Browse Source

Merge pull request #206 from longsleep/renegotiation

Renegotiation
pull/153/head
Simon Eisenmann 10 years ago
parent
commit
124795e205
  1. 4
      server.conf.in
  2. 2
      src/app/spreed-webrtc-server/config.go
  3. 74
      src/styles/components/_audiovideo.scss
  4. 4
      static/css/main.min.css
  5. 24
      static/js/controllers/uicontroller.js
  6. 19
      static/js/directives/audiolevel.js
  7. 172
      static/js/directives/audiovideo.js
  8. 42
      static/js/mediastream/dummystream.js
  9. 10
      static/js/mediastream/peercall.js
  10. 23
      static/js/mediastream/peerconference.js
  11. 16
      static/js/mediastream/peerconnection.js
  12. 145
      static/js/mediastream/usermedia.js
  13. 16
      static/js/mediastream/utils.js
  14. 76
      static/js/mediastream/webrtc.js
  15. 29
      static/js/services/dummystream.js
  16. 11
      static/js/services/mediastream.js
  17. 9
      static/js/services/services.js
  18. 56
      static/js/services/videolayout.js
  19. 6
      static/partials/audiovideo.html
  20. 2
      static/partials/audiovideopeer.html

4
server.conf.in

@ -57,6 +57,10 @@ listen = 127.0.0.1:8080 @@ -57,6 +57,10 @@ listen = 127.0.0.1:8080
; See http://tools.ietf.org/html/draft-uberti-behave-turn-rest-00 for details.
; A supported TURN server is https://code.google.com/p/rfc5766-turn-server/.
;turnSecret = the-default-turn-shared-secret-do-not-keep
; Enable renegotiation support. Set to true to tell clients that they can
; renegotiate peer connections when required. Firefox support is not complete,
; so do not enable if you want compatibility with Firefox clients.
;renegotiation = false
; Session secret to use for session id generator. 32 or 64 bytes of random data
; are recommented (hex encoded). A warning will be logged if hex decode fails.
; You can generate a secret easily with "xxd -ps -l 32 -c 32 /dev/random".

2
src/app/spreed-webrtc-server/config.go

@ -36,6 +36,7 @@ type Config struct { @@ -36,6 +36,7 @@ type Config struct {
S string // Static URL prefix with version
B string // Base URL
Token string // Server token
Renegotiation bool // Renegotiation flag
StunURIs []string // STUN server URIs
TurnURIs []string // TURN server URIs
Tokens bool // True when we got a tokens file
@ -112,6 +113,7 @@ func NewConfig(container phoenix.Container, tokens bool) *Config { @@ -112,6 +113,7 @@ func NewConfig(container phoenix.Container, tokens bool) *Config {
S: fmt.Sprintf("static/ver=%s", ver),
B: basePath,
Token: serverToken,
Renegotiation: container.GetBoolDefault("app", "renegotiation", false),
StunURIs: stunURIs,
TurnURIs: turnURIs,
Tokens: tokens,

74
src/styles/components/_audiovideo.scss

@ -114,6 +114,32 @@ @@ -114,6 +114,32 @@
object-fit: cover;
}
.onlyaudio {
bottom: 0;
color: $video-onlyaudio;
display: none;
font-size: 1em;
left: 0;
pointer-events: auto;
position: absolute;
right: 0;
text-align: center;
top: 0;
&:before {
content: '';
display: inline-block;
height: 100%;
vertical-align: middle;
}
> * {
font-size: 6em;
vertical-align: middle;
}
}
.remoteContainer {
bottom: 0;
left: 0;
@ -144,6 +170,19 @@ @@ -144,6 +170,19 @@
}
}
&.cameraMute .miniContainer,
&.cameraMute .localVideos {
background: $video-onlyaudio-background;
.onlyaudio {
display: block;
}
video {
visibility: hidden;
}
}
.miniVideo {
display: block;
height: 100%;
@ -162,6 +201,16 @@ @@ -162,6 +201,16 @@
width: 100%;
}
.localVideos {
bottom: 0;
left: 0;
position: absolute;
right: 0;
top: 0;
transition-duration: 2s;
transition-property: opacity;
}
.remoteVideos {
bottom: 0;
left: 0;
@ -217,7 +266,7 @@ @@ -217,7 +266,7 @@
//visibility: visible;
}
&.onlyaudio {
&.onlyaudioVideo {
background: $video-onlyaudio-background;
//visibility: visible;
@ -226,20 +275,7 @@ @@ -226,20 +275,7 @@
}
}
.onlyaudio {
color: $video-onlyaudio;
display: none;
font-size: 80px;
left: 0;
margin-top: -40px;
pointer-events: auto;
position: absolute;
right: 0;
text-align: center;
top: 45%;
}
&.onlyaudio video,
&.onlyaudioVideo video,
&.dummy video {
visibility: hidden;
}
@ -360,10 +396,12 @@ @@ -360,10 +396,12 @@
}
.renderer-onepeople {
.miniContainer .onlyaudio {
font-size: .4em;
}
}
.renderer-democrazy {
.remoteVideos .miniContainer {
bottom: auto;
display: inline-block;
@ -377,7 +415,6 @@ @@ -377,7 +415,6 @@
.active .miniContainer {
opacity: 1;
}
}
.renderer-conferencekiosk {
@ -509,7 +546,8 @@ @@ -509,7 +546,8 @@
height: 180px;
width: 320px;
video {
.remoteVideo,
.video {
height: 100%;
width: 100%;
}

4
static/css/main.min.css vendored

File diff suppressed because one or more lines are too long

24
static/js/controllers/uicontroller.js

@ -122,6 +122,7 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web @@ -122,6 +122,7 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web
$scope.chatMessagesUnseen = 0;
$scope.autoAccept = null;
$scope.isCollapsed = true;
$scope.usermedia = null;
$scope.setStatus = function(status) {
// This is the connection status to signaling server.
@ -147,7 +148,13 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web @@ -147,7 +148,13 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web
$scope.refreshWebrtcSettings = function() {
// Refresh constraints.
constraints.refresh($scope.master.settings);
constraints.refresh($scope.master.settings).then(function() {
var um = $scope.usermedia;
if (um && um.renegotiation && um.started) {
// Trigger renegotiation if supported and started.
um.doGetUserMediaWithConstraints(mediaStream.webrtc.settings.mediaConstraints);
}
});
};
$scope.refreshWebrtcSettings(); // Call once for bootstrap.
@ -407,6 +414,12 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web @@ -407,6 +414,12 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web
alertify.dialog.alert(translation._("Oops") + "<br/>" + message);
});
mediaStream.webrtc.e.on("usermedia", function(event, usermedia) {
safeApply($scope, function(scope) {
scope.usermedia = usermedia;
});
});
appData.flags.autoreconnect = true;
appData.flags.autoreconnectDelay = 0;
@ -704,6 +717,15 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web @@ -704,6 +717,15 @@ define(['jquery', 'underscore', 'bigscreen', 'moment', 'sjcl', 'modernizr', 'web
alertify.dialog.alert(translation._("Your browser does not support WebRTC. No calls possible."));
return;
}
if (mediaStream.config.Renegotiation && $window.webrtcDetectedBrowser === "firefox" && $window.webrtcDetectedVersion < 38) {
// See https://bugzilla.mozilla.org/show_bug.cgi?id=1017888
// and https://bugzilla.mozilla.org/show_bug.cgi?id=840728
// and https://bugzilla.mozilla.org/show_bug.cgi?id=842455
// XXX(longsleep): It seems that firefox has implemented new API which
// supports addTrack, removeTrack see http://w3c.github.io/mediacapture-main/#dom-mediastream-removetrack
console.warn("Renegotiation enabled -> currently not compatible with Firefox.");
return;
}
});
}];

19
static/js/directives/audiolevel.js

@ -22,9 +22,7 @@ @@ -22,9 +22,7 @@
"use strict";
define(['jquery', 'underscore'], function($, _) {
return ["$window", "mediaStream", "safeApply", "animationFrame", function($window, mediaStream, safeApply, animationFrame) {
var webrtc = mediaStream.webrtc;
return ["$window", "webrtc", "safeApply", "animationFrame", function($window, webrtc, safeApply, animationFrame) {
// Consider anyting lower than this % as no audio.
var threshhold = 5;
@ -37,6 +35,13 @@ define(['jquery', 'underscore'], function($, _) { @@ -37,6 +35,13 @@ define(['jquery', 'underscore'], function($, _) {
// Talking status history map.
var talkingStatus = {};
// Usermedia reference.
var usermedia = null;
webrtc.e.on("usermedia", function(event, um) {
console.log("Audio level user media changed", um);
usermedia = um;
});
var controller = ['$scope', '$element', '$attrs', function($scope, $element, $attrs) {
$scope.talking = false;
@ -47,8 +52,8 @@ define(['jquery', 'underscore'], function($, _) { @@ -47,8 +52,8 @@ define(['jquery', 'underscore'], function($, _) {
var width = 0;
this.update = _.bind(function() {
if (this.active || width > 0) {
if (webrtc.usermedia.audioLevel) {
width = Math.round(100 * webrtc.usermedia.audioLevel);
if (usermedia && usermedia.audioLevel) {
width = Math.round(100 * usermedia.audioLevel);
// Hide low volumes.
if (width < threshhold) {
width = 0;
@ -68,8 +73,8 @@ define(['jquery', 'underscore'], function($, _) { @@ -68,8 +73,8 @@ define(['jquery', 'underscore'], function($, _) {
this.meter = _.bind(function() {
var talking;
if (this.active) {
var level = Math.round(100 * webrtc.usermedia.audioLevel);
if (this.active && usermedia) {
var level = Math.round(100 * usermedia.audioLevel);
if (level < threshhold) {
level = 0;
} else {

172
static/js/directives/audiovideo.js

@ -22,7 +22,7 @@ @@ -22,7 +22,7 @@
"use strict";
define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/audiovideopeer.html', 'bigscreen', 'webrtc.adapter'], function($, _, template, templatePeer, BigScreen) {
return ["$window", "$compile", "$filter", "mediaStream", "safeApply", "desktopNotify", "buddyData", "videoWaiter", "videoLayout", "animationFrame", function($window, $compile, $filter, mediaStream, safeApply, desktopNotify, buddyData, videoWaiter, videoLayout, animationFrame) {
return ["$window", "$compile", "$filter", "mediaStream", "safeApply", "desktopNotify", "buddyData", "videoWaiter", "videoLayout", "animationFrame", "$timeout", "dummyStream", function($window, $compile, $filter, mediaStream, safeApply, desktopNotify, buddyData, videoWaiter, videoLayout, animationFrame, $timeout, DummyStream) {
var peerTemplate = $compile(templatePeer);
@ -33,19 +33,15 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -33,19 +33,15 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
var getStreamId = function(stream, currentcall) {
var id = currentcall.id + "-" + stream.id;
console.log("Created stream ID", id);
//console.log("Created stream ID", id);
return id;
};
// Dummy stream.
var dummy = {
id: "defaultDummyStream"
};
$scope.container = $element[0];
$scope.layoutparent = $element.parent();
$scope.remoteVideos = $element.find(".remoteVideos")[0];
$scope.localVideos = $element.find(".localVideos")[0];
$scope.localVideo = $element.find(".localVideo")[0];
$scope.miniVideo = $element.find(".miniVideo")[0];
$scope.mini = $element.find(".miniContainer")[0];
@ -63,33 +59,50 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -63,33 +59,50 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
$scope.addRemoteStream = function(stream, currentcall) {
var id = getStreamId(stream, currentcall);
console.log("New stream", id);
if (streams.hasOwnProperty(id)) {
console.warn("Cowardly refusing to add stream id twice", id, currentcall);
console.warn("Cowardly refusing to add stream id twice", id);
return;
}
var callscope;
var subscope;
// Dummy replacement support.
if (calls.hasOwnProperty(currentcall.id)) {
subscope = calls[currentcall.id];
if (stream === dummy) {
//console.log("xxx has call", id, currentcall.id);
if (DummyStream.is(stream)) {
return;
}
if (subscope.dummy) {
subscope.$apply(function() {
subscope.attachStream(stream);
});
callscope = calls[currentcall.id];
if (callscope.dummy) {
// Current call is marked as dummy. Use it directly.
var dummyId = getStreamId(callscope.dummy, currentcall);
subscope = streams[dummyId];
if (subscope) {
subscope.dummy = null;
delete streams[dummyId];
streams[id] = subscope;
safeApply(subscope, function(scope) {
console.log("Replacing dummy with stream", id);
scope.attachStream(stream);
});
} else {
console.warn("Scope marked as dummy but target stream not found", dummyId);
}
return;
}
} else {
//console.log("xxx create call scope", currentcall.id, id);
// Create scope.
subscope = $scope.$new();
calls[currentcall.id] = subscope;
callscope = $scope.$new();
calls[currentcall.id] = callscope;
callscope.streams = 0;
console.log("Created call scope", id);
}
//console.log("Add remote stream to scope", stream.id, stream, currentcall);
// Create scope for this stream.
subscope = callscope.$new();
callscope.streams++;
var peerid = subscope.peerid = currentcall.id;
buddyData.push(peerid);
subscope.unattached = true;
@ -100,57 +113,76 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -100,57 +113,76 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
console.log("Stream scope is now active", id, peerid);
});
subscope.$on("$destroy", function() {
if (subscope.destroyed) {
return;
}
console.log("Destroyed scope for stream", id, peerid);
subscope.destroyed = true;
callscope.streams--;
if (callscope.streams < 1) {
callscope.$destroy();
delete calls[peerid];
console.log("Destroyed scope for call", peerid, id);
}
});
console.log("Created stream scope", id, peerid);
console.log("Created stream scope", id);
// Add created scope.
if (stream === dummy) {
subscope.dummy = true;
// If stream is a dummy, mark us in callscope.
if (DummyStream.is(stream)) {
callscope.dummy = stream;
}
// Add created scope.
streams[id] = subscope;
// Render template.
peerTemplate(subscope, function(clonedElement, scope) {
$($scope.remoteVideos).append(clonedElement);
clonedElement.data("peerid", scope.peerid);
scope.element = clonedElement;
scope.attachStream = function(stream) {
if (stream === dummy) {
if (DummyStream.is(stream)) {
scope.withvideo = false;
scope.onlyaudio = true;
$timeout(function() {
scope.$emit("active", currentcall);
$scope.redraw();
});
return;
} else {
var video = clonedElement.find("video")[0];
$window.attachMediaStream(video, stream);
// Waiter callbacks also count as connected, as browser support (FireFox 25) is not setting state changes properly.
videoWaiter.wait(video, stream, function(withvideo) {
if (scope.destroyed) {
console.log("Abort wait for video on destroyed scope.");
return;
}
if (withvideo) {
scope.$apply(function($scope) {
$scope.withvideo = true;
$scope.onlyaudio = false;
});
} else {
console.info("Incoming stream has no video tracks.");
scope.$apply(function($scope) {
$scope.withvideo = false;
$scope.onlyaudio = true;
});
}
scope.$emit("active", currentcall);
$scope.redraw();
}, function() {
if (scope.destroyed) {
console.log("No longer wait for video on destroyed scope.");
return;
}
console.warn("We did not receive video data for remote stream", currentcall, stream, video);
scope.$emit("active", currentcall);
$scope.redraw();
});
scope.dummy = null;
}
var video = clonedElement.find("video")[0];
$window.attachMediaStream(video, stream);
// Waiter callbacks also count as connected, as browser support (FireFox 25) is not setting state changes properly.
videoWaiter.wait(video, stream, function(withvideo) {
if (scope.destroyed) {
console.log("Abort wait for video on destroyed scope.");
return;
}
if (withvideo) {
scope.$apply(function($scope) {
$scope.withvideo = true;
});
} else {
console.info("Incoming stream has no video tracks.");
scope.$apply(function($scope) {
$scope.onlyaudio = true;
});
}
scope.$emit("active", currentcall);
$scope.redraw();
}, function() {
if (scope.destroyed) {
console.log("No longer wait for video on destroyed scope.");
return;
}
console.warn("We did not receive video data for remote stream", currentcall, stream, video);
scope.$emit("active", currentcall);
$scope.redraw();
});
scope.unattached = false;
scope.dummy = false;
};
scope.doChat = function() {
$scope.$emit("startchat", currentcall.id, {
@ -159,27 +191,23 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -159,27 +191,23 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
});
};
scope.attachStream(stream);
$($scope.remoteVideos).append(clonedElement);
});
};
$scope.removeRemoteStream = function(stream, currentcall) {
//console.log("remove stream", stream, stream.id, currentcall);
var id = getStreamId(stream, currentcall);
console.log("Stream removed", id);
var subscope = streams[id];
if (subscope) {
buddyData.pop(currentcall.id);
delete streams[id];
//console.log("remove scope", subscope);
if (subscope.element) {
subscope.element.remove();
}
var callscope = calls[currentcall.id];
if (subscope === callscope) {
delete calls[currentcall.id];
}
subscope.$destroy();
$scope.redraw();
}
@ -202,6 +230,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -202,6 +230,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
$element.addClass("active");
//console.log("active 3");
_.delay(function() {
$scope.localVideos.style.opacity = 0;
$scope.localVideo.style.opacity = 0;
$scope.localVideo.src = "";
}, 500);
@ -228,6 +257,10 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -228,6 +257,10 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
mediaStream.webrtc.e.on("usermedia", function(event, usermedia) {
if (!usermedia || !usermedia.started) {
return;
}
//console.log("XXXX XXXXXXXXXXXXXXXXXXXXX usermedia event", usermedia);
if ($scope.haveStreams) {
@ -244,6 +277,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -244,6 +277,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
return;
}
if ($scope.localVideo.videoWidth > 0) {
console.log("Local video size: ", $scope.localVideo.videoWidth, $scope.localVideo.videoHeight);
$scope.localVideo.style.opacity = 1;
$scope.redraw();
} else {
@ -279,6 +313,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -279,6 +313,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
$($scope.remoteVideos).children(".remoteVideo").remove();
}, 1500);
$($scope.mini).removeClass("visible");
$scope.localVideos.style.opacity = 1;
$scope.localVideo.style.opacity = 0;
$scope.remoteVideos.style.opacity = 0;
$element.removeClass('active');
@ -300,6 +335,10 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -300,6 +335,10 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
$window.reattachMediaStream($scope.miniVideo, $scope.localVideo);
$scope.haveStreams = true;
}
if (stream === null) {
// Inject dummy stream.
stream = new DummyStream();
}
$scope.addRemoteStream(stream, currentcall);
});
@ -323,7 +362,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -323,7 +362,7 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
case "connected":
case "completed":
case "failed":
$scope.addRemoteStream(dummy, currentcall);
$scope.addRemoteStream(new DummyStream(), currentcall);
break;
}
@ -378,7 +417,14 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/ @@ -378,7 +417,14 @@ define(['jquery', 'underscore', 'text!partials/audiovideo.html', 'text!partials/
width: scope.layoutparent.width(),
height: scope.layoutparent.height()
}
var again = videoLayout.update(getRendererName(), size, scope, controller);
var name;
if (size.width < 1 || size.height < 1) {
// Use invisible renderer when no size available.
name = "invisible";
} else {
name = getRendererName();
}
var again = videoLayout.update(name, size, scope, controller);
if (again) {
// Layout needs a redraw.
needsRedraw = true;

42
static/js/mediastream/dummystream.js

@ -0,0 +1,42 @@ @@ -0,0 +1,42 @@
/*
* Spreed WebRTC.
* Copyright (C) 2013-2015 struktur AG
*
* This file is part of Spreed WebRTC.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
"use strict";
define([], function() {
// Dummy stream implementation.
var DummyStream = function(id) {
this.id = id ? id : "defaultDummyStream";
};
DummyStream.prototype.stop = function() {};
DummyStream.prototype.getAudioTracks = function() { return [] };
DummyStream.prototype.getVideoTracks = function() { return [] };
DummyStream.not = function(stream) {
// Helper to test if stream is a dummy.
return !stream || stream.stop !== DummyStream.prototype.stop;
};
DummyStream.is = function(stream) {
return stream && stream.stop === DummyStream.prototype.stop;
};
return DummyStream;
});

10
static/js/mediastream/peercall.js

@ -142,13 +142,19 @@ define(['jquery', 'underscore', 'mediastream/utils', 'mediastream/peerconnection @@ -142,13 +142,19 @@ define(['jquery', 'underscore', 'mediastream/utils', 'mediastream/peerconnection
// reason we always trigger onRemoteStream added for all streams which are available
// after the remote SDP was set successfully.
_.defer(_.bind(function() {
var streams = 0;
_.each(peerconnection.getRemoteStreams(), _.bind(function(stream) {
if (!this.streams.hasOwnProperty(stream.id) && (stream.getAudioTracks().length > 0 || stream.getVideoTracks().length > 0)) {
// NOTE(longsleep): Add stream here when it has at least one audio or video track, to avoid FF >= 33 to add it multiple times.
console.log("Adding stream after remote SDP success.", stream);
this.onRemoteStreamAdded(stream);
streams++;
}
}, this));
if (streams === 0 && this.sdpConstraints.mandatory && (this.sdpConstraints.mandatory.OfferToReceiveAudio || this.sdpConstraints.mandatory.OfferToReceiveVideo)) {
// We assume that we will eventually receive a stream, so we trigger the event to let the UI prepare for it.
this.e.triggerHandler("remoteStreamAdded", [null, this]);
}
}, this));
}, this), _.bind(function(err) {
console.error("Set remote session description failed", err);
@ -165,6 +171,8 @@ define(['jquery', 'underscore', 'mediastream/utils', 'mediastream/peerconnection @@ -165,6 +171,8 @@ define(['jquery', 'underscore', 'mediastream/utils', 'mediastream/peerconnection
sessionDescription.sdp = utils.maybePreferVideoReceiveCodec(sessionDescription.sdp, params);
sessionDescription.sdp = utils.maybeSetAudioReceiveBitRate(sessionDescription.sdp, params);
sessionDescription.sdp = utils.maybeSetVideoReceiveBitRate(sessionDescription.sdp, params);
// Apply workarounds.
sessionDescription.sdp = utils.fixLocal(sessionDescription.sdp, params);
};
@ -177,6 +185,8 @@ define(['jquery', 'underscore', 'mediastream/utils', 'mediastream/peerconnection @@ -177,6 +185,8 @@ define(['jquery', 'underscore', 'mediastream/utils', 'mediastream/peerconnection
sessionDescription.sdp = utils.maybeSetAudioSendBitRate(sessionDescription.sdp, params);
sessionDescription.sdp = utils.maybeSetVideoSendBitRate(sessionDescription.sdp, params);
sessionDescription.sdp = utils.maybeSetVideoSendInitialBitRate(sessionDescription.sdp, params);
// Apply workarounds.
sessionDescription.sdp = utils.fixRemote(sessionDescription.sdp, params);
};

23
static/js/mediastream/peerconference.js

@ -40,6 +40,12 @@ define(['jquery', 'underscore', 'mediastream/peercall'], function($, _, PeerCall @@ -40,6 +40,12 @@ define(['jquery', 'underscore', 'mediastream/peercall'], function($, _, PeerCall
this.id = id;
}
this.usermedia = webrtc.usermedia;
webrtc.e.on("usermedia", _.bind(function(event, um) {
console.log("Conference user media changed", um);
this.usermedia = um;
}, this));
console.log("Created conference", this.id);
};
@ -94,16 +100,12 @@ define(['jquery', 'underscore', 'mediastream/peercall'], function($, _, PeerCall @@ -94,16 +100,12 @@ define(['jquery', 'underscore', 'mediastream/peercall'], function($, _, PeerCall
console.log("Creating PeerConnection", call);
call.createPeerConnection(_.bind(function(peerconnection) {
// Success call.
if (this.usermedia) {
this.usermedia.addToPeerConnection(peerconnection);
}
call.e.on("negotiationNeeded", _.bind(function(event, extracall) {
this.webrtc.sendOfferWhenNegotiationNeeded(extracall);
}, this));
if (this.webrtc.usermedia) {
this.webrtc.usermedia.addToPeerConnection(peerconnection);
}
/*call.createOffer(_.bind(function(sessionDescription, extracall) {
console.log("Sending offer with sessionDescription", sessionDescription, extracall.id);
this.webrtc.api.sendOffer(extracall.id, sessionDescription);
}, this));*/
}, this), _.bind(function() {
// Error call.
console.error("Failed to create peer connection for conference call.");
@ -143,9 +145,12 @@ define(['jquery', 'underscore', 'mediastream/peercall'], function($, _, PeerCall @@ -143,9 +145,12 @@ define(['jquery', 'underscore', 'mediastream/peercall'], function($, _, PeerCall
call.createPeerConnection(_.bind(function(peerconnection) {
// Success call.
call.setRemoteDescription(rtcsdp, _.bind(function() {
if (this.webrtc.usermedia) {
this.webrtc.usermedia.addToPeerConnection(peerconnection);
if (this.usermedia) {
this.usermedia.addToPeerConnection(peerconnection);
}
call.e.on("negotiationNeeded", _.bind(function(event, extracall) {
this.webrtc.sendOfferWhenNegotiationNeeded(extracall);
}, this));
call.createAnswer(_.bind(function(sessionDescription, extracall) {
console.log("Sending answer", sessionDescription, extracall.id);
this.webrtc.api.sendAnswer(extracall.id, sessionDescription);

16
static/js/mediastream/peerconnection.js

@ -72,10 +72,16 @@ define(['jquery', 'underscore', 'webrtc.adapter'], function($, _) { @@ -72,10 +72,16 @@ define(['jquery', 'underscore', 'webrtc.adapter'], function($, _) {
// for example https://bugzilla.mozilla.org/show_bug.cgi?id=998546.
pc.onaddstream = _.bind(this.onRemoteStreamAdded, this);
pc.onremovestream = _.bind(this.onRemoteStreamRemoved, this);
// NOTE(longsleep): Firefox 38 has support for onaddtrack. Unfortunately Chrome does
// not support this and thus both are not compatible. For the time being this means
// that renegotiation does not work between Firefox and Chrome. Even worse, current
// spec says that the event should really be named ontrack.
if (window.webrtcDetectedBrowser === "firefox") {
// NOTE(longsleep): onnegotiationneeded is not supported by Firefox. We trigger it
// NOTE(longsleep): onnegotiationneeded is not supported by Firefox < 38.
// Also firefox does not care about streams, but has the newer API for tracks
// implemented. This does not work together with Chrome, so we trigger negotiation
// manually when a stream is added or removed.
// https://bugzilla.mozilla.org/show_bug.cgi?id=840728
// https://bugzilla.mozilla.org/show_bug.cgi?id=1017888
this.negotiationNeeded = _.bind(function() {
if (this.currentcall.initiate) {
// Trigger onNegotiationNeeded once for Firefox.
@ -308,12 +314,18 @@ define(['jquery', 'underscore', 'webrtc.adapter'], function($, _) { @@ -308,12 +314,18 @@ define(['jquery', 'underscore', 'webrtc.adapter'], function($, _) {
PeerConnection.prototype.getRemoteStreams = function() {
if (!this.pc) {
return [];
}
return this.pc.getRemoteStreams.apply(this.pc, arguments);
};
PeerConnection.prototype.getLocalStreams = function() {
if (!this.pc) {
return [];
}
return this.pc.getRemoteStreams.apply(this.pc, arguments);
};

145
static/js/mediastream/usermedia.js

@ -20,15 +20,10 @@ @@ -20,15 +20,10 @@
*/
"use strict";
define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _, AudioContext) {
define(['jquery', 'underscore', 'audiocontext', 'mediastream/dummystream', 'webrtc.adapter'], function($, _, AudioContext, DummyStream) {
// Create AudioContext singleton, if supported.
var context = AudioContext ? new AudioContext() : null;
var peerconnections = {};
// Disabled for now until browser support matures. If enabled this totally breaks
// Firefox and Chrome with Firefox interop.
var enableRenegotiationSupport = false;
// Converter helpers to convert media constraints to new API.
var mergeConstraints = function(constraints, k, v, mandatory) {
@ -124,6 +119,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -124,6 +119,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
}
})();
// UserMedia.
var UserMedia = function(options) {
this.options = $.extend({}, options);
@ -131,15 +127,23 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -131,15 +127,23 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
this.localStream = null;
this.started = false;
this.delay = 0;
this.audioMute = false;
this.videoMute = false;
this.peerconnections = {};
// If true, mute/unmute of audio/video creates a new stream which
// will trigger renegotiation on the peer connection.
this.renegotiation = options.renegotiation && true;
if (this.renegotiation) {
console.info("User media with renegotiation created ...");
}
this.audioMute = options.audioMute && true;
this.videoMute = options.videoMute && true;
this.mediaConstraints = null;
// Audio level.
this.audioLevel = 0;
if (!this.options.noaudio && context && context.createScriptProcessor) {
if (!this.options.noAudio && context && context.createScriptProcessor) {
this.audioSource = null;
this.audioProcessor = context.createScriptProcessor(2048, 1, 1);
@ -167,6 +171,11 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -167,6 +171,11 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
if (this.audioSource) {
this.audioSource.disconnect();
}
var audioTracks = stream.getAudioTracks();
if (audioTracks.length < 1) {
this.audioSource = null;
return;
}
// Connect to audioProcessor.
this.audioSource = context.createMediaStreamSource(stream);
//console.log("got source", this.audioSource);
@ -180,9 +189,13 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -180,9 +189,13 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
this.e.on("localstream", _.bind(function(event, stream, oldstream) {
// Update stream support.
if (oldstream) {
_.each(peerconnections, function(pc) {
pc.removeStream(oldstream);
pc.addStream(stream);
_.each(this.peerconnections, function(pc) {
if (DummyStream.not(oldstream)) {
pc.removeStream(oldstream);
}
if (DummyStream.not(stream)) {
pc.addStream(stream);
}
console.log("Updated usermedia stream at peer connection", pc, stream);
});
}
@ -244,7 +257,6 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -244,7 +257,6 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
if (!mediaConstraints) {
mediaConstraints = currentcall.mediaConstraints;
}
this.mediaConstraints = mediaConstraints;
return this.doGetUserMediaWithConstraints(mediaConstraints);
@ -254,6 +266,21 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -254,6 +266,21 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
if (!mediaConstraints) {
mediaConstraints = this.mediaConstraints;
} else {
this.mediaConstraints = mediaConstraints;
if (this.localStream) {
// Release stream early if any to be able to apply new constraints.
this.replaceStream(null);
}
}
if (this.renegotiation && this.audioMute && this.videoMute) {
// Fast path as nothing should be shared.
_.defer(_.bind(function() {
this.onUserMediaSuccess(new DummyStream());
}, this));
this.started = true;
return true
}
var constraints = $.extend(true, {}, mediaConstraints);
@ -301,34 +328,48 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -301,34 +328,48 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
};
UserMedia.prototype.onLocalStream = function(stream) {
UserMedia.prototype.replaceStream = function(stream) {
var oldStream = this.localStream;
if (oldStream) {
oldStream.onended = function() {};
if (oldStream && oldStream.active) {
// Let old stream silently end.
oldStream.onended = function() {
console.log("Silently ended replaced user media stream.");
};
oldStream.stop();
}
if (stream) {
// Get notified of end events.
stream.onended = _.bind(function(event) {
console.log("User media stream ended.");
if (this.started) {
this.stop();
}
}, this);
// Set new stream.
this.localStream = stream;
this.e.triggerHandler("localstream", [stream, oldStream, this]);
}
return oldStream && stream;
};
UserMedia.prototype.onLocalStream = function(stream) {
if (this.replaceStream(stream)) {
// We replaced a stream.
setTimeout(_.bind(function() {
this.e.triggerHandler("mediachanged", [this]);
}, this), 0);
} else {
// Let webrtc handle the rest.
// We are new.
setTimeout(_.bind(function() {
this.e.triggerHandler("mediasuccess", [this]);
}, this), this.delay);
}, this), 0);
}
// Get notified of end events.
stream.onended = _.bind(function(event) {
console.log("User media stream ended.");
if (this.started) {
this.stop();
}
}, this);
// Set new stream.
this.localStream = stream;
this.e.triggerHandler("localstream", [stream, oldStream, this]);
};
UserMedia.prototype.stop = function() {
@ -352,11 +393,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -352,11 +393,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
this.mediaConstraints = null;
console.log("Stopped user media.");
this.e.triggerHandler("stopped", [this]);
this.delay = 1500;
setTimeout(_.bind(function() {
this.delay = 0;
}, this), 2000);
this.e.off();
};
@ -364,7 +401,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -364,7 +401,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
var m = !!mute;
if (!enableRenegotiationSupport) {
if (!this.renegotiation) {
// Disable streams only - does not require renegotiation but keeps mic
// active and the stream will transmit silence.
@ -394,7 +431,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -394,7 +431,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
// Remove audio stream, by creating a new stream and doing renegotiation. This
// is the way to go to disable the mic when audio is muted.
if (this.localStream) {
if (this.started) {
if (this.audioMute !== m) {
this.audioMute = m;
this.doGetUserMediaWithConstraints();
@ -413,7 +450,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -413,7 +450,7 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
var m = !!mute;
if (!enableRenegotiationSupport) {
if (!this.renegotiation) {
// Disable streams only - does not require renegotiation but keeps camera
// active and the stream will transmit black.
@ -438,10 +475,10 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -438,10 +475,10 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
}
} else {
// Removevideo stream, by creating a new stream and doing renegotiation. This
// Remove video stream, by creating a new stream and doing renegotiation. This
// is the way to go to disable the camera when video is muted.
if (this.localStream) {
if (this.started) {
if (this.videoMute !== m) {
this.videoMute = m;
this.doGetUserMediaWithConstraints();
@ -460,13 +497,15 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -460,13 +497,15 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
console.log("Add usermedia stream to peer connection", pc, this.localStream);
if (this.localStream) {
pc.addStream(this.localStream);
if (DummyStream.not(this.localStream)) {
pc.addStream(this.localStream);
}
var id = pc.id;
if (!peerconnections.hasOwnProperty(id)) {
peerconnections[id] = pc;
pc.currentcall.e.one("closed", function() {
delete peerconnections[id];
});
if (!this.peerconnections.hasOwnProperty(id)) {
this.peerconnections[id] = pc;
pc.currentcall.e.one("closed", _.bind(function() {
delete this.peerconnections[id];
}, this));
}
}
@ -476,9 +515,11 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -476,9 +515,11 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
console.log("Remove usermedia stream from peer connection", pc, this.localStream);
if (this.localStream) {
pc.removeStream(this.localStream);
if (peerconnections.hasOwnProperty(pc.id)) {
delete peerconnections[pc.id];
if (DummyStream.not(this.localStream)) {
pc.removeStream(this.localStream);
}
if (this.peerconnections.hasOwnProperty(pc.id)) {
delete this.peerconnections[pc.id];
}
}
@ -486,7 +527,9 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _ @@ -486,7 +527,9 @@ define(['jquery', 'underscore', 'audiocontext', 'webrtc.adapter'], function($, _
UserMedia.prototype.attachMediaStream = function(video) {
window.attachMediaStream(video, this.localStream);
if (this.localStream && DummyStream.not(this.localStream)) {
window.attachMediaStream(video, this.localStream);
}
};

16
static/js/mediastream/utils.js

@ -425,7 +425,21 @@ define([], function() { @@ -425,7 +425,21 @@ define([], function() {
maybePreferAudioSendCodec: maybePreferAudioSendCodec,
maybePreferAudioReceiveCodec: maybePreferAudioReceiveCodec,
maybePreferVideoSendCodec: maybePreferVideoSendCodec,
maybePreferVideoReceiveCodec: maybePreferVideoReceiveCodec
maybePreferVideoReceiveCodec: maybePreferVideoReceiveCodec,
fixLocal: function(sdp) {
if (window.webrtcDetectedBrowser === "chrome") {
// Remove all rtx support from locally generated sdp. Chrome
// does create this sometimes wrong.
// TODO(longsleep): Limit to Chrome version, once it is fixed upstream.
// See https://code.google.com/p/webrtc/issues/detail?id=3962
sdp = sdp.replace(/a=rtpmap:\d+ rtx\/\d+\r\n/i, "");
sdp = sdp.replace(/a=fmtp:\d+ apt=\d+\r\n/i, "");
}
return sdp;
},
fixRemote: function(sdp) {
return sdp;
}
}
});

76
static/js/mediastream/webrtc.js

@ -54,6 +54,8 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -54,6 +54,8 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
this.started = false;
this.initiator = null;
this.usermedia = null;
this.audioMute = false;
this.videoMute = false;
@ -112,7 +114,8 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -112,7 +114,8 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
videoSendCodec: "VP8/90000"
//videoRecvBitrate: ,
//videoRecvCodec
}
},
renegotiation: true
};
this.screensharingSettings = {
@ -121,17 +124,6 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -121,17 +124,6 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
this.api.e.bind("received.offer received.candidate received.answer received.bye received.conference", _.bind(this.processReceived, this));
// Create default media (audio/video).
this.usermedia = new UserMedia();
this.usermedia.e.on("mediasuccess mediaerror", _.bind(function() {
// Start always, no matter what.
this.maybeStart();
}, this));
this.usermedia.e.on("mediachanged", _.bind(function() {
// Propagate media change events.
this.e.triggerHandler("usermedia", [this.usermedia]);
}, this));
};
WebRTC.prototype.processReceived = function(event, to, data, type, to2, from) {
@ -245,6 +237,12 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -245,6 +237,12 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
console.log("Offer process.");
targetcall = this.findTargetCall(from);
if (targetcall) {
if (!this.settings.renegotiation && targetcall.peerconnection && targetcall.peerconnection.pc && targetcall.peerconnection.pc.remoteDescription) {
// Call replace support without renegotiation.
this.doHangup("unsupported", from);
console.error("Processing new offers is not implemented without renegotiation.");
return;
}
// Hey we know this call.
targetcall.setRemoteDescription(new window.RTCSessionDescription(data), _.bind(function(sessionDescription, currentcall) {
if (currentcall === this.currentcall) {
@ -396,6 +394,39 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -396,6 +394,39 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
};
WebRTC.prototype.doUserMedia = function(currentcall) {
// Create default media (audio/video).
var usermedia = new UserMedia({
renegotiation: this.settings.renegotiation,
audioMute: this.audioMute,
videoMute: this.videoMute
});
usermedia.e.on("mediasuccess mediaerror", _.bind(function(event, um) {
this.e.triggerHandler("usermedia", [um]);
// Start always, no matter what.
this.maybeStart(um);
}, this));
usermedia.e.on("mediachanged", _.bind(function(event, um) {
// Propagate media change events.
this.e.triggerHandler("usermedia", [um]);
}, this));
usermedia.e.on("stopped", _.bind(function(event, um) {
if (um === this.usermedia) {
this.e.triggerHandler("usermedia", [null]);
this.usermedia = null;
}
}, this));
this.e.one("stop", function() {
usermedia.stop();
});
this.usermedia = usermedia;
this.e.triggerHandler("usermedia", [usermedia]);
return usermedia.doGetUserMedia(currentcall);
};
WebRTC.prototype.doCall = function(id) {
if (this.currentcall) {
@ -413,7 +444,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -413,7 +444,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
} else {
var currentcall = this.currentcall = this.createCall(id, null, id);
this.e.triggerHandler("peercall", [currentcall]);
var ok = this.usermedia.doGetUserMedia(currentcall);
var ok = this.doUserMedia(currentcall);
if (ok) {
this.e.triggerHandler("waitforusermedia", [currentcall]);
} else {
@ -432,7 +463,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -432,7 +463,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
console.warn("Trying to accept without a call.", currentcall);
return;
}
var ok = this.usermedia.doGetUserMedia(currentcall);
var ok = this.doUserMedia(currentcall);
if (ok) {
this.e.triggerHandler("waitforusermedia", [currentcall]);
} else {
@ -501,7 +532,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -501,7 +532,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
WebRTC.prototype.doScreenshare = function(options) {
var usermedia = new UserMedia({
noaudio: true
noAudio: true
});
var ok = usermedia.doGetUserMedia(null, PeerScreenshare.getCaptureMediaConstraints(this, options));
if (ok) {
@ -579,11 +610,9 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -579,11 +610,9 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
this.currentcall.close();
this.currentcall = null;
}
if (this.usermedia) {
this.usermedia.stop();
}
this.e.triggerHandler("peerconference", [null]);
this.e.triggerHandler("peercall", [null]);
this.e.triggerHandler("stop");
this.msgQueue.length = 0;
this.initiator = null;
this.started = false;
@ -629,7 +658,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -629,7 +658,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
}
WebRTC.prototype.maybeStart = function() {
WebRTC.prototype.maybeStart = function(usermedia) {
//console.log("maybeStart", this.started);
if (!this.started) {
@ -640,14 +669,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u @@ -640,14 +669,7 @@ function($, _, PeerCall, PeerConference, PeerXfer, PeerScreenshare, UserMedia, u
console.log('Creating PeerConnection.', currentcall);
currentcall.createPeerConnection(_.bind(function(peerconnection) {
// Success call.
if (this.usermedia) {
this.usermedia.applyVideoMute(this.videoMute);
this.usermedia.applyAudioMute(this.audioMute);
this.e.triggerHandler("usermedia", [this.usermedia]);
this.usermedia.addToPeerConnection(peerconnection);
} else {
_.defer(peerconnection.negotiationNeeded);
}
usermedia.addToPeerConnection(peerconnection);
this.started = true;
if (!this.initiator) {
this.calleeStart();

29
static/js/services/dummystream.js

@ -0,0 +1,29 @@ @@ -0,0 +1,29 @@
/*
* Spreed WebRTC.
* Copyright (C) 2013-2015 struktur AG
*
* This file is part of Spreed WebRTC.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
"use strict";
define([
'mediastream/dummystream'
], function(DummyStream) {
return [function() {
return DummyStream;
}];
});

11
static/js/services/mediastream.js

@ -26,7 +26,8 @@ define([ @@ -26,7 +26,8 @@ define([
'ua-parser',
'sjcl',
'modernizr',
'mediastream/tokens'
'mediastream/tokens',
'webrtc.adapter'
], function($, _, uaparser, sjcl, Modernizr, tokens) {
@ -43,6 +44,14 @@ define([ @@ -43,6 +44,14 @@ define([
// Create encryption key from server token and browser name.
var secureKey = sjcl.codec.base64.fromBits(sjcl.hash.sha256.hash(context.Cfg.Token + uaparser().browser.name));
// Apply configuration details.
webrtc.settings.renegotiation = context.Cfg.Renegotiation && true;
if (webrtc.settings.renegotiation && $window.webrtcDetectedBrowser === "firefox") {
console.warn("Disable renegotiation in Firefox for now.");
webrtc.settings.renegotiation = false;
}
// mediaStream service API.
var mediaStream = {
version: version,
ws: url,

9
static/js/services/services.js

@ -68,7 +68,8 @@ define([ @@ -68,7 +68,8 @@ define([
'services/constraints',
'services/modules',
'services/mediadevices',
'services/sandbox'], function(_,
'services/sandbox',
'services/dummystream'], function(_,
desktopNotify,
playSound,
safeApply,
@ -114,7 +115,8 @@ roompin, @@ -114,7 +115,8 @@ roompin,
constraints,
modules,
mediaDevices,
sandbox) {
sandbox,
dummyStream) {
var services = {
desktopNotify: desktopNotify,
@ -162,7 +164,8 @@ sandbox) { @@ -162,7 +164,8 @@ sandbox) {
constraints: constraints,
modules: modules,
mediaDevices: mediaDevices,
sandbox: sandbox
sandbox: sandbox,
dummyStream: dummyStream
};
var initialize = function(angModule) {

56
static/js/services/videolayout.js

@ -61,6 +61,13 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern @@ -61,6 +61,13 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern
// videoLayout
return ["$window", function($window) {
// Invisible layout (essentially shows nothing).
var Invisible = function(container, scope, controller) {};
Invisible.prototype.name = "invisible";
Invisible.prototype.render = function() {};
Invisible.prototype.close = function() {};
// Video layout with all videos rendered the same size.
var OnePeople = function(container, scope, controller) {};
@ -86,7 +93,6 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern @@ -86,7 +93,6 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern
if (scope.localVideo.style.opacity === '1') {
videoWidth = scope.localVideo.videoWidth;
videoHeight = scope.localVideo.videoHeight;
console.log("Local video size: ", videoWidth, videoHeight);
videos = [null];
}
}
@ -327,28 +333,30 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern @@ -327,28 +333,30 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern
};
// Register renderers.
renderers[Invisible.prototype.name] = Invisible;
renderers[OnePeople.prototype.name] = OnePeople;
renderers[Smally.prototype.name] = Smally;
renderers[Democrazy.prototype.name] = Democrazy;
renderers[ConferenceKiosk.prototype.name] = ConferenceKiosk;
renderers[Auditorium.prototype.name] = Auditorium;
// Helper for class name generation.
var makeName = function(prefix, n, camel) {
var r = prefix;
if (camel) {
r = r + n.charAt(0).toUpperCase() + n.slice(1);
} else {
r = r + "-" + n;
}
return r;
};
// Public api.
var current = null;
var body = $("body");
return {
update: function(name, size, scope, controller) {
var makeName = function(prefix, n, camel) {
var r = prefix;
if (camel) {
r = r + n.charAt(0).toUpperCase() + n.slice(1);
} else {
r = r + "-" + n;
}
return r;
};
var videos = _.keys(controller.streams);
var streams = controller.streams;
var container = scope.container;
@ -356,22 +364,20 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern @@ -356,22 +364,20 @@ define(["jquery", "underscore", "modernizr", "injectCSS"], function($, _, Modern
if (!current) {
current = new renderers[name](container, scope, controller)
//console.log("Created new video layout renderer", name, current);
console.log("Created new video layout renderer", name, current);
$(layoutparent).addClass(makeName("renderer", name));
body.addClass(makeName("videolayout", name, true));
return true;
} else if (current && current.name !== name) {
current.close(container, scope, controller);
$(container).removeAttr("style");
$(layoutparent).removeClass(makeName("renderer", current.name));
body.removeClass(makeName("videolayout", current.name, true));
current = new renderers[name](container, scope, controller)
$(layoutparent).addClass(makeName("renderer", name));
$(body).addClass(makeName("videolayout", name, true));
body.addClass(makeName("videolayout", name, true));
console.log("Switched to new video layout renderer", name, current);
return true;
} else {
if (current.name !== name) {
current.close(container, scope, controller);
$(container).removeAttr("style");
$(layoutparent).removeClass(makeName("renderer", current.name));
$(body).removeClass(makeName("videolayout", current.name, true));
current = new renderers[name](container, scope, controller)
$(layoutparent).addClass(makeName("renderer", name));
$(body).addClass(makeName("videolayout", name, true));
//console.log("Switched to new video layout renderer", name, current);
return true;
}
}
return current.render(container, size, scope, videos, streams);

6
static/partials/audiovideo.html

@ -1,13 +1,17 @@ @@ -1,13 +1,17 @@
<div class="audiovideo">
<div class="audiovideo" ng-class="{'cameraMute': cameraMute, 'microphoneMute': microphoneMute}">
<div class="audiovideoBase">
<div class="localContainer">
<div class="localVideos">
<video class="localVideo" autoplay="autoplay" muted="true"></video>
<div class="onlyaudio"><i class="fa fa-eye-slash"></i></div>
<div class="overlayLogo"></div>
</div>
</div>
<div class="remoteContainer">
<div class="remoteVideos nicescroll"></div>
<div class="miniContainer" ng-class="{talking: talking}">
<video class="miniVideo" autoplay="autoplay" muted="true"></video>
<div class="onlyaudio"><i class="fa fa-eye-slash"></i></div>
</div>
</div>
</div>

2
static/partials/audiovideopeer.html

@ -1,4 +1,4 @@ @@ -1,4 +1,4 @@
<div class="remoteVideo" ng-class="{'dummy': dummy, 'withvideo': withvideo, 'onlyaudio': onlyaudio, 'talking': peersTalking[peerid]}">
<div class="remoteVideo" ng-class="{'dummy': dummy, 'withvideo': withvideo, 'onlyaudioVideo': onlyaudio, 'talking': peersTalking[peerid]}">
<video autoplay="autoplay"></video>
<div class="peerLabel">{{peerid|displayName}}</div>
<div class="peerActions">

Loading…
Cancel
Save