From d2e54b0dfbd50c10f0ade83d7786a74fe8238048 Mon Sep 17 00:00:00 2001 From: Daniel Berteaud Date: Wed, 2 Jul 2014 15:56:20 +0200 Subject: [PATCH] Update to SimpleWebRTC 1.9.1 --- public/js/simplewebrtc.bundle.js | 2099 ++++++++++++++++++++------------------ 1 file changed, 1083 insertions(+), 1016 deletions(-) diff --git a/public/js/simplewebrtc.bundle.js b/public/js/simplewebrtc.bundle.js index a49d7f4..01ef8a0 100644 --- a/public/js/simplewebrtc.bundle.js +++ b/public/js/simplewebrtc.bundle.js @@ -428,7 +428,7 @@ SimpleWebRTC.prototype.sendFile = function () { module.exports = SimpleWebRTC; -},{"attachmediastream":5,"mockconsole":6,"socket.io-client":7,"webrtc":2,"webrtcsupport":4,"wildemitter":3}],3:[function(require,module,exports){ +},{"attachmediastream":5,"mockconsole":7,"socket.io-client":6,"webrtc":3,"webrtcsupport":4,"wildemitter":2}],2:[function(require,module,exports){ /* WildEmitter.js is a slim little event emitter by @henrikjoreteg largely based on @visionmedia's Emitter from UI Kit. @@ -649,18 +649,6 @@ module.exports = function (stream, el, options) { }; },{}],6:[function(require,module,exports){ -var methods = "assert,count,debug,dir,dirxml,error,exception,group,groupCollapsed,groupEnd,info,log,markTimeline,profile,profileEnd,time,timeEnd,trace,warn".split(","); -var l = methods.length; -var fn = function () {}; -var mockconsole = {}; - -while (l--) { - mockconsole[methods[l]] = fn; -} - -module.exports = mockconsole; - -},{}],7:[function(require,module,exports){ /*! Socket.IO.js build:0.9.16, development. Copyright(c) 2011 LearnBoost MIT Licensed */ var io = ('undefined' === typeof module ? {} : module.exports); @@ -4534,6 +4522,18 @@ if (typeof define === "function" && define.amd) { define([], function () { return io; }); } })(); +},{}],7:[function(require,module,exports){ +var methods = "assert,count,debug,dir,dirxml,error,exception,group,groupCollapsed,groupEnd,info,log,markTimeline,profile,profileEnd,time,timeEnd,trace,warn".split(","); +var l = methods.length; +var fn = function () {}; +var mockconsole = {}; + +while (l--) { + mockconsole[methods[l]] = fn; +} + +module.exports = mockconsole; + },{}],8:[function(require,module,exports){ var events = require('events'); @@ -4919,13 +4919,13 @@ module.exports = { IceCandidate: IceCandidate }; -},{}],2:[function(require,module,exports){ +},{}],3:[function(require,module,exports){ var util = require('util'); var webrtc = require('webrtcsupport'); -var PeerConnection = require('rtcpeerconnection'); var WildEmitter = require('wildemitter'); var mockconsole = require('mockconsole'); var localMedia = require('localmedia'); +var Peer = require('./peer'); function WebRTC(opts) { @@ -4937,7 +4937,7 @@ function WebRTC(opts) { peerConnectionConfig: { iceServers: [{"url": "stun:stun.l.google.com:19302"}] }, - peerConnectionContraints: { + peerConnectionConstraints: { optional: [ {DtlsSrtpKeyAgreement: true} ] @@ -5082,217 +5082,9 @@ WebRTC.prototype.sendDirectlyToAll = function (channel, message, payload) { }); }; -function Peer(options) { - var self = this; - - this.id = options.id; - this.parent = options.parent; - this.type = options.type || 'video'; - this.oneway = options.oneway || false; - this.sharemyscreen = options.sharemyscreen || false; - this.browserPrefix = options.prefix; - this.stream = options.stream; - this.enableDataChannels = options.enableDataChannels === undefined ? this.parent.config.enableDataChannels : options.enableDataChannels; - this.receiveMedia = options.receiveMedia || this.parent.config.receiveMedia; - this.channels = {}; - // Create an RTCPeerConnection via the polyfill - this.pc = new PeerConnection(this.parent.config.peerConnectionConfig, this.parent.config.peerConnectionContraints); - this.pc.on('ice', this.onIceCandidate.bind(this)); - this.pc.on('addStream', this.handleRemoteStreamAdded.bind(this)); - this.pc.on('addChannel', this.handleDataChannelAdded.bind(this)); - this.pc.on('removeStream', this.handleStreamRemoved.bind(this)); - // Just fire negotiation needed events for now - // When browser re-negotiation handling seems to work - // we can use this as the trigger for starting the offer/answer process - // automatically. We'll just leave it be for now while this stabalizes. - this.pc.on('negotiationNeeded', this.emit.bind(this, 'negotiationNeeded')); - this.pc.on('iceConnectionStateChange', this.emit.bind(this, 'iceConnectionStateChange')); - this.pc.on('iceConnectionStateChange', function () { - switch (self.pc.iceConnectionState) { - case 'failed': - // currently, in chrome only the initiator goes to failed - // so we need to signal this to the peer - if (self.pc.pc.peerconnection.localDescription.type === 'offer') { - self.parent.emit('iceFailed', self); - self.send('connectivityError'); - } - break; - } - }); - this.pc.on('signalingStateChange', this.emit.bind(this, 'signalingStateChange')); - this.logger = this.parent.logger; - - // handle screensharing/broadcast mode - if (options.type === 'screen') { - if (this.parent.localScreen && this.sharemyscreen) { - this.logger.log('adding local screen stream to peer connection'); - this.pc.addStream(this.parent.localScreen); - this.broadcaster = options.broadcaster; - } - } else { - this.parent.localStreams.forEach(function (stream) { - self.pc.addStream(stream); - }); - } - - // call emitter constructor - WildEmitter.call(this); - - // proxy events to parent - this.on('*', function () { - self.parent.emit.apply(self.parent, arguments); - }); -} - -Peer.prototype = Object.create(WildEmitter.prototype, { - constructor: { - value: Peer - } -}); - -Peer.prototype.handleMessage = function (message) { - var self = this; - - this.logger.log('getting', message.type, message); - - if (message.prefix) this.browserPrefix = message.prefix; - - if (message.type === 'offer') { - this.pc.handleOffer(message.payload, function (err) { - if (err) { - return; - } - // auto-accept - self.pc.answer(self.receiveMedia, function (err, sessionDescription) { - self.send('answer', sessionDescription); - }); - }); - } else if (message.type === 'answer') { - this.pc.handleAnswer(message.payload); - } else if (message.type === 'candidate') { - this.pc.processIce(message.payload); - } else if (message.type === 'connectivityError') { - this.parent.emit('connectivityError', self); - } else if (message.type === 'mute') { - this.parent.emit('mute', {id: message.from, name: message.payload.name}); - } else if (message.type === 'unmute') { - this.parent.emit('unmute', {id: message.from, name: message.payload.name}); - } else { - this.parent.emit(message.type, {id: message.from, payload: message.payload, roomType: message.roomType}); - } -}; - -// send via signalling channel -Peer.prototype.send = function (messageType, payload) { - var message = { - to: this.id, - broadcaster: this.broadcaster, - roomType: this.type, - type: messageType, - payload: payload, - prefix: webrtc.prefix - }; - this.logger.log('sending', messageType, message); - this.parent.emit('message', message); -}; - -// send via data channel -// returns true when message was sent and false if channel is not open -Peer.prototype.sendDirectly = function (channel, messageType, payload) { - var message = { - type: messageType, - payload: payload - }; - this.logger.log('sending via datachannel', channel, messageType, message); - var dc = this.getDataChannel(channel); - if (dc.readyState != 'open') return false; - dc.send(JSON.stringify(message)); - return true; -}; - -// Internal method registering handlers for a data channel and emitting events on the peer -Peer.prototype._observeDataChannel = function (channel) { - var self = this; - channel.onclose = this.emit.bind(this, 'channelClose', channel); - channel.onerror = this.emit.bind(this, 'channelError', channel); - channel.onmessage = function (event) { - self.emit('channelMessage', self, channel.label, JSON.parse(event.data), channel, event); - }; - channel.onopen = this.emit.bind(this, 'channelOpen', channel); -}; - -// Fetch or create a data channel by the given name -Peer.prototype.getDataChannel = function (name, opts) { - if (!webrtc.dataChannel) return this.emit('error', new Error('createDataChannel not supported')); - var channel = this.channels[name]; - opts || (opts = {}); - if (channel) return channel; - // if we don't have one by this label, create it - channel = this.channels[name] = this.pc.createDataChannel(name, opts); - this._observeDataChannel(channel); - return channel; -}; - -Peer.prototype.onIceCandidate = function (candidate) { - if (this.closed) return; - if (candidate) { - this.send('candidate', candidate); - } else { - this.logger.log("End of candidates."); - } -}; - -Peer.prototype.start = function () { - var self = this; - - // well, the webrtc api requires that we either - // a) create a datachannel a priori - // b) do a renegotiation later to add the SCTP m-line - // Let's do (a) first... - if (this.enableDataChannels) { - this.getDataChannel('simplewebrtc'); - } - - this.pc.offer(this.receiveMedia, function (err, sessionDescription) { - self.send('offer', sessionDescription); - }); -}; - -Peer.prototype.end = function () { - if (this.closed) return; - this.pc.close(); - this.handleStreamRemoved(); -}; - -Peer.prototype.handleRemoteStreamAdded = function (event) { - var self = this; - if (this.stream) { - this.logger.warn('Already have a remote stream'); - } else { - this.stream = event.stream; - // FIXME: addEventListener('ended', ...) would be nicer - // but does not work in firefox - this.stream.onended = function () { - self.end(); - }; - this.parent.emit('peerStreamAdded', this); - } -}; - -Peer.prototype.handleStreamRemoved = function () { - this.parent.peers.splice(this.parent.peers.indexOf(this), 1); - this.closed = true; - this.parent.emit('peerStreamRemoved', this); -}; - -Peer.prototype.handleDataChannelAdded = function (channel) { - this.channels[channel.label] = channel; - this._observeDataChannel(channel); -}; - module.exports = WebRTC; -},{"localmedia":12,"mockconsole":6,"rtcpeerconnection":11,"util":8,"webrtcsupport":10,"wildemitter":3}],13:[function(require,module,exports){ +},{"./peer":11,"localmedia":12,"mockconsole":7,"util":8,"webrtcsupport":10,"wildemitter":2}],13:[function(require,module,exports){ // shim for using process in browser var process = module.exports = {}; @@ -5543,30 +5335,585 @@ EventEmitter.listenerCount = function(emitter, type) { return ret; }; -},{"__browserify_process":13}],14:[function(require,module,exports){ -// Underscore.js 1.6.0 -// http://underscorejs.org -// (c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors -// Underscore may be freely distributed under the MIT license. - -(function() { - - // Baseline setup - // -------------- - - // Establish the root object, `window` in the browser, or `exports` on the server. - var root = this; - - // Save the previous value of the `_` variable. - var previousUnderscore = root._; +},{"__browserify_process":13}],11:[function(require,module,exports){ +var util = require('util'); +var webrtc = require('webrtcsupport'); +var PeerConnection = require('rtcpeerconnection'); +var WildEmitter = require('wildemitter'); - // Establish the object that gets returned to break out of a loop iteration. - var breaker = {}; - // Save bytes in the minified (but not gzipped) version: - var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; +function Peer(options) { + var self = this; - // Create quick reference variables for speed access to core prototypes. + this.id = options.id; + this.parent = options.parent; + this.type = options.type || 'video'; + this.oneway = options.oneway || false; + this.sharemyscreen = options.sharemyscreen || false; + this.browserPrefix = options.prefix; + this.stream = options.stream; + this.enableDataChannels = options.enableDataChannels === undefined ? this.parent.config.enableDataChannels : options.enableDataChannels; + this.receiveMedia = options.receiveMedia || this.parent.config.receiveMedia; + this.channels = {}; + // Create an RTCPeerConnection via the polyfill + this.pc = new PeerConnection(this.parent.config.peerConnectionConfig, this.parent.config.peerConnectionConstraints); + this.pc.on('ice', this.onIceCandidate.bind(this)); + this.pc.on('addStream', this.handleRemoteStreamAdded.bind(this)); + this.pc.on('addChannel', this.handleDataChannelAdded.bind(this)); + this.pc.on('removeStream', this.handleStreamRemoved.bind(this)); + // Just fire negotiation needed events for now + // When browser re-negotiation handling seems to work + // we can use this as the trigger for starting the offer/answer process + // automatically. We'll just leave it be for now while this stabalizes. + this.pc.on('negotiationNeeded', this.emit.bind(this, 'negotiationNeeded')); + this.pc.on('iceConnectionStateChange', this.emit.bind(this, 'iceConnectionStateChange')); + this.pc.on('iceConnectionStateChange', function () { + switch (self.pc.iceConnectionState) { + case 'failed': + // currently, in chrome only the initiator goes to failed + // so we need to signal this to the peer + if (self.pc.pc.peerconnection.localDescription.type === 'offer') { + self.parent.emit('iceFailed', self); + self.send('connectivityError'); + } + break; + } + }); + this.pc.on('signalingStateChange', this.emit.bind(this, 'signalingStateChange')); + this.logger = this.parent.logger; + + // handle screensharing/broadcast mode + if (options.type === 'screen') { + if (this.parent.localScreen && this.sharemyscreen) { + this.logger.log('adding local screen stream to peer connection'); + this.pc.addStream(this.parent.localScreen); + this.broadcaster = options.broadcaster; + } + } else { + this.parent.localStreams.forEach(function (stream) { + self.pc.addStream(stream); + }); + } + + // call emitter constructor + WildEmitter.call(this); + + // proxy events to parent + this.on('*', function () { + self.parent.emit.apply(self.parent, arguments); + }); +} + +util.inherits(Peer, WildEmitter); + +Peer.prototype.handleMessage = function (message) { + var self = this; + + this.logger.log('getting', message.type, message); + + if (message.prefix) this.browserPrefix = message.prefix; + + if (message.type === 'offer') { + this.pc.handleOffer(message.payload, function (err) { + if (err) { + return; + } + // auto-accept + self.pc.answer(self.receiveMedia, function (err, sessionDescription) { + self.send('answer', sessionDescription); + }); + }); + } else if (message.type === 'answer') { + this.pc.handleAnswer(message.payload); + } else if (message.type === 'candidate') { + this.pc.processIce(message.payload); + } else if (message.type === 'connectivityError') { + this.parent.emit('connectivityError', self); + } else if (message.type === 'mute') { + this.parent.emit('mute', {id: message.from, name: message.payload.name}); + } else if (message.type === 'unmute') { + this.parent.emit('unmute', {id: message.from, name: message.payload.name}); + } else { + this.parent.emit(message.type, {id: message.from, payload: message.payload, roomType: message.roomType}); + } +}; + +// send via signalling channel +Peer.prototype.send = function (messageType, payload) { + var message = { + to: this.id, + broadcaster: this.broadcaster, + roomType: this.type, + type: messageType, + payload: payload, + prefix: webrtc.prefix + }; + this.logger.log('sending', messageType, message); + this.parent.emit('message', message); +}; + +// send via data channel +// returns true when message was sent and false if channel is not open +Peer.prototype.sendDirectly = function (channel, messageType, payload) { + var message = { + type: messageType, + payload: payload + }; + this.logger.log('sending via datachannel', channel, messageType, message); + var dc = this.getDataChannel(channel); + if (dc.readyState != 'open') return false; + dc.send(JSON.stringify(message)); + return true; +}; + +// Internal method registering handlers for a data channel and emitting events on the peer +Peer.prototype._observeDataChannel = function (channel) { + var self = this; + channel.onclose = this.emit.bind(this, 'channelClose', channel); + channel.onerror = this.emit.bind(this, 'channelError', channel); + channel.onmessage = function (event) { + self.emit('channelMessage', self, channel.label, JSON.parse(event.data), channel, event); + }; + channel.onopen = this.emit.bind(this, 'channelOpen', channel); +}; + +// Fetch or create a data channel by the given name +Peer.prototype.getDataChannel = function (name, opts) { + if (!webrtc.dataChannel) return this.emit('error', new Error('createDataChannel not supported')); + var channel = this.channels[name]; + opts || (opts = {}); + if (channel) return channel; + // if we don't have one by this label, create it + channel = this.channels[name] = this.pc.createDataChannel(name, opts); + this._observeDataChannel(channel); + return channel; +}; + +Peer.prototype.onIceCandidate = function (candidate) { + if (this.closed) return; + if (candidate) { + this.send('candidate', candidate); + } else { + this.logger.log("End of candidates."); + } +}; + +Peer.prototype.start = function () { + var self = this; + + // well, the webrtc api requires that we either + // a) create a datachannel a priori + // b) do a renegotiation later to add the SCTP m-line + // Let's do (a) first... + if (this.enableDataChannels) { + this.getDataChannel('simplewebrtc'); + } + + this.pc.offer(this.receiveMedia, function (err, sessionDescription) { + self.send('offer', sessionDescription); + }); +}; + +Peer.prototype.end = function () { + if (this.closed) return; + this.pc.close(); + this.handleStreamRemoved(); +}; + +Peer.prototype.handleRemoteStreamAdded = function (event) { + var self = this; + if (this.stream) { + this.logger.warn('Already have a remote stream'); + } else { + this.stream = event.stream; + // FIXME: addEventListener('ended', ...) would be nicer + // but does not work in firefox + this.stream.onended = function () { + self.end(); + }; + this.parent.emit('peerStreamAdded', this); + } +}; + +Peer.prototype.handleStreamRemoved = function () { + this.parent.peers.splice(this.parent.peers.indexOf(this), 1); + this.closed = true; + this.parent.emit('peerStreamRemoved', this); +}; + +Peer.prototype.handleDataChannelAdded = function (channel) { + this.channels[channel.label] = channel; + this._observeDataChannel(channel); +}; + +module.exports = Peer; + +},{"rtcpeerconnection":14,"util":8,"webrtcsupport":10,"wildemitter":2}],15:[function(require,module,exports){ +// getUserMedia helper by @HenrikJoreteg +var func = (window.navigator.getUserMedia || + window.navigator.webkitGetUserMedia || + window.navigator.mozGetUserMedia || + window.navigator.msGetUserMedia); + + +module.exports = function (constraints, cb) { + var options; + var haveOpts = arguments.length === 2; + var defaultOpts = {video: true, audio: true}; + var error; + var denied = 'PERMISSION_DENIED'; + var notSatified = 'CONSTRAINT_NOT_SATISFIED'; + + // make constraints optional + if (!haveOpts) { + cb = constraints; + constraints = defaultOpts; + } + + // treat lack of browser support like an error + if (!func) { + // throw proper error per spec + error = new Error('NavigatorUserMediaError'); + error.name = 'NOT_SUPPORTED_ERROR'; + return cb(error); + } + + func.call(window.navigator, constraints, function (stream) { + cb(null, stream); + }, function (err) { + var error; + // coerce into an error object since FF gives us a string + // there are only two valid names according to the spec + // we coerce all non-denied to "constraint not satisfied". + if (typeof err === 'string') { + error = new Error('NavigatorUserMediaError'); + if (err === denied) { + error.name = denied; + } else { + error.name = notSatified; + } + } else { + // if we get an error object make sure '.name' property is set + // according to spec: http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermediaerror-and-navigatorusermediaerrorcallback + error = err; + if (!error.name) { + // this is likely chrome which + // sets a property called "ERROR_DENIED" on the error object + // if so we make sure to set a name + if (error[denied]) { + err.name = denied; + } else { + err.name = notSatified; + } + } + } + + cb(error); + }); +}; + +},{}],12:[function(require,module,exports){ +var util = require('util'); +var hark = require('hark'); +var webrtc = require('webrtcsupport'); +var getUserMedia = require('getusermedia'); +var getScreenMedia = require('getscreenmedia'); +var WildEmitter = require('wildemitter'); +var GainController = require('mediastream-gain'); +var mockconsole = require('mockconsole'); + + +function LocalMedia(opts) { + WildEmitter.call(this); + + var config = this.config = { + autoAdjustMic: false, + detectSpeakingEvents: true, + media: { + audio: true, + video: true + }, + logger: mockconsole + }; + + var item; + for (item in opts) { + this.config[item] = opts[item]; + } + + this.logger = config.logger; + this._log = this.logger.log.bind(this.logger, 'LocalMedia:'); + this._logerror = this.logger.error.bind(this.logger, 'LocalMedia:'); + + this.screenSharingSupport = webrtc.screenSharing; + + this.localStreams = []; + this.localScreens = []; + + if (!webrtc.support) { + this._logerror('Your browser does not support local media capture.'); + } +} + +util.inherits(LocalMedia, WildEmitter); + + +LocalMedia.prototype.start = function (mediaConstraints, cb) { + var self = this; + var constraints = mediaConstraints || this.config.media; + + getUserMedia(constraints, function (err, stream) { + if (!err) { + if (constraints.audio && self.config.detectSpeakingEvents) { + self.setupAudioMonitor(stream, self.config.harkOptions); + } + self.localStreams.push(stream); + + if (self.config.autoAdjustMic) { + self.gainController = new GainController(stream); + // start out somewhat muted if we can track audio + self.setMicIfEnabled(0.5); + } + + // TODO: might need to migrate to the video tracks onended + // FIXME: firefox does not seem to trigger this... + stream.onended = function () { + /* + var idx = self.localStreams.indexOf(stream); + if (idx > -1) { + self.localScreens.splice(idx, 1); + } + self.emit('localStreamStopped', stream); + */ + }; + + self.emit('localStream', stream); + } + if (cb) { + return cb(err, stream); + } + }); +}; + +LocalMedia.prototype.stop = function (stream) { + var self = this; + // FIXME: duplicates cleanup code until fixed in FF + if (stream) { + stream.stop(); + self.emit('localStreamStopped', stream); + var idx = self.localStreams.indexOf(stream); + if (idx > -1) { + self.localStreams = self.localStreams.splice(idx, 1); + } + } else { + if (this.audioMonitor) { + this.audioMonitor.stop(); + delete this.audioMonitor; + } + this.localStreams.forEach(function (stream) { + stream.stop(); + self.emit('localStreamStopped', stream); + }); + this.localStreams = []; + } +}; + +LocalMedia.prototype.startScreenShare = function (cb) { + var self = this; + getScreenMedia(function (err, stream) { + if (!err) { + self.localScreens.push(stream); + + // TODO: might need to migrate to the video tracks onended + // Firefox does not support .onended but it does not support + // screensharing either + stream.onended = function () { + var idx = self.localScreens.indexOf(stream); + if (idx > -1) { + self.localScreens.splice(idx, 1); + } + self.emit('localScreenStopped', stream); + }; + self.emit('localScreen', stream); + } + + // enable the callback + if (cb) { + return cb(err, stream); + } + }); +}; + +LocalMedia.prototype.stopScreenShare = function (stream) { + if (stream) { + stream.stop(); + } else { + this.localScreens.forEach(function (stream) { + stream.stop(); + }); + this.localScreens = []; + } +}; + +// Audio controls +LocalMedia.prototype.mute = function () { + this._audioEnabled(false); + this.hardMuted = true; + this.emit('audioOff'); +}; + +LocalMedia.prototype.unmute = function () { + this._audioEnabled(true); + this.hardMuted = false; + this.emit('audioOn'); +}; + +LocalMedia.prototype.setupAudioMonitor = function (stream, harkOptions) { + this._log('Setup audio'); + var audio = this.audioMonitor = hark(stream, harkOptions); + var self = this; + var timeout; + + audio.on('speaking', function () { + self.emit('speaking'); + if (self.hardMuted) { + return; + } + self.setMicIfEnabled(1); + }); + + audio.on('stopped_speaking', function () { + if (timeout) { + clearTimeout(timeout); + } + + timeout = setTimeout(function () { + self.emit('stoppedSpeaking'); + if (self.hardMuted) { + return; + } + self.setMicIfEnabled(0.5); + }, 1000); + }); + audio.on('volume_change', function (volume, treshold) { + self.emit('volumeChange', volume, treshold); + }); +}; + +// We do this as a seperate method in order to +// still leave the "setMicVolume" as a working +// method. +LocalMedia.prototype.setMicIfEnabled = function (volume) { + if (!this.config.autoAdjustMic) { + return; + } + this.gainController.setGain(volume); +}; + +// Video controls +LocalMedia.prototype.pauseVideo = function () { + this._videoEnabled(false); + this.emit('videoOff'); +}; +LocalMedia.prototype.resumeVideo = function () { + this._videoEnabled(true); + this.emit('videoOn'); +}; + +// Combined controls +LocalMedia.prototype.pause = function () { + this._audioEnabled(false); + this.pauseVideo(); +}; +LocalMedia.prototype.resume = function () { + this._audioEnabled(true); + this.resumeVideo(); +}; + +// Internal methods for enabling/disabling audio/video +LocalMedia.prototype._audioEnabled = function (bool) { + // work around for chrome 27 bug where disabling tracks + // doesn't seem to work (works in canary, remove when working) + this.setMicIfEnabled(bool ? 1 : 0); + this.localStreams.forEach(function (stream) { + stream.getAudioTracks().forEach(function (track) { + track.enabled = !!bool; + }); + }); +}; +LocalMedia.prototype._videoEnabled = function (bool) { + this.localStreams.forEach(function (stream) { + stream.getVideoTracks().forEach(function (track) { + track.enabled = !!bool; + }); + }); +}; + +// check if all audio streams are enabled +LocalMedia.prototype.isAudioEnabled = function () { + var enabled = true; + this.localStreams.forEach(function (stream) { + stream.getAudioTracks().forEach(function (track) { + enabled = enabled && track.enabled; + }); + }); + return enabled; +}; + +// check if all video streams are enabled +LocalMedia.prototype.isVideoEnabled = function () { + var enabled = true; + this.localStreams.forEach(function (stream) { + stream.getVideoTracks().forEach(function (track) { + enabled = enabled && track.enabled; + }); + }); + return enabled; +}; + +// Backwards Compat +LocalMedia.prototype.startLocalMedia = LocalMedia.prototype.start; +LocalMedia.prototype.stopLocalMedia = LocalMedia.prototype.stop; + +// fallback for old .localStream behaviour +Object.defineProperty(LocalMedia.prototype, 'localStream', { + get: function () { + return this.localStreams.length > 0 ? this.localStreams[0] : null; + } +}); +// fallback for old .localScreen behaviour +Object.defineProperty(LocalMedia.prototype, 'localScreen', { + get: function () { + return this.localScreens.length > 0 ? this.localScreens[0] : null; + } +}); + +module.exports = LocalMedia; + +},{"getscreenmedia":17,"getusermedia":15,"hark":16,"mediastream-gain":18,"mockconsole":7,"util":8,"webrtcsupport":10,"wildemitter":2}],19:[function(require,module,exports){ +// Underscore.js 1.6.0 +// http://underscorejs.org +// (c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors +// Underscore may be freely distributed under the MIT license. + +(function() { + + // Baseline setup + // -------------- + + // Establish the root object, `window` in the browser, or `exports` on the server. + var root = this; + + // Save the previous value of the `_` variable. + var previousUnderscore = root._; + + // Establish the object that gets returned to break out of a loop iteration. + var breaker = {}; + + // Save bytes in the minified (but not gzipped) version: + var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; + + // Create quick reference variables for speed access to core prototypes. var push = ArrayProto.push, slice = ArrayProto.slice, @@ -6874,98 +7221,21 @@ EventEmitter.listenerCount = function(emitter, type) { }); - // AMD registration happens at the end for compatibility with AMD loaders - // that may not enforce next-turn semantics on modules. Even though general - // practice for AMD registration is to be anonymous, underscore registers - // as a named module because, like jQuery, it is a base library that is - // popular enough to be bundled in a third party lib, but not be part of - // an AMD load request. Those cases could generate an error when an - // anonymous define() is called outside of a loader request. - if (typeof define === 'function' && define.amd) { - define('underscore', [], function() { - return _; - }); - } -}).call(this); - -},{}],15:[function(require,module,exports){ -// getUserMedia helper by @HenrikJoreteg -var func = (window.navigator.getUserMedia || - window.navigator.webkitGetUserMedia || - window.navigator.mozGetUserMedia || - window.navigator.msGetUserMedia); - - -module.exports = function (constraints, cb) { - var options; - var haveOpts = arguments.length === 2; - var defaultOpts = {video: true, audio: true}; - var error; - var denied = 'PERMISSION_DENIED'; - var notSatified = 'CONSTRAINT_NOT_SATISFIED'; - - // make constraints optional - if (!haveOpts) { - cb = constraints; - constraints = defaultOpts; - } - - // treat lack of browser support like an error - if (!func) { - // throw proper error per spec - error = new Error('NavigatorUserMediaError'); - error.name = 'NOT_SUPPORTED_ERROR'; - return cb(error); - } - - func.call(window.navigator, constraints, function (stream) { - cb(null, stream); - }, function (err) { - var error; - // coerce into an error object since FF gives us a string - // there are only two valid names according to the spec - // we coerce all non-denied to "constraint not satisfied". - if (typeof err === 'string') { - error = new Error('NavigatorUserMediaError'); - if (err === denied) { - error.name = denied; - } else { - error.name = notSatified; - } - } else { - // if we get an error object make sure '.name' property is set - // according to spec: http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermediaerror-and-navigatorusermediaerrorcallback - error = err; - if (!error.name) { - // this is likely chrome which - // sets a property called "ERROR_DENIED" on the error object - // if so we make sure to set a name - if (error[denied]) { - err.name = denied; - } else { - err.name = notSatified; - } - } - } - - cb(error); - }); -}; - -},{}],16:[function(require,module,exports){ -var tosdp = require('./lib/tosdp'); -var tojson = require('./lib/tojson'); - - -exports.toSessionSDP = tosdp.toSessionSDP; -exports.toMediaSDP = tosdp.toMediaSDP; -exports.toCandidateSDP = tosdp.toCandidateSDP; - -exports.toSessionJSON = tojson.toSessionJSON; -exports.toMediaJSON = tojson.toMediaJSON; -exports.toCandidateJSON = tojson.toCandidateJSON; + // AMD registration happens at the end for compatibility with AMD loaders + // that may not enforce next-turn semantics on modules. Even though general + // practice for AMD registration is to be anonymous, underscore registers + // as a named module because, like jQuery, it is a base library that is + // popular enough to be bundled in a third party lib, but not be part of + // an AMD load request. Those cases could generate an error when an + // anonymous define() is called outside of a loader request. + if (typeof define === 'function' && define.amd) { + define('underscore', [], function() { + return _; + }); + } +}).call(this); -},{"./lib/tojson":18,"./lib/tosdp":17}],11:[function(require,module,exports){ +},{}],14:[function(require,module,exports){ var _ = require('underscore'); var util = require('util'); var webrtc = require('webrtcsupport'); @@ -7387,285 +7657,100 @@ PeerConnection.prototype.getStats = function (cb) { module.exports = PeerConnection; -},{"sdp-jingle-json":16,"traceablepeerconnection":19,"underscore":14,"util":8,"webrtcsupport":10,"wildemitter":3}],12:[function(require,module,exports){ -var util = require('util'); -var hark = require('hark'); -var webrtc = require('webrtcsupport'); -var getUserMedia = require('getusermedia'); -var getScreenMedia = require('getscreenmedia'); -var WildEmitter = require('wildemitter'); -var GainController = require('mediastream-gain'); -var mockconsole = require('mockconsole'); - - -function LocalMedia(opts) { - WildEmitter.call(this); - - var config = this.config = { - autoAdjustMic: false, - detectSpeakingEvents: true, - media: { - audio: true, - video: true - }, - logger: mockconsole - }; - - var item; - for (item in opts) { - this.config[item] = opts[item]; - } - - this.logger = config.logger; - this._log = this.logger.log.bind(this.logger, 'LocalMedia:'); - this._logerror = this.logger.error.bind(this.logger, 'LocalMedia:'); - - this.screenSharingSupport = webrtc.screenSharing; - - this.localStreams = []; - this.localScreens = []; - - if (!webrtc.support) { - this._logerror('Your browser does not support local media capture.'); - } -} - -util.inherits(LocalMedia, WildEmitter); - - -LocalMedia.prototype.start = function (mediaConstraints, cb) { - var self = this; - var constraints = mediaConstraints || this.config.media; - - getUserMedia(constraints, function (err, stream) { - if (!err) { - if (constraints.audio && self.config.detectSpeakingEvents) { - self.setupAudioMonitor(stream, self.config.harkOptions); - } - self.localStreams.push(stream); - - if (self.config.autoAdjustMic) { - self.gainController = new GainController(stream); - // start out somewhat muted if we can track audio - self.setMicIfEnabled(0.5); - } - - // TODO: might need to migrate to the video tracks onended - // FIXME: firefox does not seem to trigger this... - stream.onended = function () { - /* - var idx = self.localStreams.indexOf(stream); - if (idx > -1) { - self.localScreens.splice(idx, 1); - } - self.emit('localStreamStopped', stream); - */ - }; - - self.emit('localStream', stream); - } - if (cb) { - return cb(err, stream); - } - }); -}; - -LocalMedia.prototype.stop = function (stream) { - var self = this; - // FIXME: duplicates cleanup code until fixed in FF - if (stream) { - stream.stop(); - self.emit('localStreamStopped', stream); - var idx = self.localStreams.indexOf(stream); - if (idx > -1) { - self.localStreams = self.localStreams.splice(idx, 1); - } - } else { - if (this.audioMonitor) { - this.audioMonitor.stop(); - delete this.audioMonitor; - } - this.localStreams.forEach(function (stream) { - stream.stop(); - self.emit('localStreamStopped', stream); - }); - this.localStreams = []; - } -}; - -LocalMedia.prototype.startScreenShare = function (cb) { - var self = this; - getScreenMedia(function (err, stream) { - if (!err) { - self.localScreens.push(stream); - - // TODO: might need to migrate to the video tracks onended - // Firefox does not support .onended but it does not support - // screensharing either - stream.onended = function () { - var idx = self.localScreens.indexOf(stream); - if (idx > -1) { - self.localScreens.splice(idx, 1); - } - self.emit('localScreenStopped', stream); - }; - self.emit('localScreen', stream); - } - - // enable the callback - if (cb) { - return cb(err, stream); - } - }); -}; - -LocalMedia.prototype.stopScreenShare = function (stream) { - if (stream) { - stream.stop(); - } else { - this.localScreens.forEach(function (stream) { - stream.stop(); - }); - this.localScreens = []; - } -}; - -// Audio controls -LocalMedia.prototype.mute = function () { - this._audioEnabled(false); - this.hardMuted = true; - this.emit('audioOff'); -}; - -LocalMedia.prototype.unmute = function () { - this._audioEnabled(true); - this.hardMuted = false; - this.emit('audioOn'); -}; - -LocalMedia.prototype.setupAudioMonitor = function (stream, harkOptions) { - this._log('Setup audio'); - var audio = this.audioMonitor = hark(stream, harkOptions); - var self = this; - var timeout; - - audio.on('speaking', function () { - self.emit('speaking'); - if (self.hardMuted) { - return; - } - self.setMicIfEnabled(1); - }); - - audio.on('stopped_speaking', function () { - if (timeout) { - clearTimeout(timeout); - } +},{"sdp-jingle-json":20,"traceablepeerconnection":21,"underscore":19,"util":8,"webrtcsupport":10,"wildemitter":2}],20:[function(require,module,exports){ +var tosdp = require('./lib/tosdp'); +var tojson = require('./lib/tojson'); - timeout = setTimeout(function () { - self.emit('stoppedSpeaking'); - if (self.hardMuted) { - return; - } - self.setMicIfEnabled(0.5); - }, 1000); - }); - audio.on('volume_change', function (volume, treshold) { - self.emit('volumeChange', volume, treshold); - }); -}; -// We do this as a seperate method in order to -// still leave the "setMicVolume" as a working -// method. -LocalMedia.prototype.setMicIfEnabled = function (volume) { - if (!this.config.autoAdjustMic) { - return; - } - this.gainController.setGain(volume); -}; +exports.toSessionSDP = tosdp.toSessionSDP; +exports.toMediaSDP = tosdp.toMediaSDP; +exports.toCandidateSDP = tosdp.toCandidateSDP; -// Video controls -LocalMedia.prototype.pauseVideo = function () { - this._videoEnabled(false); - this.emit('videoOff'); -}; -LocalMedia.prototype.resumeVideo = function () { - this._videoEnabled(true); - this.emit('videoOn'); -}; +exports.toSessionJSON = tojson.toSessionJSON; +exports.toMediaJSON = tojson.toMediaJSON; +exports.toCandidateJSON = tojson.toCandidateJSON; -// Combined controls -LocalMedia.prototype.pause = function () { - this._audioEnabled(false); - this.pauseVideo(); -}; -LocalMedia.prototype.resume = function () { - this._audioEnabled(true); - this.resumeVideo(); -}; +},{"./lib/tojson":22,"./lib/tosdp":23}],17:[function(require,module,exports){ +// getScreenMedia helper by @HenrikJoreteg +var getUserMedia = require('getusermedia'); -// Internal methods for enabling/disabling audio/video -LocalMedia.prototype._audioEnabled = function (bool) { - // work around for chrome 27 bug where disabling tracks - // doesn't seem to work (works in canary, remove when working) - this.setMicIfEnabled(bool ? 1 : 0); - this.localStreams.forEach(function (stream) { - stream.getAudioTracks().forEach(function (track) { - track.enabled = !!bool; - }); - }); -}; -LocalMedia.prototype._videoEnabled = function (bool) { - this.localStreams.forEach(function (stream) { - stream.getVideoTracks().forEach(function (track) { - track.enabled = !!bool; - }); - }); -}; +// cache for constraints and callback +var cache = {}; -// check if all audio streams are enabled -LocalMedia.prototype.isAudioEnabled = function () { - var enabled = true; - this.localStreams.forEach(function (stream) { - stream.getAudioTracks().forEach(function (track) { - enabled = enabled && track.enabled; - }); - }); - return enabled; -}; +module.exports = function (constraints, cb) { + var hasConstraints = arguments.length === 2; + var callback = hasConstraints ? cb : constraints; + var error; -// check if all video streams are enabled -LocalMedia.prototype.isVideoEnabled = function () { - var enabled = true; - this.localStreams.forEach(function (stream) { - stream.getVideoTracks().forEach(function (track) { - enabled = enabled && track.enabled; - }); - }); - return enabled; -}; + if (typeof window === 'undefined' || window.location.protocol === 'http:') { + error = new Error('NavigatorUserMediaError'); + error.name = 'HTTPS_REQUIRED'; + return callback(error); + } -// Backwards Compat -LocalMedia.prototype.startLocalMedia = LocalMedia.prototype.start; -LocalMedia.prototype.stopLocalMedia = LocalMedia.prototype.stop; + if (window.navigator.userAgent.match('Chrome')) { + var chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10); + var maxver = 33; + // "known" bug in chrome 34 on linux + if (window.navigator.userAgent.match('Linux')) maxver = 34; + if (chromever >= 26 && chromever <= maxver) { + // chrome 26 - chrome 33 way to do it -- requires bad chrome://flags + constraints = (hasConstraints && constraints) || { + video: { + mandatory: { + googLeakyBucket: true, + maxWidth: window.screen.width, + maxHeight: window.screen.height, + maxFrameRate: 3, + chromeMediaSource: 'screen' + } + } + }; + getUserMedia(constraints, callback); + } else { + // chrome 34+ way requiring an extension + var pending = window.setTimeout(function () { + error = new Error('NavigatorUserMediaError'); + error.name = 'EXTENSION_UNAVAILABLE'; + return callback(error); + }, 1000); + cache[pending] = [callback, hasConstraints ? constraint : null]; + window.postMessage({ type: 'getScreen', id: pending }, '*'); + } + } +}; -// fallback for old .localStream behaviour -Object.defineProperty(LocalMedia.prototype, 'localStream', { - get: function () { - return this.localStreams.length > 0 ? this.localStreams[0] : null; +window.addEventListener('message', function (event) { + if (event.origin != window.location.origin) { + return; } -}); -// fallback for old .localScreen behaviour -Object.defineProperty(LocalMedia.prototype, 'localScreen', { - get: function () { - return this.localScreens.length > 0 ? this.localScreens[0] : null; + if (event.data.type == 'gotScreen' && cache[event.data.id]) { + var data = cache[event.data.id]; + var constraints = data[1]; + var callback = data[0]; + delete cache[event.data.id]; + + if (event.data.sourceId === '') { // user canceled + var error = error = new Error('NavigatorUserMediaError'); + error.name = 'PERMISSION_DENIED'; + callback(error); + } else { + constraints = constraints || {audio: false, video: {mandatory: { + chromeMediaSource: 'desktop', + chromeMediaSourceId: event.data.sourceId, + googLeakyBucket: true, + maxWidth: window.screen.width, + maxHeight: window.screen.height, + maxFrameRate: 3 + }}}; + getUserMedia(constraints, callback); + } + } else if (event.data.type == 'getScreenPending') { + window.clearTimeout(event.data.id); } }); -module.exports = LocalMedia; - -},{"getscreenmedia":21,"getusermedia":15,"hark":20,"mediastream-gain":22,"mockconsole":6,"util":8,"webrtcsupport":10,"wildemitter":3}],17:[function(require,module,exports){ +},{"getusermedia":15}],23:[function(require,module,exports){ var senders = { 'initiator': 'sendonly', 'responder': 'recvonly', @@ -7707,22 +7792,36 @@ exports.toMediaSDP = function (content) { var payloads = desc.payloads || []; var fingerprints = (transport && transport.fingerprints) || []; - var mline = [desc.media, '1']; - - if ((desc.encryption && desc.encryption.length > 0) || (fingerprints.length > 0)) { - mline.push('RTP/SAVPF'); + var mline = []; + if (desc.descType == 'datachannel') { + mline.push('application'); + mline.push('1'); + mline.push('DTLS/SCTP'); + if (transport.sctp) { + transport.sctp.forEach(function (map) { + mline.push(map.number); + }); + } } else { - mline.push('RTP/AVPF'); + mline.push(desc.media); + mline.push('1'); + if ((desc.encryption && desc.encryption.length > 0) || (fingerprints.length > 0)) { + mline.push('RTP/SAVPF'); + } else { + mline.push('RTP/AVPF'); + } + payloads.forEach(function (payload) { + mline.push(payload.id); + }); } - payloads.forEach(function (payload) { - mline.push(payload.id); - }); sdp.push('m=' + mline.join(' ')); sdp.push('c=IN IP4 0.0.0.0'); - sdp.push('a=rtcp:1 IN IP4 0.0.0.0'); + if (desc.descType == 'rtp') { + sdp.push('a=rtcp:1 IN IP4 0.0.0.0'); + } if (transport) { if (transport.ufrag) { @@ -7737,9 +7836,16 @@ exports.toMediaSDP = function (content) { fingerprints.forEach(function (fingerprint) { sdp.push('a=fingerprint:' + fingerprint.hash + ' ' + fingerprint.value); }); + if (transport.sctp) { + transport.sctp.forEach(function (map) { + sdp.push('a=sctpmap:' + map.number + ' ' + map.protocol + ' ' + map.streams); + }); + } } - sdp.push('a=' + (senders[content.senders] || 'sendrecv')); + if (desc.descType == 'rtp') { + sdp.push('a=' + (senders[content.senders] || 'sendrecv')); + } sdp.push('a=mid:' + content.name); if (desc.mux) { @@ -7766,82 +7872,264 @@ exports.toMediaSDP = function (content) { sdp.push(fmtp.join(' ')); } - if (payload.feedback) { - payload.feedback.forEach(function (fb) { - if (fb.type === 'trr-int') { - sdp.push('a=rtcp-fb:' + payload.id + ' trr-int ' + fb.value ? fb.value : '0'); - } else { - sdp.push('a=rtcp-fb:' + payload.id + ' ' + fb.type + (fb.subtype ? ' ' + fb.subtype : '')); - } - }); - } - }); + if (payload.feedback) { + payload.feedback.forEach(function (fb) { + if (fb.type === 'trr-int') { + sdp.push('a=rtcp-fb:' + payload.id + ' trr-int ' + fb.value ? fb.value : '0'); + } else { + sdp.push('a=rtcp-fb:' + payload.id + ' ' + fb.type + (fb.subtype ? ' ' + fb.subtype : '')); + } + }); + } + }); + + if (desc.feedback) { + desc.feedback.forEach(function (fb) { + if (fb.type === 'trr-int') { + sdp.push('a=rtcp-fb:* trr-int ' + fb.value ? fb.value : '0'); + } else { + sdp.push('a=rtcp-fb:* ' + fb.type + (fb.subtype ? ' ' + fb.subtype : '')); + } + }); + } + + var hdrExts = desc.headerExtensions || []; + hdrExts.forEach(function (hdr) { + sdp.push('a=extmap:' + hdr.id + (hdr.senders ? '/' + senders[hdr.senders] : '') + ' ' + hdr.uri); + }); + + var ssrcGroups = desc.sourceGroups || []; + ssrcGroups.forEach(function (ssrcGroup) { + sdp.push('a=ssrc-group:' + ssrcGroup.semantics + ' ' + ssrcGroup.sources.join(' ')); + }); + + var ssrcs = desc.sources || []; + ssrcs.forEach(function (ssrc) { + for (var i = 0; i < ssrc.parameters.length; i++) { + var param = ssrc.parameters[i]; + sdp.push('a=ssrc:' + (ssrc.ssrc || desc.ssrc) + ' ' + param.key + (param.value ? (':' + param.value) : '')); + } + }); + + var candidates = transport.candidates || []; + candidates.forEach(function (candidate) { + sdp.push(exports.toCandidateSDP(candidate)); + }); + + return sdp.join('\r\n'); +}; + +exports.toCandidateSDP = function (candidate) { + var sdp = []; + + sdp.push(candidate.foundation); + sdp.push(candidate.component); + sdp.push(candidate.protocol.toUpperCase()); + sdp.push(candidate.priority); + sdp.push(candidate.ip); + sdp.push(candidate.port); + + var type = candidate.type; + sdp.push('typ'); + sdp.push(type); + if (type === 'srflx' || type === 'prflx' || type === 'relay') { + if (candidate.relAddr && candidate.relPort) { + sdp.push('raddr'); + sdp.push(candidate.relAddr); + sdp.push('rport'); + sdp.push(candidate.relPort); + } + } + + sdp.push('generation'); + sdp.push(candidate.generation || '0'); + + // FIXME: apparently this is wrong per spec + // but then, we need this when actually putting this into + // SDP so it's going to stay. + // decision needs to be revisited when browsers dont + // accept this any longer + return 'a=candidate:' + sdp.join(' '); +}; + +},{}],18:[function(require,module,exports){ +var support = require('webrtcsupport'); + + +function GainController(stream) { + this.support = support.webAudio && support.mediaStream; + + // set our starting value + this.gain = 1; + + if (this.support) { + var context = this.context = new support.AudioContext(); + this.microphone = context.createMediaStreamSource(stream); + this.gainFilter = context.createGain(); + this.destination = context.createMediaStreamDestination(); + this.outputStream = this.destination.stream; + this.microphone.connect(this.gainFilter); + this.gainFilter.connect(this.destination); + stream.addTrack(this.outputStream.getAudioTracks()[0]); + stream.removeTrack(stream.getAudioTracks()[0]); + } + this.stream = stream; +} + +// setting +GainController.prototype.setGain = function (val) { + // check for support + if (!this.support) return; + this.gainFilter.gain.value = val; + this.gain = val; +}; + +GainController.prototype.getGain = function () { + return this.gain; +}; + +GainController.prototype.off = function () { + return this.setGain(0); +}; + +GainController.prototype.on = function () { + this.setGain(1); +}; + + +module.exports = GainController; + +},{"webrtcsupport":10}],16:[function(require,module,exports){ +var WildEmitter = require('wildemitter'); + +function getMaxVolume (analyser, fftBins) { + var maxVolume = -Infinity; + analyser.getFloatFrequencyData(fftBins); + + for(var i=4, ii=fftBins.length; i < ii; i++) { + if (fftBins[i] > maxVolume && fftBins[i] < 0) { + maxVolume = fftBins[i]; + } + }; + + return maxVolume; +} + + +var audioContextType = window.webkitAudioContext || window.AudioContext; +// use a single audio context due to hardware limits +var audioContext = null; +module.exports = function(stream, options) { + var harker = new WildEmitter(); + - if (desc.feedback) { - desc.feedback.forEach(function (fb) { - if (fb.type === 'trr-int') { - sdp.push('a=rtcp-fb:* trr-int ' + fb.value ? fb.value : '0'); - } else { - sdp.push('a=rtcp-fb:* ' + fb.type + (fb.subtype ? ' ' + fb.subtype : '')); - } - }); - } + // make it not break in non-supported browsers + if (!audioContextType) return harker; - var hdrExts = desc.headerExtensions || []; - hdrExts.forEach(function (hdr) { - sdp.push('a=extmap:' + hdr.id + (hdr.senders ? '/' + senders[hdr.senders] : '') + ' ' + hdr.uri); - }); + //Config + var options = options || {}, + smoothing = (options.smoothing || 0.1), + interval = (options.interval || 50), + threshold = options.threshold, + play = options.play, + history = options.history || 10, + running = true; - var ssrcGroups = desc.sourceGroups || []; - ssrcGroups.forEach(function (ssrcGroup) { - sdp.push('a=ssrc-group:' + ssrcGroup.semantics + ' ' + ssrcGroup.sources.join(' ')); - }); + //Setup Audio Context + if (!audioContext) { + audioContext = new audioContextType(); + } + var sourceNode, fftBins, analyser; - var ssrcs = desc.sources || []; - ssrcs.forEach(function (ssrc) { - for (var i = 0; i < ssrc.parameters.length; i++) { - var param = ssrc.parameters[i]; - sdp.push('a=ssrc:' + (ssrc.ssrc || desc.ssrc) + ' ' + param.key + (param.value ? (':' + param.value) : '')); - } - }); + analyser = audioContext.createAnalyser(); + analyser.fftSize = 512; + analyser.smoothingTimeConstant = smoothing; + fftBins = new Float32Array(analyser.fftSize); - var candidates = transport.candidates || []; - candidates.forEach(function (candidate) { - sdp.push(exports.toCandidateSDP(candidate)); - }); + if (stream.jquery) stream = stream[0]; + if (stream instanceof HTMLAudioElement || stream instanceof HTMLVideoElement) { + //Audio Tag + sourceNode = audioContext.createMediaElementSource(stream); + if (typeof play === 'undefined') play = true; + threshold = threshold || -50; + } else { + //WebRTC Stream + sourceNode = audioContext.createMediaStreamSource(stream); + threshold = threshold || -50; + } - return sdp.join('\r\n'); -}; + sourceNode.connect(analyser); + if (play) analyser.connect(audioContext.destination); -exports.toCandidateSDP = function (candidate) { - var sdp = []; + harker.speaking = false; - sdp.push(candidate.foundation); - sdp.push(candidate.component); - sdp.push(candidate.protocol); - sdp.push(candidate.priority); - sdp.push(candidate.ip); - sdp.push(candidate.port); + harker.setThreshold = function(t) { + threshold = t; + }; - var type = candidate.type; - sdp.push('typ'); - sdp.push(type); - if (type === 'srflx' || type === 'prflx' || type === 'relay') { - if (candidate.relAddr && candidate.relPort) { - sdp.push('raddr'); - sdp.push(candidate.relAddr); - sdp.push('rport'); - sdp.push(candidate.relPort); - } + harker.setInterval = function(i) { + interval = i; + }; + + harker.stop = function() { + running = false; + harker.emit('volume_change', -100, threshold); + if (harker.speaking) { + harker.speaking = false; + harker.emit('stopped_speaking'); } + }; + harker.speakingHistory = []; + for (var i = 0; i < history; i++) { + harker.speakingHistory.push(0); + } - sdp.push('generation'); - sdp.push(candidate.generation || '0'); + // Poll the analyser node to determine if speaking + // and emit events if changed + var looper = function() { + setTimeout(function() { + + //check if stop has been called + if(!running) { + return; + } + + var currentVolume = getMaxVolume(analyser, fftBins); - return 'a=candidate:' + sdp.join(' '); -}; + harker.emit('volume_change', currentVolume, threshold); -},{}],18:[function(require,module,exports){ + var history = 0; + if (currentVolume > threshold && !harker.speaking) { + // trigger quickly, short history + for (var i = harker.speakingHistory.length - 3; i < harker.speakingHistory.length; i++) { + history += harker.speakingHistory[i]; + } + if (history >= 2) { + harker.speaking = true; + harker.emit('speaking'); + } + } else if (currentVolume < threshold && harker.speaking) { + for (var i = 0; i < harker.speakingHistory.length; i++) { + history += harker.speakingHistory[i]; + } + if (history == 0) { + harker.speaking = false; + harker.emit('stopped_speaking'); + } + } + harker.speakingHistory.shift(); + harker.speakingHistory.push(0 + (currentVolume > threshold)); + + looper(); + }, interval); + }; + looper(); + + + return harker; +} + +},{"wildemitter":2}],22:[function(require,module,exports){ var parsers = require('./parsers'); var idCounter = Math.random(); @@ -7895,17 +8183,20 @@ exports.toMediaJSON = function (media, session, creator) { transport: { transType: 'iceUdp', candidates: [], - fingerprints: [] + fingerprints: [], } }; + if (mline.media == 'application') { + // FIXME: the description is most likely to be independent + // of the SDP and should be processed by other parts of the library + content.description = { + descType: 'datachannel' + }; + content.transport.sctp = []; + } var desc = content.description; var trans = content.transport; - var ssrc = parsers.findLine('a=ssrc:', lines); - if (ssrc) { - desc.ssrc = ssrc.substr(7).split(' ')[0]; - } - // If we have a mid, use that for the content name instead. var mid = parsers.findLine('a=mid:', lines); if (mid) { @@ -7922,172 +8213,108 @@ exports.toMediaJSON = function (media, session, creator) { content.senders = 'none'; } - var rtpmapLines = parsers.findLines('a=rtpmap:', lines); - rtpmapLines.forEach(function (line) { - var payload = parsers.rtpmap(line); - payload.feedback = []; - - var fmtpLines = parsers.findLines('a=fmtp:' + payload.id, lines); - fmtpLines.forEach(function (line) { - payload.parameters = parsers.fmtp(line); - }); - - var fbLines = parsers.findLines('a=rtcp-fb:' + payload.id, lines); - fbLines.forEach(function (line) { - payload.feedback.push(parsers.rtcpfb(line)); - }); - - desc.payloads.push(payload); - }); - - var cryptoLines = parsers.findLines('a=crypto:', lines, sessionLines); - cryptoLines.forEach(function (line) { - desc.encryption.push(parsers.crypto(line)); - }); - - if (parsers.findLine('a=rtcp-mux', lines)) { - desc.mux = true; - } - - var fbLines = parsers.findLines('a=rtcp-fb:*', lines); - fbLines.forEach(function (line) { - desc.feedback.push(parsers.rtcpfb(line)); - }); - - var extLines = parsers.findLines('a=extmap:', lines); - extLines.forEach(function (line) { - var ext = parsers.extmap(line); - - var senders = { - sendonly: 'responder', - recvonly: 'initiator', - sendrecv: 'both', - inactive: 'none' - }; - ext.senders = senders[ext.senders]; - - desc.headerExtensions.push(ext); - }); - - var ssrcGroupLines = parsers.findLines('a=ssrc-group:', lines); - desc.sourceGroups = parsers.sourceGroups(ssrcGroupLines || []); - - var ssrcLines = parsers.findLines('a=ssrc:', lines); - desc.sources = parsers.sources(ssrcLines || []); - - var fingerprintLines = parsers.findLines('a=fingerprint:', lines, sessionLines); - fingerprintLines.forEach(function (line) { - var fp = parsers.fingerprint(line); - var setup = parsers.findLine('a=setup:', lines, sessionLines); - if (setup) { - fp.setup = setup.substr(8); + if (desc.descType == 'rtp') { + var ssrc = parsers.findLine('a=ssrc:', lines); + if (ssrc) { + desc.ssrc = ssrc.substr(7).split(' ')[0]; } - trans.fingerprints.push(fp); - }); - - var ufragLine = parsers.findLine('a=ice-ufrag:', lines, sessionLines); - var pwdLine = parsers.findLine('a=ice-pwd:', lines, sessionLines); - if (ufragLine && pwdLine) { - trans.ufrag = ufragLine.substr(12); - trans.pwd = pwdLine.substr(10); - trans.candidates = []; - - var candidateLines = parsers.findLines('a=candidate:', lines, sessionLines); - candidateLines.forEach(function (line) { - trans.candidates.push(exports.toCandidateJSON(line)); - }); - } - - return content; -}; - -exports.toCandidateJSON = function (line) { - var candidate = parsers.candidate(line.split('\r\n')[0]); - candidate.id = (idCounter++).toString(36).substr(0, 12); - return candidate; -}; -},{"./parsers":23}],21:[function(require,module,exports){ -// getScreenMedia helper by @HenrikJoreteg -var getUserMedia = require('getusermedia'); - -// cache for constraints and callback -var cache = {}; + var rtpmapLines = parsers.findLines('a=rtpmap:', lines); + rtpmapLines.forEach(function (line) { + var payload = parsers.rtpmap(line); + payload.feedback = []; -module.exports = function (constraints, cb) { - var hasConstraints = arguments.length === 2; - var callback = hasConstraints ? cb : constraints; - var error; + var fmtpLines = parsers.findLines('a=fmtp:' + payload.id, lines); + fmtpLines.forEach(function (line) { + payload.parameters = parsers.fmtp(line); + }); - if (typeof window === 'undefined' || window.location.protocol === 'http:') { - error = new Error('NavigatorUserMediaError'); - error.name = 'HTTPS_REQUIRED'; - return callback(error); - } + var fbLines = parsers.findLines('a=rtcp-fb:' + payload.id, lines); + fbLines.forEach(function (line) { + payload.feedback.push(parsers.rtcpfb(line)); + }); - if (window.navigator.userAgent.match('Chrome')) { - var chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10); - var maxver = 33; - // "known" crash in chrome 34 on linux - if (window.navigator.userAgent.match('Linux')) maxver = 34; - if (chromever >= 26 && chromever <= maxver) { - // chrome 26 - chrome 33 way to do it -- requires bad chrome://flags - constraints = (hasConstraints && constraints) || { - video: { - mandatory: { - googLeakyBucket: true, - maxWidth: window.screen.width, - maxHeight: window.screen.height, - maxFrameRate: 3, - chromeMediaSource: 'screen' - } - } - }; - getUserMedia(constraints, callback); - } else { - // chrome 34+ way requiring an extension - var pending = window.setTimeout(function () { - error = new Error('NavigatorUserMediaError'); - error.name = 'EXTENSION_UNAVAILABLE'; - return callback(error); - }, 1000); - cache[pending] = [callback, hasConstraints ? constraint : null]; - window.postMessage({ type: 'getScreen', id: pending }, '*'); + desc.payloads.push(payload); + }); + + var cryptoLines = parsers.findLines('a=crypto:', lines, sessionLines); + cryptoLines.forEach(function (line) { + desc.encryption.push(parsers.crypto(line)); + }); + + if (parsers.findLine('a=rtcp-mux', lines)) { + desc.mux = true; } - } -}; -window.addEventListener('message', function (event) { - if (event.origin != window.location.origin) { - return; + var fbLines = parsers.findLines('a=rtcp-fb:*', lines); + fbLines.forEach(function (line) { + desc.feedback.push(parsers.rtcpfb(line)); + }); + + var extLines = parsers.findLines('a=extmap:', lines); + extLines.forEach(function (line) { + var ext = parsers.extmap(line); + + var senders = { + sendonly: 'responder', + recvonly: 'initiator', + sendrecv: 'both', + inactive: 'none' + }; + ext.senders = senders[ext.senders]; + + desc.headerExtensions.push(ext); + }); + + var ssrcGroupLines = parsers.findLines('a=ssrc-group:', lines); + desc.sourceGroups = parsers.sourceGroups(ssrcGroupLines || []); + + var ssrcLines = parsers.findLines('a=ssrc:', lines); + desc.sources = parsers.sources(ssrcLines || []); } - if (event.data.type == 'gotScreen' && cache[event.data.id]) { - var data = cache[event.data.id]; - var constraints = data[1]; - var callback = data[0]; - delete cache[event.data.id]; - if (event.data.sourceId === '') { // user canceled - var error = error = new Error('NavigatorUserMediaError'); - error.name = 'PERMISSION_DENIED'; - callback(error); - } else { - constraints = constraints || {audio: false, video: {mandatory: { - chromeMediaSource: 'desktop', - chromeMediaSourceId: event.data.sourceId, - googLeakyBucket: true, - maxWidth: window.screen.width, - maxHeight: window.screen.height, - maxFrameRate: 3 - }}}; - getUserMedia(constraints, callback); + // transport specific attributes + var fingerprintLines = parsers.findLines('a=fingerprint:', lines, sessionLines); + fingerprintLines.forEach(function (line) { + var fp = parsers.fingerprint(line); + var setup = parsers.findLine('a=setup:', lines, sessionLines); + if (setup) { + fp.setup = setup.substr(8); } - } else if (event.data.type == 'getScreenPending') { - window.clearTimeout(event.data.id); + trans.fingerprints.push(fp); + }); + + var ufragLine = parsers.findLine('a=ice-ufrag:', lines, sessionLines); + var pwdLine = parsers.findLine('a=ice-pwd:', lines, sessionLines); + if (ufragLine && pwdLine) { + trans.ufrag = ufragLine.substr(12); + trans.pwd = pwdLine.substr(10); + trans.candidates = []; + + var candidateLines = parsers.findLines('a=candidate:', lines, sessionLines); + candidateLines.forEach(function (line) { + trans.candidates.push(exports.toCandidateJSON(line)); + }); } -}); -},{"getusermedia":15}],23:[function(require,module,exports){ + if (desc.descType == 'datachannel') { + var sctpmapLines = parsers.findLines('a=sctpmap:', lines); + sctpmapLines.forEach(function (line) { + var sctp = parsers.sctpmap(line); + trans.sctp.push(sctp); + }); + } + + return content; +}; + +exports.toCandidateJSON = function (line) { + var candidate = parsers.candidate(line.split('\r\n')[0]); + candidate.id = (idCounter++).toString(36).substr(0, 12); + return candidate; +}; + +},{"./parsers":24}],24:[function(require,module,exports){ exports.lines = function (sdp) { return sdp.split('\r\n').filter(function (line) { return line.length > 0; @@ -8164,6 +8391,18 @@ exports.rtpmap = function (line) { return parsed; }; +exports.sctpmap = function (line) { + // based on -05 draft + var parts = line.substr(10).split(' '); + var parsed = { + number: parts.shift(), + protocol: parts.shift(), + streams: parts.shift() + }; + return parsed; +}; + + exports.fmtp = function (line) { var kv, key, value; var parts = line.substr(line.indexOf(' ') + 1).split(';'); @@ -8234,7 +8473,12 @@ exports.rtcpfb = function (line) { }; exports.candidate = function (line) { - var parts = line.substring(12).split(' '); + var parts; + if (line.indexOf('a=candidate:') === 0) { + parts = line.substring(12).split(' '); + } else { // no a=candidate + parts = line.substring(10).split(' '); + } var candidate = { foundation: parts[0], @@ -8321,54 +8565,7 @@ exports.groups = function (lines) { return parsed; }; -},{}],22:[function(require,module,exports){ -var support = require('webrtcsupport'); - - -function GainController(stream) { - this.support = support.webAudio && support.mediaStream; - - // set our starting value - this.gain = 1; - - if (this.support) { - var context = this.context = new support.AudioContext(); - this.microphone = context.createMediaStreamSource(stream); - this.gainFilter = context.createGain(); - this.destination = context.createMediaStreamDestination(); - this.outputStream = this.destination.stream; - this.microphone.connect(this.gainFilter); - this.gainFilter.connect(this.destination); - stream.removeTrack(stream.getAudioTracks()[0]); - stream.addTrack(this.outputStream.getAudioTracks()[0]); - } - this.stream = stream; -} - -// setting -GainController.prototype.setGain = function (val) { - // check for support - if (!this.support) return; - this.gainFilter.gain.value = val; - this.gain = val; -}; - -GainController.prototype.getGain = function () { - return this.gain; -}; - -GainController.prototype.off = function () { - return this.setGain(0); -}; - -GainController.prototype.on = function () { - this.setGain(1); -}; - - -module.exports = GainController; - -},{"webrtcsupport":10}],19:[function(require,module,exports){ +},{}],21:[function(require,module,exports){ // based on https://github.com/ESTOS/strophe.jingle/ // adds wildemitter support var util = require('util'); @@ -8588,136 +8785,6 @@ TraceablePeerConnection.prototype.getStats = function (callback, errback) { module.exports = TraceablePeerConnection; -},{"util":8,"webrtcsupport":10,"wildemitter":3}],20:[function(require,module,exports){ -var WildEmitter = require('wildemitter'); - -function getMaxVolume (analyser, fftBins) { - var maxVolume = -Infinity; - analyser.getFloatFrequencyData(fftBins); - - for(var i=4, ii=fftBins.length; i < ii; i++) { - if (fftBins[i] > maxVolume && fftBins[i] < 0) { - maxVolume = fftBins[i]; - } - }; - - return maxVolume; -} - - -var audioContextType = window.webkitAudioContext || window.AudioContext; -// use a single audio context due to hardware limits -var audioContext = null; -module.exports = function(stream, options) { - var harker = new WildEmitter(); - - - // make it not break in non-supported browsers - if (!audioContextType) return harker; - - //Config - var options = options || {}, - smoothing = (options.smoothing || 0.1), - interval = (options.interval || 50), - threshold = options.threshold, - play = options.play, - history = options.history || 10, - running = true; - - //Setup Audio Context - if (!audioContext) { - audioContext = new audioContextType(); - } - var sourceNode, fftBins, analyser; - - analyser = audioContext.createAnalyser(); - analyser.fftSize = 512; - analyser.smoothingTimeConstant = smoothing; - fftBins = new Float32Array(analyser.fftSize); - - if (stream.jquery) stream = stream[0]; - if (stream instanceof HTMLAudioElement || stream instanceof HTMLVideoElement) { - //Audio Tag - sourceNode = audioContext.createMediaElementSource(stream); - if (typeof play === 'undefined') play = true; - threshold = threshold || -50; - } else { - //WebRTC Stream - sourceNode = audioContext.createMediaStreamSource(stream); - threshold = threshold || -50; - } - - sourceNode.connect(analyser); - if (play) analyser.connect(audioContext.destination); - - harker.speaking = false; - - harker.setThreshold = function(t) { - threshold = t; - }; - - harker.setInterval = function(i) { - interval = i; - }; - - harker.stop = function() { - running = false; - harker.emit('volume_change', -100, threshold); - if (harker.speaking) { - harker.speaking = false; - harker.emit('stopped_speaking'); - } - }; - harker.speakingHistory = []; - for (var i = 0; i < history; i++) { - harker.speakingHistory.push(0); - } - - // Poll the analyser node to determine if speaking - // and emit events if changed - var looper = function() { - setTimeout(function() { - - //check if stop has been called - if(!running) { - return; - } - - var currentVolume = getMaxVolume(analyser, fftBins); - - harker.emit('volume_change', currentVolume, threshold); - - var history = 0; - if (currentVolume > threshold && !harker.speaking) { - // trigger quickly, short history - for (var i = harker.speakingHistory.length - 3; i < harker.speakingHistory.length; i++) { - history += harker.speakingHistory[i]; - } - if (history >= 2) { - harker.speaking = true; - harker.emit('speaking'); - } - } else if (currentVolume < threshold && harker.speaking) { - for (var i = 0; i < harker.speakingHistory.length; i++) { - history += harker.speakingHistory[i]; - } - if (history == 0) { - harker.speaking = false; - harker.emit('stopped_speaking'); - } - } - harker.speakingHistory.shift(); - harker.speakingHistory.push(0 + (currentVolume > threshold)); - - looper(); - }, interval); - }; - looper(); - - - return harker; -} - -},{"wildemitter":3}]},{},[1])(1) +},{"util":8,"webrtcsupport":10,"wildemitter":2}]},{},[1])(1) }); ;