diff --git a/public/js/simplewebrtc.bundle.js b/public/js/simplewebrtc.bundle.js index f214b41..7bcaec8 100644 --- a/public/js/simplewebrtc.bundle.js +++ b/public/js/simplewebrtc.bundle.js @@ -423,98 +423,7 @@ SimpleWebRTC.prototype.sendFile = function () { module.exports = SimpleWebRTC; -},{"attachmediastream":5,"mockconsole":4,"socket.io-client":6,"webrtc":2,"webrtcsupport":3,"wildemitter":7}],3:[function(require,module,exports){ -// created by @HenrikJoreteg -var prefix; -var isChrome = false; -var isFirefox = false; -var ua = window.navigator.userAgent.toLowerCase(); - -// basic sniffing -if (ua.indexOf('firefox') !== -1) { - prefix = 'moz'; - isFirefox = true; -} else if (ua.indexOf('chrome') !== -1) { - prefix = 'webkit'; - isChrome = true; -} - -var PC = window.mozRTCPeerConnection || window.webkitRTCPeerConnection; -var IceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate; -var SessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription; -var MediaStream = window.webkitMediaStream || window.MediaStream; -var screenSharing = window.location.protocol === 'https:' && window.navigator.userAgent.match('Chrome') && parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10) >= 26; -var AudioContext = window.webkitAudioContext || window.AudioContext; - - -// export support flags and constructors.prototype && PC -module.exports = { - support: !!PC, - dataChannel: isChrome || isFirefox || (PC && PC.prototype && PC.prototype.createDataChannel), - prefix: prefix, - webAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource), - mediaStream: !!(MediaStream && MediaStream.prototype.removeTrack), - screenSharing: !!screenSharing, - AudioContext: AudioContext, - PeerConnection: PC, - SessionDescription: SessionDescription, - IceCandidate: IceCandidate -}; - -},{}],4:[function(require,module,exports){ -var methods = "assert,count,debug,dir,dirxml,error,exception,group,groupCollapsed,groupEnd,info,log,markTimeline,profile,profileEnd,time,timeEnd,trace,warn".split(","); -var l = methods.length; -var fn = function () {}; -var mockconsole = {}; - -while (l--) { - mockconsole[methods[l]] = fn; -} - -module.exports = mockconsole; - -},{}],5:[function(require,module,exports){ -module.exports = function (stream, el, options) { - var URL = window.URL; - var opts = { - autoplay: true, - mirror: false, - muted: false - }; - var element = el || document.createElement('video'); - var item; - - if (options) { - for (item in options) { - opts[item] = options[item]; - } - } - - if (opts.autoplay) element.autoplay = 'autoplay'; - if (opts.muted) element.muted = true; - if (opts.mirror) { - ['', 'moz', 'webkit', 'o', 'ms'].forEach(function (prefix) { - var styleName = prefix ? prefix + 'Transform' : 'transform'; - element.style[styleName] = 'scaleX(-1)'; - }); - } - - // this first one should work most everywhere now - // but we have a few fallbacks just in case. - if (URL && URL.createObjectURL) { - element.src = URL.createObjectURL(stream); - } else if (element.srcObject) { - element.srcObject = stream; - } else if (element.mozSrcObject) { - element.mozSrcObject = stream; - } else { - return false; - } - - return element; -}; - -},{}],7:[function(require,module,exports){ +},{"attachmediastream":5,"mockconsole":6,"socket.io-client":7,"webrtc":2,"webrtcsupport":4,"wildemitter":3}],3:[function(require,module,exports){ /* WildEmitter.js is a slim little event emitter by @henrikjoreteg largely based on @visionmedia's Emitter from UI Kit. @@ -655,7 +564,98 @@ WildEmitter.prototype.getWildcardCallbacks = function (eventName) { return result; }; +},{}],4:[function(require,module,exports){ +// created by @HenrikJoreteg +var prefix; +var isChrome = false; +var isFirefox = false; +var ua = window.navigator.userAgent.toLowerCase(); + +// basic sniffing +if (ua.indexOf('firefox') !== -1) { + prefix = 'moz'; + isFirefox = true; +} else if (ua.indexOf('chrome') !== -1) { + prefix = 'webkit'; + isChrome = true; +} + +var PC = window.mozRTCPeerConnection || window.webkitRTCPeerConnection; +var IceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate; +var SessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription; +var MediaStream = window.webkitMediaStream || window.MediaStream; +var screenSharing = window.location.protocol === 'https:' && window.navigator.userAgent.match('Chrome') && parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10) >= 26; +var AudioContext = window.webkitAudioContext || window.AudioContext; + + +// export support flags and constructors.prototype && PC +module.exports = { + support: !!PC, + dataChannel: isChrome || isFirefox || (PC && PC.prototype && PC.prototype.createDataChannel), + prefix: prefix, + webAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource), + mediaStream: !!(MediaStream && MediaStream.prototype.removeTrack), + screenSharing: !!screenSharing, + AudioContext: AudioContext, + PeerConnection: PC, + SessionDescription: SessionDescription, + IceCandidate: IceCandidate +}; + +},{}],5:[function(require,module,exports){ +module.exports = function (stream, el, options) { + var URL = window.URL; + var opts = { + autoplay: true, + mirror: false, + muted: false + }; + var element = el || document.createElement('video'); + var item; + + if (options) { + for (item in options) { + opts[item] = options[item]; + } + } + + if (opts.autoplay) element.autoplay = 'autoplay'; + if (opts.muted) element.muted = true; + if (opts.mirror) { + ['', 'moz', 'webkit', 'o', 'ms'].forEach(function (prefix) { + var styleName = prefix ? prefix + 'Transform' : 'transform'; + element.style[styleName] = 'scaleX(-1)'; + }); + } + + // this first one should work most everywhere now + // but we have a few fallbacks just in case. + if (URL && URL.createObjectURL) { + element.src = URL.createObjectURL(stream); + } else if (element.srcObject) { + element.srcObject = stream; + } else if (element.mozSrcObject) { + element.mozSrcObject = stream; + } else { + return false; + } + + return element; +}; + },{}],6:[function(require,module,exports){ +var methods = "assert,count,debug,dir,dirxml,error,exception,group,groupCollapsed,groupEnd,info,log,markTimeline,profile,profileEnd,time,timeEnd,trace,warn".split(","); +var l = methods.length; +var fn = function () {}; +var mockconsole = {}; + +while (l--) { + mockconsole[methods[l]] = fn; +} + +module.exports = mockconsole; + +},{}],7:[function(require,module,exports){ /*! Socket.IO.js build:0.9.16, development. Copyright(c) 2011 LearnBoost MIT Licensed */ var io = ('undefined' === typeof module ? {} : module.exports); @@ -4530,44 +4530,6 @@ if (typeof define === "function" && define.amd) { } })(); },{}],8:[function(require,module,exports){ -// created by @HenrikJoreteg -var prefix; -var isChrome = false; -var isFirefox = false; -var ua = navigator.userAgent.toLowerCase(); - -// basic sniffing -if (ua.indexOf('firefox') !== -1) { - prefix = 'moz'; - isFirefox = true; -} else if (ua.indexOf('chrome') !== -1) { - prefix = 'webkit'; - isChrome = true; -} - -var PC = window.mozRTCPeerConnection || window.webkitRTCPeerConnection; -var IceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate; -var SessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription; -var MediaStream = window.webkitMediaStream || window.MediaStream; -var screenSharing = navigator.userAgent.match('Chrome') && parseInt(navigator.userAgent.match(/Chrome\/(.*) /)[1], 10) >= 26; -var AudioContext = window.webkitAudioContext || window.AudioContext; - - -// export support flags and constructors.prototype && PC -module.exports = { - support: !!PC, - dataChannel: isChrome || isFirefox || (PC && PC.prototype && PC.prototype.createDataChannel), - prefix: prefix, - webAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource), - mediaStream: !!(MediaStream && MediaStream.prototype.removeTrack), - screenSharing: !!screenSharing, - AudioContext: AudioContext, - PeerConnection: PC, - SessionDescription: SessionDescription, - IceCandidate: IceCandidate -}; - -},{}],9:[function(require,module,exports){ var events = require('events'); exports.isArray = isArray; @@ -4914,32 +4876,70 @@ exports.format = function(f) { return str; }; -},{"events":10}],2:[function(require,module,exports){ -var util = require('util'); -var webrtc = require('webrtcsupport'); -var PeerConnection = require('rtcpeerconnection'); -var WildEmitter = require('wildemitter'); -var mockconsole = require('mockconsole'); -var localMedia = require('localmedia'); +},{"events":9}],10:[function(require,module,exports){ +// created by @HenrikJoreteg +var prefix; +var isChrome = false; +var isFirefox = false; +var ua = navigator.userAgent.toLowerCase(); +// basic sniffing +if (ua.indexOf('firefox') !== -1) { + prefix = 'moz'; + isFirefox = true; +} else if (ua.indexOf('chrome') !== -1) { + prefix = 'webkit'; + isChrome = true; +} -function WebRTC(opts) { - var self = this; - var options = opts || {}; - var config = this.config = { - debug: false, - // makes the entire PC config overridable - peerConnectionConfig: { - iceServers: [{"url": "stun:stun.l.google.com:19302"}] - }, - peerConnectionContraints: { - optional: [ - {DtlsSrtpKeyAgreement: true} - ] - }, - receiveMedia: { - mandatory: { - OfferToReceiveAudio: true, +var PC = window.mozRTCPeerConnection || window.webkitRTCPeerConnection; +var IceCandidate = window.mozRTCIceCandidate || window.RTCIceCandidate; +var SessionDescription = window.mozRTCSessionDescription || window.RTCSessionDescription; +var MediaStream = window.webkitMediaStream || window.MediaStream; +var screenSharing = navigator.userAgent.match('Chrome') && parseInt(navigator.userAgent.match(/Chrome\/(.*) /)[1], 10) >= 26; +var AudioContext = window.webkitAudioContext || window.AudioContext; + + +// export support flags and constructors.prototype && PC +module.exports = { + support: !!PC, + dataChannel: isChrome || isFirefox || (PC && PC.prototype && PC.prototype.createDataChannel), + prefix: prefix, + webAudio: !!(AudioContext && AudioContext.prototype.createMediaStreamSource), + mediaStream: !!(MediaStream && MediaStream.prototype.removeTrack), + screenSharing: !!screenSharing, + AudioContext: AudioContext, + PeerConnection: PC, + SessionDescription: SessionDescription, + IceCandidate: IceCandidate +}; + +},{}],2:[function(require,module,exports){ +var util = require('util'); +var webrtc = require('webrtcsupport'); +var PeerConnection = require('rtcpeerconnection'); +var WildEmitter = require('wildemitter'); +var mockconsole = require('mockconsole'); +var localMedia = require('localmedia'); + + +function WebRTC(opts) { + var self = this; + var options = opts || {}; + var config = this.config = { + debug: false, + // makes the entire PC config overridable + peerConnectionConfig: { + iceServers: [{"url": "stun:stun.l.google.com:19302"}] + }, + peerConnectionContraints: { + optional: [ + {DtlsSrtpKeyAgreement: true} + ] + }, + receiveMedia: { + mandatory: { + OfferToReceiveAudio: true, OfferToReceiveVideo: true } }, @@ -4985,12 +4985,26 @@ function WebRTC(opts) { this.on('speaking', function () { if (!self.hardMuted) { - self.sendToAll('speaking'); + // FIXME: should use sendDirectlyToAll, but currently has different semantics wrt payload + self.peers.forEach(function (peer) { + if (peer.enableDataChannels) { + var dc = peer.getDataChannel('hark'); + if (dc.readyState != 'open') return; + dc.send(JSON.stringify({type: 'speaking'})); + } + }); } }); this.on('stoppedSpeaking', function () { if (!self.hardMuted) { - self.sendToAll('stopped_speaking'); + // FIXME: should use sendDirectlyToAll, but currently has different semantics wrt payload + self.peers.forEach(function (peer) { + if (peer.enableDataChannels) { + var dc = peer.getDataChannel('hark'); + if (dc.readyState != 'open') return; + dc.send(JSON.stringify({type: 'stoppedSpeaking'})); + } + }); } }); this.on('volumeChange', function (volume, treshold) { @@ -5154,10 +5168,6 @@ Peer.prototype.handleMessage = function (message) { this.pc.processIce(message.payload); } else if (message.type === 'connectivityError') { this.parent.emit('connectivityError', self); - } else if (message.type === 'speaking') { - this.parent.emit('speaking', {id: message.from}); - } else if (message.type === 'stopped_speaking') { - this.parent.emit('stopped_speaking', {id: message.from}); } else if (message.type === 'mute') { this.parent.emit('mute', {id: message.from, name: message.payload.name}); } else if (message.type === 'unmute') { @@ -5277,7 +5287,7 @@ Peer.prototype.handleDataChannelAdded = function (channel) { module.exports = WebRTC; -},{"localmedia":11,"mockconsole":4,"rtcpeerconnection":12,"util":9,"webrtcsupport":8,"wildemitter":7}],13:[function(require,module,exports){ +},{"localmedia":12,"mockconsole":6,"rtcpeerconnection":11,"util":8,"webrtcsupport":10,"wildemitter":3}],13:[function(require,module,exports){ // shim for using process in browser var process = module.exports = {}; @@ -5332,7 +5342,7 @@ process.chdir = function (dir) { throw new Error('process.chdir is not supported'); }; -},{}],10:[function(require,module,exports){ +},{}],9:[function(require,module,exports){ var process=require("__browserify_process");if (!process.EventEmitter) process.EventEmitter = function () {}; var EventEmitter = exports.EventEmitter = process.EventEmitter; @@ -5529,70 +5539,6 @@ EventEmitter.listenerCount = function(emitter, type) { }; },{"__browserify_process":13}],14:[function(require,module,exports){ -// getUserMedia helper by @HenrikJoreteg -var func = (window.navigator.getUserMedia || - window.navigator.webkitGetUserMedia || - window.navigator.mozGetUserMedia || - window.navigator.msGetUserMedia); - - -module.exports = function (constraints, cb) { - var options; - var haveOpts = arguments.length === 2; - var defaultOpts = {video: true, audio: true}; - var error; - var denied = 'PERMISSION_DENIED'; - var notSatified = 'CONSTRAINT_NOT_SATISFIED'; - - // make constraints optional - if (!haveOpts) { - cb = constraints; - constraints = defaultOpts; - } - - // treat lack of browser support like an error - if (!func) { - // throw proper error per spec - error = new Error('NavigatorUserMediaError'); - error.name = 'NOT_SUPPORTED_ERROR'; - return cb(error); - } - - func.call(window.navigator, constraints, function (stream) { - cb(null, stream); - }, function (err) { - var error; - // coerce into an error object since FF gives us a string - // there are only two valid names according to the spec - // we coerce all non-denied to "constraint not satisfied". - if (typeof err === 'string') { - error = new Error('NavigatorUserMediaError'); - if (err === denied) { - error.name = denied; - } else { - error.name = notSatified; - } - } else { - // if we get an error object make sure '.name' property is set - // according to spec: http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermediaerror-and-navigatorusermediaerrorcallback - error = err; - if (!error.name) { - // this is likely chrome which - // sets a property called "ERROR_DENIED" on the error object - // if so we make sure to set a name - if (error[denied]) { - err.name = denied; - } else { - err.name = notSatified; - } - } - } - - cb(error); - }); -}; - -},{}],15:[function(require,module,exports){ // Underscore.js 1.6.0 // http://underscorejs.org // (c) 2009-2014 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors @@ -6937,297 +6883,104 @@ module.exports = function (constraints, cb) { } }).call(this); -},{}],11:[function(require,module,exports){ -var util = require('util'); -var hark = require('hark'); -var webrtc = require('webrtcsupport'); -var getUserMedia = require('getusermedia'); -var getScreenMedia = require('getscreenmedia'); -var WildEmitter = require('wildemitter'); -var GainController = require('mediastream-gain'); -var mockconsole = require('mockconsole'); +},{}],15:[function(require,module,exports){ +// getUserMedia helper by @HenrikJoreteg +var func = (window.navigator.getUserMedia || + window.navigator.webkitGetUserMedia || + window.navigator.mozGetUserMedia || + window.navigator.msGetUserMedia); -function LocalMedia(opts) { - WildEmitter.call(this); +module.exports = function (constraints, cb) { + var options; + var haveOpts = arguments.length === 2; + var defaultOpts = {video: true, audio: true}; + var error; + var denied = 'PERMISSION_DENIED'; + var notSatified = 'CONSTRAINT_NOT_SATISFIED'; - var config = this.config = { - autoAdjustMic: false, - detectSpeakingEvents: true, - media: { - audio: true, - video: true - }, - logger: mockconsole - }; + // make constraints optional + if (!haveOpts) { + cb = constraints; + constraints = defaultOpts; + } - var item; - for (item in opts) { - this.config[item] = opts[item]; + // treat lack of browser support like an error + if (!func) { + // throw proper error per spec + error = new Error('NavigatorUserMediaError'); + error.name = 'NOT_SUPPORTED_ERROR'; + return cb(error); } - this.logger = config.logger; - this._log = this.logger.log.bind(this.logger, 'LocalMedia:'); - this._logerror = this.logger.error.bind(this.logger, 'LocalMedia:'); + func.call(window.navigator, constraints, function (stream) { + cb(null, stream); + }, function (err) { + var error; + // coerce into an error object since FF gives us a string + // there are only two valid names according to the spec + // we coerce all non-denied to "constraint not satisfied". + if (typeof err === 'string') { + error = new Error('NavigatorUserMediaError'); + if (err === denied) { + error.name = denied; + } else { + error.name = notSatified; + } + } else { + // if we get an error object make sure '.name' property is set + // according to spec: http://dev.w3.org/2011/webrtc/editor/getusermedia.html#navigatorusermediaerror-and-navigatorusermediaerrorcallback + error = err; + if (!error.name) { + // this is likely chrome which + // sets a property called "ERROR_DENIED" on the error object + // if so we make sure to set a name + if (error[denied]) { + err.name = denied; + } else { + err.name = notSatified; + } + } + } - this.screenSharingSupport = webrtc.screenSharing; + cb(error); + }); +}; - this.localStreams = []; - this.localScreens = []; +},{}],16:[function(require,module,exports){ +var tosdp = require('./lib/tosdp'); +var tojson = require('./lib/tojson'); - if (!webrtc.support) { - this._logerror('Your browser does not support local media capture.'); - } -} -util.inherits(LocalMedia, WildEmitter); +exports.toSessionSDP = tosdp.toSessionSDP; +exports.toMediaSDP = tosdp.toMediaSDP; +exports.toCandidateSDP = tosdp.toCandidateSDP; +exports.toSessionJSON = tojson.toSessionJSON; +exports.toMediaJSON = tojson.toMediaJSON; +exports.toCandidateJSON = tojson.toCandidateJSON; -LocalMedia.prototype.start = function (mediaConstraints, cb) { +},{"./lib/tojson":18,"./lib/tosdp":17}],11:[function(require,module,exports){ +var _ = require('underscore'); +var util = require('util'); +var webrtc = require('webrtcsupport'); +var SJJ = require('sdp-jingle-json'); +var WildEmitter = require('wildemitter'); +var peerconn = require('traceablepeerconnection'); + +function PeerConnection(config, constraints) { var self = this; - var constraints = mediaConstraints || this.config.media; + var item; + WildEmitter.call(this); - getUserMedia(constraints, function (err, stream) { - if (!err) { - if (constraints.audio && self.config.detectSpeakingEvents) { - self.setupAudioMonitor(stream, self.config.harkOptions); - } - self.localStreams.push(stream); + config = config || {}; + config.iceServers = config.iceServers || []; - if (self.config.autoAdjustMic) { - self.gainController = new GainController(stream); - // start out somewhat muted if we can track audio - self.setMicIfEnabled(0.5); - } + this.pc = new peerconn(config, constraints); - // TODO: might need to migrate to the video tracks onended - // FIXME: firefox does not seem to trigger this... - stream.onended = function () { - /* - var idx = self.localStreams.indexOf(stream); - if (idx > -1) { - self.localScreens.splice(idx, 1); - } - self.emit('localStreamStopped', stream); - */ - }; - - self.emit('localStream', stream); - } - if (cb) { - return cb(err, stream); - } - }); -}; - -LocalMedia.prototype.stop = function (stream) { - var self = this; - // FIXME: duplicates cleanup code until fixed in FF - if (stream) { - stream.stop(); - self.emit('localStreamStopped', stream); - var idx = self.localStreams.indexOf(stream); - if (idx > -1) { - self.localStreams = self.localStreams.splice(idx, 1); - } - } else { - this.localStreams.forEach(function (stream) { - stream.stop(); - self.emit('localStreamStopped', stream); - }); - this.localStreams = []; - } -}; - -LocalMedia.prototype.startScreenShare = function (cb) { - var self = this; - getScreenMedia(function (err, stream) { - if (!err) { - self.localScreens.push(stream); - - // TODO: might need to migrate to the video tracks onended - // Firefox does not support .onended but it does not support - // screensharing either - stream.onended = function () { - var idx = self.localScreens.indexOf(stream); - if (idx > -1) { - self.localScreens.splice(idx, 1); - } - self.emit('localScreenStopped', stream); - }; - self.emit('localScreen', stream); - } - - // enable the callback - if (cb) { - return cb(err, stream); - } - }); -}; - -LocalMedia.prototype.stopScreenShare = function (stream) { - if (stream) { - stream.stop(); - } else { - this.localScreens.forEach(function (stream) { - stream.stop(); - }); - this.localScreens = []; - } -}; - -// Audio controls -LocalMedia.prototype.mute = function () { - this._audioEnabled(false); - this.hardMuted = true; - this.emit('audioOff'); -}; - -LocalMedia.prototype.unmute = function () { - this._audioEnabled(true); - this.hardMuted = false; - this.emit('audioOn'); -}; - -LocalMedia.prototype.setupAudioMonitor = function (stream, harkOptions) { - this._log('Setup audio'); - var audio = hark(stream, harkOptions); - var self = this; - var timeout; - - audio.on('speaking', function () { - self.emit('speaking'); - if (self.hardMuted) { - return; - } - self.setMicIfEnabled(1); - }); - - audio.on('stopped_speaking', function () { - if (timeout) { - clearTimeout(timeout); - } - - timeout = setTimeout(function () { - self.emit('stoppedSpeaking'); - if (self.hardMuted) { - return; - } - self.setMicIfEnabled(0.5); - }, 1000); - }); - audio.on('volume_change', function (volume, treshold) { - self.emit('volumeChange', volume, treshold); - }); -}; - -// We do this as a seperate method in order to -// still leave the "setMicVolume" as a working -// method. -LocalMedia.prototype.setMicIfEnabled = function (volume) { - if (!this.config.autoAdjustMic) { - return; - } - this.gainController.setGain(volume); -}; - -// Video controls -LocalMedia.prototype.pauseVideo = function () { - this._videoEnabled(false); - this.emit('videoOff'); -}; -LocalMedia.prototype.resumeVideo = function () { - this._videoEnabled(true); - this.emit('videoOn'); -}; - -// Combined controls -LocalMedia.prototype.pause = function () { - this._audioEnabled(false); - this.pauseVideo(); -}; -LocalMedia.prototype.resume = function () { - this._audioEnabled(true); - this.resumeVideo(); -}; - -// Internal methods for enabling/disabling audio/video -LocalMedia.prototype._audioEnabled = function (bool) { - // work around for chrome 27 bug where disabling tracks - // doesn't seem to work (works in canary, remove when working) - this.setMicIfEnabled(bool ? 1 : 0); - this.localStreams.forEach(function (stream) { - stream.getAudioTracks().forEach(function (track) { - track.enabled = !!bool; - }); - }); -}; -LocalMedia.prototype._videoEnabled = function (bool) { - this.localStreams.forEach(function (stream) { - stream.getVideoTracks().forEach(function (track) { - track.enabled = !!bool; - }); - }); -}; - -// check if all audio streams are enabled -LocalMedia.prototype.isAudioEnabled = function () { - var enabled = true; - this.localStreams.forEach(function (stream) { - stream.getAudioTracks().forEach(function (track) { - enabled = enabled && track.enabled; - }); - }); - return enabled; -}; - -// check if all video streams are enabled -LocalMedia.prototype.isVideoEnabled = function () { - var enabled = true; - this.localStreams.forEach(function (stream) { - stream.getVideoTracks().forEach(function (track) { - enabled = enabled && track.enabled; - }); - }); - return enabled; -}; - -// Backwards Compat -LocalMedia.prototype.startLocalMedia = LocalMedia.prototype.start; -LocalMedia.prototype.stopLocalMedia = LocalMedia.prototype.stop; - -// fallback for old .localStream behaviour -Object.defineProperty(LocalMedia.prototype, 'localStream', { - get: function () { - return this.localStreams.length > 0 ? this.localStreams[0] : null; - } -}); -// fallback for old .localScreen behaviour -Object.defineProperty(LocalMedia.prototype, 'localScreen', { - get: function () { - return this.localScreens.length > 0 ? this.localScreens[0] : null; - } -}); - -module.exports = LocalMedia; - -},{"getscreenmedia":17,"getusermedia":14,"hark":16,"mediastream-gain":18,"mockconsole":4,"util":9,"webrtcsupport":8,"wildemitter":7}],12:[function(require,module,exports){ -var _ = require('underscore'); -var util = require('util'); -var webrtc = require('webrtcsupport'); -var SJJ = require('sdp-jingle-json'); -var WildEmitter = require('wildemitter'); -var peerconn = require('traceablepeerconnection'); - -function PeerConnection(config, constraints) { - var self = this; - var item; - WildEmitter.call(this); - - config = config || {}; - config.iceServers = config.iceServers || []; + this.getLocalStreams = this.pc.getLocalStreams.bind(this.pc); + this.getRemoteStreams = this.pc.getRemoteStreams.bind(this.pc); - this.pc = new peerconn(config, constraints); // proxy events this.pc.on('*', function () { self.emit.apply(self, arguments); @@ -7629,30 +7382,291 @@ PeerConnection.prototype.getStats = function (cb) { module.exports = PeerConnection; -},{"sdp-jingle-json":19,"traceablepeerconnection":20,"underscore":15,"util":9,"webrtcsupport":8,"wildemitter":7}],19:[function(require,module,exports){ -var tosdp = require('./lib/tosdp'); -var tojson = require('./lib/tojson'); +},{"sdp-jingle-json":16,"traceablepeerconnection":19,"underscore":14,"util":8,"webrtcsupport":10,"wildemitter":3}],12:[function(require,module,exports){ +var util = require('util'); +var hark = require('hark'); +var webrtc = require('webrtcsupport'); +var getUserMedia = require('getusermedia'); +var getScreenMedia = require('getscreenmedia'); +var WildEmitter = require('wildemitter'); +var GainController = require('mediastream-gain'); +var mockconsole = require('mockconsole'); -exports.toSessionSDP = tosdp.toSessionSDP; -exports.toMediaSDP = tosdp.toMediaSDP; -exports.toCandidateSDP = tosdp.toCandidateSDP; +function LocalMedia(opts) { + WildEmitter.call(this); -exports.toSessionJSON = tojson.toSessionJSON; -exports.toMediaJSON = tojson.toMediaJSON; -exports.toCandidateJSON = tojson.toCandidateJSON; + var config = this.config = { + autoAdjustMic: false, + detectSpeakingEvents: true, + media: { + audio: true, + video: true + }, + logger: mockconsole + }; -},{"./lib/tojson":22,"./lib/tosdp":21}],21:[function(require,module,exports){ -var senders = { - 'initiator': 'sendonly', - 'responder': 'recvonly', - 'both': 'sendrecv', - 'none': 'inactive', - 'sendonly': 'initator', - 'recvonly': 'responder', - 'sendrecv': 'both', - 'inactive': 'none' -}; + var item; + for (item in opts) { + this.config[item] = opts[item]; + } + + this.logger = config.logger; + this._log = this.logger.log.bind(this.logger, 'LocalMedia:'); + this._logerror = this.logger.error.bind(this.logger, 'LocalMedia:'); + + this.screenSharingSupport = webrtc.screenSharing; + + this.localStreams = []; + this.localScreens = []; + + if (!webrtc.support) { + this._logerror('Your browser does not support local media capture.'); + } +} + +util.inherits(LocalMedia, WildEmitter); + + +LocalMedia.prototype.start = function (mediaConstraints, cb) { + var self = this; + var constraints = mediaConstraints || this.config.media; + + getUserMedia(constraints, function (err, stream) { + if (!err) { + if (constraints.audio && self.config.detectSpeakingEvents) { + self.setupAudioMonitor(stream, self.config.harkOptions); + } + self.localStreams.push(stream); + + if (self.config.autoAdjustMic) { + self.gainController = new GainController(stream); + // start out somewhat muted if we can track audio + self.setMicIfEnabled(0.5); + } + + // TODO: might need to migrate to the video tracks onended + // FIXME: firefox does not seem to trigger this... + stream.onended = function () { + /* + var idx = self.localStreams.indexOf(stream); + if (idx > -1) { + self.localScreens.splice(idx, 1); + } + self.emit('localStreamStopped', stream); + */ + }; + + self.emit('localStream', stream); + } + if (cb) { + return cb(err, stream); + } + }); +}; + +LocalMedia.prototype.stop = function (stream) { + var self = this; + // FIXME: duplicates cleanup code until fixed in FF + if (stream) { + stream.stop(); + self.emit('localStreamStopped', stream); + var idx = self.localStreams.indexOf(stream); + if (idx > -1) { + self.localStreams = self.localStreams.splice(idx, 1); + } + } else { + this.localStreams.forEach(function (stream) { + stream.stop(); + self.emit('localStreamStopped', stream); + }); + this.localStreams = []; + } +}; + +LocalMedia.prototype.startScreenShare = function (cb) { + var self = this; + getScreenMedia(function (err, stream) { + if (!err) { + self.localScreens.push(stream); + + // TODO: might need to migrate to the video tracks onended + // Firefox does not support .onended but it does not support + // screensharing either + stream.onended = function () { + var idx = self.localScreens.indexOf(stream); + if (idx > -1) { + self.localScreens.splice(idx, 1); + } + self.emit('localScreenStopped', stream); + }; + self.emit('localScreen', stream); + } + + // enable the callback + if (cb) { + return cb(err, stream); + } + }); +}; + +LocalMedia.prototype.stopScreenShare = function (stream) { + if (stream) { + stream.stop(); + } else { + this.localScreens.forEach(function (stream) { + stream.stop(); + }); + this.localScreens = []; + } +}; + +// Audio controls +LocalMedia.prototype.mute = function () { + this._audioEnabled(false); + this.hardMuted = true; + this.emit('audioOff'); +}; + +LocalMedia.prototype.unmute = function () { + this._audioEnabled(true); + this.hardMuted = false; + this.emit('audioOn'); +}; + +LocalMedia.prototype.setupAudioMonitor = function (stream, harkOptions) { + this._log('Setup audio'); + var audio = hark(stream, harkOptions); + var self = this; + var timeout; + + audio.on('speaking', function () { + self.emit('speaking'); + if (self.hardMuted) { + return; + } + self.setMicIfEnabled(1); + }); + + audio.on('stopped_speaking', function () { + if (timeout) { + clearTimeout(timeout); + } + + timeout = setTimeout(function () { + self.emit('stoppedSpeaking'); + if (self.hardMuted) { + return; + } + self.setMicIfEnabled(0.5); + }, 1000); + }); + audio.on('volume_change', function (volume, treshold) { + self.emit('volumeChange', volume, treshold); + }); +}; + +// We do this as a seperate method in order to +// still leave the "setMicVolume" as a working +// method. +LocalMedia.prototype.setMicIfEnabled = function (volume) { + if (!this.config.autoAdjustMic) { + return; + } + this.gainController.setGain(volume); +}; + +// Video controls +LocalMedia.prototype.pauseVideo = function () { + this._videoEnabled(false); + this.emit('videoOff'); +}; +LocalMedia.prototype.resumeVideo = function () { + this._videoEnabled(true); + this.emit('videoOn'); +}; + +// Combined controls +LocalMedia.prototype.pause = function () { + this._audioEnabled(false); + this.pauseVideo(); +}; +LocalMedia.prototype.resume = function () { + this._audioEnabled(true); + this.resumeVideo(); +}; + +// Internal methods for enabling/disabling audio/video +LocalMedia.prototype._audioEnabled = function (bool) { + // work around for chrome 27 bug where disabling tracks + // doesn't seem to work (works in canary, remove when working) + this.setMicIfEnabled(bool ? 1 : 0); + this.localStreams.forEach(function (stream) { + stream.getAudioTracks().forEach(function (track) { + track.enabled = !!bool; + }); + }); +}; +LocalMedia.prototype._videoEnabled = function (bool) { + this.localStreams.forEach(function (stream) { + stream.getVideoTracks().forEach(function (track) { + track.enabled = !!bool; + }); + }); +}; + +// check if all audio streams are enabled +LocalMedia.prototype.isAudioEnabled = function () { + var enabled = true; + this.localStreams.forEach(function (stream) { + stream.getAudioTracks().forEach(function (track) { + enabled = enabled && track.enabled; + }); + }); + return enabled; +}; + +// check if all video streams are enabled +LocalMedia.prototype.isVideoEnabled = function () { + var enabled = true; + this.localStreams.forEach(function (stream) { + stream.getVideoTracks().forEach(function (track) { + enabled = enabled && track.enabled; + }); + }); + return enabled; +}; + +// Backwards Compat +LocalMedia.prototype.startLocalMedia = LocalMedia.prototype.start; +LocalMedia.prototype.stopLocalMedia = LocalMedia.prototype.stop; + +// fallback for old .localStream behaviour +Object.defineProperty(LocalMedia.prototype, 'localStream', { + get: function () { + return this.localStreams.length > 0 ? this.localStreams[0] : null; + } +}); +// fallback for old .localScreen behaviour +Object.defineProperty(LocalMedia.prototype, 'localScreen', { + get: function () { + return this.localScreens.length > 0 ? this.localScreens[0] : null; + } +}); + +module.exports = LocalMedia; + +},{"getscreenmedia":21,"getusermedia":15,"hark":20,"mediastream-gain":22,"mockconsole":6,"util":8,"webrtcsupport":10,"wildemitter":3}],17:[function(require,module,exports){ +var senders = { + 'initiator': 'sendonly', + 'responder': 'recvonly', + 'both': 'sendrecv', + 'none': 'inactive', + 'sendonly': 'initator', + 'recvonly': 'responder', + 'sendrecv': 'both', + 'inactive': 'none' +}; exports.toSessionSDP = function (session, sid, time) { @@ -7818,85 +7832,7 @@ exports.toCandidateSDP = function (candidate) { return 'a=candidate:' + sdp.join(' '); }; -},{}],17:[function(require,module,exports){ -// getScreenMedia helper by @HenrikJoreteg -var getUserMedia = require('getusermedia'); - -// cache for constraints and callback -var cache = {}; - -module.exports = function (constraints, cb) { - var hasConstraints = arguments.length === 2; - var callback = hasConstraints ? cb : constraints; - var error; - - if (typeof window === 'undefined' || window.location.protocol === 'http:') { - error = new Error('NavigatorUserMediaError'); - error.name = 'HTTPS_REQUIRED'; - return callback(error); - } - - if (window.navigator.userAgent.match('Chrome')) { - var chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10); - var maxver = 33; - // "known‶ bug in chrome 34 on linux - if (window.navigator.userAgent.match('Linux')) maxver = 34; - if (chromever >= 26 && chromever <= maxver) { - // chrome 26 - chrome 33 way to do it -- requires bad chrome://flags - constraints = (hasConstraints && constraints) || { - video: { - mandatory: { - maxWidth: window.screen.width, - maxHeight: window.screen.height, - maxFrameRate: 3, - chromeMediaSource: 'screen' - } - } - }; - getUserMedia(constraints, callback); - } else { - // chrome 34+ way requiring an extension - var pending = window.setTimeout(function () { - error = new Error('NavigatorUserMediaError'); - error.name = 'EXTENSION_UNAVAILABLE'; - return callback(error); - }, 1000); - cache[pending] = [callback, hasConstraints ? constraint : null]; - window.postMessage({ type: 'getScreen', id: pending }, '*'); - } - } -}; - -window.addEventListener('message', function (event) { - if (event.origin != window.location.origin) { - return; - } - if (event.data.type == 'gotScreen' && cache[event.data.id]) { - var data = cache[event.data.id]; - var constraints = data[1]; - var callback = data[0]; - delete cache[event.data.id]; - - if (event.data.sourceId === '') { // user canceled - var error = error = new Error('NavigatorUserMediaError'); - error.name = 'PERMISSION_DENIED'; - callback(error); - } else { - constraints = constraints || {audio: false, video: {mandatory: { - chromeMediaSource: 'desktop', - chromeMediaSourceId: event.data.sourceId, - maxWidth: window.screen.width, - maxHeight: window.screen.height, - maxFrameRate: 3, - }}}; - getUserMedia(constraints, callback); - } - } else if (event.data.type == 'getScreenPending') { - window.clearTimeout(event.data.id); - } -}); - -},{"getusermedia":14}],22:[function(require,module,exports){ +},{}],18:[function(require,module,exports){ var parsers = require('./parsers'); var idCounter = Math.random(); @@ -8037,32 +7973,112 @@ exports.toMediaJSON = function (media, session, creator) { if (setup) { fp.setup = setup.substr(8); } - trans.fingerprints.push(fp); - }); - - var ufragLine = parsers.findLine('a=ice-ufrag:', lines, sessionLines); - var pwdLine = parsers.findLine('a=ice-pwd:', lines, sessionLines); - if (ufragLine && pwdLine) { - trans.ufrag = ufragLine.substr(12); - trans.pwd = pwdLine.substr(10); - trans.candidates = []; - - var candidateLines = parsers.findLines('a=candidate:', lines, sessionLines); - candidateLines.forEach(function (line) { - trans.candidates.push(exports.toCandidateJSON(line)); - }); + trans.fingerprints.push(fp); + }); + + var ufragLine = parsers.findLine('a=ice-ufrag:', lines, sessionLines); + var pwdLine = parsers.findLine('a=ice-pwd:', lines, sessionLines); + if (ufragLine && pwdLine) { + trans.ufrag = ufragLine.substr(12); + trans.pwd = pwdLine.substr(10); + trans.candidates = []; + + var candidateLines = parsers.findLines('a=candidate:', lines, sessionLines); + candidateLines.forEach(function (line) { + trans.candidates.push(exports.toCandidateJSON(line)); + }); + } + + return content; +}; + +exports.toCandidateJSON = function (line) { + var candidate = parsers.candidate(line.split('\r\n')[0]); + candidate.id = (idCounter++).toString(36).substr(0, 12); + return candidate; +}; + +},{"./parsers":23}],21:[function(require,module,exports){ +// getScreenMedia helper by @HenrikJoreteg +var getUserMedia = require('getusermedia'); + +// cache for constraints and callback +var cache = {}; + +module.exports = function (constraints, cb) { + var hasConstraints = arguments.length === 2; + var callback = hasConstraints ? cb : constraints; + var error; + + if (typeof window === 'undefined' || window.location.protocol === 'http:') { + error = new Error('NavigatorUserMediaError'); + error.name = 'HTTPS_REQUIRED'; + return callback(error); + } + + if (window.navigator.userAgent.match('Chrome')) { + var chromever = parseInt(window.navigator.userAgent.match(/Chrome\/(.*) /)[1], 10); + var maxver = 33; + // "known" crash in chrome 34 and 35 on linux + if (window.navigator.userAgent.match('Linux')) maxver = 35; + if (chromever >= 26 && chromever <= maxver) { + // chrome 26 - chrome 33 way to do it -- requires bad chrome://flags + constraints = (hasConstraints && constraints) || { + video: { + mandatory: { + googLeakyBucket: true, + maxWidth: window.screen.width, + maxHeight: window.screen.height, + maxFrameRate: 3, + chromeMediaSource: 'screen' + } + } + }; + getUserMedia(constraints, callback); + } else { + // chrome 34+ way requiring an extension + var pending = window.setTimeout(function () { + error = new Error('NavigatorUserMediaError'); + error.name = 'EXTENSION_UNAVAILABLE'; + return callback(error); + }, 1000); + cache[pending] = [callback, hasConstraints ? constraint : null]; + window.postMessage({ type: 'getScreen', id: pending }, '*'); + } + } +}; + +window.addEventListener('message', function (event) { + if (event.origin != window.location.origin) { + return; + } + if (event.data.type == 'gotScreen' && cache[event.data.id]) { + var data = cache[event.data.id]; + var constraints = data[1]; + var callback = data[0]; + delete cache[event.data.id]; + + if (event.data.sourceId === '') { // user canceled + var error = error = new Error('NavigatorUserMediaError'); + error.name = 'PERMISSION_DENIED'; + callback(error); + } else { + constraints = constraints || {audio: false, video: {mandatory: { + chromeMediaSource: 'desktop', + chromeMediaSourceId: event.data.sourceId, + googLeakyBucket: true, + maxWidth: window.screen.width, + maxHeight: window.screen.height, + maxFrameRate: 3 + }}}; + getUserMedia(constraints, callback); + } + } else if (event.data.type == 'getScreenPending') { + window.clearTimeout(event.data.id); } +}); - return content; -}; - -exports.toCandidateJSON = function (line) { - var candidate = parsers.candidate(line.split('\r\n')[0]); - candidate.id = (idCounter++).toString(36).substr(0, 12); - return candidate; -}; - -},{"./parsers":23}],23:[function(require,module,exports){ +},{"getusermedia":15}],23:[function(require,module,exports){ exports.lines = function (sdp) { return sdp.split('\r\n').filter(function (line) { return line.length > 0; @@ -8296,7 +8312,7 @@ exports.groups = function (lines) { return parsed; }; -},{}],18:[function(require,module,exports){ +},{}],22:[function(require,module,exports){ var support = require('webrtcsupport'); @@ -8343,122 +8359,7 @@ GainController.prototype.on = function () { module.exports = GainController; -},{"webrtcsupport":8}],16:[function(require,module,exports){ -var WildEmitter = require('wildemitter'); - -function getMaxVolume (analyser, fftBins) { - var maxVolume = -Infinity; - analyser.getFloatFrequencyData(fftBins); - - for(var i=0, ii=fftBins.length; i < ii; i++) { - if (fftBins[i] > maxVolume && fftBins[i] < 0) { - maxVolume = fftBins[i]; - } - }; - - return maxVolume; -} - - -var audioContextType = window.webkitAudioContext || window.AudioContext; -// use a single audio context due to hardware limits -var audioContext = null; -module.exports = function(stream, options) { - var harker = new WildEmitter(); - - - // make it not break in non-supported browsers - if (!audioContextType) return harker; - - //Config - var options = options || {}, - smoothing = (options.smoothing || 0.5), - interval = (options.interval || 100), - threshold = options.threshold, - play = options.play, - running = true; - - //Setup Audio Context - if (!audioContext) { - audioContext = new audioContextType(); - } - var sourceNode, fftBins, analyser; - - analyser = audioContext.createAnalyser(); - analyser.fftSize = 512; - analyser.smoothingTimeConstant = smoothing; - fftBins = new Float32Array(analyser.fftSize); - - if (stream.jquery) stream = stream[0]; - if (stream instanceof HTMLAudioElement) { - //Audio Tag - sourceNode = audioContext.createMediaElementSource(stream); - if (typeof play === 'undefined') play = true; - threshold = threshold || -65; - } else { - //WebRTC Stream - sourceNode = audioContext.createMediaStreamSource(stream); - threshold = threshold || -45; - } - - sourceNode.connect(analyser); - if (play) analyser.connect(audioContext.destination); - - harker.speaking = false; - - harker.setThreshold = function(t) { - threshold = t; - }; - - harker.setInterval = function(i) { - interval = i; - }; - - harker.stop = function() { - running = false; - harker.emit('volume_change', -100, threshold); - if (harker.speaking) { - harker.speaking = false; - harker.emit('stopped_speaking'); - } - }; - - // Poll the analyser node to determine if speaking - // and emit events if changed - var looper = function() { - setTimeout(function() { - - //check if stop has been called - if(!running) { - return; - } - - var currentVolume = getMaxVolume(analyser, fftBins); - - harker.emit('volume_change', currentVolume, threshold); - - if (currentVolume > threshold) { - if (!harker.speaking) { - harker.speaking = true; - harker.emit('speaking'); - } - } else { - if (harker.speaking) { - harker.speaking = false; - harker.emit('stopped_speaking'); - } - } - - looper(); - }, interval); - }; - looper(); - - - return harker; -} - -},{"wildemitter":7}],20:[function(require,module,exports){ +},{"webrtcsupport":10}],19:[function(require,module,exports){ // based on https://github.com/ESTOS/strophe.jingle/ // adds wildemitter support var util = require('util'); @@ -8532,6 +8433,8 @@ function TraceablePeerConnection(config, constraints) { self.ondatachannel(event); } }; + this.getLocalStreams = this.peerconnection.getLocalStreams.bind(this.peerconnection); + this.getRemoteStreams = this.peerconnection.getRemoteStreams.bind(this.peerconnection); } util.inherits(TraceablePeerConnection, WildEmitter); @@ -8676,6 +8579,121 @@ TraceablePeerConnection.prototype.getStats = function (callback, errback) { module.exports = TraceablePeerConnection; -},{"util":9,"webrtcsupport":8,"wildemitter":7}]},{},[1])(1) +},{"util":8,"webrtcsupport":10,"wildemitter":3}],20:[function(require,module,exports){ +var WildEmitter = require('wildemitter'); + +function getMaxVolume (analyser, fftBins) { + var maxVolume = -Infinity; + analyser.getFloatFrequencyData(fftBins); + + for(var i=0, ii=fftBins.length; i < ii; i++) { + if (fftBins[i] > maxVolume && fftBins[i] < 0) { + maxVolume = fftBins[i]; + } + }; + + return maxVolume; +} + + +var audioContextType = window.webkitAudioContext || window.AudioContext; +// use a single audio context due to hardware limits +var audioContext = null; +module.exports = function(stream, options) { + var harker = new WildEmitter(); + + + // make it not break in non-supported browsers + if (!audioContextType) return harker; + + //Config + var options = options || {}, + smoothing = (options.smoothing || 0.5), + interval = (options.interval || 100), + threshold = options.threshold, + play = options.play, + running = true; + + //Setup Audio Context + if (!audioContext) { + audioContext = new audioContextType(); + } + var sourceNode, fftBins, analyser; + + analyser = audioContext.createAnalyser(); + analyser.fftSize = 512; + analyser.smoothingTimeConstant = smoothing; + fftBins = new Float32Array(analyser.fftSize); + + if (stream.jquery) stream = stream[0]; + if (stream instanceof HTMLAudioElement) { + //Audio Tag + sourceNode = audioContext.createMediaElementSource(stream); + if (typeof play === 'undefined') play = true; + threshold = threshold || -65; + } else { + //WebRTC Stream + sourceNode = audioContext.createMediaStreamSource(stream); + threshold = threshold || -45; + } + + sourceNode.connect(analyser); + if (play) analyser.connect(audioContext.destination); + + harker.speaking = false; + + harker.setThreshold = function(t) { + threshold = t; + }; + + harker.setInterval = function(i) { + interval = i; + }; + + harker.stop = function() { + running = false; + harker.emit('volume_change', -100, threshold); + if (harker.speaking) { + harker.speaking = false; + harker.emit('stopped_speaking'); + } + }; + + // Poll the analyser node to determine if speaking + // and emit events if changed + var looper = function() { + setTimeout(function() { + + //check if stop has been called + if(!running) { + return; + } + + var currentVolume = getMaxVolume(analyser, fftBins); + + harker.emit('volume_change', currentVolume, threshold); + + if (currentVolume > threshold) { + if (!harker.speaking) { + harker.speaking = true; + harker.emit('speaking'); + } + } else { + if (harker.speaking) { + harker.speaking = false; + harker.emit('stopped_speaking'); + } + } + + looper(); + }, interval); + }; + looper(); + + + return harker; +} + +},{"wildemitter":3}]},{},[1])(1) }); ;