videojs-contrib-media-sources
Advanced tools
Comparing version 2.3.0 to 2.4.0
{ | ||
"name": "videojs-contrib-media-sources", | ||
"version": "2.3.0", | ||
"version": "2.4.0", | ||
"description": "A Media Source Extensions plugin for video.js", | ||
@@ -5,0 +5,0 @@ "main": "videojs-media-sources.js", |
@@ -10,3 +10,2 @@ (function(window, muxjs, undefined){ | ||
interceptBufferCreation, | ||
addSourceBuffer, | ||
aggregateUpdateHandler, | ||
@@ -64,8 +63,8 @@ scheduleTick, | ||
createTextTracksIfNecessary = function (sourceHandler, mediaSource, segment) { | ||
createTextTracksIfNecessary = function (sourceBuffer, mediaSource, segment) { | ||
// create an in-band caption track if one is present in the segment | ||
if (segment.captions && | ||
segment.captions.length && | ||
!sourceHandler.inbandTextTrack_) { | ||
sourceHandler.inbandTextTrack_ = mediaSource.player_.addTextTrack('captions'); | ||
!sourceBuffer.inbandTextTrack_) { | ||
sourceBuffer.inbandTextTrack_ = mediaSource.player_.addTextTrack('captions'); | ||
} | ||
@@ -75,5 +74,5 @@ | ||
segment.metadata.length && | ||
!sourceHandler.metadataTrack_) { | ||
sourceHandler.metadataTrack_ = mediaSource.player_.addTextTrack('metadata', 'Timed Metadata'); | ||
sourceHandler.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType; | ||
!sourceBuffer.metadataTrack_) { | ||
sourceBuffer.metadataTrack_ = mediaSource.player_.addTextTrack('metadata', 'Timed Metadata'); | ||
sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType = segment.metadata.dispatchType; | ||
} | ||
@@ -127,33 +126,16 @@ }; | ||
videojs.MediaSource = videojs.extend(EventTarget, { | ||
constructor: function(options){ | ||
var self; | ||
videojs.MediaSource = function(options) { | ||
var settings = videojs.mergeOptions(defaults, options); | ||
this.settings_ = videojs.mergeOptions(defaults, options); | ||
// determine whether HTML MediaSources should be used | ||
if (settings.mode === 'html5' || | ||
(settings.mode === 'auto' && | ||
videojs.MediaSource.supportsNativeMediaSources())) { | ||
return new videojs.HtmlMediaSource(); | ||
} | ||
// determine whether native MediaSources should be used | ||
if ((this.settings_.mode === 'auto' && | ||
videojs.MediaSource.supportsNativeMediaSources()) || | ||
this.settings_.mode === 'html5') { | ||
self = new window.MediaSource(); | ||
interceptBufferCreation(self); | ||
// otherwise, emulate them through the SWF | ||
return new videojs.FlashMediaSource(); | ||
}; | ||
// capture the associated player when the MediaSource is | ||
// successfully attached | ||
self.addEventListener('sourceopen', function() { | ||
var video = document.querySelector('[src="' + self.url_ + '"]'); | ||
if (!video) { | ||
return; | ||
} | ||
self.player_ = videojs(video.parentNode); | ||
}); | ||
return self; | ||
} | ||
// otherwise, emulate them through the SWF | ||
return new videojs.FlashMediaSource(); | ||
} | ||
}); | ||
videojs.MediaSource.supportsNativeMediaSources = function() { | ||
@@ -167,57 +149,133 @@ return !!window.MediaSource; | ||
interceptBufferCreation = function(mediaSource) { | ||
// virtual source buffers will be created as needed to transmux | ||
// MPEG-2 TS into supported ones | ||
mediaSource.virtualBuffers = []; | ||
videojs.HtmlMediaSource = videojs.extend(EventTarget, { | ||
constructor: function() { | ||
var self = this, property; | ||
// intercept calls to addSourceBuffer so video/mp2t can be | ||
// transmuxed to mp4s | ||
mediaSource.addSourceBuffer_ = mediaSource.addSourceBuffer; | ||
mediaSource.addSourceBuffer = addSourceBuffer; | ||
}; | ||
this.mediaSource_ = new window.MediaSource(); | ||
// delegate to the native MediaSource's methods by default | ||
for (property in this.mediaSource_) { | ||
if (!(property in videojs.HtmlMediaSource.prototype) && | ||
typeof this.mediaSource_[property] === 'function') { | ||
this[property] = this.mediaSource_[property].bind(this.mediaSource_); | ||
} | ||
} | ||
addSourceBuffer = function(type) { | ||
var | ||
buffer, | ||
codecs, | ||
avcCodec, | ||
mp4aCodec, | ||
avcRegEx = /avc1\.[\da-f]+/i, | ||
mp4aRegEx = /mp4a\.\d+.\d+/i; | ||
// emulate `duration` and `seekable` until seeking can be | ||
// handled uniformly for live streams | ||
// see https://github.com/w3c/media-source/issues/5 | ||
this.duration_ = NaN; | ||
Object.defineProperty(this, 'duration', { | ||
get: function() { | ||
return self.duration_; | ||
}, | ||
set: function(duration) { | ||
var currentDuration; | ||
// create a virtual source buffer to transmux MPEG-2 transport | ||
// stream segments into fragmented MP4s | ||
if ((/^video\/mp2t/i).test(type)) { | ||
codecs = type.split(';').slice(1).join(';'); | ||
self.duration_ = duration; | ||
if (duration !== Infinity) { | ||
self.mediaSource_.duration = duration; | ||
return; | ||
} | ||
} | ||
}); | ||
Object.defineProperty(this, 'seekable', { | ||
get: function() { | ||
if (this.duration_ === Infinity) { | ||
return videojs.createTimeRanges([[0, self.mediaSource_.duration]]); | ||
} | ||
return self.mediaSource_.seekable; | ||
} | ||
}); | ||
// Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard | ||
// `avc1.<hhhhhh>` | ||
codecs = codecs.replace(/avc1\.(\d+)\.(\d+)/i, function(orig, profile, avcLevel) { | ||
var | ||
profileHex = ('00' + Number(profile).toString(16)).slice(-2), | ||
avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2); | ||
Object.defineProperty(this, 'readyState', { | ||
get: function() { | ||
return self.mediaSource_.readyState; | ||
} | ||
}); | ||
return 'avc1.' + profileHex + '00' + avcLevelHex; | ||
// the list of virtual and native SourceBuffers created by this | ||
// MediaSource | ||
this.sourceBuffers = []; | ||
// capture the associated player when the MediaSource is | ||
// successfully attached | ||
this.mediaSource_.addEventListener('sourceopen', function(event) { | ||
var video = document.querySelector('[src="' + self.url_ + '"]'); | ||
if (!video) { | ||
return; | ||
} | ||
self.player_ = videojs(video.parentNode); | ||
self.trigger(event); | ||
}); | ||
}, | ||
// Pull out each individual codec string if it exists | ||
avcCodec = (codecs.match(avcRegEx) || [])[0]; | ||
mp4aCodec = (codecs.match(mp4aRegEx) || [])[0]; | ||
addSeekableRange_: function(start, end) { | ||
var error; | ||
// If a codec is unspecified, use the defaults | ||
if (!avcCodec || !avcCodec.length) { | ||
avcCodec = 'avc1.4d400d'; | ||
if (this.duration !== Infinity) { | ||
error = new Error('MediaSource.addSeekableRange() can only be invoked ' + | ||
'when the duration is Infinity'); | ||
error.name = 'InvalidStateError'; | ||
error.code = 11; | ||
throw error; | ||
} | ||
if (!mp4aCodec || !mp4aCodec.length) { | ||
mp4aCodec = 'mp4a.40.2'; | ||
if (end > this.mediaSource_.duration || | ||
isNaN(this.mediaSource_.duration)) { | ||
this.mediaSource_.duration = end; | ||
} | ||
}, | ||
buffer = new VirtualSourceBuffer(this, [avcCodec, mp4aCodec]); | ||
this.virtualBuffers.push(buffer); | ||
addSourceBuffer: function(type) { | ||
var | ||
buffer, | ||
codecs, | ||
avcCodec, | ||
mp4aCodec, | ||
avcRegEx = /avc1\.[\da-f]+/i, | ||
mp4aRegEx = /mp4a\.\d+.\d+/i; | ||
// create a virtual source buffer to transmux MPEG-2 transport | ||
// stream segments into fragmented MP4s | ||
if ((/^video\/mp2t/i).test(type)) { | ||
codecs = type.split(';').slice(1).join(';'); | ||
codecs = translateLegacyCodecs(codecs); | ||
// Pull out each individual codec string if it exists | ||
avcCodec = (codecs.match(avcRegEx) || [])[0]; | ||
mp4aCodec = (codecs.match(mp4aRegEx) || [])[0]; | ||
// If a codec is unspecified, use the defaults | ||
if (!avcCodec || !avcCodec.length) { | ||
avcCodec = 'avc1.4d400d'; | ||
} | ||
if (!mp4aCodec || !mp4aCodec.length) { | ||
mp4aCodec = 'mp4a.40.2'; | ||
} | ||
buffer = new VirtualSourceBuffer(this, [avcCodec, mp4aCodec]); | ||
this.sourceBuffers.push(buffer); | ||
return buffer; | ||
} | ||
// delegate to the native implementation | ||
buffer = this.mediaSource_.addSourceBuffer(type); | ||
this.sourceBuffers.push(buffer); | ||
return buffer; | ||
} | ||
}); | ||
// Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard | ||
// `avc1.<hhhhhh>` | ||
var translateLegacyCodecs = function(codecs) { | ||
return codecs.replace(/avc1\.(\d+)\.(\d+)/i, function(orig, profile, avcLevel) { | ||
var | ||
profileHex = ('00' + Number(profile).toString(16)).slice(-2), | ||
avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2); | ||
// delegate to the native implementation | ||
return this.addSourceBuffer_(type); | ||
return 'avc1.' + profileHex + '00' + avcLevelHex; | ||
}); | ||
}; | ||
@@ -240,2 +298,4 @@ | ||
this.bufferUpdating_ = false; | ||
this.mediaSource_ = mediaSource; | ||
this.codecs_ = codecs; | ||
@@ -249,61 +309,7 @@ // append muxed segments to their respective native buffers as | ||
if (event.data.action === 'data') { | ||
var | ||
segment = event.data.segment; | ||
// Cast to type | ||
segment.data = new Uint8Array(segment.data); | ||
// If any sourceBuffers have not been created, do so now | ||
if (segment.type === 'video') { | ||
if (!self.videoBuffer_) { | ||
// Some common mp4 codec strings. Saved for future twittling: | ||
// 4d400d | ||
// 42c01e & 42c01f | ||
self.videoBuffer_ = mediaSource.addSourceBuffer_('video/mp4;codecs="' + codecs[0] + '"'); | ||
self.videoBuffer_.timestampOffset = self.timestampOffset_; | ||
// aggregate buffer events | ||
self.videoBuffer_.addEventListener('updatestart', | ||
aggregateUpdateHandler(self, 'audioBuffer_', 'updatestart')); | ||
self.videoBuffer_.addEventListener('update', | ||
aggregateUpdateHandler(self, 'audioBuffer_', 'update')); | ||
self.videoBuffer_.addEventListener('updateend', | ||
aggregateUpdateHandler(self, 'audioBuffer_', 'updateend')); | ||
} | ||
} else if (segment.type === 'audio') { | ||
if (!self.audioBuffer_) { | ||
self.audioBuffer_ = mediaSource.addSourceBuffer_('audio/mp4;codecs="' + codecs[1] + '"'); | ||
self.audioBuffer_.timestampOffset = self.timestampOffset_; | ||
// aggregate buffer events | ||
self.audioBuffer_.addEventListener('updatestart', | ||
aggregateUpdateHandler(self, 'videoBuffer_', 'updatestart')); | ||
self.audioBuffer_.addEventListener('update', | ||
aggregateUpdateHandler(self, 'videoBuffer_', 'update')); | ||
self.audioBuffer_.addEventListener('updateend', | ||
aggregateUpdateHandler(self, 'videoBuffer_', 'updateend')); | ||
} | ||
} else if (segment.type === 'combined') { | ||
if (!self.videoBuffer_) { | ||
self.videoBuffer_ = mediaSource.addSourceBuffer_('video/mp4;codecs="' + codecs.join(',') + '"'); | ||
self.videoBuffer_.timestampOffset = self.timestampOffset_; | ||
// aggregate buffer events | ||
self.videoBuffer_.addEventListener('updatestart', | ||
aggregateUpdateHandler(self, 'videoBuffer_', 'updatestart')); | ||
self.videoBuffer_.addEventListener('update', | ||
aggregateUpdateHandler(self, 'videoBuffer_', 'update')); | ||
self.videoBuffer_.addEventListener('updateend', | ||
aggregateUpdateHandler(self, 'videoBuffer_', 'updateend')); | ||
} | ||
} | ||
createTextTracksIfNecessary(self, mediaSource, segment); | ||
// Add the segments to the pendingBuffers array | ||
self.pendingBuffers_.push(segment); | ||
return; | ||
return self.data_(event); | ||
} | ||
if (event.data.action === 'done') { | ||
// All buffers should have been flushed from the muxer | ||
// start processing anything we have received | ||
self.processPendingSegments_(); | ||
return; | ||
return self.done_(event); | ||
} | ||
@@ -434,2 +440,66 @@ }; | ||
}, | ||
// Transmuxer message handlers | ||
data_: function(event) { | ||
var | ||
segment = event.data.segment, | ||
nativeMediaSource = this.mediaSource_.mediaSource_; | ||
// Cast to type | ||
segment.data = new Uint8Array(segment.data); | ||
// If any sourceBuffers have not been created, do so now | ||
if (segment.type === 'video') { | ||
if (!this.videoBuffer_) { | ||
this.videoBuffer_ = nativeMediaSource.addSourceBuffer('video/mp4;codecs="' + this.codecs_[0] + '"'); | ||
this.videoBuffer_.timestampOffset = this.timestampOffset_; | ||
// aggregate buffer events | ||
this.videoBuffer_.addEventListener('updatestart', | ||
aggregateUpdateHandler(this, 'audioBuffer_', 'updatestart')); | ||
this.videoBuffer_.addEventListener('update', | ||
aggregateUpdateHandler(this, 'audioBuffer_', 'update')); | ||
this.videoBuffer_.addEventListener('updateend', | ||
aggregateUpdateHandler(this, 'audioBuffer_', 'updateend')); | ||
} | ||
} else if (segment.type === 'audio') { | ||
if (!this.audioBuffer_) { | ||
this.audioBuffer_ = nativeMediaSource.addSourceBuffer('audio/mp4;codecs="' + this.codecs_[1] + '"'); | ||
this.audioBuffer_.timestampOffset = this.timestampOffset_; | ||
// aggregate buffer events | ||
this.audioBuffer_.addEventListener('updatestart', | ||
aggregateUpdateHandler(this, 'videoBuffer_', 'updatestart')); | ||
this.audioBuffer_.addEventListener('update', | ||
aggregateUpdateHandler(this, 'videoBuffer_', 'update')); | ||
this.audioBuffer_.addEventListener('updateend', | ||
aggregateUpdateHandler(this, 'videoBuffer_', 'updateend')); | ||
} | ||
} else if (segment.type === 'combined') { | ||
if (!this.videoBuffer_) { | ||
this.videoBuffer_ = nativeMediaSource.addSourceBuffer('video/mp4;codecs="' + this.codecs_.join(',') + '"'); | ||
this.videoBuffer_.timestampOffset = this.timestampOffset_; | ||
// aggregate buffer events | ||
this.videoBuffer_.addEventListener('updatestart', | ||
aggregateUpdateHandler(this, 'videoBuffer_', 'updatestart')); | ||
this.videoBuffer_.addEventListener('update', | ||
aggregateUpdateHandler(this, 'videoBuffer_', 'update')); | ||
this.videoBuffer_.addEventListener('updateend', | ||
aggregateUpdateHandler(this, 'videoBuffer_', 'updateend')); | ||
} | ||
} | ||
createTextTracksIfNecessary(this, this.mediaSource_, segment); | ||
// Add the segments to the pendingBuffers array | ||
this.pendingBuffers_.push(segment); | ||
return; | ||
}, | ||
done_: function() { | ||
// All buffers should have been flushed from the muxer | ||
// start processing anything we have received | ||
this.processPendingSegments_(); | ||
return; | ||
}, | ||
// SourceBuffer Implementation | ||
appendBuffer: function(segment) { | ||
@@ -456,2 +526,3 @@ // Start the internal "updating" state | ||
}, | ||
/** | ||
@@ -581,2 +652,5 @@ * Process any segments that the muxer has output | ||
}); | ||
}, | ||
addSeekableRange_: function() { | ||
// intentional no-op | ||
} | ||
@@ -1029,2 +1103,9 @@ }); | ||
// use the native MediaSource to generate an object URL | ||
if (object instanceof videojs.HtmlMediaSource) { | ||
url = window.URL.createObjectURL(object.mediaSource_); | ||
object.url_ = url; | ||
return url; | ||
} | ||
// if the object isn't an emulated MediaSource, delegate to the | ||
@@ -1031,0 +1112,0 @@ // native implementation |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
514770
14
8114