Socket
Socket
Sign inDemoInstall

@videojs/http-streaming

Package Overview
Dependencies
Maintainers
14
Versions
155
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@videojs/http-streaming - npm Package Compare versions

Comparing version 0.1.0 to 0.2.0

es5/dash-playlist-loader.js

3

CHANGELOG.md

@@ -9,4 +9,7 @@ CHANGELOG

--------------------
## 0.2.0
* Initial DASH Support ([#8](https://github.com/videojs/http-streaming/issues/8))
## 0.1.0
* Initial release, based on [videojs-contrib-hls 5.12.2](https://github.com/videojs/videojs-contrib-hls)

@@ -24,2 +24,6 @@ /**

var _dashPlaylistLoader = require('./dash-playlist-loader');
var _dashPlaylistLoader2 = _interopRequireDefault(_dashPlaylistLoader);
var _playlistJs = require('./playlist.js');

@@ -153,2 +157,25 @@

var audioProfileFromDefault = function audioProfileFromDefault(master, audioGroupId) {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (var _name in audioGroup) {
var audioType = audioGroup[_name];
if (audioType['default'] && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return (0, _utilCodecsJs.parseCodecs)(audioType.playlists[0].attributes.CODECS).audioProfile;
}
}
return null;
};
/**

@@ -191,3 +218,6 @@ * Calculates the MIME type strings for a working configuration of

for (var groupId in audioGroup) {
if (!audioGroup[groupId].uri) {
// either a uri is present (if the case of HLS and an external playlist), or
// playlists is present (in the case of DASH where we don't have external audio
// playlists)
if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
isMuxed = true;

@@ -203,4 +233,13 @@ break;

if (isMaat && !codecInfo.audioProfile) {
_videoJs2['default'].log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
codecInfo.audioProfile = defaultCodecs.audioProfile;
if (!isMuxed) {
// It is possible for codecs to be specified on the audio media group playlist but
// not on the rendition playlist. This is mostly the case for DASH, where audio and
// video are always separate (and separately specified).
codecInfo.audioProfile = audioProfileFromDefault(master, mediaAttributes.AUDIO);
}
if (!codecInfo.audioProfile) {
_videoJs2['default'].log.warn('Multiple audio tracks present but no audio codec string is specified. ' + 'Attempting to use the default audio codec (mp4a.40.2)');
codecInfo.audioProfile = defaultCodecs.audioProfile;
}
}

@@ -276,2 +315,3 @@

var enableLowInitialPlaylist = options.enableLowInitialPlaylist;
var sourceType = options.sourceType;

@@ -288,2 +328,3 @@ if (!url) {

this.mode_ = mode;
this.sourceType_ = sourceType;
this.useCueTags_ = useCueTags;

@@ -343,7 +384,7 @@ this.blacklistDuration = blacklistDuration;

syncController: this.syncController_,
decrypter: this.decrypter_
decrypter: this.decrypter_,
sourceType: this.sourceType_
};
// setup playlist loaders
this.masterPlaylistLoader_ = new _playlistLoader2['default'](url, this.hls_, this.withCredentials);
this.masterPlaylistLoader_ = this.sourceType_ === 'dash' ? new _dashPlaylistLoader2['default'](url, this.hls_, this.withCredentials) : new _playlistLoader2['default'](url, this.hls_, this.withCredentials);
this.setupMasterPlaylistLoaderListeners_();

@@ -409,2 +450,3 @@

(0, _mediaGroups.setupMediaGroups)({
sourceType: _this2.sourceType_,
segmentLoaders: {

@@ -1236,7 +1278,4 @@ AUDIO: _this2.audioSegmentLoader_,

}
this.mainSegmentLoader_.mimeType(mimeTypes[0]);
if (mimeTypes[1]) {
this.audioSegmentLoader_.mimeType(mimeTypes[1]);
}
this.configureLoaderMimeTypes_(mimeTypes);
// exclude any incompatible variant streams from future playlist

@@ -1246,3 +1285,20 @@ // selection

}
}, {
key: 'configureLoaderMimeTypes_',
value: function configureLoaderMimeTypes_(mimeTypes) {
// If the content is demuxed, we can't start appending segments to a source buffer
// until both source buffers are set up, or else the browser may not let us add the
// second source buffer (it will assume we are playing either audio only or video
// only).
var sourceBufferEmitter =
// if the first mime type has muxed video and audio then we shouldn't wait on the
// second source buffer
mimeTypes.length > 1 && mimeTypes[0].indexOf(',') === -1 ? new _videoJs2['default'].EventTarget() : null;
this.mainSegmentLoader_.mimeType(mimeTypes[0], sourceBufferEmitter);
if (mimeTypes[1]) {
this.audioSegmentLoader_.mimeType(mimeTypes[1], sourceBufferEmitter);
}
}
/**

@@ -1249,0 +1305,0 @@ * Blacklist playlists that are known to be codec or

@@ -17,2 +17,6 @@ 'use strict';

var _dashPlaylistLoader = require('./dash-playlist-loader');
var _dashPlaylistLoader2 = _interopRequireDefault(_dashPlaylistLoader);
var noop = function noop() {};

@@ -368,2 +372,3 @@

var hls = settings.hls;
var sourceType = settings.sourceType;
var segmentLoader = settings.segmentLoaders[type];

@@ -394,2 +399,4 @@ var withCredentials = settings.requestOptions.withCredentials;

playlistLoader = new _playlistLoader2['default'](properties.resolvedUri, hls, withCredentials);
} else if (properties.playlists && sourceType === 'dash') {
playlistLoader = new _dashPlaylistLoader2['default'](properties.playlists[0], hls, withCredentials);
} else {

@@ -437,2 +444,3 @@ // no resolvedUri means the audio is muxed with the video when using this

var hls = settings.hls;
var sourceType = settings.sourceType;
var segmentLoader = settings.segmentLoaders[type];

@@ -465,5 +473,13 @@ var withCredentials = settings.requestOptions.withCredentials;

var playlistLoader = undefined;
if (sourceType === 'hls') {
playlistLoader = new _playlistLoader2['default'](properties.resolvedUri, hls, withCredentials);
} else if (sourceType === 'dash') {
playlistLoader = new _dashPlaylistLoader2['default'](properties.playlists[0], hls, withCredentials);
}
properties = _videoJs2['default'].mergeOptions({
id: variantLabel,
playlistLoader: new _playlistLoader2['default'](properties.resolvedUri, hls, withCredentials)
playlistLoader: playlistLoader
}, properties);

@@ -470,0 +486,0 @@

8

es5/segment-loader.js

@@ -189,2 +189,3 @@ /**

this.goalBufferLength_ = settings.goalBufferLength;
this.sourceType_ = settings.sourceType;

@@ -441,3 +442,3 @@ // private instance variables

this.state = 'READY';
this.sourceUpdater_ = new _sourceUpdater2['default'](this.mediaSource_, this.mimeType_);
this.sourceUpdater_ = new _sourceUpdater2['default'](this.mediaSource_, this.mimeType_, this.sourceBufferEmitter_);
this.resetEverything();

@@ -560,6 +561,8 @@ return this.monitorBuffer_();

* @param {String} mimeType the mime type string to use
* @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer
* is added to the media source
*/
}, {
key: 'mimeType',
value: function mimeType(_mimeType) {
value: function mimeType(_mimeType, sourceBufferEmitter) {
if (this.mimeType_) {

@@ -570,2 +573,3 @@ return;

this.mimeType_ = _mimeType;
this.sourceBufferEmitter_ = sourceBufferEmitter;
// if we were unpaused but waiting for a sourceUpdater, start

@@ -572,0 +576,0 @@ // buffering now

@@ -33,19 +33,60 @@ /**

* SourceBuffer
* @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer is
* added to the media source
*/
var SourceUpdater = (function () {
function SourceUpdater(mediaSource, mimeType) {
var _this = this;
function SourceUpdater(mediaSource, mimeType, sourceBufferEmitter) {
_classCallCheck(this, SourceUpdater);
var createSourceBuffer = function createSourceBuffer() {
_this.sourceBuffer_ = mediaSource.addSourceBuffer(mimeType);
this.callbacks_ = [];
this.pendingCallback_ = null;
this.timestampOffset_ = 0;
this.mediaSource = mediaSource;
this.processedAppend_ = false;
if (mediaSource.readyState === 'closed') {
mediaSource.addEventListener('sourceopen', this.createSourceBuffer_.bind(this, mimeType, sourceBufferEmitter));
} else {
this.createSourceBuffer_(mimeType, sourceBufferEmitter);
}
}
_createClass(SourceUpdater, [{
key: 'createSourceBuffer_',
value: function createSourceBuffer_(mimeType, sourceBufferEmitter) {
var _this = this;
this.sourceBuffer_ = this.mediaSource.addSourceBuffer(mimeType);
if (sourceBufferEmitter) {
sourceBufferEmitter.trigger('sourcebufferadded');
if (this.mediaSource.sourceBuffers.length < 2) {
// There's another source buffer we must wait for before we can start updating
// our own (or else we can get into a bad state, i.e., appending video/audio data
// before the other video/audio source buffer is available and leading to a video
// or audio only buffer).
sourceBufferEmitter.on('sourcebufferadded', function () {
_this.start_();
});
return;
}
}
this.start_();
}
}, {
key: 'start_',
value: function start_() {
var _this2 = this;
this.started_ = true;
// run completion handlers and process callbacks as updateend
// events fire
_this.onUpdateendCallback_ = function () {
var pendingCallback = _this.pendingCallback_;
this.onUpdateendCallback_ = function () {
var pendingCallback = _this2.pendingCallback_;
_this.pendingCallback_ = null;
_this2.pendingCallback_ = null;

@@ -56,38 +97,24 @@ if (pendingCallback) {

_this.runCallback_();
_this2.runCallback_();
};
_this.sourceBuffer_.addEventListener('updateend', _this.onUpdateendCallback_);
this.sourceBuffer_.addEventListener('updateend', this.onUpdateendCallback_);
_this.runCallback_();
};
this.callbacks_ = [];
this.pendingCallback_ = null;
this.timestampOffset_ = 0;
this.mediaSource = mediaSource;
this.processedAppend_ = false;
if (mediaSource.readyState === 'closed') {
mediaSource.addEventListener('sourceopen', createSourceBuffer);
} else {
createSourceBuffer();
this.runCallback_();
}
}
/**
* Aborts the current segment and resets the segment parser.
*
* @param {Function} done function to call when done
* @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
*/
_createClass(SourceUpdater, [{
/**
* Aborts the current segment and resets the segment parser.
*
* @param {Function} done function to call when done
* @see http://w3c.github.io/media-source/#widl-SourceBuffer-abort-void
*/
}, {
key: 'abort',
value: function abort(done) {
var _this2 = this;
var _this3 = this;
if (this.processedAppend_) {
this.queueCallback_(function () {
_this2.sourceBuffer_.abort();
_this3.sourceBuffer_.abort();
}, done);

@@ -107,7 +134,7 @@ }

value: function appendBuffer(bytes, done) {
var _this3 = this;
var _this4 = this;
this.processedAppend_ = true;
this.queueCallback_(function () {
_this3.sourceBuffer_.appendBuffer(bytes);
_this4.sourceBuffer_.appendBuffer(bytes);
}, done);

@@ -140,7 +167,7 @@ }

value: function remove(start, end) {
var _this4 = this;
var _this5 = this;
if (this.processedAppend_) {
this.queueCallback_(function () {
_this4.sourceBuffer_.remove(start, end);
_this5.sourceBuffer_.remove(start, end);
}, noop);

@@ -169,7 +196,7 @@ }

value: function timestampOffset(offset) {
var _this5 = this;
var _this6 = this;
if (typeof offset !== 'undefined') {
this.queueCallback_(function () {
_this5.sourceBuffer_.timestampOffset = offset;
_this6.sourceBuffer_.timestampOffset = offset;
});

@@ -199,3 +226,3 @@ this.timestampOffset_ = offset;

if (!this.updating() && this.callbacks_.length) {
if (!this.updating() && this.callbacks_.length && this.started_) {
callbacks = this.callbacks_.shift();

@@ -202,0 +229,0 @@ this.pendingCallback_ = callbacks[1];

@@ -564,3 +564,3 @@ /**

};
} else if (playlist.discontinuityStarts.length) {
} else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
// Search for future discontinuities that we can provide better timing

@@ -567,0 +567,0 @@ // information for and save that information for sync purposes

@@ -9,2 +9,6 @@ /**

Object.defineProperty(exports, '__esModule', {
value: true
});
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })();

@@ -115,2 +119,19 @@

var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
if (mpegurlRE.test(type)) {
return 'hls';
}
var dashRE = /^application\/dash\+xml/i;
if (dashRE.test(type)) {
return 'dash';
}
return null;
};
exports.simpleTypeFromSourceType = simpleTypeFromSourceType;
/**

@@ -161,2 +182,38 @@ * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.

var emeOptions = function emeOptions(keySystemOptions, videoPlaylist, audioPlaylist) {
if (!keySystemOptions) {
return keySystemOptions;
}
// upsert the content types based on the selected playlist
var keySystemContentTypes = {};
for (var keySystem in keySystemOptions) {
keySystemContentTypes[keySystem] = {
audioContentType: 'audio/mp4; codecs="' + audioPlaylist.attributes.CODECS + '"',
videoContentType: 'video/mp4; codecs="' + videoPlaylist.attributes.CODECS + '"'
};
// videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
// so we need to prevent overwriting the URL entirely
if (typeof keySystemOptions[keySystem] === 'string') {
keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
}
}
return {
keySystems: _videoJs2['default'].mergeOptions(keySystemOptions, keySystemContentTypes)
};
};
var setupEmeOptions = function setupEmeOptions(hlsHandler) {
if (hlsHandler.options_.sourceType === 'dash') {
var player = _videoJs2['default'].players[hlsHandler.tech_.options_.playerId];
if (player.eme) {
player.eme.options = emeOptions(hlsHandler.source_.keySystems, hlsHandler.playlists.media(), hlsHandler.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader.media());
}
}
};
/**

@@ -192,2 +249,11 @@ * Whether the browser has built-in HLS support.

Hls.supportsNativeDash = (function () {
if (!_videoJs2['default'].getTech('Html5').isSupported()) {
return false;
}
return (/maybe|probably/i.test(_globalDocument2['default'].createElement('video').canPlayType('application/dash+xml'))
);
})();
/**

@@ -238,2 +304,11 @@ * HLS is a source handler, not a tech. Make sure attempts to use it

}
// Set up a reference to the HlsHandler from player.vhs. This allows users to start
// migrating from player.tech_.hls... to player.vhs... for API access. Although this
// isn't the most appropriate form of reference for video.js (since all APIs should
// be provided through core video.js), it is a common pattern for plugins, and vhs
// will act accordingly.
_player.vhs = this;
// deprecated, for backwards compatibility
_player.dash = this;
}

@@ -327,3 +402,3 @@

key: 'src',
value: function src(_src) {
value: function src(_src, type) {
var _this3 = this;

@@ -340,2 +415,3 @@

this.options_.externHls = Hls;
this.options_.sourceType = simpleTypeFromSourceType(type);

@@ -489,2 +565,3 @@ this.masterPlaylistController_ = new _masterPlaylistController.MasterPlaylistController(this.options_);

(0, _renditionMixin2['default'])(_this3);
setupEmeOptions(_this3);
});

@@ -631,3 +708,3 @@

tech.hls.src(source.src);
tech.hls.src(source.src, source.type);
return tech.hls;

@@ -654,4 +731,11 @@ },

var mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
var sourceType = simpleTypeFromSourceType(type);
if (sourceType === 'dash') {
if (!options.hls.overrideNative && Hls.supportsNativeDash) {
return false;
}
return true;
}
// favor native HLS support if it's available

@@ -661,3 +745,4 @@ if (!options.hls.overrideNative && Hls.supportsNativeHls) {

}
return mpegurlRE.test(type);
return sourceType === 'hls';
};

@@ -698,3 +783,5 @@

HlsHandler: HlsHandler,
HlsSourceHandler: HlsSourceHandler
HlsSourceHandler: HlsSourceHandler,
emeOptions: emeOptions,
simpleTypeFromSourceType: simpleTypeFromSourceType
};
{
"name": "@videojs/http-streaming",
"version": "0.1.0",
"version": "0.2.0",
"description": "Play back HLS with video.js, even where it's not natively supported",

@@ -95,2 +95,3 @@ "main": "es5/videojs-http-streaming.js",

"mux.js": "4.3.2",
"mpd-parser": "0.2.0",
"url-toolkit": "1.0.9",

@@ -118,5 +119,5 @@ "video.js": "^5.19.1 || ^6.2.0",

"jsdoc": "^3.4.0",
"karma": "^0.13.0",
"karma": "^1.7.1",
"karma-browserify": "^4.4.0",
"karma-chrome-launcher": "^0.2.0",
"karma-chrome-launcher": "^2.2.0",
"karma-coverage": "^1.1.1",

@@ -139,2 +140,3 @@ "karma-detect-browsers": "^2.0.0",

"uglify-js": "^2.5.0",
"videojs-contrib-eme": "^3.0.0",
"videojs-contrib-quality-levels": "^2.0.2",

@@ -141,0 +143,0 @@ "videojs-flash": "^2.0.0",

@@ -1,2 +0,2 @@

# video.js HLS Source Handler
# videojs-http-streaming (VHS)

@@ -6,7 +6,13 @@ [![Build Status][travis-icon]][travis-link]

Play HLS, DASH, and future HTTP streaming protocols with video.js, even where they're not
natively supported.
Play back HLS with video.js, even where it's not natively supported.
Lead Maintainers:
- Jon-Carlos Rivera [@imbcmdth](https://github.com/imbcmdth)
- Joe Forbes [@forbesjo](https://github.com/forbesjo)
- Matthew Neil [@mjneil](https://github.com/mjneil)
- Oshin Karamian [@OshinKaramian](https://github.com/OshinKaramian)
- Garrett Singer [@gesinger](https://github.com/gesinger)
- Chuck Wilson [@squarebracket](https://github.com/squarebracket)
Lead Maintainer: Jon-Carlos Rivera [@imbcmdth](https://github.com/imbcmdth)
Maintenance Status: Stable

@@ -13,0 +19,0 @@

@@ -17,5 +17,5 @@ var fs = require('fs');

if (extname === '.m3u8') {
if (extname === '.m3u8' || extname === '.mpd') {
// translate this manifest
manifests += ' \'' + path.basename(file, '.m3u8') + '\': ';
manifests += ' \'' + path.basename(file, extname) + '\': ';
manifests += fs.readFileSync(file, 'utf8')

@@ -22,0 +22,0 @@ .split(/\r\n|\n/)

@@ -5,2 +5,3 @@ /**

import PlaylistLoader from './playlist-loader';
import DashPlaylistLoader from './dash-playlist-loader';
import { isEnabled, isLowestEnabledRendition } from './playlist.js';

@@ -109,2 +110,25 @@ import SegmentLoader from './segment-loader';

const audioProfileFromDefault = (master, audioGroupId) => {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
const audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (let name in audioGroup) {
const audioType = audioGroup[name];
if (audioType.default && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return parseCodecs(audioType.playlists[0].attributes.CODECS).audioProfile;
}
}
return null;
};
/**

@@ -147,3 +171,6 @@ * Calculates the MIME type strings for a working configuration of

for (let groupId in audioGroup) {
if (!audioGroup[groupId].uri) {
// either a uri is present (if the case of HLS and an external playlist), or
// playlists is present (in the case of DASH where we don't have external audio
// playlists)
if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
isMuxed = true;

@@ -159,6 +186,15 @@ break;

if (isMaat && !codecInfo.audioProfile) {
videojs.log.warn(
'Multiple audio tracks present but no audio codec string is specified. ' +
'Attempting to use the default audio codec (mp4a.40.2)');
codecInfo.audioProfile = defaultCodecs.audioProfile;
if (!isMuxed) {
// It is possible for codecs to be specified on the audio media group playlist but
// not on the rendition playlist. This is mostly the case for DASH, where audio and
// video are always separate (and separately specified).
codecInfo.audioProfile = audioProfileFromDefault(master, mediaAttributes.AUDIO);
}
if (!codecInfo.audioProfile) {
videojs.log.warn(
'Multiple audio tracks present but no audio codec string is specified. ' +
'Attempting to use the default audio codec (mp4a.40.2)');
codecInfo.audioProfile = defaultCodecs.audioProfile;
}
}

@@ -239,3 +275,4 @@

blacklistDuration,
enableLowInitialPlaylist
enableLowInitialPlaylist,
sourceType
} = options;

@@ -253,2 +290,3 @@

this.mode_ = mode;
this.sourceType_ = sourceType;
this.useCueTags_ = useCueTags;

@@ -297,7 +335,9 @@ this.blacklistDuration = blacklistDuration;

syncController: this.syncController_,
decrypter: this.decrypter_
decrypter: this.decrypter_,
sourceType: this.sourceType_
};
// setup playlist loaders
this.masterPlaylistLoader_ = new PlaylistLoader(url, this.hls_, this.withCredentials);
this.masterPlaylistLoader_ = this.sourceType_ === 'dash' ?
new DashPlaylistLoader(url, this.hls_, this.withCredentials) :
new PlaylistLoader(url, this.hls_, this.withCredentials);
this.setupMasterPlaylistLoaderListeners_();

@@ -362,2 +402,3 @@

setupMediaGroups({
sourceType: this.sourceType_,
segmentLoaders: {

@@ -1145,7 +1186,4 @@ AUDIO: this.audioSegmentLoader_,

}
this.mainSegmentLoader_.mimeType(mimeTypes[0]);
if (mimeTypes[1]) {
this.audioSegmentLoader_.mimeType(mimeTypes[1]);
}
this.configureLoaderMimeTypes_(mimeTypes);
// exclude any incompatible variant streams from future playlist

@@ -1156,2 +1194,19 @@ // selection

configureLoaderMimeTypes_(mimeTypes) {
// If the content is demuxed, we can't start appending segments to a source buffer
// until both source buffers are set up, or else the browser may not let us add the
// second source buffer (it will assume we are playing either audio only or video
// only).
const sourceBufferEmitter =
// if the first mime type has muxed video and audio then we shouldn't wait on the
// second source buffer
mimeTypes.length > 1 && mimeTypes[0].indexOf(',') === -1 ?
new videojs.EventTarget() : null;
this.mainSegmentLoader_.mimeType(mimeTypes[0], sourceBufferEmitter);
if (mimeTypes[1]) {
this.audioSegmentLoader_.mimeType(mimeTypes[1], sourceBufferEmitter);
}
}
/**

@@ -1158,0 +1213,0 @@ * Blacklist playlists that are known to be codec or

import videojs from 'video.js';
import PlaylistLoader from './playlist-loader';
import DashPlaylistLoader from './dash-playlist-loader';

@@ -354,2 +355,3 @@ const noop = () => {};

hls,
sourceType,
segmentLoaders: { [type]: segmentLoader },

@@ -388,2 +390,6 @@ requestOptions: { withCredentials },

withCredentials);
} else if (properties.playlists && sourceType === 'dash') {
playlistLoader = new DashPlaylistLoader(properties.playlists[0],
hls,
withCredentials);
} else {

@@ -433,2 +439,3 @@ // no resolvedUri means the audio is muxed with the video when using this

hls,
sourceType,
segmentLoaders: { [type]: segmentLoader },

@@ -465,7 +472,15 @@ requestOptions: { withCredentials },

let playlistLoader;
if (sourceType === 'hls') {
playlistLoader =
new PlaylistLoader(properties.resolvedUri, hls, withCredentials);
} else if (sourceType === 'dash') {
playlistLoader =
new DashPlaylistLoader(properties.playlists[0], hls, withCredentials);
}
properties = videojs.mergeOptions({
id: variantLabel,
playlistLoader: new PlaylistLoader(properties.resolvedUri,
hls,
withCredentials)
playlistLoader
}, properties);

@@ -472,0 +487,0 @@

@@ -148,2 +148,3 @@ /**

this.goalBufferLength_ = settings.goalBufferLength;
this.sourceType_ = settings.sourceType;

@@ -375,3 +376,5 @@ // private instance variables

this.state = 'READY';
this.sourceUpdater_ = new SourceUpdater(this.mediaSource_, this.mimeType_);
this.sourceUpdater_ = new SourceUpdater(this.mediaSource_,
this.mimeType_,
this.sourceBufferEmitter_);
this.resetEverything();

@@ -486,4 +489,6 @@ return this.monitorBuffer_();

* @param {String} mimeType the mime type string to use
* @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer
* is added to the media source
*/
mimeType(mimeType) {
mimeType(mimeType, sourceBufferEmitter) {
if (this.mimeType_) {

@@ -494,2 +499,3 @@ return;

this.mimeType_ = mimeType;
this.sourceBufferEmitter_ = sourceBufferEmitter;
// if we were unpaused but waiting for a sourceUpdater, start

@@ -496,0 +502,0 @@ // buffering now

@@ -19,38 +19,62 @@ /**

* SourceBuffer
* @param {Object} sourceBufferEmitter an event emitter that fires when a source buffer is
* added to the media source
*/
export default class SourceUpdater {
constructor(mediaSource, mimeType) {
let createSourceBuffer = () => {
this.sourceBuffer_ = mediaSource.addSourceBuffer(mimeType);
constructor(mediaSource, mimeType, sourceBufferEmitter) {
this.callbacks_ = [];
this.pendingCallback_ = null;
this.timestampOffset_ = 0;
this.mediaSource = mediaSource;
this.processedAppend_ = false;
// run completion handlers and process callbacks as updateend
// events fire
this.onUpdateendCallback_ = () => {
let pendingCallback = this.pendingCallback_;
if (mediaSource.readyState === 'closed') {
mediaSource.addEventListener(
'sourceopen', this.createSourceBuffer_.bind(this, mimeType, sourceBufferEmitter));
} else {
this.createSourceBuffer_(mimeType, sourceBufferEmitter);
}
}
this.pendingCallback_ = null;
createSourceBuffer_(mimeType, sourceBufferEmitter) {
this.sourceBuffer_ = this.mediaSource.addSourceBuffer(mimeType);
if (pendingCallback) {
pendingCallback();
}
if (sourceBufferEmitter) {
sourceBufferEmitter.trigger('sourcebufferadded');
this.runCallback_();
};
if (this.mediaSource.sourceBuffers.length < 2) {
// There's another source buffer we must wait for before we can start updating
// our own (or else we can get into a bad state, i.e., appending video/audio data
// before the other video/audio source buffer is available and leading to a video
// or audio only buffer).
sourceBufferEmitter.on('sourcebufferadded', () => {
this.start_();
});
return;
}
}
this.sourceBuffer_.addEventListener('updateend', this.onUpdateendCallback_);
this.start_();
}
start_() {
this.started_ = true;
// run completion handlers and process callbacks as updateend
// events fire
this.onUpdateendCallback_ = () => {
let pendingCallback = this.pendingCallback_;
this.pendingCallback_ = null;
if (pendingCallback) {
pendingCallback();
}
this.runCallback_();
};
this.callbacks_ = [];
this.pendingCallback_ = null;
this.timestampOffset_ = 0;
this.mediaSource = mediaSource;
this.processedAppend_ = false;
this.sourceBuffer_.addEventListener('updateend', this.onUpdateendCallback_);
if (mediaSource.readyState === 'closed') {
mediaSource.addEventListener('sourceopen', createSourceBuffer);
} else {
createSourceBuffer();
}
this.runCallback_();
}

@@ -152,3 +176,4 @@

if (!this.updating() &&
this.callbacks_.length) {
this.callbacks_.length &&
this.started_) {
callbacks = this.callbacks_.shift();

@@ -155,0 +180,0 @@ this.pendingCallback_ = callbacks[1];

@@ -519,3 +519,3 @@ /**

};
} else if (playlist.discontinuityStarts.length) {
} else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
// Search for future discontinuities that we can provide better timing

@@ -522,0 +522,0 @@ // information for and save that information for sync purposes

@@ -76,2 +76,18 @@ /**

export const simpleTypeFromSourceType = (type) => {
const mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
if (mpegurlRE.test(type)) {
return 'hls';
}
const dashRE = /^application\/dash\+xml/i;
if (dashRE.test(type)) {
return 'dash';
}
return null;
};
/**

@@ -123,2 +139,42 @@ * Updates the selectedIndex of the QualityLevelList when a mediachange happens in hls.

const emeOptions = (keySystemOptions, videoPlaylist, audioPlaylist) => {
if (!keySystemOptions) {
return keySystemOptions;
}
// upsert the content types based on the selected playlist
const keySystemContentTypes = {};
for (let keySystem in keySystemOptions) {
keySystemContentTypes[keySystem] = {
audioContentType: `audio/mp4; codecs="${audioPlaylist.attributes.CODECS}"`,
videoContentType: `video/mp4; codecs="${videoPlaylist.attributes.CODECS}"`
};
// videojs-contrib-eme accepts the option of specifying: 'com.some.cdm': 'url'
// so we need to prevent overwriting the URL entirely
if (typeof keySystemOptions[keySystem] === 'string') {
keySystemContentTypes[keySystem].url = keySystemOptions[keySystem];
}
}
return {
keySystems: videojs.mergeOptions(keySystemOptions, keySystemContentTypes)
};
};
const setupEmeOptions = (hlsHandler) => {
if (hlsHandler.options_.sourceType === 'dash') {
const player = videojs.players[hlsHandler.tech_.options_.playerId];
if (player.eme) {
player.eme.options = emeOptions(
hlsHandler.source_.keySystems,
hlsHandler.playlists.media(),
hlsHandler.masterPlaylistController_.mediaTypes_.AUDIO.activePlaylistLoader.media()
);
}
}
};
/**

@@ -156,2 +212,11 @@ * Whether the browser has built-in HLS support.

Hls.supportsNativeDash = (function() {
if (!videojs.getTech('Html5').isSupported()) {
return false;
}
return (/maybe|probably/i).test(
document.createElement('video').canPlayType('application/dash+xml'));
}());
/**

@@ -196,2 +261,11 @@ * HLS is a source handler, not a tech. Make sure attempts to use it

}
// Set up a reference to the HlsHandler from player.vhs. This allows users to start
// migrating from player.tech_.hls... to player.vhs... for API access. Although this
// isn't the most appropriate form of reference for video.js (since all APIs should
// be provided through core video.js), it is a common pattern for plugins, and vhs
// will act accordingly.
_player.vhs = this;
// deprecated, for backwards compatibility
_player.dash = this;
}

@@ -280,3 +354,3 @@

*/
src(src) {
src(src, type) {
// do nothing if the src is falsey

@@ -291,2 +365,3 @@ if (!src) {

this.options_.externHls = Hls;
this.options_.sourceType = simpleTypeFromSourceType(type);

@@ -427,2 +502,3 @@ this.masterPlaylistController_ = new MasterPlaylistController(this.options_);

renditionSelectionMixin(this);
setupEmeOptions(this);
});

@@ -556,3 +632,3 @@

tech.hls.src(source.src);
tech.hls.src(source.src, source.type);
return tech.hls;

@@ -577,4 +653,11 @@ },

let mpegurlRE = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
const sourceType = simpleTypeFromSourceType(type);
if (sourceType === 'dash') {
if (!options.hls.overrideNative && Hls.supportsNativeDash) {
return false;
}
return true;
}
// favor native HLS support if it's available

@@ -584,3 +667,4 @@ if (!options.hls.overrideNative && Hls.supportsNativeHls) {

}
return mpegurlRE.test(type);
return sourceType === 'hls';
};

@@ -622,3 +706,5 @@

HlsHandler,
HlsSourceHandler
HlsSourceHandler,
emeOptions,
simpleTypeFromSourceType
};

@@ -21,3 +21,3 @@ var istanbul = require('browserify-istanbul');

if (process.env.TRAVIS) {
config.browsers = ['travisChrome'];
config.browsers = ['ChromeHeadless'];
}

@@ -70,8 +70,2 @@

},
customLaunchers: {
travisChrome: {
base: 'Chrome',
flags: ['--no-sandbox']
}
},
detectBrowsers: detectBrowsers,

@@ -78,0 +72,0 @@ reporters: ['dots'],

@@ -6,2 +6,4 @@ import QUnit from 'qunit';

import * as MediaGroups from '../src/media-groups';
import PlaylistLoader from '../src/playlist-loader';
import DashPlaylistLoader from '../src/dash-playlist-loader';

@@ -622,3 +624,4 @@ QUnit.module('MediaGroups', {

mediaTypes: this.mediaTypes,
blacklistCurrentPlaylist() {}
blacklistCurrentPlaylist() {},
sourceType: 'hls'
};

@@ -752,1 +755,97 @@ }

});
QUnit.test('initialize audio correctly uses HLS source type', function(assert) {
this.master.mediaGroups.AUDIO.aud1 = {
en: { default: true, language: 'en' },
fr: { default: false, language: 'fr', resolvedUri: 'aud1/fr.m3u8' }
};
this.settings.sourceType = 'hls';
MediaGroups.initialize.AUDIO('AUDIO', this.settings);
assert.notOk(this.mediaTypes.AUDIO.groups.aud1[0].playlistLoader,
'no playlist loader because muxed (no URI)');
assert.ok(this.mediaTypes.AUDIO.groups.aud1[1].playlistLoader instanceof PlaylistLoader,
'playlist loader is an HLS playlist loader');
});
QUnit.test('initialize audio correctly uses DASH source type', function(assert) {
// allow async methods to resolve before next test
const done = assert.async();
this.master.mediaGroups.AUDIO.aud1 = {
// playlists are resolved, no URI for DASH
en: { default: true, language: 'en', playlists: [{}] },
fr: { default: false, language: 'fr', playlists: [{}] }
};
this.settings.sourceType = 'dash';
MediaGroups.initialize.AUDIO('AUDIO', this.settings);
assert.ok(
this.mediaTypes.AUDIO.groups.aud1[0].playlistLoader instanceof DashPlaylistLoader,
'playlist loader is a DASH playlist loader');
assert.ok(
this.mediaTypes.AUDIO.groups.aud1[1].playlistLoader instanceof DashPlaylistLoader,
'playlist loader is a DASH playlist loader');
done();
});
QUnit.test('initialize audio does not create DASH playlist loader if no playlists',
function(assert) {
this.master.mediaGroups.AUDIO.aud1 = {
en: { default: true, language: 'en' },
fr: { default: false, language: 'fr' }
};
this.settings.sourceType = 'dash';
MediaGroups.initialize.AUDIO('AUDIO', this.settings);
assert.notOk(this.mediaTypes.AUDIO.groups.aud1[0].playlistLoader,
'no playlist loader when misconfigured');
assert.notOk(this.mediaTypes.AUDIO.groups.aud1[1].playlistLoader,
'no playlist loader when misconfigured');
});
QUnit.test('initialize subtitles correctly uses HLS source type', function(assert) {
this.master.mediaGroups.SUBTITLES.sub1 = {
en: { language: 'en', resolvedUri: 'sub1/en.m3u8' },
fr: { language: 'fr', resolvedUri: 'sub1/fr.m3u8' }
};
this.settings.sourceType = 'hls';
MediaGroups.initialize.SUBTITLES('SUBTITLES', this.settings);
assert.ok(
this.mediaTypes.SUBTITLES.groups.sub1[0].playlistLoader instanceof PlaylistLoader,
'playlist loader is an HLS playlist loader');
assert.ok(
this.mediaTypes.SUBTITLES.groups.sub1[1].playlistLoader instanceof PlaylistLoader,
'playlist loader is an HLS playlist loader');
});
QUnit.test('initialize subtitles correctly uses DASH source type', function(assert) {
// allow async methods to resolve before next test
const done = assert.async();
this.master.mediaGroups.SUBTITLES.sub1 = {
// playlists are resolved, no URI for DASH
en: { language: 'en', playlists: [{}] },
fr: { language: 'fr', playlists: [{}] }
};
this.settings.sourceType = 'dash';
MediaGroups.initialize.AUDIO('AUDIO', this.settings);
MediaGroups.initialize.SUBTITLES('SUBTITLES', this.settings);
assert.ok(
this.mediaTypes.SUBTITLES.groups.sub1[0].playlistLoader instanceof DashPlaylistLoader,
'playlist loader is a DASH playlist loader');
assert.ok(
this.mediaTypes.SUBTITLES.groups.sub1[1].playlistLoader instanceof DashPlaylistLoader,
'playlist loader is a DASH playlist loader');
done();
});

@@ -42,2 +42,43 @@ import SourceUpdater from '../src/source-updater';

QUnit.test('runs callback if a media source exists when passed source buffer emitter',
function(assert) {
let sourceBufferEmitter = new videojs.EventTarget();
let sourceBuffer;
this.mediaSource.trigger('sourceopen');
// create other media source
this.mediaSource.addSourceBuffer('audio/mp2t');
let updater = new SourceUpdater(this.mediaSource, 'video/mp2t', sourceBufferEmitter);
updater.appendBuffer(new Uint8Array([0, 1, 2]));
sourceBuffer = this.mediaSource.sourceBuffers[1];
assert.equal(sourceBuffer.updates_.length, 1, 'called the source buffer once');
assert.deepEqual(sourceBuffer.updates_[0].append, new Uint8Array([0, 1, 2]),
'appended the bytes');
});
QUnit.test('runs callback after source buffer emitter triggers if other source buffer ' +
'doesn\'t exist at creation',
function(assert) {
let sourceBufferEmitter = new videojs.EventTarget();
let updater = new SourceUpdater(this.mediaSource, 'video/mp2t', sourceBufferEmitter);
let sourceBuffer;
updater.appendBuffer(new Uint8Array([0, 1, 2]));
this.mediaSource.trigger('sourceopen');
sourceBuffer = this.mediaSource.sourceBuffers[0];
assert.equal(sourceBuffer.updates_.length, 0, 'did not call the source buffer');
// create other media source
this.mediaSource.addSourceBuffer('audio/mp2t');
sourceBufferEmitter.trigger('sourcebufferadded');
assert.equal(sourceBuffer.updates_.length, 1, 'called the source buffer once');
assert.deepEqual(sourceBuffer.updates_[0].append, new Uint8Array([0, 1, 2]),
'appended the bytes');
});
QUnit.test('runs the completion callback when updateend fires', function(assert) {

@@ -44,0 +85,0 @@ let updater = new SourceUpdater(this.mediaSource, 'video/mp2t');

@@ -336,3 +336,3 @@ import document from 'global/document';

// contents off the global object
let manifestName = (/(?:.*\/)?(.*)\.m3u8/).exec(request.url);
let manifestName = (/(?:.*\/)?(.*)\.(m3u8|mpd)/).exec(request.url);

@@ -349,2 +349,4 @@ if (manifestName) {

contentType = 'video/MP2T';
} else if (/\.mpd/.test(request.url)) {
contentType = 'application/dash+xml';
}

@@ -351,0 +353,0 @@

@@ -190,2 +190,27 @@ export default {

'#EXT-X-ENDLIST\n',
'dash': '<?xml version="1.0"?>\n' +
'<MPD xmlns="urn:mpeg:dash:schema:mpd:2011" profiles="urn:mpeg:dash:profile:full:2011" minBufferTime="1.5" mediaPresentationDuration="PT4S">\n' +
' <Period>\n' +
' <BaseURL>main/</BaseURL>\n' +
' <AdaptationSet mimeType="video/mp4">\n' +
' <BaseURL>video/</BaseURL>\n' +
' <Representation id="1080p" bandwidth="6800000" width="1920" height="1080">\n' +
' <BaseURL>1080/</BaseURL>\n' +
' <SegmentTemplate media="$RepresentationID$-segment-$Number$.mp4" initialization="$RepresentationID$-init.mp4" duration="10" timescale="10" startNumber="0" />\n' +
' </Representation>\n' +
' <Representation id="720p" bandwidth="2400000" width="1280" height="720">\n' +
' <BaseURL>720/</BaseURL>\n' +
' <SegmentTemplate media="$RepresentationID$-segment-$Number$.mp4" initialization="$RepresentationID$-init.mp4" duration="10" timescale="10" startNumber="0" />\n' +
' </Representation>\n' +
' </AdaptationSet>\n' +
' <AdaptationSet mimeType="audio/mp4">\n' +
' <BaseURL>audio/</BaseURL>\n' +
' <Representation id="audio" bandwidth="128000">\n' +
' <BaseURL>720/</BaseURL>\n' +
' <SegmentTemplate media="segment-$Number$.mp4" initialization="$RepresentationID$-init.mp4" duration="10" timescale="10" startNumber="0" />\n' +
' </Representation>\n' +
' </AdaptationSet>\n' +
' </Period>\n' +
'</MPD>\n' +
'\n',
'demuxed': '#EXTM3U\n' +

@@ -192,0 +217,0 @@ '#EXT-X-VERSION:4\n' +

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc