Socket
Socket
Sign inDemoInstall

xgplayer-flv.js

Package Overview
Dependencies
13
Maintainers
5
Versions
129
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 1.1.7 to 2.0.0

2

package.json
{
"name": "xgplayer-flv.js",
"version": "1.1.7",
"version": "2.0.0",
"description": "web video player",

@@ -5,0 +5,0 @@ "main": "./dist/index.js",

@@ -39,3 +39,3 @@ /*

}
this.definitionChange = false;
this.e = {

@@ -163,26 +163,3 @@ onSourceOpen: this._onSourceOpen.bind(this),

}
newSourceInitSegment (initSegment) {
let is = initSegment;
let mimeType = `${is.container}`;
if (is.codec && is.codec.length > 0) {
mimeType += `;codecs=${is.codec}`;
}
let ps = this._pendingSegments[is.type];
ps.splice(0, ps.length);
this._pendingSegments[is.type] = [];
this._pendingRemoveRanges[is.type] = [];
this._lastInitSegments[is.type] = [];
let ms = this._mediaSource;
this.definitionChange = true;
if (this._sourceBuffers[is.type]) {
ms.removeSourceBuffer(this._sourceBuffers[is.type]);
let sb = this._sourceBuffers[is.type] = this._mediaSource.addSourceBuffer(mimeType);
sb.addEventListener('error', this.e.onSourceBufferError);
sb.addEventListener('updateend', this.e.onSourceBufferUpdateEnd);
}
// this.definitionChange = false;
}
appendInitSegment(initSegment, deferred) {

@@ -254,3 +231,3 @@ if (!this._mediaSource || this._mediaSource.readyState !== 'open') {

let sb = this._sourceBuffers[ms.type];
if (sb && !sb.updating && !this._hasPendingRemoveRanges() && this.definitionChange === false) {
if (sb && !sb.updating && !this._hasPendingRemoveRanges()) {
this._doAppendSegments();

@@ -437,2 +414,3 @@ }

let pendingSegments = this._pendingSegments;
for (let type in pendingSegments) {

@@ -439,0 +417,0 @@ if (!this._sourceBuffers[type] || this._sourceBuffers[type].updating) {

@@ -32,3 +32,3 @@ /*

this._emitter = new EventEmitter();
this.isDefinitionChanging = false;
if (config.enableWorker && typeof (Worker) !== 'undefined') {

@@ -55,6 +55,2 @@ try {

this._controller._emitter.on('metadata_arrived', onMetaData => {
this._emitter.emit('metadata_arrived', onMetaData);
})
if (this._controller) {

@@ -91,7 +87,7 @@ let ctl = this._controller;

on(event, listener) {
this._emitter && this._emitter.addListener(event, listener);
this._emitter.addListener(event, listener);
}
off(event, listener) {
this._emitter && this._emitter.removeListener(event, listener);
this._emitter.removeListener(event, listener);
}

@@ -146,3 +142,3 @@

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.INIT_SEGMENT, type, initSegment);
this._emitter.emit(TransmuxingEvents.INIT_SEGMENT, type, initSegment);
});

@@ -153,3 +149,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.MEDIA_SEGMENT, type, mediaSegment);
this._emitter.emit(TransmuxingEvents.MEDIA_SEGMENT, type, mediaSegment);
});

@@ -160,3 +156,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.LOADING_COMPLETE);
this._emitter.emit(TransmuxingEvents.LOADING_COMPLETE);
});

@@ -167,3 +163,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.RECOVERED_EARLY_EOF);
this._emitter.emit(TransmuxingEvents.RECOVERED_EARLY_EOF);
});

@@ -174,3 +170,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.MEDIA_INFO, mediaInfo);
this._emitter.emit(TransmuxingEvents.MEDIA_INFO, mediaInfo);
});

@@ -181,3 +177,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.STATISTICS_INFO, statisticsInfo);
this._emitter.emit(TransmuxingEvents.STATISTICS_INFO, statisticsInfo);
});

@@ -188,3 +184,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.IO_ERROR, type, info);
this._emitter.emit(TransmuxingEvents.IO_ERROR, type, info);
});

@@ -195,3 +191,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.DEMUX_ERROR, type, info);
this._emitter.emit(TransmuxingEvents.DEMUX_ERROR, type, info);
});

@@ -202,3 +198,3 @@ }

Promise.resolve().then(() => {
this._emitter && this._emitter.emit(TransmuxingEvents.RECOMMEND_SEEKPOINT, milliseconds);
this._emitter.emit(TransmuxingEvents.RECOMMEND_SEEKPOINT, milliseconds);
});

@@ -257,2 +253,2 @@ }

export default Transmuxer;
export default Transmuxer;

@@ -249,6 +249,2 @@ /*

this._demuxer._emitter.on('metadata_arrived', onMetaData => {
this._emitter.emit('metadata_arrived', onMetaData);
})
if (!this._remuxer) {

@@ -332,2 +328,3 @@ this._remuxer = new MP4Remuxer(this._config);

this._internalAbort();
this._remuxer.flushStashedSamples();
this._loadSegment(nextSegmentIndex);

@@ -434,2 +431,2 @@ } else {

export default TransmuxingController;
export default TransmuxingController;

@@ -25,3 +25,2 @@ /*

import {IllegalStateException} from '../utils/exception.js';
import EventEmitter from 'events';

@@ -53,4 +52,2 @@ function Swap16(src) {

this._emitter = new EventEmitter();
this._config = config;

@@ -132,5 +129,2 @@

this._onDataAvailable = null;
this._emitter.removeAllListeners();
this._emitter = null;
}

@@ -370,3 +364,2 @@

let onMetaData = this._metadata.onMetaData;
this._emitter.emit('metadata_arrived', onMetaData);

@@ -1082,2 +1075,2 @@ if (typeof onMetaData.hasAudio === 'boolean') { // hasAudio

export default FLVDemuxer;
export default FLVDemuxer;

@@ -87,2 +87,2 @@ /*

export default flvjs;
export default flvjs;

@@ -164,5 +164,19 @@ /*

if (result.done) {
this._status = LoaderStatus.kComplete;
if (this._onComplete) {
this._onComplete(this._range.from, this._range.from + this._receivedLength - 1);
// First check received length
if (this._contentLength !== null && this._receivedLength < this._contentLength) {
// Report Early-EOF
this._status = LoaderStatus.kError;
let type = LoaderErrors.EARLY_EOF;
let info = {code: -1, msg: 'Fetch stream meet Early-EOF'};
if (this._onError) {
this._onError(type, info);
} else {
throw new RuntimeException(info.msg);
}
} else {
// OK. Download complete
this._status = LoaderStatus.kComplete;
if (this._onComplete) {
this._onComplete(this._range.from, this._range.from + this._receivedLength - 1);
}
}

@@ -169,0 +183,0 @@ } else {

@@ -19,730 +19,587 @@ /*

import EventEmitter from 'events'
import Log from '../utils/logger.js'
import Browser from '../utils/browser.js'
import PlayerEvents from './player-events.js'
import Transmuxer from '../core/transmuxer.js'
import TransmuxingEvents from '../core/transmuxing-events.js'
import MSEController from '../core/mse-controller.js'
import MSEEvents from '../core/mse-events.js'
import {ErrorTypes, ErrorDetails} from './player-errors.js'
import {createDefaultConfig} from '../config.js'
import {InvalidArgumentException, IllegalStateException} from '../utils/exception.js'
/* eslint-disable */
import EventEmitter from 'events';
import Log from '../utils/logger.js';
import Browser from '../utils/browser.js';
import PlayerEvents from './player-events.js';
import Transmuxer from '../core/transmuxer.js';
import TransmuxingEvents from '../core/transmuxing-events.js';
import MSEController from '../core/mse-controller.js';
import MSEEvents from '../core/mse-events.js';
import {ErrorTypes, ErrorDetails} from './player-errors.js';
import {createDefaultConfig} from '../config.js';
import {InvalidArgumentException, IllegalStateException} from '../utils/exception.js';
class FlvPlayer {
constructor (mediaDataSource, config) {
this.TAG = 'FlvPlayer'
this._type = 'FlvPlayer'
this._emitter = new EventEmitter()
this._config = createDefaultConfig()
if (typeof config === 'object') {
Object.assign(this._config, config)
}
constructor(mediaDataSource, config) {
this.TAG = 'FlvPlayer';
this._type = 'FlvPlayer';
this._emitter = new EventEmitter();
if (mediaDataSource.type.toLowerCase() !== 'flv') {
throw new InvalidArgumentException('FlvPlayer requires an flv MediaDataSource input!')
}
this._config = createDefaultConfig();
if (typeof config === 'object') {
Object.assign(this._config, config);
}
if (mediaDataSource.isLive === true) {
this._config.isLive = true
}
if (mediaDataSource.type.toLowerCase() !== 'flv') {
throw new InvalidArgumentException('FlvPlayer requires an flv MediaDataSource input!');
}
this.e = {
onvLoadedMetadata: this._onvLoadedMetadata.bind(this),
onvSeeking: this._onvSeeking.bind(this),
onvCanPlay: this._onvCanPlay.bind(this),
onvStalled: this._onvStalled.bind(this),
onvProgress: this._onvProgress.bind(this)
}
if (mediaDataSource.isLive === true) {
this._config.isLive = true;
}
if (window.performance && window.performance.now) {
this._now = window.performance.now.bind(window.performance)
} else {
this._now = Date.now
}
this.e = {
onvLoadedMetadata: this._onvLoadedMetadata.bind(this),
onvSeeking: this._onvSeeking.bind(this),
onvCanPlay: this._onvCanPlay.bind(this),
onvStalled: this._onvStalled.bind(this),
onvProgress: this._onvProgress.bind(this)
};
this._pendingSeekTime = null // in seconds
this._requestSetTime = false
this._seekpointRecord = null
this._progressChecker = null
if (self.performance && self.performance.now) {
this._now = self.performance.now.bind(self.performance);
} else {
this._now = Date.now;
}
this._mediaDataSource = mediaDataSource
this._mediaElement = null
this._msectl = null
this._transmuxer = null
this._pendingSeekTime = null; // in seconds
this._requestSetTime = false;
this._seekpointRecord = null;
this._progressChecker = null;
this._mseSourceOpened = false
this._hasPendingLoad = false
this._receivedCanPlay = false
this._mediaDataSource = mediaDataSource;
this._mediaElement = null;
this._msectl = null;
this._transmuxer = null;
this._mediaInfo = null
this._statisticsInfo = null
this._mseSourceOpened = false;
this._hasPendingLoad = false;
this._receivedCanPlay = false;
let chromeNeedIDRFix = (Browser.chrome &&
(Browser.version.major < 50 ||
(Browser.version.major === 50 && Browser.version.build < 2661)))
this._alwaysSeekKeyframe = !!((chromeNeedIDRFix || Browser.msedge || Browser.msie))
this._mediaInfo = null;
this._statisticsInfo = null;
if (this._alwaysSeekKeyframe) {
this._config.accurateSeek = false
let chromeNeedIDRFix = (Browser.chrome &&
(Browser.version.major < 50 ||
(Browser.version.major === 50 && Browser.version.build < 2661)));
this._alwaysSeekKeyframe = (chromeNeedIDRFix || Browser.msedge || Browser.msie) ? true : false;
if (this._alwaysSeekKeyframe) {
this._config.accurateSeek = false;
}
}
this._tempPendingSegments = {
audio: [],
video: []
destroy() {
if (this._progressChecker != null) {
window.clearInterval(this._progressChecker);
this._progressChecker = null;
}
if (this._transmuxer) {
this.unload();
}
if (this._mediaElement) {
this.detachMediaElement();
}
this.e = null;
this._mediaDataSource = null;
this._emitter.removeAllListeners();
this._emitter = null;
}
this._definitionRetryTimes = 0
}
destroy () {
if (this._progressChecker != null) {
window.clearInterval(this._progressChecker)
this._progressChecker = null
on(event, listener) {
if (event === PlayerEvents.MEDIA_INFO) {
if (this._mediaInfo != null) {
Promise.resolve().then(() => {
this._emitter.emit(PlayerEvents.MEDIA_INFO, this.mediaInfo);
});
}
} else if (event === PlayerEvents.STATISTICS_INFO) {
if (this._statisticsInfo != null) {
Promise.resolve().then(() => {
this._emitter.emit(PlayerEvents.STATISTICS_INFO, this.statisticsInfo);
});
}
}
this._emitter.addListener(event, listener);
}
if (this._transmuxer) {
this.unload()
}
if (this._mediaElement) {
this.detachMediaElement()
}
this.e = null
this._mediaDataSource = null
this._emitter.removeAllListeners()
this._emitter = null
}
on (event, listener) {
if (event === PlayerEvents.MEDIA_INFO) {
if (this._mediaInfo != null) {
Promise.resolve().then(() => {
this._emitter.emit(PlayerEvents.MEDIA_INFO, this.mediaInfo)
})
}
} else if (event === PlayerEvents.STATISTICS_INFO) {
if (this._statisticsInfo != null) {
Promise.resolve().then(() => {
this._emitter.emit(PlayerEvents.STATISTICS_INFO, this.statisticsInfo)
})
}
off(event, listener) {
this._emitter.removeListener(event, listener);
}
this._emitter.addListener(event, listener)
}
off (event, listener) {
this._emitter.removeListener(event, listener)
}
attachMediaElement(mediaElement) {
this._mediaElement = mediaElement;
mediaElement.addEventListener('loadedmetadata', this.e.onvLoadedMetadata);
mediaElement.addEventListener('seeking', this.e.onvSeeking);
mediaElement.addEventListener('canplay', this.e.onvCanPlay);
mediaElement.addEventListener('stalled', this.e.onvStalled);
mediaElement.addEventListener('progress', this.e.onvProgress);
attachMediaElement (mediaElement) {
this._mediaElement = mediaElement
mediaElement.addEventListener('loadedmetadata', this.e.onvLoadedMetadata)
mediaElement.addEventListener('seeking', this.e.onvSeeking)
mediaElement.addEventListener('canplay', this.e.onvCanPlay)
mediaElement.addEventListener('stalled', this.e.onvStalled)
mediaElement.addEventListener('progress', this.e.onvProgress)
this._msectl = new MSEController(this._config);
this._msectl = new MSEController(this._config)
this._msectl.on(MSEEvents.UPDATE_END, this._onmseUpdateEnd.bind(this));
this._msectl.on(MSEEvents.BUFFER_FULL, this._onmseBufferFull.bind(this));
this._msectl.on(MSEEvents.SOURCE_OPEN, () => {
this._mseSourceOpened = true;
if (this._hasPendingLoad) {
this._hasPendingLoad = false;
this.load();
}
});
this._msectl.on(MSEEvents.ERROR, (info) => {
this._emitter.emit(PlayerEvents.ERROR,
ErrorTypes.MEDIA_ERROR,
ErrorDetails.MEDIA_MSE_ERROR,
info
);
});
this._msectl.on(MSEEvents.UPDATE_END, this._onmseUpdateEnd.bind(this))
this._msectl.on(MSEEvents.BUFFER_FULL, this._onmseBufferFull.bind(this))
this._msectl.on(MSEEvents.SOURCE_OPEN, () => {
this._mseSourceOpened = true
if (this._hasPendingLoad) {
this._hasPendingLoad = false
this.load()
}
})
this._msectl.on(MSEEvents.ERROR, (info) => {
this._emitter.emit(PlayerEvents.ERROR,
ErrorTypes.MEDIA_ERROR,
ErrorDetails.MEDIA_MSE_ERROR,
info
)
})
this._msectl.attachMediaElement(mediaElement);
this._msectl.attachMediaElement(mediaElement)
if (this._pendingSeekTime != null) {
try {
mediaElement.currentTime = this._pendingSeekTime
this._pendingSeekTime = null
} catch (e) {
// IE11 may throw InvalidStateError if readyState === 0
// We can defer set currentTime operation after loadedmetadata
}
if (this._pendingSeekTime != null) {
try {
mediaElement.currentTime = this._pendingSeekTime;
this._pendingSeekTime = null;
} catch (e) {
// IE11 may throw InvalidStateError if readyState === 0
// We can defer set currentTime operation after loadedmetadata
}
}
}
}
detachMediaElement () {
if (this._mediaElement) {
this._msectl.detachMediaElement()
this._mediaElement.removeEventListener('loadedmetadata', this.e.onvLoadedMetadata)
this._mediaElement.removeEventListener('seeking', this.e.onvSeeking)
this._mediaElement.removeEventListener('canplay', this.e.onvCanPlay)
this._mediaElement.removeEventListener('stalled', this.e.onvStalled)
this._mediaElement.removeEventListener('progress', this.e.onvProgress)
this._mediaElement = null
detachMediaElement() {
if (this._mediaElement) {
this._msectl.detachMediaElement();
this._mediaElement.removeEventListener('loadedmetadata', this.e.onvLoadedMetadata);
this._mediaElement.removeEventListener('seeking', this.e.onvSeeking);
this._mediaElement.removeEventListener('canplay', this.e.onvCanPlay);
this._mediaElement.removeEventListener('stalled', this.e.onvStalled);
this._mediaElement.removeEventListener('progress', this.e.onvProgress);
this._mediaElement = null;
}
if (this._msectl) {
this._msectl.destroy();
this._msectl = null;
}
}
if (this._msectl) {
this._msectl.destroy()
this._msectl = null
}
}
onDefinitionChange (url, expectTime = 3) {
// setTimeout(() => {
// if (!this.isDefinitionDataReady && this._definitionRetryTimes < 3) {
// this._definitionRetryTimes += 1
// this.onDefinitionChange(url, expectTime)
// } else if (this.isDefinitionDataReady) {
// if (this._transmuxer !== this._tempTransmuxer) {
// this._transmuxer.destroy()
// this._transmuxer = this._tempTransmuxer
// let currentTime = this._mediaElement.currentTime
// this._tempTransmuxer.seek(currentTime * 1000)
// }
// Object.keys(this._tempPendingSegments).forEach(key => {
// this._msectl._pendingSegments[key] = this._tempPendingSegments[key]
// })
// this._tempPendingSegments = {
// audio: [],
// video: []
// }
//
// this._definitionRetryTimes = 0
// } else if (this._definitionRetryTimes >= 3) {
// this._definitionRetryTimes = 0
// if (this._tempTransmuxer) {
// this._tempTransmuxer.destroy()
// this._tempTransmuxer = null
// this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.NETWORK_ERROR, '', '清晰度切换失败!')
// }
// this._definitionRetryTimes = 0
// }
// this._tempPendingSegments = {
// audio: [],
// video: []
// }
// }, expectTime * 1000)
this._mediaDataSource.segments[0].url = url
this._tempMds = Object.assign({}, this._mediaDataSource, {url})
this._tempTransmuxer = new Transmuxer(this._tempMds, this._config)
this._tempTransmuxer._emitter.on('metadata_arrived', onMetaData => {
this._emitter.emit('metadata_arrived', onMetaData);
})
this._tempTransmuxer.on(TransmuxingEvents.INIT_SEGMENT, (type, is) => {
if (!this._config.isLive) {
this._tempPendingSegments[type] = [is]
// this._msectl.appendInitSegment(is)
if (this._transmuxer !== this._tempTransmuxer && this._tempTransmuxer) {
const currentTime = this._mediaElement.currentTime
this._tempTransmuxer.seek(currentTime * 1000)
load() {
if (!this._mediaElement) {
throw new IllegalStateException('HTMLMediaElement must be attached before load()!');
}
// if (this._transmuxer !== this._tempTransmuxer) {
// this._transmuxer.destroy()
// }
// this._transmuxer = this._tempTransmuxer
} else {
this._msectl.doClearSourceBuffer()
this._msectl.appendInitSegment(is)
if (this._transmuxer !== this._tempTransmuxer) {
this._transmuxer.destroy()
if (this._transmuxer) {
throw new IllegalStateException('FlvPlayer.load() has been called, please call unload() first!');
}
this._transmuxer = this._tempTransmuxer
}
})
if (this._hasPendingLoad) {
return;
}
this._tempTransmuxer.on(TransmuxingEvents.MEDIA_SEGMENT, (type, ms) => {
if (!this._config.isLive) {
if (!this._tempTransmuxer) {
this._msectl.appendMediaSegment(ms)
} else {
this._tempPendingSegments[type] && this._tempPendingSegments[type].push(ms)
if (this.isDefinitionDataReady) {
Object.keys(this._tempPendingSegments).forEach(key => {
this._msectl._pendingSegments[key] = this._tempPendingSegments[key]
})
this._tempPendingSegments = {
audio: [],
video: []
}
this._transmuxer.destroy()
this._transmuxer = this._tempTransmuxer
delete this._tempTransmuxer
}
if (this._config.deferLoadAfterSourceOpen && this._mseSourceOpened === false) {
this._hasPendingLoad = true;
return;
}
} else {
this._msectl.appendMediaSegment(ms)
}
// lazyLoad check
if (this._config.lazyLoad && !this._config.isLive) {
let currentTime = this._mediaElement.currentTime
if (ms.info.endDts >= (currentTime + this._config.lazyLoadMaxDuration) * 1000) {
if (this._progressChecker == null) {
Log.v(this.TAG, 'Maximum buffering duration exceeded, suspend transmuxing task')
this._suspendTransmuxer()
}
if (this._mediaElement.readyState > 0) {
this._requestSetTime = true;
// IE11 may throw InvalidStateError if readyState === 0
this._mediaElement.currentTime = 0;
}
}
})
this._tempTransmuxer.on(TransmuxingEvents.LOADING_COMPLETE, () => {
this._msectl.endOfStream()
this._emitter.emit(PlayerEvents.LOADING_COMPLETE)
})
this._tempTransmuxer.on(TransmuxingEvents.RECOVERED_EARLY_EOF, () => {
this._emitter.emit(PlayerEvents.RECOVERED_EARLY_EOF)
})
this._tempTransmuxer.on(TransmuxingEvents.IO_ERROR, (detail, info) => {
this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.NETWORK_ERROR, detail, info)
})
this._tempTransmuxer.on(TransmuxingEvents.DEMUX_ERROR, (detail, info) => {
this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.MEDIA_ERROR, detail, {code: -1, msg: info})
})
this._tempTransmuxer.on(TransmuxingEvents.MEDIA_INFO, (mediaInfo) => {
this._mediaInfo = mediaInfo
this._tempTransmuxer.seek((this._mediaElement.currentTime + expectTime) * 1000)
this._emitter.emit(PlayerEvents.MEDIA_INFO, Object.assign({}, mediaInfo))
})
this._tempTransmuxer.on(TransmuxingEvents.STATISTICS_INFO, (statInfo) => {
this._statisticsInfo = this._fillStatisticsInfo(statInfo)
this._emitter.emit(PlayerEvents.STATISTICS_INFO, Object.assign({}, this._statisticsInfo))
})
this._tempTransmuxer.on(TransmuxingEvents.RECOMMEND_SEEKPOINT, (milliseconds) => {
if (this._transmuxer === this._tempTransmuxer && this._mediaElement && !this._config.accurateSeek) {
this._requestSetTime = true
this._mediaElement.currentTime = milliseconds / 1000
}
})
this._tempTransmuxer.open()
}
this._transmuxer = new Transmuxer(this._mediaDataSource, this._config);
load () {
if (!this._mediaElement) {
throw new IllegalStateException('HTMLMediaElement must be attached before load()!')
}
if (this._transmuxer) {
throw new IllegalStateException('FlvPlayer.load() has been called, please call unload() first!')
}
if (this._hasPendingLoad) {
return
}
this._transmuxer.on(TransmuxingEvents.INIT_SEGMENT, (type, is) => {
this._msectl.appendInitSegment(is);
});
this._transmuxer.on(TransmuxingEvents.MEDIA_SEGMENT, (type, ms) => {
this._msectl.appendMediaSegment(ms);
if (this._config.deferLoadAfterSourceOpen && this._mseSourceOpened === false) {
this._hasPendingLoad = true
return
}
// lazyLoad check
if (this._config.lazyLoad && !this._config.isLive) {
let currentTime = this._mediaElement.currentTime;
if (ms.info.endDts >= (currentTime + this._config.lazyLoadMaxDuration) * 1000) {
if (this._progressChecker == null) {
Log.v(this.TAG, 'Maximum buffering duration exceeded, suspend transmuxing task');
this._suspendTransmuxer();
}
}
}
});
this._transmuxer.on(TransmuxingEvents.LOADING_COMPLETE, () => {
this._msectl.endOfStream();
this._emitter.emit(PlayerEvents.LOADING_COMPLETE);
});
this._transmuxer.on(TransmuxingEvents.RECOVERED_EARLY_EOF, () => {
this._emitter.emit(PlayerEvents.RECOVERED_EARLY_EOF);
});
this._transmuxer.on(TransmuxingEvents.IO_ERROR, (detail, info) => {
this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.NETWORK_ERROR, detail, info);
});
this._transmuxer.on(TransmuxingEvents.DEMUX_ERROR, (detail, info) => {
this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.MEDIA_ERROR, detail, {code: -1, msg: info});
});
this._transmuxer.on(TransmuxingEvents.MEDIA_INFO, (mediaInfo) => {
this._mediaInfo = mediaInfo;
this._emitter.emit(PlayerEvents.MEDIA_INFO, Object.assign({}, mediaInfo));
});
this._transmuxer.on(TransmuxingEvents.STATISTICS_INFO, (statInfo) => {
this._statisticsInfo = this._fillStatisticsInfo(statInfo);
this._emitter.emit(PlayerEvents.STATISTICS_INFO, Object.assign({}, this._statisticsInfo));
});
this._transmuxer.on(TransmuxingEvents.RECOMMEND_SEEKPOINT, (milliseconds) => {
if (this._mediaElement && !this._config.accurateSeek) {
this._requestSetTime = true;
this._mediaElement.currentTime = milliseconds / 1000;
}
});
if (this._mediaElement.readyState > 0) {
this._requestSetTime = true
// IE11 may throw InvalidStateError if readyState === 0
this._mediaElement.currentTime = 0
this._transmuxer.open();
}
this._transmuxer = new Transmuxer(this._mediaDataSource, this._config)
this._transmuxer._emitter.on('metadata_arrived', onMetaData => {
this._emitter.emit('metadata_arrived', onMetaData);
})
this._transmuxer.on(TransmuxingEvents.INIT_SEGMENT, (type, is) => {
this._msectl.appendInitSegment(is)
})
this._transmuxer.on(TransmuxingEvents.MEDIA_SEGMENT, (type, ms) => {
this._msectl.appendMediaSegment(ms)
// lazyLoad check
if (this._config.lazyLoad && !this._config.isLive) {
let currentTime = this._mediaElement.currentTime
if (ms.info.endDts >= (currentTime + this._config.lazyLoadMaxDuration) * 1000) {
if (this._progressChecker == null) {
Log.v(this.TAG, 'Maximum buffering duration exceeded, suspend transmuxing task')
this._suspendTransmuxer()
}
unload() {
if (this._mediaElement) {
this._mediaElement.pause();
}
}
})
this._transmuxer.on(TransmuxingEvents.LOADING_COMPLETE, () => {
this._msectl.endOfStream()
this._emitter.emit(PlayerEvents.LOADING_COMPLETE)
})
this._transmuxer.on(TransmuxingEvents.RECOVERED_EARLY_EOF, () => {
this._emitter.emit(PlayerEvents.RECOVERED_EARLY_EOF)
})
this._transmuxer.on(TransmuxingEvents.IO_ERROR, (detail, info) => {
this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.NETWORK_ERROR, detail, info)
})
this._transmuxer.on(TransmuxingEvents.DEMUX_ERROR, (detail, info) => {
this._emitter.emit(PlayerEvents.ERROR, ErrorTypes.MEDIA_ERROR, detail, {code: -1, msg: info})
})
this._transmuxer.on(TransmuxingEvents.MEDIA_INFO, (mediaInfo) => {
this._mediaInfo = mediaInfo
this._emitter.emit(PlayerEvents.MEDIA_INFO, Object.assign({}, mediaInfo))
})
this._transmuxer.on(TransmuxingEvents.STATISTICS_INFO, (statInfo) => {
this._statisticsInfo = this._fillStatisticsInfo(statInfo)
this._emitter.emit(PlayerEvents.STATISTICS_INFO, Object.assign({}, this._statisticsInfo))
})
this._transmuxer.on(TransmuxingEvents.RECOMMEND_SEEKPOINT, (milliseconds) => {
if (this._mediaElement && !this._config.accurateSeek) {
this._requestSetTime = true
this._mediaElement.currentTime = milliseconds / 1000
}
})
if (this._msectl) {
this._msectl.seek(0);
}
if (this._transmuxer) {
this._transmuxer.close();
this._transmuxer.destroy();
this._transmuxer = null;
}
}
this._transmuxer.open()
}
play() {
return this._mediaElement.play().catch(function() {});
}
unload () {
if (this._mediaElement) {
this._mediaElement.pause()
pause() {
this._mediaElement.pause();
}
if (this._msectl) {
this._msectl.seek(0)
get type() {
return this._type;
}
if (this._transmuxer) {
this._transmuxer.close()
this._transmuxer.destroy()
this._transmuxer = null
get buffered() {
return this._mediaElement.buffered;
}
}
play () {
return this._mediaElement.play()
}
get duration() {
return this._mediaElement.duration;
}
pause () {
this._mediaElement.pause()
}
get volume() {
return this._mediaElement.volume;
}
get type () {
return this._type
}
set volume(value) {
this._mediaElement.volume = value;
}
get buffered () {
return this._mediaElement.buffered
}
get muted() {
return this._mediaElement.muted;
}
get duration () {
return this._mediaElement.duration
}
set muted(muted) {
this._mediaElement.muted = muted;
}
get volume () {
return this._mediaElement.volume
}
get currentTime() {
if (this._mediaElement) {
return this._mediaElement.currentTime;
}
return 0;
}
set volume (value) {
this._mediaElement.volume = value
}
set currentTime(seconds) {
if (this._mediaElement) {
this._internalSeek(seconds);
} else {
this._pendingSeekTime = seconds;
}
}
get muted () {
return this._mediaElement.muted
}
set muted (muted) {
this._mediaElement.muted = muted
}
get currentTime () {
if (this._mediaElement) {
return this._mediaElement.currentTime
get mediaInfo() {
return Object.assign({}, this._mediaInfo);
}
return 0
}
set currentTime (seconds) {
if (this._mediaElement) {
this._internalSeek(seconds)
} else {
this._pendingSeekTime = seconds
get statisticsInfo() {
if (this._statisticsInfo == null) {
this._statisticsInfo = {};
}
this._statisticsInfo = this._fillStatisticsInfo(this._statisticsInfo);
return Object.assign({}, this._statisticsInfo);
}
}
get mediaInfo () {
return Object.assign({}, this._mediaInfo)
}
_fillStatisticsInfo(statInfo) {
statInfo.playerType = this._type;
get statisticsInfo () {
if (this._statisticsInfo == null) {
this._statisticsInfo = {}
}
this._statisticsInfo = this._fillStatisticsInfo(this._statisticsInfo)
return Object.assign({}, this._statisticsInfo)
}
if (!(this._mediaElement instanceof HTMLVideoElement)) {
return statInfo;
}
_fillStatisticsInfo (statInfo) {
statInfo.playerType = this._type
let hasQualityInfo = true;
let decoded = 0;
let dropped = 0;
if (!(this._mediaElement instanceof window.HTMLVideoElement)) {
return statInfo
}
if (this._mediaElement.getVideoPlaybackQuality) {
let quality = this._mediaElement.getVideoPlaybackQuality();
decoded = quality.totalVideoFrames;
dropped = quality.droppedVideoFrames;
} else if (this._mediaElement.webkitDecodedFrameCount != undefined) {
decoded = this._mediaElement.webkitDecodedFrameCount;
dropped = this._mediaElement.webkitDroppedFrameCount;
} else {
hasQualityInfo = false;
}
let hasQualityInfo = true
let decoded = 0
let dropped = 0
if (hasQualityInfo) {
statInfo.decodedFrames = decoded;
statInfo.droppedFrames = dropped;
}
if (this._mediaElement.getVideoPlaybackQuality) {
let quality = this._mediaElement.getVideoPlaybackQuality()
decoded = quality.totalVideoFrames
dropped = quality.droppedVideoFrames
} else if (this._mediaElement.webkitDecodedFrameCount != undefined) {
decoded = this._mediaElement.webkitDecodedFrameCount
dropped = this._mediaElement.webkitDroppedFrameCount
} else {
hasQualityInfo = false
return statInfo;
}
if (hasQualityInfo) {
statInfo.decodedFrames = decoded
statInfo.droppedFrames = dropped
}
_onmseUpdateEnd() {
if (!this._config.lazyLoad || this._config.isLive) {
return;
}
return statInfo
}
let buffered = this._mediaElement.buffered;
let currentTime = this._mediaElement.currentTime;
let currentRangeStart = 0;
let currentRangeEnd = 0;
_onmseUpdateEnd () {
if (!this._config.lazyLoad || this._config.isLive) {
return
}
for (let i = 0; i < buffered.length; i++) {
let start = buffered.start(i);
let end = buffered.end(i);
if (start <= currentTime && currentTime < end) {
currentRangeStart = start;
currentRangeEnd = end;
break;
}
}
let buffered = this._mediaElement.buffered
let currentTime = this._mediaElement.currentTime
let currentRangeStart = 0
let currentRangeEnd = 0
for (let i = 0; i < buffered.length; i++) {
let start = buffered.start(i)
let end = buffered.end(i)
if (start <= currentTime && currentTime < end) {
currentRangeStart = start
currentRangeEnd = end
break
}
if (currentRangeEnd >= currentTime + this._config.lazyLoadMaxDuration && this._progressChecker == null) {
Log.v(this.TAG, 'Maximum buffering duration exceeded, suspend transmuxing task');
this._suspendTransmuxer();
}
}
if (currentRangeEnd >= currentTime + this._config.lazyLoadMaxDuration && this._progressChecker == null) {
Log.v(this.TAG, 'Maximum buffering duration exceeded, suspend transmuxing task')
this._suspendTransmuxer()
_onmseBufferFull() {
Log.v(this.TAG, 'MSE SourceBuffer is full, suspend transmuxing task');
if (this._progressChecker == null) {
this._suspendTransmuxer();
}
}
}
_onmseBufferFull () {
Log.v(this.TAG, 'MSE SourceBuffer is full, suspend transmuxing task')
if (this._progressChecker == null) {
this._suspendTransmuxer()
}
}
_suspendTransmuxer() {
if (this._transmuxer) {
this._transmuxer.pause();
_suspendTransmuxer () {
if (this._transmuxer) {
this._transmuxer.pause()
if (this._progressChecker == null) {
this._progressChecker = window.setInterval(this._checkProgressAndResume.bind(this), 1000)
}
if (this._progressChecker == null) {
this._progressChecker = window.setInterval(this._checkProgressAndResume.bind(this), 1000);
}
}
}
}
_checkProgressAndResume () {
let currentTime = this._mediaElement.currentTime
let buffered = this._mediaElement.buffered
_checkProgressAndResume() {
let currentTime = this._mediaElement.currentTime;
let buffered = this._mediaElement.buffered;
let needResume = false
let needResume = false;
for (let i = 0; i < buffered.length; i++) {
let from = buffered.start(i)
let to = buffered.end(i)
if (currentTime >= from && currentTime < to) {
if (currentTime >= to - this._config.lazyLoadRecoverDuration) {
needResume = true
for (let i = 0; i < buffered.length; i++) {
let from = buffered.start(i);
let to = buffered.end(i);
if (currentTime >= from && currentTime < to) {
if (currentTime >= to - this._config.lazyLoadRecoverDuration) {
needResume = true;
}
break;
}
}
break
}
}
if (needResume) {
window.clearInterval(this._progressChecker)
this._progressChecker = null
if (needResume) {
Log.v(this.TAG, 'Continue loading from paused position')
this._transmuxer.resume()
}
if (needResume) {
window.clearInterval(this._progressChecker);
this._progressChecker = null;
if (needResume) {
Log.v(this.TAG, 'Continue loading from paused position');
this._transmuxer.resume();
}
}
}
}
_isTimepointBuffered (seconds) {
let buffered = this._mediaElement.buffered
_isTimepointBuffered(seconds) {
let buffered = this._mediaElement.buffered;
for (let i = 0; i < buffered.length; i++) {
let from = buffered.start(i)
let to = buffered.end(i)
if (seconds >= from && seconds < to) {
return true
}
for (let i = 0; i < buffered.length; i++) {
let from = buffered.start(i);
let to = buffered.end(i);
if (seconds >= from && seconds < to) {
return true;
}
}
return false;
}
return false
}
_internalSeek (seconds) {
let directSeek = this._isTimepointBuffered(seconds)
_internalSeek(seconds) {
let directSeek = this._isTimepointBuffered(seconds);
let directSeekBegin = false
let directSeekBeginTime = 0
let directSeekBegin = false;
let directSeekBeginTime = 0;
if (seconds < 1.0 && this._mediaElement.buffered.length > 0) {
let videoBeginTime = this._mediaElement.buffered.start(0)
if ((videoBeginTime < 1.0 && seconds < videoBeginTime) || Browser.safari) {
directSeekBegin = true
// also workaround for Safari: Seek to 0 may cause video stuck, use 0.1 to avoid
directSeekBeginTime = Browser.safari ? 0.1 : videoBeginTime
}
}
if (seconds < 1.0 && this._mediaElement.buffered.length > 0) {
let videoBeginTime = this._mediaElement.buffered.start(0);
if ((videoBeginTime < 1.0 && seconds < videoBeginTime) || Browser.safari) {
directSeekBegin = true;
// also workaround for Safari: Seek to 0 may cause video stuck, use 0.1 to avoid
directSeekBeginTime = Browser.safari ? 0.1 : videoBeginTime;
}
}
if (directSeekBegin) { // seek to video begin, set currentTime directly if beginPTS buffered
this._requestSetTime = true
this._mediaElement.currentTime = directSeekBeginTime
} else if (directSeek) { // buffered position
if (!this._alwaysSeekKeyframe) {
this._requestSetTime = true
this._mediaElement.currentTime = seconds
} else {
let idr = this._msectl.getNearestKeyframe(Math.floor(seconds * 1000))
this._requestSetTime = true
if (idr != null) {
this._mediaElement.currentTime = idr.dts / 1000
if (directSeekBegin) { // seek to video begin, set currentTime directly if beginPTS buffered
this._requestSetTime = true;
this._mediaElement.currentTime = directSeekBeginTime;
} else if (directSeek) { // buffered position
if (!this._alwaysSeekKeyframe) {
this._requestSetTime = true;
this._mediaElement.currentTime = seconds;
} else {
let idr = this._msectl.getNearestKeyframe(Math.floor(seconds * 1000));
this._requestSetTime = true;
if (idr != null) {
this._mediaElement.currentTime = idr.dts / 1000;
} else {
this._mediaElement.currentTime = seconds;
}
}
if (this._progressChecker != null) {
this._checkProgressAndResume();
}
} else {
this._mediaElement.currentTime = seconds
if (this._progressChecker != null) {
window.clearInterval(this._progressChecker);
this._progressChecker = null;
}
this._msectl.seek(seconds);
this._transmuxer.seek(Math.floor(seconds * 1000)); // in milliseconds
// no need to set mediaElement.currentTime if non-accurateSeek,
// just wait for the recommend_seekpoint callback
if (this._config.accurateSeek) {
this._requestSetTime = true;
this._mediaElement.currentTime = seconds;
}
}
}
if (this._progressChecker != null) {
this._checkProgressAndResume()
}
} else {
if (this._progressChecker != null) {
window.clearInterval(this._progressChecker)
this._progressChecker = null
}
this._msectl.seek(seconds)
this._transmuxer.seek(Math.floor(seconds * 1000)) // in milliseconds
// no need to set mediaElement.currentTime if non-accurateSeek,
// just wait for the recommend_seekpoint callback
if (this._config.accurateSeek) {
this._requestSetTime = true
this._mediaElement.currentTime = seconds
}
}
}
_checkAndApplyUnbufferedSeekpoint () {
if (this._seekpointRecord) {
if (this._seekpointRecord.recordTime <= this._now() - 100) {
let target = this._mediaElement.currentTime
this._seekpointRecord = null
if (!this._isTimepointBuffered(target)) {
if (this._progressChecker != null) {
window.clearTimeout(this._progressChecker)
this._progressChecker = null
}
// .currentTime is consists with .buffered timestamp
// Chrome/Edge use DTS, while FireFox/Safari use PTS
this._msectl.seek(target)
this._transmuxer.seek(Math.floor(target * 1000))
// set currentTime if accurateSeek, or wait for recommend_seekpoint callback
if (this._config.accurateSeek) {
this._requestSetTime = true
this._mediaElement.currentTime = target
}
_checkAndApplyUnbufferedSeekpoint() {
if (this._seekpointRecord) {
if (this._seekpointRecord.recordTime <= this._now() - 100) {
let target = this._mediaElement.currentTime;
this._seekpointRecord = null;
if (!this._isTimepointBuffered(target)) {
if (this._progressChecker != null) {
window.clearTimeout(this._progressChecker);
this._progressChecker = null;
}
// .currentTime is consists with .buffered timestamp
// Chrome/Edge use DTS, while FireFox/Safari use PTS
this._msectl.seek(target);
this._transmuxer.seek(Math.floor(target * 1000));
// set currentTime if accurateSeek, or wait for recommend_seekpoint callback
if (this._config.accurateSeek) {
this._requestSetTime = true;
this._mediaElement.currentTime = target;
}
}
} else {
window.setTimeout(this._checkAndApplyUnbufferedSeekpoint.bind(this), 50);
}
}
} else {
window.setTimeout(this._checkAndApplyUnbufferedSeekpoint.bind(this), 50)
}
}
}
_checkAndResumeStuckPlayback (stalled) {
let media = this._mediaElement
if (stalled || !this._receivedCanPlay || media.readyState < 2) { // HAVE_CURRENT_DATA
let buffered = media.buffered
if (buffered.length > 0 && media.currentTime < buffered.start(0)) {
Log.w(this.TAG, `Playback seems stuck at ${media.currentTime}, seek to ${buffered.start(0)}`)
this._requestSetTime = true
this._mediaElement.currentTime = buffered.start(0)
this._mediaElement.removeEventListener('progress', this.e.onvProgress)
}
} else {
// Playback didn't stuck, remove progress event listener
this._mediaElement.removeEventListener('progress', this.e.onvProgress)
_checkAndResumeStuckPlayback(stalled) {
let media = this._mediaElement;
if (stalled || !this._receivedCanPlay || media.readyState < 2) { // HAVE_CURRENT_DATA
let buffered = media.buffered;
if (buffered.length > 0 && media.currentTime < buffered.start(0)) {
Log.w(this.TAG, `Playback seems stuck at ${media.currentTime}, seek to ${buffered.start(0)}`);
this._requestSetTime = true;
this._mediaElement.currentTime = buffered.start(0);
this._mediaElement.removeEventListener('progress', this.e.onvProgress);
}
} else {
// Playback didn't stuck, remove progress event listener
this._mediaElement.removeEventListener('progress', this.e.onvProgress);
}
}
}
_onvLoadedMetadata (e) {
if (this._pendingSeekTime != null) {
this._mediaElement.currentTime = this._pendingSeekTime
this._pendingSeekTime = null
_onvLoadedMetadata(e) {
if (this._pendingSeekTime != null) {
this._mediaElement.currentTime = this._pendingSeekTime;
this._pendingSeekTime = null;
}
}
}
_onvSeeking (e) { // handle seeking request from browser's progress bar
let target = this._mediaElement.currentTime
let buffered = this._mediaElement.buffered
_onvSeeking(e) { // handle seeking request from browser's progress bar
let target = this._mediaElement.currentTime;
let buffered = this._mediaElement.buffered;
if (this._requestSetTime) {
this._requestSetTime = false
return
}
if (this._requestSetTime) {
this._requestSetTime = false;
return;
}
if (target < 1.0 && buffered.length > 0) {
// seek to video begin, set currentTime directly if beginPTS buffered
let videoBeginTime = buffered.start(0)
if ((videoBeginTime < 1.0 && target < videoBeginTime) || Browser.safari) {
this._requestSetTime = true
// also workaround for Safari: Seek to 0 may cause video stuck, use 0.1 to avoid
this._mediaElement.currentTime = Browser.safari ? 0.1 : videoBeginTime
return
}
}
if (target < 1.0 && buffered.length > 0) {
// seek to video begin, set currentTime directly if beginPTS buffered
let videoBeginTime = buffered.start(0);
if ((videoBeginTime < 1.0 && target < videoBeginTime) || Browser.safari) {
this._requestSetTime = true;
// also workaround for Safari: Seek to 0 may cause video stuck, use 0.1 to avoid
this._mediaElement.currentTime = Browser.safari ? 0.1 : videoBeginTime;
return;
}
}
if (this._isTimepointBuffered(target)) {
if (this._alwaysSeekKeyframe) {
let idr = this._msectl.getNearestKeyframe(Math.floor(target * 1000))
if (idr != null) {
this._requestSetTime = true
this._mediaElement.currentTime = idr.dts / 1000
if (this._isTimepointBuffered(target)) {
if (this._alwaysSeekKeyframe) {
let idr = this._msectl.getNearestKeyframe(Math.floor(target * 1000));
if (idr != null) {
this._requestSetTime = true;
this._mediaElement.currentTime = idr.dts / 1000;
}
}
if (this._progressChecker != null) {
this._checkProgressAndResume();
}
return;
}
}
if (this._progressChecker != null) {
this._checkProgressAndResume()
}
return
this._seekpointRecord = {
seekPoint: target,
recordTime: this._now()
};
window.setTimeout(this._checkAndApplyUnbufferedSeekpoint.bind(this), 50);
}
this._seekpointRecord = {
seekPoint: target,
recordTime: this._now()
_onvCanPlay(e) {
this._receivedCanPlay = true;
this._mediaElement.removeEventListener('canplay', this.e.onvCanPlay);
}
window.setTimeout(this._checkAndApplyUnbufferedSeekpoint.bind(this), 50)
}
_onvCanPlay (e) {
this._receivedCanPlay = true
this._mediaElement.removeEventListener('canplay', this.e.onvCanPlay)
}
_onvStalled(e) {
this._checkAndResumeStuckPlayback(true);
}
_onvStalled (e) {
this._checkAndResumeStuckPlayback(true)
}
_onvProgress(e) {
this._checkAndResumeStuckPlayback();
}
_onvProgress (e) {
this._checkAndResumeStuckPlayback()
}
get isDefinitionDataReady () {
const minSegmentLen = 10
return Object.keys(this._tempPendingSegments).every((key) => this._tempPendingSegments[key].length >= minSegmentLen)
}
}
export default FlvPlayer
export default FlvPlayer;

@@ -24,3 +24,3 @@ /*

// Player wrapper for browser's native player (HTMLVideoElement) without MediaSource src.
// Player wrapper for browser's native player (HTMLVideoElement) without MediaSource src.
class NativePlayer {

@@ -145,3 +145,3 @@

play() {
return this._mediaElement.play();
return this._mediaElement.play().catch(function() {});
}

@@ -240,3 +240,3 @@

}
return info;

@@ -259,2 +259,2 @@ }

export default NativePlayer;
export default NativePlayer;

@@ -22,574 +22,549 @@ /*

class MP4 {
static init () {
MP4.types = {
avc1: [],
avcC: [],
btrt: [],
dinf: [],
dref: [],
esds: [],
ftyp: [],
hdlr: [],
mdat: [],
mdhd: [],
mdia: [],
mfhd: [],
minf: [],
moof: [],
moov: [],
mp4a: [],
mvex: [],
mvhd: [],
sdtp: [],
stbl: [],
stco: [],
stsc: [],
stsd: [],
stsz: [],
stts: [],
tfdt: [],
tfhd: [],
traf: [],
trak: [],
trun: [],
trex: [],
tkhd: [],
vmhd: [],
smhd: [],
'.mp3': []
}
for (let name in MP4.types) {
if (MP4.types.hasOwnProperty(name)) {
MP4.types[name] = [
name.charCodeAt(0),
name.charCodeAt(1),
name.charCodeAt(2),
name.charCodeAt(3)
]
}
}
static init() {
MP4.types = {
avc1: [], avcC: [], btrt: [], dinf: [],
dref: [], esds: [], ftyp: [], hdlr: [],
mdat: [], mdhd: [], mdia: [], mfhd: [],
minf: [], moof: [], moov: [], mp4a: [],
mvex: [], mvhd: [], sdtp: [], stbl: [],
stco: [], stsc: [], stsd: [], stsz: [],
stts: [], tfdt: [], tfhd: [], traf: [],
trak: [], trun: [], trex: [], tkhd: [],
vmhd: [], smhd: [], '.mp3': []
};
let constants = MP4.constants = {}
for (let name in MP4.types) {
if (MP4.types.hasOwnProperty(name)) {
MP4.types[name] = [
name.charCodeAt(0),
name.charCodeAt(1),
name.charCodeAt(2),
name.charCodeAt(3)
];
}
}
constants.FTYP = new Uint8Array([
0x69, 0x73, 0x6F, 0x6D, // major_brand: isom
0x0, 0x0, 0x0, 0x1, // minor_version: 0x01
0x69, 0x73, 0x6F, 0x6D, // isom
0x61, 0x76, 0x63, 0x31 // avc1
])
let constants = MP4.constants = {};
constants.STSD_PREFIX = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x01 // entry_count
])
constants.FTYP = new Uint8Array([
0x69, 0x73, 0x6F, 0x6D, // major_brand: isom
0x0, 0x0, 0x0, 0x1, // minor_version: 0x01
0x69, 0x73, 0x6F, 0x6D, // isom
0x61, 0x76, 0x63, 0x31 // avc1
]);
constants.STTS = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00 // entry_count
])
constants.STSD_PREFIX = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x01 // entry_count
]);
constants.STSC = constants.STCO = constants.STTS
constants.STTS = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00 // entry_count
]);
constants.STSZ = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // sample_size
0x00, 0x00, 0x00, 0x00 // sample_count
])
constants.STSC = constants.STCO = constants.STTS;
constants.HDLR_VIDEO = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x56, 0x69, 0x64, 0x65,
0x6F, 0x48, 0x61, 0x6E,
0x64, 0x6C, 0x65, 0x72, 0x00 // name: VideoHandler
])
constants.STSZ = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // sample_size
0x00, 0x00, 0x00, 0x00 // sample_count
]);
constants.HDLR_AUDIO = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x73, 0x6F, 0x75, 0x6E, // handler_type: 'soun'
0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x53, 0x6F, 0x75, 0x6E,
0x64, 0x48, 0x61, 0x6E,
0x64, 0x6C, 0x65, 0x72, 0x00 // name: SoundHandler
])
constants.HDLR_VIDEO = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x76, 0x69, 0x64, 0x65, // handler_type: 'vide'
0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x56, 0x69, 0x64, 0x65,
0x6F, 0x48, 0x61, 0x6E,
0x64, 0x6C, 0x65, 0x72, 0x00 // name: VideoHandler
]);
constants.DREF = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x01, // entry_count
0x00, 0x00, 0x00, 0x0C, // entry_size
0x75, 0x72, 0x6C, 0x20, // type 'url '
0x00, 0x00, 0x00, 0x01 // version(0) + flags
])
constants.HDLR_AUDIO = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // pre_defined
0x73, 0x6F, 0x75, 0x6E, // handler_type: 'soun'
0x00, 0x00, 0x00, 0x00, // reserved: 3 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x53, 0x6F, 0x75, 0x6E,
0x64, 0x48, 0x61, 0x6E,
0x64, 0x6C, 0x65, 0x72, 0x00 // name: SoundHandler
]);
// Sound media header
constants.SMHD = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00 // balance(2) + reserved(2)
])
constants.DREF = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x01, // entry_count
0x00, 0x00, 0x00, 0x0C, // entry_size
0x75, 0x72, 0x6C, 0x20, // type 'url '
0x00, 0x00, 0x00, 0x01 // version(0) + flags
]);
// video media header
constants.VMHD = new Uint8Array([
0x00, 0x00, 0x00, 0x01, // version(0) + flags
0x00, 0x00, // graphicsmode: 2 bytes
0x00, 0x00, 0x00, 0x00, // opcolor: 3 * 2 bytes
0x00, 0x00
])
}
// Sound media header
constants.SMHD = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00 // balance(2) + reserved(2)
]);
// Generate a box
static box (type) {
let size = 8
let result = null
let datas = Array.prototype.slice.call(arguments, 1)
let arrayCount = datas.length
for (let i = 0; i < arrayCount; i++) {
size += datas[i].byteLength
// video media header
constants.VMHD = new Uint8Array([
0x00, 0x00, 0x00, 0x01, // version(0) + flags
0x00, 0x00, // graphicsmode: 2 bytes
0x00, 0x00, 0x00, 0x00, // opcolor: 3 * 2 bytes
0x00, 0x00
]);
}
result = new Uint8Array(size)
result[0] = (size >>> 24) & 0xFF // size
result[1] = (size >>> 16) & 0xFF
result[2] = (size >>> 8) & 0xFF
result[3] = (size) & 0xFF
// Generate a box
static box(type) {
let size = 8;
let result = null;
let datas = Array.prototype.slice.call(arguments, 1);
let arrayCount = datas.length;
result.set(type, 4) // type
for (let i = 0; i < arrayCount; i++) {
size += datas[i].byteLength;
}
let offset = 8
for (let i = 0; i < arrayCount; i++) { // data body
result.set(datas[i], offset)
offset += datas[i].byteLength
}
result = new Uint8Array(size);
result[0] = (size >>> 24) & 0xFF; // size
result[1] = (size >>> 16) & 0xFF;
result[2] = (size >>> 8) & 0xFF;
result[3] = (size) & 0xFF;
return result
}
result.set(type, 4); // type
// emit ftyp & moov
static generateInitSegment (meta) {
let ftyp = MP4.box(MP4.types.ftyp, MP4.constants.FTYP)
let moov = MP4.moov(meta)
let offset = 8;
for (let i = 0; i < arrayCount; i++) { // data body
result.set(datas[i], offset);
offset += datas[i].byteLength;
}
let result = new Uint8Array(ftyp.byteLength + moov.byteLength)
result.set(ftyp, 0)
result.set(moov, ftyp.byteLength)
return result
}
return result;
}
// Movie metadata box
static moov (meta) {
let mvhd = MP4.mvhd(meta.timescale, meta.duration)
let trak = MP4.trak(meta)
let mvex = MP4.mvex(meta)
return MP4.box(MP4.types.moov, mvhd, trak, mvex)
}
// emit ftyp & moov
static generateInitSegment(meta) {
let ftyp = MP4.box(MP4.types.ftyp, MP4.constants.FTYP);
let moov = MP4.moov(meta);
// Movie header box
static mvhd (timescale, duration) {
return MP4.box(MP4.types.mvhd, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(timescale >>> 24) & 0xFF, // timescale: 4 bytes
(timescale >>> 16) & 0xFF,
(timescale >>> 8) & 0xFF,
(timescale) & 0xFF,
(duration >>> 24) & 0xFF, // duration: 4 bytes
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
0x00, 0x01, 0x00, 0x00, // Preferred rate: 1.0
0x01, 0x00, 0x00, 0x00, // PreferredVolume(1.0, 2bytes) + reserved(2bytes)
0x00, 0x00, 0x00, 0x00, // reserved: 4 + 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00, // ----begin composition matrix----
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // ----end composition matrix----
0x00, 0x00, 0x00, 0x00, // ----begin pre_defined 6 * 4 bytes----
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // ----end pre_defined 6 * 4 bytes----
0xFF, 0xFF, 0xFF, 0xFF // next_track_ID
]))
}
let result = new Uint8Array(ftyp.byteLength + moov.byteLength);
result.set(ftyp, 0);
result.set(moov, ftyp.byteLength);
return result;
}
// Track box
static trak (meta) {
return MP4.box(MP4.types.trak, MP4.tkhd(meta), MP4.mdia(meta))
}
// Movie metadata box
static moov(meta) {
let mvhd = MP4.mvhd(meta.timescale, meta.duration);
let trak = MP4.trak(meta);
let mvex = MP4.mvex(meta);
return MP4.box(MP4.types.moov, mvhd, trak, mvex);
}
// Track header box
static tkhd (meta) {
let trackId = meta.id, duration = meta.duration
let width = meta.presentWidth, height = meta.presentHeight
// Movie header box
static mvhd(timescale, duration) {
return MP4.box(MP4.types.mvhd, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(timescale >>> 24) & 0xFF, // timescale: 4 bytes
(timescale >>> 16) & 0xFF,
(timescale >>> 8) & 0xFF,
(timescale) & 0xFF,
(duration >>> 24) & 0xFF, // duration: 4 bytes
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
0x00, 0x01, 0x00, 0x00, // Preferred rate: 1.0
0x01, 0x00, 0x00, 0x00, // PreferredVolume(1.0, 2bytes) + reserved(2bytes)
0x00, 0x00, 0x00, 0x00, // reserved: 4 + 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00, // ----begin composition matrix----
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // ----end composition matrix----
0x00, 0x00, 0x00, 0x00, // ----begin pre_defined 6 * 4 bytes----
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // ----end pre_defined 6 * 4 bytes----
0xFF, 0xFF, 0xFF, 0xFF // next_track_ID
]));
}
return MP4.box(MP4.types.tkhd, new Uint8Array([
0x00, 0x00, 0x00, 0x07, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(trackId >>> 24) & 0xFF, // track_ID: 4 bytes
(trackId >>> 16) & 0xFF,
(trackId >>> 8) & 0xFF,
(trackId) & 0xFF,
0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes
(duration >>> 24) & 0xFF, // duration: 4 bytes
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // layer(2bytes) + alternate_group(2bytes)
0x00, 0x00, 0x00, 0x00, // volume(2bytes) + reserved(2bytes)
0x00, 0x01, 0x00, 0x00, // ----begin composition matrix----
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // ----end composition matrix----
(width >>> 8) & 0xFF, // width and height
(width) & 0xFF,
0x00, 0x00,
(height >>> 8) & 0xFF,
(height) & 0xFF,
0x00, 0x00
]))
}
// Track box
static trak(meta) {
return MP4.box(MP4.types.trak, MP4.tkhd(meta), MP4.mdia(meta));
}
// Media Box
static mdia (meta) {
return MP4.box(MP4.types.mdia, MP4.mdhd(meta), MP4.hdlr(meta), MP4.minf(meta))
}
// Track header box
static tkhd(meta) {
let trackId = meta.id, duration = meta.duration;
let width = meta.presentWidth, height = meta.presentHeight;
// Media header box
static mdhd (meta) {
let timescale = meta.timescale
let duration = meta.duration
return MP4.box(MP4.types.mdhd, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(timescale >>> 24) & 0xFF, // timescale: 4 bytes
(timescale >>> 16) & 0xFF,
(timescale >>> 8) & 0xFF,
(timescale) & 0xFF,
(duration >>> 24) & 0xFF, // duration: 4 bytes
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
0x55, 0xC4, // language: und (undetermined)
0x00, 0x00 // pre_defined = 0
]))
}
return MP4.box(MP4.types.tkhd, new Uint8Array([
0x00, 0x00, 0x00, 0x07, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(trackId >>> 24) & 0xFF, // track_ID: 4 bytes
(trackId >>> 16) & 0xFF,
(trackId >>> 8) & 0xFF,
(trackId) & 0xFF,
0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes
(duration >>> 24) & 0xFF, // duration: 4 bytes
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // layer(2bytes) + alternate_group(2bytes)
0x00, 0x00, 0x00, 0x00, // volume(2bytes) + reserved(2bytes)
0x00, 0x01, 0x00, 0x00, // ----begin composition matrix----
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, // ----end composition matrix----
(width >>> 8) & 0xFF, // width and height
(width) & 0xFF,
0x00, 0x00,
(height >>> 8) & 0xFF,
(height) & 0xFF,
0x00, 0x00
]));
}
// Media handler reference box
static hdlr (meta) {
let data = null
if (meta.type === 'audio') {
data = MP4.constants.HDLR_AUDIO
} else {
data = MP4.constants.HDLR_VIDEO
// Media Box
static mdia(meta) {
return MP4.box(MP4.types.mdia, MP4.mdhd(meta), MP4.hdlr(meta), MP4.minf(meta));
}
return MP4.box(MP4.types.hdlr, data)
}
// Media infomation box
static minf (meta) {
let xmhd = null
if (meta.type === 'audio') {
xmhd = MP4.box(MP4.types.smhd, MP4.constants.SMHD)
} else {
xmhd = MP4.box(MP4.types.vmhd, MP4.constants.VMHD)
// Media header box
static mdhd(meta) {
let timescale = meta.timescale;
let duration = meta.duration;
return MP4.box(MP4.types.mdhd, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
0x00, 0x00, 0x00, 0x00, // creation_time
0x00, 0x00, 0x00, 0x00, // modification_time
(timescale >>> 24) & 0xFF, // timescale: 4 bytes
(timescale >>> 16) & 0xFF,
(timescale >>> 8) & 0xFF,
(timescale) & 0xFF,
(duration >>> 24) & 0xFF, // duration: 4 bytes
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
0x55, 0xC4, // language: und (undetermined)
0x00, 0x00 // pre_defined = 0
]));
}
return MP4.box(MP4.types.minf, xmhd, MP4.dinf(), MP4.stbl(meta))
}
// Data infomation box
static dinf () {
let result = MP4.box(MP4.types.dinf,
MP4.box(MP4.types.dref, MP4.constants.DREF)
)
return result
}
// Media handler reference box
static hdlr(meta) {
let data = null;
if (meta.type === 'audio') {
data = MP4.constants.HDLR_AUDIO;
} else {
data = MP4.constants.HDLR_VIDEO;
}
return MP4.box(MP4.types.hdlr, data);
}
// Sample table box
static stbl (meta) {
let result = MP4.box(MP4.types.stbl, // type: stbl
MP4.stsd(meta), // Sample Description Table
MP4.box(MP4.types.stts, MP4.constants.STTS), // Time-To-Sample
MP4.box(MP4.types.stsc, MP4.constants.STSC), // Sample-To-Chunk
MP4.box(MP4.types.stsz, MP4.constants.STSZ), // Sample size
MP4.box(MP4.types.stco, MP4.constants.STCO) // Chunk offset
)
return result
}
// Media infomation box
static minf(meta) {
let xmhd = null;
if (meta.type === 'audio') {
xmhd = MP4.box(MP4.types.smhd, MP4.constants.SMHD);
} else {
xmhd = MP4.box(MP4.types.vmhd, MP4.constants.VMHD);
}
return MP4.box(MP4.types.minf, xmhd, MP4.dinf(), MP4.stbl(meta));
}
// Sample description box
static stsd (meta) {
if (meta.type === 'audio') {
if (meta.codec === 'mp3') {
return MP4.box(MP4.types.stsd, MP4.constants.STSD_PREFIX, MP4.mp3(meta))
}
// else: aac -> mp4a
return MP4.box(MP4.types.stsd, MP4.constants.STSD_PREFIX, MP4.mp4a(meta))
} else {
return MP4.box(MP4.types.stsd, MP4.constants.STSD_PREFIX, MP4.avc1(meta))
// Data infomation box
static dinf() {
let result = MP4.box(MP4.types.dinf,
MP4.box(MP4.types.dref, MP4.constants.DREF)
);
return result;
}
}
static mp3 (meta) {
let channelCount = meta.channelCount
let sampleRate = meta.audioSampleRate
// Sample table box
static stbl(meta) {
let result = MP4.box(MP4.types.stbl, // type: stbl
MP4.stsd(meta), // Sample Description Table
MP4.box(MP4.types.stts, MP4.constants.STTS), // Time-To-Sample
MP4.box(MP4.types.stsc, MP4.constants.STSC), // Sample-To-Chunk
MP4.box(MP4.types.stsz, MP4.constants.STSZ), // Sample size
MP4.box(MP4.types.stco, MP4.constants.STCO) // Chunk offset
);
return result;
}
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // reserved(4)
0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2)
0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, channelCount, // channelCount(2)
0x00, 0x10, // sampleSize(2)
0x00, 0x00, 0x00, 0x00, // reserved(4)
(sampleRate >>> 8) & 0xFF, // Audio sample rate
(sampleRate) & 0xFF,
0x00, 0x00
])
// Sample description box
static stsd(meta) {
if (meta.type === 'audio') {
if (meta.codec === 'mp3') {
return MP4.box(MP4.types.stsd, MP4.constants.STSD_PREFIX, MP4.mp3(meta));
}
// else: aac -> mp4a
return MP4.box(MP4.types.stsd, MP4.constants.STSD_PREFIX, MP4.mp4a(meta));
} else {
return MP4.box(MP4.types.stsd, MP4.constants.STSD_PREFIX, MP4.avc1(meta));
}
}
return MP4.box(MP4.types['.mp3'], data)
}
static mp3(meta) {
let channelCount = meta.channelCount;
let sampleRate = meta.audioSampleRate;
static mp4a (meta) {
let channelCount = meta.channelCount
let sampleRate = meta.audioSampleRate
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // reserved(4)
0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2)
0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, channelCount, // channelCount(2)
0x00, 0x10, // sampleSize(2)
0x00, 0x00, 0x00, 0x00, // reserved(4)
(sampleRate >>> 8) & 0xFF, // Audio sample rate
(sampleRate) & 0xFF,
0x00, 0x00
]);
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // reserved(4)
0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2)
0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, channelCount, // channelCount(2)
0x00, 0x10, // sampleSize(2)
0x00, 0x00, 0x00, 0x00, // reserved(4)
(sampleRate >>> 8) & 0xFF, // Audio sample rate
(sampleRate) & 0xFF,
0x00, 0x00
])
return MP4.box(MP4.types['.mp3'], data);
}
return MP4.box(MP4.types.mp4a, data, MP4.esds(meta))
}
static mp4a(meta) {
let channelCount = meta.channelCount;
let sampleRate = meta.audioSampleRate;
static esds (meta) {
let config = meta.config || []
let configSize = config.length
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version 0 + flags
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // reserved(4)
0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2)
0x00, 0x00, 0x00, 0x00, // reserved: 2 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, channelCount, // channelCount(2)
0x00, 0x10, // sampleSize(2)
0x00, 0x00, 0x00, 0x00, // reserved(4)
(sampleRate >>> 8) & 0xFF, // Audio sample rate
(sampleRate) & 0xFF,
0x00, 0x00
]);
0x03, // descriptor_type
0x17 + configSize, // length3
0x00, 0x01, // es_id
0x00, // stream_priority
return MP4.box(MP4.types.mp4a, data, MP4.esds(meta));
}
0x04, // descriptor_type
0x0F + configSize, // length
0x40, // codec: mpeg4_audio
0x15, // stream_type: Audio
0x00, 0x00, 0x00, // buffer_size
0x00, 0x00, 0x00, 0x00, // maxBitrate
0x00, 0x00, 0x00, 0x00, // avgBitrate
static esds(meta) {
let config = meta.config || [];
let configSize = config.length;
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version 0 + flags
0x05 // descriptor_type
].concat([
configSize
]).concat(
config
).concat([
0x06, 0x01, 0x02 // GASpecificConfig
]))
return MP4.box(MP4.types.esds, data)
}
0x03, // descriptor_type
0x17 + configSize, // length3
0x00, 0x01, // es_id
0x00, // stream_priority
static avc1 (meta) {
let avcc = meta.avcc
let width = meta.codecWidth, height = meta.codecHeight
0x04, // descriptor_type
0x0F + configSize, // length
0x40, // codec: mpeg4_audio
0x15, // stream_type: Audio
0x00, 0x00, 0x00, // buffer_size
0x00, 0x00, 0x00, 0x00, // maxBitrate
0x00, 0x00, 0x00, 0x00, // avgBitrate
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // reserved(4)
0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2)
0x00, 0x00, 0x00, 0x00, // pre_defined(2) + reserved(2)
0x00, 0x00, 0x00, 0x00, // pre_defined: 3 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
(width >>> 8) & 0xFF, // width: 2 bytes
(width) & 0xFF,
(height >>> 8) & 0xFF, // height: 2 bytes
(height) & 0xFF,
0x00, 0x48, 0x00, 0x00, // horizresolution: 4 bytes
0x00, 0x48, 0x00, 0x00, // vertresolution: 4 bytes
0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes
0x00, 0x01, // frame_count
0x0A, // strlen
0x78, 0x71, 0x71, 0x2F, // compressorname: 32 bytes
0x66, 0x6C, 0x76, 0x2E,
0x6A, 0x73, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00,
0x00, 0x18, // depth
0xFF, 0xFF // pre_defined = -1
])
return MP4.box(MP4.types.avc1, data, MP4.box(MP4.types.avcC, avcc))
}
0x05 // descriptor_type
].concat([
configSize
]).concat(
config
).concat([
0x06, 0x01, 0x02 // GASpecificConfig
]));
return MP4.box(MP4.types.esds, data);
}
// Movie Extends box
static mvex (meta) {
return MP4.box(MP4.types.mvex, MP4.trex(meta))
}
static avc1(meta) {
let avcc = meta.avcc;
let width = meta.codecWidth, height = meta.codecHeight;
// Track Extends box
static trex (meta) {
let trackId = meta.id
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
(trackId >>> 24) & 0xFF, // track_ID
(trackId >>> 16) & 0xFF,
(trackId >>> 8) & 0xFF,
(trackId) & 0xFF,
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
0x00, 0x00, 0x00, 0x00, // default_sample_duration
0x00, 0x00, 0x00, 0x00, // default_sample_size
0x00, 0x01, 0x00, 0x01 // default_sample_flags
])
return MP4.box(MP4.types.trex, data)
}
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // reserved(4)
0x00, 0x00, 0x00, 0x01, // reserved(2) + data_reference_index(2)
0x00, 0x00, 0x00, 0x00, // pre_defined(2) + reserved(2)
0x00, 0x00, 0x00, 0x00, // pre_defined: 3 * 4 bytes
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
(width >>> 8) & 0xFF, // width: 2 bytes
(width) & 0xFF,
(height >>> 8) & 0xFF, // height: 2 bytes
(height) & 0xFF,
0x00, 0x48, 0x00, 0x00, // horizresolution: 4 bytes
0x00, 0x48, 0x00, 0x00, // vertresolution: 4 bytes
0x00, 0x00, 0x00, 0x00, // reserved: 4 bytes
0x00, 0x01, // frame_count
0x0A, // strlen
0x78, 0x71, 0x71, 0x2F, // compressorname: 32 bytes
0x66, 0x6C, 0x76, 0x2E,
0x6A, 0x73, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00,
0x00, 0x18, // depth
0xFF, 0xFF // pre_defined = -1
]);
return MP4.box(MP4.types.avc1, data, MP4.box(MP4.types.avcC, avcc));
}
// Movie fragment box
static moof (track, baseMediaDecodeTime) {
return MP4.box(MP4.types.moof, MP4.mfhd(track.sequenceNumber), MP4.traf(track, baseMediaDecodeTime))
}
// Movie Extends box
static mvex(meta) {
return MP4.box(MP4.types.mvex, MP4.trex(meta));
}
static mfhd (sequenceNumber) {
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00,
(sequenceNumber >>> 24) & 0xFF, // sequence_number: int32
(sequenceNumber >>> 16) & 0xFF,
(sequenceNumber >>> 8) & 0xFF,
(sequenceNumber) & 0xFF
])
return MP4.box(MP4.types.mfhd, data)
}
// Track Extends box
static trex(meta) {
let trackId = meta.id;
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) + flags
(trackId >>> 24) & 0xFF, // track_ID
(trackId >>> 16) & 0xFF,
(trackId >>> 8) & 0xFF,
(trackId) & 0xFF,
0x00, 0x00, 0x00, 0x01, // default_sample_description_index
0x00, 0x00, 0x00, 0x00, // default_sample_duration
0x00, 0x00, 0x00, 0x00, // default_sample_size
0x00, 0x01, 0x00, 0x01 // default_sample_flags
]);
return MP4.box(MP4.types.trex, data);
}
// Track fragment box
static traf (track, baseMediaDecodeTime) {
let trackId = track.id
// Movie fragment box
static moof(track, baseMediaDecodeTime) {
return MP4.box(MP4.types.moof, MP4.mfhd(track.sequenceNumber), MP4.traf(track, baseMediaDecodeTime));
}
// Track fragment header box
let tfhd = MP4.box(MP4.types.tfhd, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) & flags
(trackId >>> 24) & 0xFF, // track_ID
(trackId >>> 16) & 0xFF,
(trackId >>> 8) & 0xFF,
(trackId) & 0xFF
]))
// Track Fragment Decode Time
let tfdt = MP4.box(MP4.types.tfdt, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) & flags
(baseMediaDecodeTime >>> 24) & 0xFF, // baseMediaDecodeTime: int32
(baseMediaDecodeTime >>> 16) & 0xFF,
(baseMediaDecodeTime >>> 8) & 0xFF,
(baseMediaDecodeTime) & 0xFF
]))
let sdtp = MP4.sdtp(track)
let trun = MP4.trun(track, sdtp.byteLength + 16 + 16 + 8 + 16 + 8 + 8)
static mfhd(sequenceNumber) {
let data = new Uint8Array([
0x00, 0x00, 0x00, 0x00,
(sequenceNumber >>> 24) & 0xFF, // sequence_number: int32
(sequenceNumber >>> 16) & 0xFF,
(sequenceNumber >>> 8) & 0xFF,
(sequenceNumber) & 0xFF
]);
return MP4.box(MP4.types.mfhd, data);
}
return MP4.box(MP4.types.traf, tfhd, tfdt, trun, sdtp)
}
// Track fragment box
static traf(track, baseMediaDecodeTime) {
let trackId = track.id;
// Sample Dependency Type box
static sdtp (track) {
let samples = track.samples || []
let sampleCount = samples.length
let data = new Uint8Array(4 + sampleCount)
// 0~4 bytes: version(0) & flags
for (let i = 0; i < sampleCount; i++) {
let flags = samples[i].flags
data[i + 4] = (flags.isLeading << 6) | // is_leading: 2 (bit)
(flags.dependsOn << 4) | // sample_depends_on
(flags.isDependedOn << 2) | // sample_is_depended_on
(flags.hasRedundancy) // sample_has_redundancy
// Track fragment header box
let tfhd = MP4.box(MP4.types.tfhd, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) & flags
(trackId >>> 24) & 0xFF, // track_ID
(trackId >>> 16) & 0xFF,
(trackId >>> 8) & 0xFF,
(trackId) & 0xFF
]));
// Track Fragment Decode Time
let tfdt = MP4.box(MP4.types.tfdt, new Uint8Array([
0x00, 0x00, 0x00, 0x00, // version(0) & flags
(baseMediaDecodeTime >>> 24) & 0xFF, // baseMediaDecodeTime: int32
(baseMediaDecodeTime >>> 16) & 0xFF,
(baseMediaDecodeTime >>> 8) & 0xFF,
(baseMediaDecodeTime) & 0xFF
]));
let sdtp = MP4.sdtp(track);
let trun = MP4.trun(track, sdtp.byteLength + 16 + 16 + 8 + 16 + 8 + 8);
return MP4.box(MP4.types.traf, tfhd, tfdt, trun, sdtp);
}
return MP4.box(MP4.types.sdtp, data)
}
// Track fragment run box
static trun (track, offset) {
let samples = track.samples || []
let sampleCount = samples.length
let dataSize = 12 + 16 * sampleCount
let data = new Uint8Array(dataSize)
offset += 8 + dataSize
// Sample Dependency Type box
static sdtp(track) {
let samples = track.samples || [];
let sampleCount = samples.length;
let data = new Uint8Array(4 + sampleCount);
// 0~4 bytes: version(0) & flags
for (let i = 0; i < sampleCount; i++) {
let flags = samples[i].flags;
data[i + 4] = (flags.isLeading << 6) // is_leading: 2 (bit)
| (flags.dependsOn << 4) // sample_depends_on
| (flags.isDependedOn << 2) // sample_is_depended_on
| (flags.hasRedundancy); // sample_has_redundancy
}
return MP4.box(MP4.types.sdtp, data);
}
data.set([
0x00, 0x00, 0x0F, 0x01, // version(0) & flags
(sampleCount >>> 24) & 0xFF, // sample_count
(sampleCount >>> 16) & 0xFF,
(sampleCount >>> 8) & 0xFF,
(sampleCount) & 0xFF,
(offset >>> 24) & 0xFF, // data_offset
(offset >>> 16) & 0xFF,
(offset >>> 8) & 0xFF,
(offset) & 0xFF
], 0)
// Track fragment run box
static trun(track, offset) {
let samples = track.samples || [];
let sampleCount = samples.length;
let dataSize = 12 + 16 * sampleCount;
let data = new Uint8Array(dataSize);
offset += 8 + dataSize;
for (let i = 0; i < sampleCount; i++) {
// console.log(samples[i].duration)
let duration = samples[i].duration
let size = samples[i].size
let flags = samples[i].flags
let cts = samples[i].cts
data.set([
(duration >>> 24) & 0xFF, // sample_duration
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
(size >>> 24) & 0xFF, // sample_size
(size >>> 16) & 0xFF,
(size >>> 8) & 0xFF,
(size) & 0xFF,
(flags.isLeading << 2) | flags.dependsOn, // sample_flags
(flags.isDependedOn << 6) | (flags.hasRedundancy << 4) | flags.isNonSync,
0x00, 0x00, // sample_degradation_priority
(cts >>> 24) & 0xFF, // sample_composition_time_offset
(cts >>> 16) & 0xFF,
(cts >>> 8) & 0xFF,
(cts) & 0xFF
], 12 + 16 * i)
data.set([
0x00, 0x00, 0x0F, 0x01, // version(0) & flags
(sampleCount >>> 24) & 0xFF, // sample_count
(sampleCount >>> 16) & 0xFF,
(sampleCount >>> 8) & 0xFF,
(sampleCount) & 0xFF,
(offset >>> 24) & 0xFF, // data_offset
(offset >>> 16) & 0xFF,
(offset >>> 8) & 0xFF,
(offset) & 0xFF
], 0);
for (let i = 0; i < sampleCount; i++) {
let duration = samples[i].duration;
let size = samples[i].size;
let flags = samples[i].flags;
let cts = samples[i].cts;
data.set([
(duration >>> 24) & 0xFF, // sample_duration
(duration >>> 16) & 0xFF,
(duration >>> 8) & 0xFF,
(duration) & 0xFF,
(size >>> 24) & 0xFF, // sample_size
(size >>> 16) & 0xFF,
(size >>> 8) & 0xFF,
(size) & 0xFF,
(flags.isLeading << 2) | flags.dependsOn, // sample_flags
(flags.isDependedOn << 6) | (flags.hasRedundancy << 4) | flags.isNonSync,
0x00, 0x00, // sample_degradation_priority
(cts >>> 24) & 0xFF, // sample_composition_time_offset
(cts >>> 16) & 0xFF,
(cts >>> 8) & 0xFF,
(cts) & 0xFF
], 12 + 16 * i);
}
return MP4.box(MP4.types.trun, data);
}
return MP4.box(MP4.types.trun, data)
}
static mdat (data) {
return MP4.box(MP4.types.mdat, data)
}
static mdat(data) {
return MP4.box(MP4.types.mdat, data);
}
}
MP4.init()
MP4.init();
export default MP4
export default MP4;

@@ -19,71 +19,73 @@ /*

import Log from '../utils/logger.js'
import MP4 from './mp4-generator.js'
import AAC from './aac-silent.js'
import Browser from '../utils/browser.js'
import {SampleInfo, MediaSegmentInfo, MediaSegmentInfoList} from '../core/media-segment-info.js'
import {IllegalStateException} from '../utils/exception.js'
import Log from '../utils/logger.js';
import MP4 from './mp4-generator.js';
import AAC from './aac-silent.js';
import Browser from '../utils/browser.js';
import { SampleInfo, MediaSegmentInfo, MediaSegmentInfoList } from '../core/media-segment-info.js';
import { IllegalStateException } from '../utils/exception.js';
// Fragmented mp4 remuxer
class MP4Remuxer {
constructor (config) {
this.TAG = 'MP4Remuxer'
this._config = config
this._isLive = (config.isLive === true)
constructor(config) {
this.TAG = 'MP4Remuxer';
this._dtsBase = -1
this._dtsBaseInited = false
this._audioDtsBase = Infinity
this._videoDtsBase = Infinity
this._audioNextDts = undefined
this._videoNextDts = undefined
this._audioStashedLastSample = null
this._videoStashedLastSample = null
this._config = config;
this._isLive = (config.isLive === true) ? true : false;
this._audioMeta = null
this._videoMeta = null
this._dtsBase = -1;
this._dtsBaseInited = false;
this._audioDtsBase = Infinity;
this._videoDtsBase = Infinity;
this._audioNextDts = undefined;
this._videoNextDts = undefined;
this._audioStashedLastSample = null;
this._videoStashedLastSample = null;
this._audioSegmentInfoList = new MediaSegmentInfoList('audio')
this._videoSegmentInfoList = new MediaSegmentInfoList('video')
this._audioMeta = null;
this._videoMeta = null;
this._onInitSegment = null
this._onMediaSegment = null
this._audioSegmentInfoList = new MediaSegmentInfoList('audio');
this._videoSegmentInfoList = new MediaSegmentInfoList('video');
// Workaround for chrome < 50: Always force first sample as a Random Access Point in media segment
// see https://bugs.chromium.org/p/chromium/issues/detail?id=229412
this._forceFirstIDR = !!((Browser.chrome &&
(Browser.version.major < 50 ||
(Browser.version.major === 50 && Browser.version.build < 2661))))
this._onInitSegment = null;
this._onMediaSegment = null;
// Workaround for IE11/Edge: Fill silent aac frame after keyframe-seeking
// Make audio beginDts equals with video beginDts, in order to fix seek freeze
this._fillSilentAfterSeek = (Browser.msedge || Browser.msie)
// Workaround for chrome < 50: Always force first sample as a Random Access Point in media segment
// see https://bugs.chromium.org/p/chromium/issues/detail?id=229412
this._forceFirstIDR = (Browser.chrome &&
(Browser.version.major < 50 ||
(Browser.version.major === 50 && Browser.version.build < 2661))) ? true : false;
// While only FireFox supports 'audio/mp4, codecs="mp3"', use 'audio/mpeg' for chrome, safari, ...
this._mp3UseMpegAudio = !Browser.firefox
// Workaround for IE11/Edge: Fill silent aac frame after keyframe-seeking
// Make audio beginDts equals with video beginDts, in order to fix seek freeze
this._fillSilentAfterSeek = (Browser.msedge || Browser.msie);
this._fillAudioTimestampGap = this._config.fixAudioTimestampGap
}
// While only FireFox supports 'audio/mp4, codecs="mp3"', use 'audio/mpeg' for chrome, safari, ...
this._mp3UseMpegAudio = !Browser.firefox;
destroy () {
this._dtsBase = -1
this._dtsBaseInited = false
this._audioMeta = null
this._videoMeta = null
this._audioSegmentInfoList.clear()
this._audioSegmentInfoList = null
this._videoSegmentInfoList.clear()
this._videoSegmentInfoList = null
this._onInitSegment = null
this._onMediaSegment = null
}
this._fillAudioTimestampGap = this._config.fixAudioTimestampGap;
}
bindDataSource (producer) {
producer.onDataAvailable = this.remux.bind(this)
producer.onTrackMetadata = this._onTrackMetadataReceived.bind(this)
return this
}
destroy() {
this._dtsBase = -1;
this._dtsBaseInited = false;
this._audioMeta = null;
this._videoMeta = null;
this._audioSegmentInfoList.clear();
this._audioSegmentInfoList = null;
this._videoSegmentInfoList.clear();
this._videoSegmentInfoList = null;
this._onInitSegment = null;
this._onMediaSegment = null;
}
/* prototype: function onInitSegment(type: string, initSegment: ArrayBuffer): void
bindDataSource(producer) {
producer.onDataAvailable = this.remux.bind(this);
producer.onTrackMetadata = this._onTrackMetadataReceived.bind(this);
return this;
}
/* prototype: function onInitSegment(type: string, initSegment: ArrayBuffer): void
InitSegment: {

@@ -96,11 +98,11 @@ type: string,

*/
get onInitSegment () {
return this._onInitSegment
}
get onInitSegment() {
return this._onInitSegment;
}
set onInitSegment (callback) {
this._onInitSegment = callback
}
set onInitSegment(callback) {
this._onInitSegment = callback;
}
/* prototype: function onMediaSegment(type: string, mediaSegment: MediaSegment): void
/* prototype: function onMediaSegment(type: string, mediaSegment: MediaSegment): void
MediaSegment: {

@@ -113,626 +115,656 @@ type: string,

*/
get onMediaSegment () {
return this._onMediaSegment
}
get onMediaSegment() {
return this._onMediaSegment;
}
set onMediaSegment (callback) {
this._onMediaSegment = callback
}
set onMediaSegment(callback) {
this._onMediaSegment = callback;
}
insertDiscontinuity () {
this._audioNextDts = this._videoNextDts = undefined
}
insertDiscontinuity() {
this._audioNextDts = this._videoNextDts = undefined;
}
seek (originalDts) {
this._audioStashedLastSample = null
this._videoStashedLastSample = null
this._videoSegmentInfoList.clear()
this._audioSegmentInfoList.clear()
}
seek(originalDts) {
this._audioStashedLastSample = null;
this._videoStashedLastSample = null;
this._videoSegmentInfoList.clear();
this._audioSegmentInfoList.clear();
}
remux (audioTrack, videoTrack) {
if (!this._onMediaSegment) {
throw new IllegalStateException('MP4Remuxer: onMediaSegment callback must be specificed!')
remux(audioTrack, videoTrack) {
if (!this._onMediaSegment) {
throw new IllegalStateException('MP4Remuxer: onMediaSegment callback must be specificed!');
}
if (!this._dtsBaseInited) {
this._calculateDtsBase(audioTrack, videoTrack);
}
this._remuxVideo(videoTrack);
this._remuxAudio(audioTrack);
}
if (!this._dtsBaseInited) {
this._calculateDtsBase(audioTrack, videoTrack)
}
this._remuxVideo(videoTrack)
this._remuxAudio(audioTrack)
}
_onTrackMetadataReceived (type, metadata) {
let metabox = null
_onTrackMetadataReceived(type, metadata) {
let metabox = null;
let container = 'mp4'
let codec = metadata.codec
let container = 'mp4';
let codec = metadata.codec;
if (type === 'audio') {
this._audioMeta = metadata
if (metadata.codec === 'mp3' && this._mp3UseMpegAudio) {
// 'audio/mpeg' for MP3 audio track
container = 'mpeg'
codec = ''
metabox = new Uint8Array()
} else {
// 'audio/mp4, codecs="codec"'
metabox = MP4.generateInitSegment(metadata)
}
} else if (type === 'video') {
this._videoMeta = metadata
metabox = MP4.generateInitSegment(metadata)
} else {
return
}
if (type === 'audio') {
this._audioMeta = metadata;
if (metadata.codec === 'mp3' && this._mp3UseMpegAudio) {
// 'audio/mpeg' for MP3 audio track
container = 'mpeg';
codec = '';
metabox = new Uint8Array();
} else {
// 'audio/mp4, codecs="codec"'
metabox = MP4.generateInitSegment(metadata);
}
} else if (type === 'video') {
this._videoMeta = metadata;
metabox = MP4.generateInitSegment(metadata);
} else {
return;
}
// dispatch metabox (Initialization Segment)
if (!this._onInitSegment) {
throw new IllegalStateException('MP4Remuxer: onInitSegment callback must be specified!')
// dispatch metabox (Initialization Segment)
if (!this._onInitSegment) {
throw new IllegalStateException('MP4Remuxer: onInitSegment callback must be specified!');
}
this._onInitSegment(type, {
type: type,
data: metabox.buffer,
codec: codec,
container: `${type}/${container}`,
mediaDuration: metadata.duration // in timescale 1000 (milliseconds)
});
}
this._onInitSegment(type, {
type: type,
data: metabox.buffer,
codec: codec,
container: `${type}/${container}`,
mediaDuration: metadata.duration // in timescale 1000 (milliseconds)
})
}
_calculateDtsBase (audioTrack, videoTrack) {
if (this._dtsBaseInited) {
return
}
_calculateDtsBase(audioTrack, videoTrack) {
if (this._dtsBaseInited) {
return;
}
if (audioTrack.samples && audioTrack.samples.length) {
this._audioDtsBase = audioTrack.samples[0].dts
if (audioTrack.samples && audioTrack.samples.length) {
this._audioDtsBase = audioTrack.samples[0].dts;
}
if (videoTrack.samples && videoTrack.samples.length) {
this._videoDtsBase = videoTrack.samples[0].dts;
}
this._dtsBase = Math.min(this._audioDtsBase, this._videoDtsBase);
this._dtsBaseInited = true;
}
if (videoTrack.samples && videoTrack.samples.length) {
this._videoDtsBase = videoTrack.samples[0].dts
}
this._dtsBase = Math.min(this._audioDtsBase, this._videoDtsBase)
this._dtsBaseInited = true
}
flushStashedSamples() {
let videoSample = this._videoStashedLastSample;
let audioSample = this._audioStashedLastSample;
flushStashedSamples () {
let videoSample = this._videoStashedLastSample
let audioSample = this._audioStashedLastSample
let videoTrack = {
type: 'video',
id: 1,
sequenceNumber: 0,
samples: [],
length: 0
};
let videoTrack = {
type: 'video',
id: 1,
sequenceNumber: 0,
samples: [],
length: 0
}
if (videoSample != null) {
videoTrack.samples.push(videoSample);
videoTrack.length = videoSample.length;
}
if (videoSample != null) {
videoTrack.samples.push(videoSample)
videoTrack.length = videoSample.length
}
let audioTrack = {
type: 'audio',
id: 2,
sequenceNumber: 0,
samples: [],
length: 0
};
let audioTrack = {
type: 'audio',
id: 2,
sequenceNumber: 0,
samples: [],
length: 0
}
if (audioSample != null) {
audioTrack.samples.push(audioSample);
audioTrack.length = audioSample.length;
}
if (audioSample != null) {
audioTrack.samples.push(audioSample)
audioTrack.length = audioSample.length
this._videoStashedLastSample = null;
this._audioStashedLastSample = null;
this._remuxVideo(videoTrack, true);
this._remuxAudio(audioTrack, true);
}
this._videoStashedLastSample = null
this._audioStashedLastSample = null
_remuxAudio(audioTrack, force) {
if (this._audioMeta == null) {
return;
}
this._remuxVideo(videoTrack, true)
this._remuxAudio(audioTrack, true)
}
let track = audioTrack;
let samples = track.samples;
let dtsCorrection = undefined;
let firstDts = -1, lastDts = -1, lastPts = -1;
let refSampleDuration = this._audioMeta.refSampleDuration;
_remuxAudio (audioTrack, force) {
if (this._audioMeta == null) {
return
}
let mpegRawTrack = this._audioMeta.codec === 'mp3' && this._mp3UseMpegAudio;
let firstSegmentAfterSeek = this._dtsBaseInited && this._audioNextDts === undefined;
let track = audioTrack
let samples = track.samples
let dtsCorrection
let firstDts = -1, lastDts = -1, lastPts = -1
let refSampleDuration = this._audioMeta.refSampleDuration
let insertPrefixSilentFrame = false;
let mpegRawTrack = this._audioMeta.codec === 'mp3' && this._mp3UseMpegAudio
let firstSegmentAfterSeek = this._dtsBaseInited && this._audioNextDts === undefined
if (!samples || samples.length === 0) {
return;
}
if (samples.length === 1 && !force) {
// If [sample count in current batch] === 1 && (force != true)
// Ignore and keep in demuxer's queue
return;
} // else if (force === true) do remux
let insertPrefixSilentFrame = false
let offset = 0;
let mdatbox = null;
let mdatBytes = 0;
if (!samples || samples.length === 0) {
return
}
if (samples.length === 1 && !force) {
// If [sample count in current batch] === 1 && (force != true)
// Ignore and keep in demuxer's queue
return
} // else if (force === true) do remux
// calculate initial mdat size
if (mpegRawTrack) {
// for raw mpeg buffer
offset = 0;
mdatBytes = track.length;
} else {
// for fmp4 mdat box
offset = 8; // size + type
mdatBytes = 8 + track.length;
}
let offset = 0
let mdatbox = null
let mdatBytes = 0
// calculate initial mdat size
if (mpegRawTrack) {
// for raw mpeg buffer
offset = 0
mdatBytes = track.length
} else {
// for fmp4 mdat box
offset = 8 // size + type
mdatBytes = 8 + track.length
}
let lastSample = null;
let lastSample = null
// Pop the lastSample and waiting for stash
if (samples.length > 1) {
lastSample = samples.pop();
mdatBytes -= lastSample.length;
}
// Pop the lastSample and waiting for stash
if (samples.length > 1) {
lastSample = samples.pop()
mdatBytes -= lastSample.length
}
// Insert [stashed lastSample in the previous batch] to the front
if (this._audioStashedLastSample != null) {
let sample = this._audioStashedLastSample;
this._audioStashedLastSample = null;
samples.unshift(sample);
mdatBytes += sample.length;
}
// Insert [stashed lastSample in the previous batch] to the front
if (this._audioStashedLastSample != null) {
let sample = this._audioStashedLastSample
this._audioStashedLastSample = null
samples.unshift(sample)
mdatBytes += sample.length
}
// Stash the lastSample of current batch, waiting for next batch
if (lastSample != null) {
this._audioStashedLastSample = lastSample;
}
// Stash the lastSample of current batch, waiting for next batch
if (lastSample != null) {
this._audioStashedLastSample = lastSample
}
let firstSampleOriginalDts = samples[0].dts - this._dtsBase
let firstSampleOriginalDts = samples[0].dts - this._dtsBase;
// calculate dtsCorrection
if (this._audioNextDts) {
dtsCorrection = firstSampleOriginalDts - this._audioNextDts
} else { // this._audioNextDts == undefined
if (this._audioSegmentInfoList.isEmpty()) {
dtsCorrection = 0
if (this._fillSilentAfterSeek && !this._videoSegmentInfoList.isEmpty()) {
if (this._audioMeta.originalCodec !== 'mp3') {
insertPrefixSilentFrame = true
}
// calculate dtsCorrection
if (this._audioNextDts) {
dtsCorrection = firstSampleOriginalDts - this._audioNextDts;
} else { // this._audioNextDts == undefined
if (this._audioSegmentInfoList.isEmpty()) {
dtsCorrection = 0;
if (this._fillSilentAfterSeek && !this._videoSegmentInfoList.isEmpty()) {
if (this._audioMeta.originalCodec !== 'mp3') {
insertPrefixSilentFrame = true;
}
}
} else {
let lastSample = this._audioSegmentInfoList.getLastSampleBefore(firstSampleOriginalDts);
if (lastSample != null) {
let distance = (firstSampleOriginalDts - (lastSample.originalDts + lastSample.duration));
if (distance <= 3) {
distance = 0;
}
let expectedDts = lastSample.dts + lastSample.duration + distance;
dtsCorrection = firstSampleOriginalDts - expectedDts;
} else { // lastSample == null, cannot found
dtsCorrection = 0;
}
}
}
} else {
let lastSample = this._audioSegmentInfoList.getLastSampleBefore(firstSampleOriginalDts)
if (lastSample != null) {
let distance = (firstSampleOriginalDts - (lastSample.originalDts + lastSample.duration))
if (distance <= 3) {
distance = 0
}
let expectedDts = lastSample.dts + lastSample.duration + distance
dtsCorrection = firstSampleOriginalDts - expectedDts
} else { // lastSample == null, cannot found
dtsCorrection = 0
if (insertPrefixSilentFrame) {
// align audio segment beginDts to match with current video segment's beginDts
let firstSampleDts = firstSampleOriginalDts - dtsCorrection;
let videoSegment = this._videoSegmentInfoList.getLastSegmentBefore(firstSampleOriginalDts);
if (videoSegment != null && videoSegment.beginDts < firstSampleDts) {
let silentUnit = AAC.getSilentFrame(this._audioMeta.originalCodec, this._audioMeta.channelCount);
if (silentUnit) {
let dts = videoSegment.beginDts;
let silentFrameDuration = firstSampleDts - videoSegment.beginDts;
Log.v(this.TAG, `InsertPrefixSilentAudio: dts: ${dts}, duration: ${silentFrameDuration}`);
samples.unshift({ unit: silentUnit, dts: dts, pts: dts });
mdatBytes += silentUnit.byteLength;
} // silentUnit == null: Cannot generate, skip
} else {
insertPrefixSilentFrame = false;
}
}
}
}
if (insertPrefixSilentFrame) {
// align audio segment beginDts to match with current video segment's beginDts
let firstSampleDts = firstSampleOriginalDts - dtsCorrection
let videoSegment = this._videoSegmentInfoList.getLastSegmentBefore(firstSampleOriginalDts)
if (videoSegment != null && videoSegment.beginDts < firstSampleDts) {
let silentUnit = AAC.getSilentFrame(this._audioMeta.originalCodec, this._audioMeta.channelCount)
if (silentUnit) {
let dts = videoSegment.beginDts
let silentFrameDuration = firstSampleDts - videoSegment.beginDts
Log.v(this.TAG, `InsertPrefixSilentAudio: dts: ${dts}, duration: ${silentFrameDuration}`)
samples.unshift({unit: silentUnit, dts: dts, pts: dts})
mdatBytes += silentUnit.byteLength
} // silentUnit == null: Cannot generate, skip
} else {
insertPrefixSilentFrame = false
}
}
let mp4Samples = [];
let mp4Samples = []
// Correct dts for each sample, and calculate sample duration. Then output to mp4Samples
for (let i = 0; i < samples.length; i++) {
let sample = samples[i];
let unit = sample.unit;
let originalDts = sample.dts - this._dtsBase;
let dts = originalDts;
let needFillSilentFrames = false;
let silentFrames = null;
let sampleDuration = 0;
// Correct dts for each sample, and calculate sample duration. Then output to mp4Samples
for (let i = 0; i < samples.length; i++) {
let sample = samples[i]
let unit = sample.unit
let originalDts = sample.dts - this._dtsBase
let dts = originalDts - dtsCorrection
if (originalDts < -0.001) {
continue; //pass the first sample with the invalid dts
}
if (firstDts === -1) {
firstDts = dts
}
if (this._audioMeta.codec !== 'mp3') {
// for AAC codec, we need to keep dts increase based on refSampleDuration
let curRefDts = originalDts;
const maxAudioFramesDrift = 3;
if (this._audioNextDts) {
curRefDts = this._audioNextDts;
}
let sampleDuration = 0
dtsCorrection = originalDts - curRefDts;
if (dtsCorrection <= -maxAudioFramesDrift * refSampleDuration) {
// If we're overlapping by more than maxAudioFramesDrift number of frame, drop this sample
Log.w(this.TAG, `Dropping 1 audio frame (originalDts: ${originalDts} ms ,curRefDts: ${curRefDts} ms) due to dtsCorrection: ${dtsCorrection} ms overlap.`);
continue;
}
else if (dtsCorrection >= maxAudioFramesDrift * refSampleDuration && this._fillAudioTimestampGap && !Browser.safari) {
// Silent frame generation, if large timestamp gap detected && config.fixAudioTimestampGap
needFillSilentFrames = true;
// We need to insert silent frames to fill timestamp gap
let frameCount = Math.floor(dtsCorrection / refSampleDuration);
Log.w(this.TAG, 'Large audio timestamp gap detected, may cause AV sync to drift. ' +
'Silent frames will be generated to avoid unsync.\n' +
`originalDts: ${originalDts} ms, curRefDts: ${curRefDts} ms, ` +
`dtsCorrection: ${Math.round(dtsCorrection)} ms, generate: ${frameCount} frames`);
if (i !== samples.length - 1) {
let nextDts = samples[i + 1].dts - this._dtsBase - dtsCorrection
sampleDuration = nextDts - dts
} else { // the last sample
if (lastSample != null) { // use stashed sample's dts to calculate sample duration
let nextDts = lastSample.dts - this._dtsBase - dtsCorrection
sampleDuration = nextDts - dts
} else if (mp4Samples.length >= 1) { // use second last sample duration
sampleDuration = mp4Samples[mp4Samples.length - 1].duration
} else { // the only one sample, use reference sample duration
sampleDuration = Math.floor(refSampleDuration)
}
}
let needFillSilentFrames = false
let silentFrames = null
dts = Math.floor(curRefDts);
sampleDuration = Math.floor(curRefDts + refSampleDuration) - dts;
// Silent frame generation, if large timestamp gap detected && config.fixAudioTimestampGap
if (sampleDuration > refSampleDuration * 1.5 && this._audioMeta.codec !== 'mp3' && this._fillAudioTimestampGap && !Browser.safari) {
// We need to insert silent frames to fill timestamp gap
needFillSilentFrames = true
let delta = Math.abs(sampleDuration - refSampleDuration)
let frameCount = Math.ceil(delta / refSampleDuration)
let currentDts = dts + refSampleDuration // Notice: in float
let silentUnit = AAC.getSilentFrame(this._audioMeta.originalCodec, this._audioMeta.channelCount);
if (silentUnit == null) {
Log.w(this.TAG, 'Unable to generate silent frame for ' +
`${this._audioMeta.originalCodec} with ${this._audioMeta.channelCount} channels, repeat last frame`);
// Repeat last frame
silentUnit = unit;
}
silentFrames = [];
Log.w(this.TAG, 'Large audio timestamp gap detected, may cause AV sync to drift. ' +
'Silent frames will be generated to avoid unsync.\n' +
`dts: ${dts + sampleDuration} ms, expected: ${dts + Math.round(refSampleDuration)} ms, ` +
`delta: ${Math.round(delta)} ms, generate: ${frameCount} frames`)
for (let j = 0; j < frameCount; j++) {
curRefDts = curRefDts + refSampleDuration;
let intDts = Math.floor(curRefDts); // change to integer
let intDuration = Math.floor(curRefDts + refSampleDuration) - intDts;
let frame = {
dts: intDts,
pts: intDts,
cts: 0,
unit: silentUnit,
size: silentUnit.byteLength,
duration: intDuration, // wait for next sample
originalDts: originalDts,
flags: {
isLeading: 0,
dependsOn: 1,
isDependedOn: 0,
hasRedundancy: 0
}
};
silentFrames.push(frame);
mdatBytes += unit.byteLength;
let silentUnit = AAC.getSilentFrame(this._audioMeta.originalCodec, this._audioMeta.channelCount)
if (silentUnit == null) {
Log.w(this.TAG, 'Unable to generate silent frame for ' +
`${this._audioMeta.originalCodec} with ${this._audioMeta.channelCount} channels, repeat last frame`)
// Repeat last frame
silentUnit = unit
}
silentFrames = []
}
for (let j = 0; j < frameCount; j++) {
let intDts = Math.round(currentDts) // round to integer
if (silentFrames.length > 0) {
// Set previous frame sample duration
let previousFrame = silentFrames[silentFrames.length - 1]
previousFrame.duration = intDts - previousFrame.dts
}
let frame = {
dts: intDts,
pts: intDts,
cts: 0,
unit: silentUnit,
size: silentUnit.byteLength,
duration: 0, // wait for next sample
originalDts: originalDts,
flags: {
isLeading: 0,
dependsOn: 1,
isDependedOn: 0,
hasRedundancy: 0
this._audioNextDts = curRefDts + refSampleDuration;
} else {
dts = Math.floor(curRefDts);
sampleDuration = Math.floor(curRefDts + refSampleDuration) - dts;
this._audioNextDts = curRefDts + refSampleDuration;
}
} else {
// keep the original dts calculate algorithm for mp3
dts = originalDts - dtsCorrection;
if (i !== samples.length - 1) {
let nextDts = samples[i + 1].dts - this._dtsBase - dtsCorrection;
sampleDuration = nextDts - dts;
} else { // the last sample
if (lastSample != null) { // use stashed sample's dts to calculate sample duration
let nextDts = lastSample.dts - this._dtsBase - dtsCorrection;
sampleDuration = nextDts - dts;
} else if (mp4Samples.length >= 1) { // use second last sample duration
sampleDuration = mp4Samples[mp4Samples.length - 1].duration;
} else { // the only one sample, use reference sample duration
sampleDuration = Math.floor(refSampleDuration);
}
}
this._audioNextDts = dts + sampleDuration;
}
}
silentFrames.push(frame)
mdatBytes += unit.byteLength
currentDts += refSampleDuration
}
// last frame: align end time to next frame dts
let lastFrame = silentFrames[silentFrames.length - 1]
lastFrame.duration = dts + sampleDuration - lastFrame.dts
if (firstDts === -1) {
firstDts = dts;
}
mp4Samples.push({
dts: dts,
pts: dts,
cts: 0,
unit: sample.unit,
size: sample.unit.byteLength,
duration: sampleDuration,
originalDts: originalDts,
flags: {
isLeading: 0,
dependsOn: 1,
isDependedOn: 0,
hasRedundancy: 0
}
});
// silentFrames.forEach((frame) => {
// Log.w(this.TAG, `SilentAudio: dts: ${frame.dts}, duration: ${frame.duration}`);
// });
if (needFillSilentFrames) {
// Silent frames should be inserted after wrong-duration frame
mp4Samples.push.apply(mp4Samples, silentFrames);
}
}
// Set correct sample duration for current frame
sampleDuration = Math.round(refSampleDuration)
}
if (mp4Samples.length === 0) {
//no samples need to remux
track.samples = [];
track.length = 0;
return;
}
mp4Samples.push({
dts: dts,
pts: dts,
cts: 0,
unit: sample.unit,
size: sample.unit.byteLength,
duration: sampleDuration,
originalDts: originalDts,
flags: {
isLeading: 0,
dependsOn: 1,
isDependedOn: 0,
hasRedundancy: 0
// allocate mdatbox
if (mpegRawTrack) {
// allocate for raw mpeg buffer
mdatbox = new Uint8Array(mdatBytes);
} else {
// allocate for fmp4 mdat box
mdatbox = new Uint8Array(mdatBytes);
// size field
mdatbox[0] = (mdatBytes >>> 24) & 0xFF;
mdatbox[1] = (mdatBytes >>> 16) & 0xFF;
mdatbox[2] = (mdatBytes >>> 8) & 0xFF;
mdatbox[3] = (mdatBytes) & 0xFF;
// type field (fourCC)
mdatbox.set(MP4.types.mdat, 4);
}
})
if (needFillSilentFrames) {
// Silent frames should be inserted after wrong-duration frame
mp4Samples.push.apply(mp4Samples, silentFrames)
}
}
// Write samples into mdatbox
for (let i = 0; i < mp4Samples.length; i++) {
let unit = mp4Samples[i].unit;
mdatbox.set(unit, offset);
offset += unit.byteLength;
}
// allocate mdatbox
if (mpegRawTrack) {
// allocate for raw mpeg buffer
mdatbox = new Uint8Array(mdatBytes)
} else {
// allocate for fmp4 mdat box
mdatbox = new Uint8Array(mdatBytes)
// size field
mdatbox[0] = (mdatBytes >>> 24) & 0xFF
mdatbox[1] = (mdatBytes >>> 16) & 0xFF
mdatbox[2] = (mdatBytes >>> 8) & 0xFF
mdatbox[3] = (mdatBytes) & 0xFF
// type field (fourCC)
mdatbox.set(MP4.types.mdat, 4)
}
let latest = mp4Samples[mp4Samples.length - 1];
lastDts = latest.dts + latest.duration;
//this._audioNextDts = lastDts;
// Write samples into mdatbox
for (let i = 0; i < mp4Samples.length; i++) {
let unit = mp4Samples[i].unit
mdatbox.set(unit, offset)
offset += unit.byteLength
}
// fill media segment info & add to info list
let info = new MediaSegmentInfo();
info.beginDts = firstDts;
info.endDts = lastDts;
info.beginPts = firstDts;
info.endPts = lastDts;
info.originalBeginDts = mp4Samples[0].originalDts;
info.originalEndDts = latest.originalDts + latest.duration;
info.firstSample = new SampleInfo(mp4Samples[0].dts,
mp4Samples[0].pts,
mp4Samples[0].duration,
mp4Samples[0].originalDts,
false);
info.lastSample = new SampleInfo(latest.dts,
latest.pts,
latest.duration,
latest.originalDts,
false);
if (!this._isLive) {
this._audioSegmentInfoList.append(info);
}
let latest = mp4Samples[mp4Samples.length - 1]
lastDts = latest.dts + latest.duration
this._audioNextDts = lastDts
track.samples = mp4Samples;
track.sequenceNumber++;
// fill media segment info & add to info list
let info = new MediaSegmentInfo()
info.beginDts = firstDts
info.endDts = lastDts
info.beginPts = firstDts
info.endPts = lastDts
info.originalBeginDts = mp4Samples[0].originalDts
info.originalEndDts = latest.originalDts + latest.duration
info.firstSample = new SampleInfo(mp4Samples[0].dts,
mp4Samples[0].pts,
mp4Samples[0].duration,
mp4Samples[0].originalDts,
false)
info.lastSample = new SampleInfo(latest.dts,
latest.pts,
latest.duration,
latest.originalDts,
false)
if (!this._isLive) {
this._audioSegmentInfoList.append(info)
}
let moofbox = null;
track.samples = mp4Samples
track.sequenceNumber++
if (mpegRawTrack) {
// Generate empty buffer, because useless for raw mpeg
moofbox = new Uint8Array();
} else {
// Generate moof for fmp4 segment
moofbox = MP4.moof(track, firstDts);
}
let moofbox = null
track.samples = [];
track.length = 0;
if (mpegRawTrack) {
// Generate empty buffer, because useless for raw mpeg
moofbox = new Uint8Array()
} else {
// Generate moof for fmp4 segment
moofbox = MP4.moof(track, firstDts)
}
let segment = {
type: 'audio',
data: this._mergeBoxes(moofbox, mdatbox).buffer,
sampleCount: mp4Samples.length,
info: info
};
track.samples = []
track.length = 0
if (mpegRawTrack && firstSegmentAfterSeek) {
// For MPEG audio stream in MSE, if seeking occurred, before appending new buffer
// We need explicitly set timestampOffset to the desired point in timeline for mpeg SourceBuffer.
segment.timestampOffset = firstDts;
}
let segment = {
type: 'audio',
data: this._mergeBoxes(moofbox, mdatbox).buffer,
sampleCount: mp4Samples.length,
info: info
this._onMediaSegment('audio', segment);
}
if (mpegRawTrack && firstSegmentAfterSeek) {
// For MPEG audio stream in MSE, if seeking occurred, before appending new buffer
// We need explicitly set timestampOffset to the desired point in timeline for mpeg SourceBuffer.
segment.timestampOffset = firstDts
}
_remuxVideo(videoTrack, force) {
if (this._videoMeta == null) {
return;
}
this._onMediaSegment('audio', segment)
}
let track = videoTrack;
let samples = track.samples;
let dtsCorrection = undefined;
let firstDts = -1, lastDts = -1;
let firstPts = -1, lastPts = -1;
_remuxVideo (videoTrack, force) {
if (this._videoMeta == null) {
return
}
if (!samples || samples.length === 0) {
return;
}
if (samples.length === 1 && !force) {
// If [sample count in current batch] === 1 && (force != true)
// Ignore and keep in demuxer's queue
return;
} // else if (force === true) do remux
let track = videoTrack
let samples = track.samples
let dtsCorrection
let firstDts = -1, lastDts = -1
let firstPts = -1, lastPts = -1
let offset = 8;
let mdatbox = null;
let mdatBytes = 8 + videoTrack.length;
if (!samples || samples.length === 0) {
return
}
if (samples.length === 1 && !force) {
// If [sample count in current batch] === 1 && (force != true)
// Ignore and keep in demuxer's queue
return
} // else if (force === true) do remux
let offset = 8
let mdatbox = null
let mdatBytes = 8 + videoTrack.length
let lastSample = null;
let lastSample = null
// Pop the lastSample and waiting for stash
if (samples.length > 1) {
lastSample = samples.pop();
mdatBytes -= lastSample.length;
}
// Pop the lastSample and waiting for stash
if (samples.length > 1) {
lastSample = samples.pop()
mdatBytes -= lastSample.length
}
// Insert [stashed lastSample in the previous batch] to the front
if (this._videoStashedLastSample != null) {
let sample = this._videoStashedLastSample;
this._videoStashedLastSample = null;
samples.unshift(sample);
mdatBytes += sample.length;
}
// Insert [stashed lastSample in the previous batch] to the front
if (this._videoStashedLastSample != null) {
let sample = this._videoStashedLastSample
this._videoStashedLastSample = null
samples.unshift(sample)
mdatBytes += sample.length
}
// Stash the lastSample of current batch, waiting for next batch
if (lastSample != null) {
this._videoStashedLastSample = lastSample;
}
// Stash the lastSample of current batch, waiting for next batch
if (lastSample != null) {
this._videoStashedLastSample = lastSample
}
let firstSampleOriginalDts = samples[0].dts - this._dtsBase
let firstSampleOriginalDts = samples[0].dts - this._dtsBase;
// calculate dtsCorrection
if (this._videoNextDts) {
dtsCorrection = firstSampleOriginalDts - this._videoNextDts
} else { // this._videoNextDts == undefined
if (this._videoSegmentInfoList.isEmpty()) {
dtsCorrection = 0
} else {
let lastSample = this._videoSegmentInfoList.getLastSampleBefore(firstSampleOriginalDts)
if (lastSample != null) {
let distance = (firstSampleOriginalDts - (lastSample.originalDts + lastSample.duration))
if (distance <= 3) {
distance = 0
}
let expectedDts = lastSample.dts + lastSample.duration + distance
dtsCorrection = firstSampleOriginalDts - expectedDts
} else { // lastSample == null, cannot found
dtsCorrection = 0
// calculate dtsCorrection
if (this._videoNextDts) {
dtsCorrection = firstSampleOriginalDts - this._videoNextDts;
} else { // this._videoNextDts == undefined
if (this._videoSegmentInfoList.isEmpty()) {
dtsCorrection = 0;
} else {
let lastSample = this._videoSegmentInfoList.getLastSampleBefore(firstSampleOriginalDts);
if (lastSample != null) {
let distance = (firstSampleOriginalDts - (lastSample.originalDts + lastSample.duration));
if (distance <= 3) {
distance = 0;
}
let expectedDts = lastSample.dts + lastSample.duration + distance;
dtsCorrection = firstSampleOriginalDts - expectedDts;
} else { // lastSample == null, cannot found
dtsCorrection = 0;
}
}
}
}
}
let info = new MediaSegmentInfo()
let mp4Samples = []
// Correct dts for each sample, and calculate sample duration. Then output to mp4Samples
for (let i = 0; i < samples.length; i++) {
let sample = samples[i]
let originalDts = sample.dts - this._dtsBase
let isKeyframe = sample.isKeyframe
let dts = originalDts - dtsCorrection
let cts = sample.cts
let pts = dts + cts
let info = new MediaSegmentInfo();
let mp4Samples = [];
if (firstDts === -1) {
firstDts = dts
firstPts = pts
}
// Correct dts for each sample, and calculate sample duration. Then output to mp4Samples
for (let i = 0; i < samples.length; i++) {
let sample = samples[i];
let originalDts = sample.dts - this._dtsBase;
let isKeyframe = sample.isKeyframe;
let dts = originalDts - dtsCorrection;
let cts = sample.cts;
let pts = dts + cts;
let sampleDuration = 0
if (firstDts === -1) {
firstDts = dts;
firstPts = pts;
}
if (i !== samples.length - 1) {
let nextDts = samples[i + 1].dts - this._dtsBase - dtsCorrection
sampleDuration = nextDts - dts
} else { // the last sample
if (lastSample != null) { // use stashed sample's dts to calculate sample duration
let nextDts = lastSample.dts - this._dtsBase - dtsCorrection
sampleDuration = nextDts - dts
} else if (mp4Samples.length >= 1) { // use second last sample duration
sampleDuration = mp4Samples[mp4Samples.length - 1].duration
} else { // the only one sample, use reference sample duration
sampleDuration = Math.floor(this._videoMeta.refSampleDuration)
let sampleDuration = 0;
if (i !== samples.length - 1) {
let nextDts = samples[i + 1].dts - this._dtsBase - dtsCorrection;
sampleDuration = nextDts - dts;
} else { // the last sample
if (lastSample != null) { // use stashed sample's dts to calculate sample duration
let nextDts = lastSample.dts - this._dtsBase - dtsCorrection;
sampleDuration = nextDts - dts;
} else if (mp4Samples.length >= 1) { // use second last sample duration
sampleDuration = mp4Samples[mp4Samples.length - 1].duration;
} else { // the only one sample, use reference sample duration
sampleDuration = Math.floor(this._videoMeta.refSampleDuration);
}
}
if (isKeyframe) {
let syncPoint = new SampleInfo(dts, pts, sampleDuration, sample.dts, true);
syncPoint.fileposition = sample.fileposition;
info.appendSyncPoint(syncPoint);
}
mp4Samples.push({
dts: dts,
pts: pts,
cts: cts,
units: sample.units,
size: sample.length,
isKeyframe: isKeyframe,
duration: sampleDuration,
originalDts: originalDts,
flags: {
isLeading: 0,
dependsOn: isKeyframe ? 2 : 1,
isDependedOn: isKeyframe ? 1 : 0,
hasRedundancy: 0,
isNonSync: isKeyframe ? 0 : 1
}
});
}
}
if (isKeyframe) {
let syncPoint = new SampleInfo(dts, pts, sampleDuration, sample.dts, true)
syncPoint.fileposition = sample.fileposition
info.appendSyncPoint(syncPoint)
}
// allocate mdatbox
mdatbox = new Uint8Array(mdatBytes);
mdatbox[0] = (mdatBytes >>> 24) & 0xFF;
mdatbox[1] = (mdatBytes >>> 16) & 0xFF;
mdatbox[2] = (mdatBytes >>> 8) & 0xFF;
mdatbox[3] = (mdatBytes) & 0xFF;
mdatbox.set(MP4.types.mdat, 4);
mp4Samples.push({
dts: dts,
pts: pts,
cts: cts,
units: sample.units,
size: sample.length,
isKeyframe: isKeyframe,
duration: sampleDuration,
originalDts: originalDts,
flags: {
isLeading: 0,
dependsOn: isKeyframe ? 2 : 1,
isDependedOn: isKeyframe ? 1 : 0,
hasRedundancy: 0,
isNonSync: isKeyframe ? 0 : 1
// Write samples into mdatbox
for (let i = 0; i < mp4Samples.length; i++) {
let units = mp4Samples[i].units;
while (units.length) {
let unit = units.shift();
let data = unit.data;
mdatbox.set(data, offset);
offset += data.byteLength;
}
}
})
}
// allocate mdatbox
mdatbox = new Uint8Array(mdatBytes)
mdatbox[0] = (mdatBytes >>> 24) & 0xFF
mdatbox[1] = (mdatBytes >>> 16) & 0xFF
mdatbox[2] = (mdatBytes >>> 8) & 0xFF
mdatbox[3] = (mdatBytes) & 0xFF
mdatbox.set(MP4.types.mdat, 4)
let latest = mp4Samples[mp4Samples.length - 1];
lastDts = latest.dts + latest.duration;
lastPts = latest.pts + latest.duration;
this._videoNextDts = lastDts;
// Write samples into mdatbox
for (let i = 0; i < mp4Samples.length; i++) {
let units = mp4Samples[i].units
while (units.length) {
let unit = units.shift()
let data = unit.data
mdatbox.set(data, offset)
offset += data.byteLength
}
}
// fill media segment info & add to info list
info.beginDts = firstDts;
info.endDts = lastDts;
info.beginPts = firstPts;
info.endPts = lastPts;
info.originalBeginDts = mp4Samples[0].originalDts;
info.originalEndDts = latest.originalDts + latest.duration;
info.firstSample = new SampleInfo(mp4Samples[0].dts,
mp4Samples[0].pts,
mp4Samples[0].duration,
mp4Samples[0].originalDts,
mp4Samples[0].isKeyframe);
info.lastSample = new SampleInfo(latest.dts,
latest.pts,
latest.duration,
latest.originalDts,
latest.isKeyframe);
if (!this._isLive) {
this._videoSegmentInfoList.append(info);
}
let latest = mp4Samples[mp4Samples.length - 1]
lastDts = latest.dts + latest.duration
lastPts = latest.pts + latest.duration
this._videoNextDts = lastDts
track.samples = mp4Samples;
track.sequenceNumber++;
// fill media segment info & add to info list
info.beginDts = firstDts
info.endDts = lastDts
info.beginPts = firstPts
info.endPts = lastPts
info.originalBeginDts = mp4Samples[0].originalDts
info.originalEndDts = latest.originalDts + latest.duration
info.firstSample = new SampleInfo(mp4Samples[0].dts,
mp4Samples[0].pts,
mp4Samples[0].duration,
mp4Samples[0].originalDts,
mp4Samples[0].isKeyframe)
info.lastSample = new SampleInfo(latest.dts,
latest.pts,
latest.duration,
latest.originalDts,
latest.isKeyframe)
if (!this._isLive) {
this._videoSegmentInfoList.append(info)
}
// workaround for chrome < 50: force first sample as a random access point
// see https://bugs.chromium.org/p/chromium/issues/detail?id=229412
if (this._forceFirstIDR) {
let flags = mp4Samples[0].flags;
flags.dependsOn = 2;
flags.isNonSync = 0;
}
track.samples = mp4Samples
track.sequenceNumber++
let moofbox = MP4.moof(track, firstDts);
track.samples = [];
track.length = 0;
// workaround for chrome < 50: force first sample as a random access point
// see https://bugs.chromium.org/p/chromium/issues/detail?id=229412
if (this._forceFirstIDR) {
let flags = mp4Samples[0].flags
flags.dependsOn = 2
flags.isNonSync = 0
this._onMediaSegment('video', {
type: 'video',
data: this._mergeBoxes(moofbox, mdatbox).buffer,
sampleCount: mp4Samples.length,
info: info
});
}
let moofbox = MP4.moof(track, firstDts)
track.samples = []
track.length = 0
_mergeBoxes(moof, mdat) {
let result = new Uint8Array(moof.byteLength + mdat.byteLength);
result.set(moof, 0);
result.set(mdat, moof.byteLength);
return result;
}
this._onMediaSegment('video', {
type: 'video',
data: this._mergeBoxes(moofbox, mdatbox).buffer,
sampleCount: mp4Samples.length,
info: info
})
}
_mergeBoxes (moof, mdat) {
let result = new Uint8Array(moof.byteLength + mdat.byteLength)
result.set(moof, 0)
result.set(mdat, moof.byteLength)
return result
}
}
export default MP4Remuxer
export default MP4Remuxer;

@@ -19,116 +19,95 @@ /*

import EventEmitter from 'events'
import EventEmitter from 'events';
class Log {
static e (tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG) { tag = Log.GLOBAL_TAG }
let str = `[${tag}] > ${msg}`
static e(tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG)
tag = Log.GLOBAL_TAG;
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'error', str)
}
let str = `[${tag}] > ${msg}`;
if (!Log.ENABLE_ERROR) {
return
}
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'error', str);
}
if (console.error) {
console.error(str)
} else if (console.warn) {
console.warn(str)
} else {
console.log(str)
if (!Log.ENABLE_ERROR) {
return;
}
}
}
static i (tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG) { tag = Log.GLOBAL_TAG }
static i(tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG)
tag = Log.GLOBAL_TAG;
let str = `[${tag}] > ${msg}`
let str = `[${tag}] > ${msg}`;
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'info', str)
}
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'info', str);
}
if (!Log.ENABLE_INFO) {
return
if (!Log.ENABLE_INFO) {
return;
}
}
if (console.info) {
console.info(str)
} else {
console.log(str)
}
}
static w(tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG)
tag = Log.GLOBAL_TAG;
static w (tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG) { tag = Log.GLOBAL_TAG }
let str = `[${tag}] > ${msg}`;
let str = `[${tag}] > ${msg}`
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'warn', str);
}
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'warn', str)
if (!Log.ENABLE_WARN) {
return;
}
}
if (!Log.ENABLE_WARN) {
return
}
static d(tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG)
tag = Log.GLOBAL_TAG;
if (console.warn) {
console.warn(str)
} else {
console.log(str)
}
}
let str = `[${tag}] > ${msg}`;
static d (tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG) { tag = Log.GLOBAL_TAG }
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'debug', str);
}
let str = `[${tag}] > ${msg}`
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'debug', str)
if (!Log.ENABLE_DEBUG) {
return;
}
}
if (!Log.ENABLE_DEBUG) {
return
}
static v(tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG)
tag = Log.GLOBAL_TAG;
if (console.debug) {
console.debug(str)
} else {
console.log(str)
}
}
let str = `[${tag}] > ${msg}`;
static v (tag, msg) {
if (!tag || Log.FORCE_GLOBAL_TAG) { tag = Log.GLOBAL_TAG }
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'verbose', str);
}
let str = `[${tag}] > ${msg}`
if (Log.ENABLE_CALLBACK) {
Log.emitter.emit('log', 'verbose', str)
if (!Log.ENABLE_VERBOSE) {
return;
}
}
if (!Log.ENABLE_VERBOSE) {
return
}
console.log(str)
}
}
Log.GLOBAL_TAG = 'flv.js'
Log.FORCE_GLOBAL_TAG = false
Log.ENABLE_ERROR = true
Log.ENABLE_INFO = true
Log.ENABLE_WARN = true
Log.ENABLE_DEBUG = true
Log.ENABLE_VERBOSE = true
Log.GLOBAL_TAG = 'flv.js';
Log.FORCE_GLOBAL_TAG = false;
Log.ENABLE_ERROR = true;
Log.ENABLE_INFO = true;
Log.ENABLE_WARN = true;
Log.ENABLE_DEBUG = true;
Log.ENABLE_VERBOSE = true;
Log.ENABLE_CALLBACK = false
Log.ENABLE_CALLBACK = false;
Log.emitter = new EventEmitter()
Log.emitter = new EventEmitter();
export default Log
export default Log;

@@ -12,18 +12,42 @@ import Player from 'xgplayer'

const player = this
Object.defineProperty(player, 'src', {
get () {
return player.currentSrc
},
set (url) {
player.flv_load(url)
let oldVol = player.volume
player.video.muted = true
Player.util.addClass(player.root, 'xgplayer-is-enter')
player.once('playing', function(){
Player.util.removeClass(player.root, 'xgplayer-is-enter')
player.video.muted = false
})
player.once('canplay', function () {
player.play()
})
},
configurable: true
})
player.once('complete', () => {
player.__flv__ = Flv.createPlayer(this.flvOpts, this.optionalConfig)
player.createInstance(player.__flv__)
if(player.config.isLive) {
Player.util.addClass(player.root, 'xgplayer-is-live')
const live = Player.util.createDom('xg-live', '正在直播', {}, 'xgplayer-live')
player.controls.appendChild(live)
}
})
}
createInstance (flv) {
const player = this
const util = Player.util
player.video.addEventListener('contextmenu', function (e) {
e.preventDefault()
})
flv.attachMediaElement(player.video)
flv.load()
flv.play()
if (this.flvOpts.isLive) {
util.addClass(player.root, 'xgplayer-is-live')
const live = util.createDom('xg-live', '正在直播', {}, 'xgplayer-live')
player.controls.appendChild(live)
}
flv.on(Flv.Events.ERROR, (e) => {

@@ -38,14 +62,63 @@ player.emit('error', new Player.Errors('other', player.config.url))

}
flv_load (newUrl) {
let mediaDataSource = {
type: 'flv'
}
mediaDataSource.segments = [
{
cors: true,
duration: undefined,
filesize: undefined,
timestampBase: 0,
url: newUrl,
withCredentials: false
}
]
mediaDataSource.cors = true
mediaDataSource.hasAudio = true
mediaDataSource.hasVideo = true
mediaDataSource.isLive = true
mediaDataSource.url = newUrl
mediaDataSource.withCredentials = false
this.flv_load_mds(mediaDataSource)
}
flv_load_mds (mediaDataSource) {
let player = this
if (typeof player.__flv__ !== 'undefined') {
if (player.__flv__ != null) {
player.__flv__.unload()
player.__flv__.detachMediaElement()
player.__flv__.destroy()
player.__flv__ = null
}
}
player.__flv__ = Flv.createPlayer(mediaDataSource, {
enableWorker: false,
lazyLoadMaxDuration: 3 * 60,
seekType: 'range'
})
player.__flv__.attachMediaElement(player.video)
player.__flv__.load()
}
switchURL (url) {
const player = this
const flvPlayer = player.__flv__
player.config.url = url
if (!player.config.isLive) {
flvPlayer.onDefinitionChange(url, player.config.retryTimes)
} else {
const tempFlvPlayer = Flv.createPlayer(player.flvOpts)
flvPlayer.destroy()
player.createInstance(tempFlvPlayer)
player.__flv__ = tempFlvPlayer
let curTime = 0
if(!player.config.isLive) {
curTime = player.currentTime
}
player.flv_load(url)
let oldVol = player.volume
player.video.muted = true
Player.util.addClass(player.root, 'xgplayer-is-enter')
player.once('playing', function(){
Player.util.removeClass(player.root, 'xgplayer-is-enter')
player.video.muted = false
})
player.once('canplay', function () {
if(!player.config.isLive) {
player.currentTime = curTime
}
player.play()
})
}

@@ -52,0 +125,0 @@ }

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc