Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@brightcove/videojs-flashls-source-handler

Package Overview
Dependencies
Maintainers
41
Versions
15
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@brightcove/videojs-flashls-source-handler - npm Package Compare versions

Comparing version 1.0.0 to 1.1.0

1137

dist/videojs-flashls-source-handler.js
/**
* @brightcove/videojs-flashls-source-handler
* @version 1.0.0
* @version 1.1.0
* @copyright 2016 Brightcove

@@ -9,4 +9,861 @@ * @license Apache-2.0

(function (global){
if (typeof window !== "undefined") {
module.exports = window;
} else if (typeof global !== "undefined") {
module.exports = global;
} else if (typeof self !== "undefined"){
module.exports = self;
} else {
module.exports = {};
}
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],2:[function(require,module,exports){
/**
* mux.js
*
* Copyright (c) 2015 Brightcove
* All rights reserved.
*
* Reads in-band caption information from a video elementary
* stream. Captions must follow the CEA-708 standard for injection
* into an MPEG-2 transport streams.
* @see https://en.wikipedia.org/wiki/CEA-708
*/
'use strict';
// -----------------
// Link To Transport
// -----------------
// Supplemental enhancement information (SEI) NAL units have a
// payload type field to indicate how they are to be
// interpreted. CEAS-708 caption content is always transmitted with
// payload type 0x04.
var USER_DATA_REGISTERED_ITU_T_T35 = 4,
RBSP_TRAILING_BITS = 128,
Stream = require('../utils/stream');
/**
* Parse a supplemental enhancement information (SEI) NAL unit.
* Stops parsing once a message of type ITU T T35 has been found.
*
* @param bytes {Uint8Array} the bytes of a SEI NAL unit
* @return {object} the parsed SEI payload
* @see Rec. ITU-T H.264, 7.3.2.3.1
*/
var parseSei = function(bytes) {
var
i = 0,
result = {
payloadType: -1,
payloadSize: 0
},
payloadType = 0,
payloadSize = 0;
// go through the sei_rbsp parsing each each individual sei_message
while (i < bytes.byteLength) {
// stop once we have hit the end of the sei_rbsp
if (bytes[i] === RBSP_TRAILING_BITS) {
break;
}
// Parse payload type
while (bytes[i] === 0xFF) {
payloadType += 255;
i++;
}
payloadType += bytes[i++];
// Parse payload size
while (bytes[i] === 0xFF) {
payloadSize += 255;
i++;
}
payloadSize += bytes[i++];
// this sei_message is a 608/708 caption so save it and break
// there can only ever be one caption message in a frame's sei
if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
result.payloadType = payloadType;
result.payloadSize = payloadSize;
result.payload = bytes.subarray(i, i + payloadSize);
break;
}
// skip the payload and parse the next message
i += payloadSize;
payloadType = 0;
payloadSize = 0;
}
return result;
};
// see ANSI/SCTE 128-1 (2013), section 8.1
var parseUserData = function(sei) {
// itu_t_t35_contry_code must be 181 (United States) for
// captions
if (sei.payload[0] !== 181) {
return null;
}
// itu_t_t35_provider_code should be 49 (ATSC) for captions
if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
return null;
}
// the user_identifier should be "GA94" to indicate ATSC1 data
if (String.fromCharCode(sei.payload[3],
sei.payload[4],
sei.payload[5],
sei.payload[6]) !== 'GA94') {
return null;
}
// finally, user_data_type_code should be 0x03 for caption data
if (sei.payload[7] !== 0x03) {
return null;
}
// return the user_data_type_structure and strip the trailing
// marker bits
return sei.payload.subarray(8, sei.payload.length - 1);
};
// see CEA-708-D, section 4.4
var parseCaptionPackets = function(pts, userData) {
var results = [], i, count, offset, data;
// if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
}
// parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
};
// capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
results.push(data);
}
}
return results;
};
var CaptionStream = function() {
CaptionStream.prototype.init.call(this);
this.captionPackets_ = [];
this.field1_ = new Cea608Stream(); // eslint-disable-line no-use-before-define
// forward data and done events from field1_ to this CaptionStream
this.field1_.on('data', this.trigger.bind(this, 'data'));
this.field1_.on('done', this.trigger.bind(this, 'done'));
};
CaptionStream.prototype = new Stream();
CaptionStream.prototype.push = function(event) {
var sei, userData;
// only examine SEI NALs
if (event.nalUnitType !== 'sei_rbsp') {
return;
}
// parse the sei
sei = parseSei(event.escapedRBSP);
// ignore everything but user_data_registered_itu_t_t35
if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
return;
}
// parse out the user data payload
userData = parseUserData(sei);
// ignore unrecognized userData
if (!userData) {
return;
}
// parse out CC data packets and save them for later
this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
};
CaptionStream.prototype.flush = function() {
// make sure we actually parsed captions before proceeding
if (!this.captionPackets_.length) {
this.field1_.flush();
return;
}
// In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
this.captionPackets_.forEach(function(elem, idx) {
elem.presortIndex = idx;
});
// sort caption byte-pairs based on their PTS values
this.captionPackets_.sort(function(a, b) {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
// Push each caption into Cea608Stream
this.captionPackets_.forEach(this.field1_.push, this.field1_);
this.captionPackets_.length = 0;
this.field1_.flush();
return;
};
// ----------------------
// Session to Application
// ----------------------
var BASIC_CHARACTER_TRANSLATION = {
0x2a: 0xe1,
0x5c: 0xe9,
0x5e: 0xed,
0x5f: 0xf3,
0x60: 0xfa,
0x7b: 0xe7,
0x7c: 0xf7,
0x7d: 0xd1,
0x7e: 0xf1,
0x7f: 0x2588
};
var getCharFromCode = function(code) {
if (code === null) {
return '';
}
code = BASIC_CHARACTER_TRANSLATION[code] || code;
return String.fromCharCode(code);
};
// Constants for the byte codes recognized by Cea608Stream. This
// list is not exhaustive. For a more comprehensive listing and
// semantics see
// http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
var PADDING = 0x0000,
// Pop-on Mode
RESUME_CAPTION_LOADING = 0x1420,
END_OF_CAPTION = 0x142f,
// Roll-up Mode
ROLL_UP_2_ROWS = 0x1425,
ROLL_UP_3_ROWS = 0x1426,
ROLL_UP_4_ROWS = 0x1427,
CARRIAGE_RETURN = 0x142d,
// Erasure
BACKSPACE = 0x1421,
ERASE_DISPLAYED_MEMORY = 0x142c,
ERASE_NON_DISPLAYED_MEMORY = 0x142e;
// the index of the last row in a CEA-608 display buffer
var BOTTOM_ROW = 14;
// CEA-608 captions are rendered onto a 34x15 matrix of character
// cells. The "bottom" row is the last element in the outer array.
var createDisplayBuffer = function() {
var result = [], i = BOTTOM_ROW + 1;
while (i--) {
result.push('');
}
return result;
};
var Cea608Stream = function() {
Cea608Stream.prototype.init.call(this);
this.mode_ = 'popOn';
// When in roll-up mode, the index of the last row that will
// actually display captions. If a caption is shifted to a row
// with a lower index than this, it is cleared from the display
// buffer
this.topRow_ = 0;
this.startPts_ = 0;
this.displayed_ = createDisplayBuffer();
this.nonDisplayed_ = createDisplayBuffer();
this.lastControlCode_ = null;
this.push = function(packet) {
// Ignore other channels
if (packet.type !== 0) {
return;
}
var data, swap, char0, char1;
// remove the parity bits
data = packet.ccData & 0x7f7f;
// ignore duplicate control codes
if (data === this.lastControlCode_) {
this.lastControlCode_ = null;
return;
}
// Store control codes
if ((data & 0xf000) === 0x1000) {
this.lastControlCode_ = data;
} else {
this.lastControlCode_ = null;
}
switch (data) {
case PADDING:
break;
case RESUME_CAPTION_LOADING:
this.mode_ = 'popOn';
break;
case END_OF_CAPTION:
// if a caption was being displayed, it's gone now
this.flushDisplayed(packet.pts);
// flip memory
swap = this.displayed_;
this.displayed_ = this.nonDisplayed_;
this.nonDisplayed_ = swap;
// start measuring the time to display the caption
this.startPts_ = packet.pts;
break;
case ROLL_UP_2_ROWS:
this.topRow_ = BOTTOM_ROW - 1;
this.mode_ = 'rollUp';
break;
case ROLL_UP_3_ROWS:
this.topRow_ = BOTTOM_ROW - 2;
this.mode_ = 'rollUp';
break;
case ROLL_UP_4_ROWS:
this.topRow_ = BOTTOM_ROW - 3;
this.mode_ = 'rollUp';
break;
case CARRIAGE_RETURN:
this.flushDisplayed(packet.pts);
this.shiftRowsUp_();
this.startPts_ = packet.pts;
break;
case BACKSPACE:
if (this.mode_ === 'popOn') {
this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
} else {
this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
}
break;
case ERASE_DISPLAYED_MEMORY:
this.flushDisplayed(packet.pts);
this.displayed_ = createDisplayBuffer();
break;
case ERASE_NON_DISPLAYED_MEMORY:
this.nonDisplayed_ = createDisplayBuffer();
break;
default:
char0 = data >>> 8;
char1 = data & 0xff;
// Look for a Channel 1 Preamble Address Code
if (char0 >= 0x10 && char0 <= 0x17 &&
char1 >= 0x40 && char1 <= 0x7F &&
(char0 !== 0x10 || char1 < 0x60)) {
// Follow Safari's lead and replace the PAC with a space
char0 = 0x20;
// we only want one space so make the second character null
// which will get become '' in getCharFromCode
char1 = null;
}
// Look for special character sets
if ((char0 === 0x11 || char0 === 0x19) &&
(char1 >= 0x30 && char1 <= 0x3F)) {
// Put in eigth note and space
char0 = 0x266A;
char1 = '';
}
// ignore unsupported control codes
if ((char0 & 0xf0) === 0x10) {
return;
}
// remove null chars
if (char0 === 0x00) {
char0 = null;
}
if (char1 === 0x00) {
char1 = null;
}
// character handling is dependent on the current mode
this[this.mode_](packet.pts, char0, char1);
break;
}
};
};
Cea608Stream.prototype = new Stream();
// Trigger a cue point that captures the current state of the
// display buffer
Cea608Stream.prototype.flushDisplayed = function(pts) {
var content = this.displayed_
// remove spaces from the start and end of the string
.map(function(row) {
return row.trim();
})
// remove empty rows
.filter(function(row) {
return row.length;
})
// combine all text rows to display in one cue
.join('\n');
if (content.length) {
this.trigger('data', {
startPts: this.startPts_,
endPts: pts,
text: content
});
}
};
// Mode Implementations
Cea608Stream.prototype.popOn = function(pts, char0, char1) {
var baseRow = this.nonDisplayed_[BOTTOM_ROW];
// buffer characters
baseRow += getCharFromCode(char0);
baseRow += getCharFromCode(char1);
this.nonDisplayed_[BOTTOM_ROW] = baseRow;
};
Cea608Stream.prototype.rollUp = function(pts, char0, char1) {
var baseRow = this.displayed_[BOTTOM_ROW];
if (baseRow === '') {
// we're starting to buffer new display input, so flush out the
// current display
this.flushDisplayed(pts);
this.startPts_ = pts;
}
baseRow += getCharFromCode(char0);
baseRow += getCharFromCode(char1);
this.displayed_[BOTTOM_ROW] = baseRow;
};
Cea608Stream.prototype.shiftRowsUp_ = function() {
var i;
// clear out inactive rows
for (i = 0; i < this.topRow_; i++) {
this.displayed_[i] = '';
}
// shift displayed rows up
for (i = this.topRow_; i < BOTTOM_ROW; i++) {
this.displayed_[i] = this.displayed_[i + 1];
}
// clear out the bottom row
this.displayed_[BOTTOM_ROW] = '';
};
// exports
module.exports = {
CaptionStream: CaptionStream,
Cea608Stream: Cea608Stream
};
},{"../utils/stream":5}],3:[function(require,module,exports){
/**
* Accepts program elementary stream (PES) data events and parses out
* ID3 metadata from them, if present.
* @see http://id3.org/id3v2.3.0
*/
'use strict';
var
Stream = require('../utils/stream'),
StreamTypes = require('./stream-types'),
// return a percent-encoded representation of the specified byte range
// @see http://en.wikipedia.org/wiki/Percent-encoding
percentEncode = function(bytes, start, end) {
var i, result = '';
for (i = start; i < end; i++) {
result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
}
return result;
},
// return the string representation of the specified byte range,
// interpreted as UTf-8.
parseUtf8 = function(bytes, start, end) {
return decodeURIComponent(percentEncode(bytes, start, end));
},
// return the string representation of the specified byte range,
// interpreted as ISO-8859-1.
parseIso88591 = function(bytes, start, end) {
return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
},
parseSyncSafeInteger = function(data) {
return (data[0] << 21) |
(data[1] << 14) |
(data[2] << 7) |
(data[3]);
},
tagParsers = {
TXXX: function(tag) {
var i;
if (tag.data[0] !== 3) {
// ignore frames with unrecognized character encodings
return;
}
for (i = 1; i < tag.data.length; i++) {
if (tag.data[i] === 0) {
// parse the text fields
tag.description = parseUtf8(tag.data, 1, i);
// do not include the null terminator in the tag value
tag.value = parseUtf8(tag.data, i + 1, tag.data.length - 1);
break;
}
}
tag.data = tag.value;
},
WXXX: function(tag) {
var i;
if (tag.data[0] !== 3) {
// ignore frames with unrecognized character encodings
return;
}
for (i = 1; i < tag.data.length; i++) {
if (tag.data[i] === 0) {
// parse the description and URL fields
tag.description = parseUtf8(tag.data, 1, i);
tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
break;
}
}
},
PRIV: function(tag) {
var i;
for (i = 0; i < tag.data.length; i++) {
if (tag.data[i] === 0) {
// parse the description and URL fields
tag.owner = parseIso88591(tag.data, 0, i);
break;
}
}
tag.privateData = tag.data.subarray(i + 1);
tag.data = tag.privateData;
}
},
MetadataStream;
MetadataStream = function(options) {
var
settings = {
debug: !!(options && options.debug),
// the bytes of the program-level descriptor field in MP2T
// see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
// program element descriptors"
descriptor: options && options.descriptor
},
// the total size in bytes of the ID3 tag being parsed
tagSize = 0,
// tag data that is not complete enough to be parsed
buffer = [],
// the total number of bytes currently in the buffer
bufferSize = 0,
i;
MetadataStream.prototype.init.call(this);
// calculate the text track in-band metadata track dispatch type
// https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
if (settings.descriptor) {
for (i = 0; i < settings.descriptor.length; i++) {
this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
}
}
this.push = function(chunk) {
var tag, frameStart, frameSize, frame, i, frameHeader;
if (chunk.type !== 'timed-metadata') {
return;
}
// if data_alignment_indicator is set in the PES header,
// we must have the start of a new ID3 tag. Assume anything
// remaining in the buffer was malformed and throw it out
if (chunk.dataAlignmentIndicator) {
bufferSize = 0;
buffer.length = 0;
}
// ignore events that don't look like ID3 data
if (buffer.length === 0 &&
(chunk.data.length < 10 ||
chunk.data[0] !== 'I'.charCodeAt(0) ||
chunk.data[1] !== 'D'.charCodeAt(0) ||
chunk.data[2] !== '3'.charCodeAt(0))) {
if (settings.debug) {
// eslint-disable-next-line no-console
console.log('Skipping unrecognized metadata packet');
}
return;
}
// add this chunk to the data we've collected so far
buffer.push(chunk);
bufferSize += chunk.data.byteLength;
// grab the size of the entire frame from the ID3 header
if (buffer.length === 1) {
// the frame size is transmitted as a 28-bit integer in the
// last four bytes of the ID3 header.
// The most significant bit of each byte is dropped and the
// results concatenated to recover the actual value.
tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
// ID3 reports the tag size excluding the header but it's more
// convenient for our comparisons to include it
tagSize += 10;
}
// if the entire frame has not arrived, wait for more data
if (bufferSize < tagSize) {
return;
}
// collect the entire frame so it can be parsed
tag = {
data: new Uint8Array(tagSize),
frames: [],
pts: buffer[0].pts,
dts: buffer[0].dts
};
for (i = 0; i < tagSize;) {
tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
i += buffer[0].data.byteLength;
bufferSize -= buffer[0].data.byteLength;
buffer.shift();
}
// find the start of the first frame and the end of the tag
frameStart = 10;
if (tag.data[5] & 0x40) {
// advance the frame start past the extended header
frameStart += 4; // header size field
frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
// clip any padding off the end
tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
}
// parse one or more ID3 frames
// http://id3.org/id3v2.3.0#ID3v2_frame_overview
do {
// determine the number of bytes in this frame
frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
if (frameSize < 1) {
// eslint-disable-next-line no-console
return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
}
frameHeader = String.fromCharCode(tag.data[frameStart],
tag.data[frameStart + 1],
tag.data[frameStart + 2],
tag.data[frameStart + 3]);
frame = {
id: frameHeader,
data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
};
frame.key = frame.id;
if (tagParsers[frame.id]) {
tagParsers[frame.id](frame);
// handle the special PRIV frame used to indicate the start
// time for raw AAC data
if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
var
d = frame.data,
size = ((d[3] & 0x01) << 30) |
(d[4] << 22) |
(d[5] << 14) |
(d[6] << 6) |
(d[7] >>> 2);
size *= 4;
size += d[7] & 0x03;
frame.timeStamp = size;
// in raw AAC, all subsequent data will be timestamped based
// on the value of this frame
// we couldn't have known the appropriate pts and dts before
// parsing this ID3 tag so set those values now
if (tag.pts === undefined && tag.dts === undefined) {
tag.pts = frame.timeStamp;
tag.dts = frame.timeStamp;
}
this.trigger('timestamp', frame);
}
}
tag.frames.push(frame);
frameStart += 10; // advance past the frame header
frameStart += frameSize; // advance past the frame body
} while (frameStart < tagSize);
this.trigger('data', tag);
};
};
MetadataStream.prototype = new Stream();
module.exports = MetadataStream;
},{"../utils/stream":5,"./stream-types":4}],4:[function(require,module,exports){
'use strict';
module.exports = {
H264_STREAM_TYPE: 0x1B,
ADTS_STREAM_TYPE: 0x0F,
METADATA_STREAM_TYPE: 0x15
};
},{}],5:[function(require,module,exports){
/**
* mux.js
*
* Copyright (c) 2014 Brightcove
* All rights reserved.
*
* A lightweight readable stream implemention that handles event dispatching.
* Objects that inherit from streams should call init in their constructors.
*/
'use strict';
var Stream = function() {
this.init = function() {
var listeners = {};
/**
* Add a listener for a specified event type.
* @param type {string} the event name
* @param listener {function} the callback to be invoked when an event of
* the specified type occurs
*/
this.on = function(type, listener) {
if (!listeners[type]) {
listeners[type] = [];
}
listeners[type] = listeners[type].concat(listener);
};
/**
* Remove a listener for a specified event type.
* @param type {string} the event name
* @param listener {function} a function previously registered for this
* type of event through `on`
*/
this.off = function(type, listener) {
var index;
if (!listeners[type]) {
return false;
}
index = listeners[type].indexOf(listener);
listeners[type] = listeners[type].slice();
listeners[type].splice(index, 1);
return index > -1;
};
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
* @param type {string} the event name
*/
this.trigger = function(type) {
var callbacks, i, length, args;
callbacks = listeners[type];
if (!callbacks) {
return;
}
// Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
length = callbacks.length;
for (i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
args = [];
i = arguments.length;
for (i = 1; i < arguments.length; ++i) {
args.push(arguments[i]);
}
length = callbacks.length;
for (i = 0; i < length; ++i) {
callbacks[i].apply(this, args);
}
}
};
/**
* Destroys the stream and cleans up.
*/
this.dispose = function() {
listeners = {};
};
};
};
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
* @param destination {stream} the stream that will receive all `data` events
* @param autoFlush {boolean} if false, we will not call `flush` on the destination
* when the current stream emits a 'done' event
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
Stream.prototype.pipe = function(destination) {
this.on('data', function(data) {
destination.push(data);
});
this.on('done', function(flushSource) {
destination.flush(flushSource);
});
return destination;
};
// Default stream functions that are expected to be overridden to perform
// actual work. These are provided by the prototype as a sort of no-op
// implementation so that we don't have to check for their existence in the
// `pipe` function above.
Stream.prototype.push = function(data) {
this.trigger('data', data);
};
Stream.prototype.flush = function(flushSource) {
this.trigger('done', flushSource);
};
module.exports = Stream;
},{}],6:[function(require,module,exports){
(function (global){
'use strict';
exports.__esModule = true;

@@ -18,4 +875,136 @@

var _window = require('global/window');
var _window2 = _interopRequireDefault(_window);
var _captionStream = require('mux.js/lib/m2ts/caption-stream');
var _metadataStream = require('mux.js/lib/m2ts/metadata-stream');
var _metadataStream2 = _interopRequireDefault(_metadataStream);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
var deprecateOldCue = function deprecateOldCue(cue) {
Object.defineProperties(cue.frame, {
id: {
get: function get() {
_video2.default.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
return cue.value.key;
}
},
value: {
get: function get() {
_video2.default.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
},
privateData: {
get: function get() {
_video2.default.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
}
});
};
/**
* Remove text track from tech
*/
var removeExistingTrack = function removeExistingTrack(tech, kind, label) {
var tracks = tech.remoteTextTracks() || [];
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.kind === kind && track.label === label) {
tech.removeRemoteTextTrack(track);
}
}
};
/**
* convert a string to a byte array of char codes
*/
var stringToByteArray = function stringToByteArray(data) {
var bytes = new Uint8Array(data.length);
for (var i = 0; i < data.length; i++) {
bytes[i] = data.charCodeAt(i);
}
return bytes;
};
// see CEA-708-D, section 4.4
var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
var results = [];
var i = void 0;
var count = void 0;
var offset = void 0;
var data = void 0;
// if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
}
// parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
};
// capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
results.push(data);
}
}
return results;
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
var i = void 0;
var cue = void 0;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i];
// Remove any overlapping cue
if (cue.startTime <= end && cue.endTime >= start) {
track.removeCue(cue);
}
}
};
/*

@@ -76,9 +1065,143 @@ * Registers the SWF as a handler for HLS video.

FlashlsSourceHandler.handleSource = function (source, tech, options) {
this.tech = tech;
var cea608Stream = new _captionStream.Cea608Stream();
var metadataStream = new _metadataStream2.default();
var captionPackets = [];
var inbandTextTrack = void 0;
var metadataTrack = void 0;
this.onSeeked = function () {
removeCuesFromTrack(0, Infinity, metadataTrack);
var buffered = tech.buffered();
if (buffered.length === 1) {
removeCuesFromTrack(0, buffered.start(0), inbandTextTrack);
removeCuesFromTrack(buffered.end(0), Infinity, inbandTextTrack);
} else {
removeCuesFromTrack(0, Infinity, inbandTextTrack);
}
};
this.onId3updated = function (event, data) {
var id3tag = _window2.default.atob(data[0]);
var bytes = stringToByteArray(id3tag);
var chunk = {
type: 'timed-metadata',
dataAlignmentIndicator: true,
data: bytes
};
metadataStream.push(chunk);
};
metadataStream.on('data', function (tag) {
if (!metadataTrack) {
metadataTrack = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, true).track;
metadataTrack.inBandMetadataTrackDispatchType = '';
}
var time = tech.currentTime();
tag.frames.forEach(function (frame) {
var cue = new _window2.default.VTTCue(time, time + 0.1, frame.value || frame.url || frame.data || '');
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
if (metadataTrack.cues && metadataTrack.cues.length) {
var cues = metadataTrack.cues;
var cuesArray = [];
var duration = tech.duration();
if (isNaN(duration) || Math.abs(duration) === Infinity) {
duration = Number.MAX_VALUE;
}
for (var i = 0; i < cues.length; i++) {
cuesArray.push(cues[i]);
}
cuesArray.sort(function (a, b) {
return a.startTime - b.startTime;
});
for (var _i = 0; _i < cuesArray.length - 1; _i++) {
if (cuesArray[_i].endTime !== cuesArray[_i + 1].startTime) {
cuesArray[_i].endTime = cuesArray[_i + 1].startTime;
}
}
cuesArray[cuesArray.length - 1].endTime = duration;
}
});
cea608Stream.on('data', function (caption) {
if (caption) {
if (!inbandTextTrack) {
removeExistingTrack(tech, 'captions', 'cc1');
inbandTextTrack = tech.addRemoteTextTrack({
kind: 'captions',
label: 'cc1'
}, true).track;
}
inbandTextTrack.addCue(new _window2.default.VTTCue(caption.startPts / 90000, caption.endPts / 90000, caption.text));
}
});
this.onCaptiondata = function (event, data) {
var captions = data[0].map(function (d) {
return {
pts: d.pos * 90000,
bytes: stringToByteArray(_window2.default.atob(d.data))
};
});
captions.forEach(function (caption) {
captionPackets = captionPackets.concat(parseCaptionPackets(caption.pts, caption.bytes));
});
if (captionPackets.length) {
// In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
captionPackets.forEach(function (elem, idx) {
elem.presortIndex = idx;
});
// sort caption byte-pairs based on their PTS values
captionPackets.sort(function (a, b) {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
// Push each caption into Cea608Stream
captionPackets.forEach(cea608Stream.push, cea608Stream);
captionPackets.length = 0;
cea608Stream.flush();
}
};
tech.on('seeked', this.onSeeked);
tech.on('id3updated', this.onId3updated);
tech.on('captiondata', this.onCaptiondata);
tech.setSrc(source.src);
};
/**
* No extra cleanup is necessary on dispose.
*/
FlashlsSourceHandler.dispose = function () {};
FlashlsSourceHandler.dispose = function () {
this.tech.off('seeked', this.onSeeked);
this.tech.off('id3updated', this.onId3updated);
this.tech.off('captiondata', this.onCaptiondata);
};

@@ -93,7 +1216,7 @@ // Register the source handler and make sure it takes precedence over

// Include the version number.
FlashlsSourceHandler.VERSION = '1.0.0';
FlashlsSourceHandler.VERSION = '1.1.0';
exports.default = FlashlsSourceHandler;
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}]},{},[1])(1)
},{"global/window":1,"mux.js/lib/m2ts/caption-stream":2,"mux.js/lib/m2ts/metadata-stream":3}]},{},[6])(6)
});

2

dist/videojs-flashls-source-handler.min.js

@@ -1,1 +0,1 @@

!function e(n,t,o){function r(u,f){if(!t[u]){if(!n[u]){var d="function"==typeof require&&require;if(!f&&d)return d(u,!0);if(i)return i(u,!0);var a=new Error("Cannot find module '"+u+"'");throw a.code="MODULE_NOT_FOUND",a}var l=t[u]={exports:{}};n[u][0].call(l.exports,function(e){var t=n[u][1][e];return r(t?t:e)},l,l.exports,e,n,t,o)}return t[u].exports}for(var i="function"==typeof require&&require,u=0;u<o.length;u++)r(o[u]);return r}({1:[function(e,n,t){(function(e){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}t.__esModule=!0;var o="undefined"!=typeof window?window.videojs:"undefined"!=typeof e?e.videojs:null,r=n(o),i={},u=/^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;i.canPlayType=function(e){return u.test(e)?"maybe":""},i.canHandleSource=function(e,n){return"maybe"===i.canPlayType(e.type)},i.handleSource=function(e,n,t){n.setSrc(e.src)},i.dispose=function(){},r.default.getTech("Flash").registerSourceHandler(i,0),r.default.options.flash.swf="https://players.brightcove.net/videojs-flashls/video-js.swf",i.VERSION="1.0.0",t.default=i}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}]},{},[1]);
!function t(e,a,n){function i(r,s){if(!a[r]){if(!e[r]){var d="function"==typeof require&&require;if(!s&&d)return d(r,!0);if(o)return o(r,!0);var p=new Error("Cannot find module '"+r+"'");throw p.code="MODULE_NOT_FOUND",p}var u=a[r]={exports:{}};e[r][0].call(u.exports,function(t){var a=e[r][1][t];return i(a?a:t)},u,u.exports,t,e,a,n)}return a[r].exports}for(var o="function"==typeof require&&require,r=0;r<n.length;r++)i(n[r]);return i}({1:[function(t,e,a){(function(t){"undefined"!=typeof window?e.exports=window:"undefined"!=typeof t?e.exports=t:"undefined"!=typeof self?e.exports=self:e.exports={}}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],2:[function(t,e,a){"use strict";var n=4,i=128,o=t(5),r=function(t){for(var e=0,a={payloadType:-1,payloadSize:0},o=0,r=0;e<t.byteLength&&t[e]!==i;){for(;255===t[e];)o+=255,e++;for(o+=t[e++];255===t[e];)r+=255,e++;if(r+=t[e++],!a.payload&&o===n){a.payloadType=o,a.payloadSize=r,a.payload=t.subarray(e,e+r);break}e+=r,o=0,r=0}return a},s=function(t){return 181!==t.payload[0]?null:49!==(t.payload[1]<<8|t.payload[2])?null:"GA94"!==String.fromCharCode(t.payload[3],t.payload[4],t.payload[5],t.payload[6])?null:3!==t.payload[7]?null:t.payload.subarray(8,t.payload.length-1)},d=function(t,e){var a,n,i,o,r=[];if(!(64&e[0]))return r;for(n=31&e[0],a=0;a<n;a++)i=3*a,o={type:3&e[i+2],pts:t},4&e[i+2]&&(o.ccData=e[i+3]<<8|e[i+4],r.push(o));return r},p=function(){p.prototype.init.call(this),this.captionPackets_=[],this.field1_=new C,this.field1_.on("data",this.trigger.bind(this,"data")),this.field1_.on("done",this.trigger.bind(this,"done"))};p.prototype=new o,p.prototype.push=function(t){var e,a;"sei_rbsp"===t.nalUnitType&&(e=r(t.escapedRBSP),e.payloadType===n&&(a=s(e),a&&(this.captionPackets_=this.captionPackets_.concat(d(t.pts,a)))))},p.prototype.flush=function(){return this.captionPackets_.length?(this.captionPackets_.forEach(function(t,e){t.presortIndex=e}),this.captionPackets_.sort(function(t,e){return t.pts===e.pts?t.presortIndex-e.presortIndex:t.pts-e.pts}),this.captionPackets_.forEach(this.field1_.push,this.field1_),this.captionPackets_.length=0,void this.field1_.flush()):void this.field1_.flush()};var u={42:225,92:233,94:237,95:243,96:250,123:231,124:247,125:209,126:241,127:9608},l=function(t){return null===t?"":(t=u[t]||t,String.fromCharCode(t))},f=0,c=5152,h=5167,y=5157,g=5158,v=5159,_=5165,m=5153,b=5164,T=5166,w=14,k=function(){for(var t=[],e=w+1;e--;)t.push("");return t},C=function(){C.prototype.init.call(this),this.mode_="popOn",this.topRow_=0,this.startPts_=0,this.displayed_=k(),this.nonDisplayed_=k(),this.lastControlCode_=null,this.push=function(t){if(0===t.type){var e,a,n,i;if(e=32639&t.ccData,e===this.lastControlCode_)return void(this.lastControlCode_=null);switch(4096===(61440&e)?this.lastControlCode_=e:this.lastControlCode_=null,e){case f:break;case c:this.mode_="popOn";break;case h:this.flushDisplayed(t.pts),a=this.displayed_,this.displayed_=this.nonDisplayed_,this.nonDisplayed_=a,this.startPts_=t.pts;break;case y:this.topRow_=w-1,this.mode_="rollUp";break;case g:this.topRow_=w-2,this.mode_="rollUp";break;case v:this.topRow_=w-3,this.mode_="rollUp";break;case _:this.flushDisplayed(t.pts),this.shiftRowsUp_(),this.startPts_=t.pts;break;case m:"popOn"===this.mode_?this.nonDisplayed_[w]=this.nonDisplayed_[w].slice(0,-1):this.displayed_[w]=this.displayed_[w].slice(0,-1);break;case b:this.flushDisplayed(t.pts),this.displayed_=k();break;case T:this.nonDisplayed_=k();break;default:if(n=e>>>8,i=255&e,n>=16&&n<=23&&i>=64&&i<=127&&(16!==n||i<96)&&(n=32,i=null),(17===n||25===n)&&i>=48&&i<=63&&(n=9834,i=""),16===(240&n))return;0===n&&(n=null),0===i&&(i=null),this[this.mode_](t.pts,n,i)}}}};C.prototype=new o,C.prototype.flushDisplayed=function(t){var e=this.displayed_.map(function(t){return t.trim()}).filter(function(t){return t.length}).join("\n");e.length&&this.trigger("data",{startPts:this.startPts_,endPts:t,text:e})},C.prototype.popOn=function(t,e,a){var n=this.nonDisplayed_[w];n+=l(e),n+=l(a),this.nonDisplayed_[w]=n},C.prototype.rollUp=function(t,e,a){var n=this.displayed_[w];""===n&&(this.flushDisplayed(t),this.startPts_=t),n+=l(e),n+=l(a),this.displayed_[w]=n},C.prototype.shiftRowsUp_=function(){var t;for(t=0;t<this.topRow_;t++)this.displayed_[t]="";for(t=this.topRow_;t<w;t++)this.displayed_[t]=this.displayed_[t+1];this.displayed_[w]=""},e.exports={CaptionStream:p,Cea608Stream:C}},{}],3:[function(t,e,a){"use strict";var n,i=t(5),o=t(4),r=function(t,e,a){var n,i="";for(n=e;n<a;n++)i+="%"+("00"+t[n].toString(16)).slice(-2);return i},s=function(t,e,a){return decodeURIComponent(r(t,e,a))},d=function(t,e,a){return unescape(r(t,e,a))},p=function(t){return t[0]<<21|t[1]<<14|t[2]<<7|t[3]},u={TXXX:function(t){var e;if(3===t.data[0]){for(e=1;e<t.data.length;e++)if(0===t.data[e]){t.description=s(t.data,1,e),t.value=s(t.data,e+1,t.data.length-1);break}t.data=t.value}},WXXX:function(t){var e;if(3===t.data[0])for(e=1;e<t.data.length;e++)if(0===t.data[e]){t.description=s(t.data,1,e),t.url=s(t.data,e+1,t.data.length);break}},PRIV:function(t){var e;for(e=0;e<t.data.length;e++)if(0===t.data[e]){t.owner=d(t.data,0,e);break}t.privateData=t.data.subarray(e+1),t.data=t.privateData}};n=function(t){var e,a={debug:!(!t||!t.debug),descriptor:t&&t.descriptor},i=0,r=[],s=0;if(n.prototype.init.call(this),this.dispatchType=o.METADATA_STREAM_TYPE.toString(16),a.descriptor)for(e=0;e<a.descriptor.length;e++)this.dispatchType+=("00"+a.descriptor[e].toString(16)).slice(-2);this.push=function(t){var e,n,o,d,l,f;if("timed-metadata"===t.type){if(t.dataAlignmentIndicator&&(s=0,r.length=0),0===r.length&&(t.data.length<10||t.data[0]!=="I".charCodeAt(0)||t.data[1]!=="D".charCodeAt(0)||t.data[2]!=="3".charCodeAt(0)))return void(a.debug&&console.log("Skipping unrecognized metadata packet"));if(r.push(t),s+=t.data.byteLength,1===r.length&&(i=p(t.data.subarray(6,10)),i+=10),!(s<i)){for(e={data:new Uint8Array(i),frames:[],pts:r[0].pts,dts:r[0].dts},l=0;l<i;)e.data.set(r[0].data.subarray(0,i-l),l),l+=r[0].data.byteLength,s-=r[0].data.byteLength,r.shift();n=10,64&e.data[5]&&(n+=4,n+=p(e.data.subarray(10,14)),i-=p(e.data.subarray(16,20)));do{if(o=p(e.data.subarray(n+4,n+8)),o<1)return console.log("Malformed ID3 frame encountered. Skipping metadata parsing.");if(f=String.fromCharCode(e.data[n],e.data[n+1],e.data[n+2],e.data[n+3]),d={id:f,data:e.data.subarray(n+10,n+o+10)},d.key=d.id,u[d.id]&&(u[d.id](d),"com.apple.streaming.transportStreamTimestamp"===d.owner)){var c=d.data,h=(1&c[3])<<30|c[4]<<22|c[5]<<14|c[6]<<6|c[7]>>>2;h*=4,h+=3&c[7],d.timeStamp=h,void 0===e.pts&&void 0===e.dts&&(e.pts=d.timeStamp,e.dts=d.timeStamp),this.trigger("timestamp",d)}e.frames.push(d),n+=10,n+=o}while(n<i);this.trigger("data",e)}}}},n.prototype=new i,e.exports=n},{}],4:[function(t,e,a){"use strict";e.exports={H264_STREAM_TYPE:27,ADTS_STREAM_TYPE:15,METADATA_STREAM_TYPE:21}},{}],5:[function(t,e,a){"use strict";var n=function(){this.init=function(){var t={};this.on=function(e,a){t[e]||(t[e]=[]),t[e]=t[e].concat(a)},this.off=function(e,a){var n;return!!t[e]&&(n=t[e].indexOf(a),t[e]=t[e].slice(),t[e].splice(n,1),n>-1)},this.trigger=function(e){var a,n,i,o;if(a=t[e])if(2===arguments.length)for(i=a.length,n=0;n<i;++n)a[n].call(this,arguments[1]);else{for(o=[],n=arguments.length,n=1;n<arguments.length;++n)o.push(arguments[n]);for(i=a.length,n=0;n<i;++n)a[n].apply(this,o)}},this.dispose=function(){t={}}}};n.prototype.pipe=function(t){return this.on("data",function(e){t.push(e)}),this.on("done",function(e){t.flush(e)}),t},n.prototype.push=function(t){this.trigger("data",t)},n.prototype.flush=function(t){this.trigger("done",t)},e.exports=n},{}],6:[function(t,e,a){(function(e){"use strict";function n(t){return t&&t.__esModule?t:{default:t}}a.__esModule=!0;var i="undefined"!=typeof window?window.videojs:"undefined"!=typeof e?e.videojs:null,o=n(i),r=t(1),s=n(r),d=t(2),p=t(3),u=n(p),l=function(t){Object.defineProperties(t.frame,{id:{get:function(){return o.default.log.warn("cue.frame.id is deprecated. Use cue.value.key instead."),t.value.key}},value:{get:function(){return o.default.log.warn("cue.frame.value is deprecated. Use cue.value.data instead."),t.value.data}},privateData:{get:function(){return o.default.log.warn("cue.frame.privateData is deprecated. Use cue.value.data instead."),t.value.data}}})},f=function(t,e,a){for(var n=t.remoteTextTracks()||[],i=0;i<n.length;i++){var o=n[i];o.kind===e&&o.label===a&&t.removeRemoteTextTrack(o)}},c=function(t){for(var e=new Uint8Array(t.length),a=0;a<t.length;a++)e[a]=t.charCodeAt(a);return e},h=function(t,e){var a=[],n=void 0,i=void 0,o=void 0,r=void 0;if(!(64&e[0]))return a;for(i=31&e[0],n=0;n<i;n++)o=3*n,r={type:3&e[o+2],pts:t},4&e[o+2]&&(r.ccData=e[o+3]<<8|e[o+4],a.push(r));return a},y=function(t,e,a){var n=void 0,i=void 0;if(a&&a.cues)for(n=a.cues.length;n--;)i=a.cues[n],i.startTime<=e&&i.endTime>=t&&a.removeCue(i)},g={},v=/^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;g.canPlayType=function(t){return v.test(t)?"maybe":""},g.canHandleSource=function(t,e){return"maybe"===g.canPlayType(t.type)},g.handleSource=function(t,e,a){this.tech=e;var n=new d.Cea608Stream,i=new u.default,o=[],r=void 0,p=void 0;this.onSeeked=function(){y(0,1/0,p);var t=e.buffered();1===t.length?(y(0,t.start(0),r),y(t.end(0),1/0,r)):y(0,1/0,r)},this.onId3updated=function(t,e){var a=s.default.atob(e[0]),n=c(a),o={type:"timed-metadata",dataAlignmentIndicator:!0,data:n};i.push(o)},i.on("data",function(t){p||(p=e.addRemoteTextTrack({kind:"metadata",label:"Timed Metadata"},!0).track,p.inBandMetadataTrackDispatchType="");var a=e.currentTime();if(t.frames.forEach(function(t){var e=new s.default.VTTCue(a,a+.1,t.value||t.url||t.data||"");e.frame=t,e.value=t,l(e),p.addCue(e)}),p.cues&&p.cues.length){var n=p.cues,i=[],o=e.duration();(isNaN(o)||Math.abs(o)===1/0)&&(o=Number.MAX_VALUE);for(var r=0;r<n.length;r++)i.push(n[r]);i.sort(function(t,e){return t.startTime-e.startTime});for(var d=0;d<i.length-1;d++)i[d].endTime!==i[d+1].startTime&&(i[d].endTime=i[d+1].startTime);i[i.length-1].endTime=o}}),n.on("data",function(t){t&&(r||(f(e,"captions","cc1"),r=e.addRemoteTextTrack({kind:"captions",label:"cc1"},!0).track),r.addCue(new s.default.VTTCue(t.startPts/9e4,t.endPts/9e4,t.text)))}),this.onCaptiondata=function(t,e){var a=e[0].map(function(t){return{pts:9e4*t.pos,bytes:c(s.default.atob(t.data))}});a.forEach(function(t){o=o.concat(h(t.pts,t.bytes))}),o.length&&(o.forEach(function(t,e){t.presortIndex=e}),o.sort(function(t,e){return t.pts===e.pts?t.presortIndex-e.presortIndex:t.pts-e.pts}),o.forEach(n.push,n),o.length=0,n.flush())},e.on("seeked",this.onSeeked),e.on("id3updated",this.onId3updated),e.on("captiondata",this.onCaptiondata),e.setSrc(t.src)},g.dispose=function(){this.tech.off("seeked",this.onSeeked),this.tech.off("id3updated",this.onId3updated),this.tech.off("captiondata",this.onCaptiondata)},o.default.getTech("Flash").registerSourceHandler(g,0),o.default.options.flash.swf="https://players.brightcove.net/videojs-flashls/video-js.swf",g.VERSION="1.1.0",a.default=g}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}]},{},[6]);

@@ -9,4 +9,136 @@ 'use strict';

var _window = require('global/window');
var _window2 = _interopRequireDefault(_window);
var _captionStream = require('mux.js/lib/m2ts/caption-stream');
var _metadataStream = require('mux.js/lib/m2ts/metadata-stream');
var _metadataStream2 = _interopRequireDefault(_metadataStream);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
var deprecateOldCue = function deprecateOldCue(cue) {
Object.defineProperties(cue.frame, {
id: {
get: function get() {
_video2.default.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
return cue.value.key;
}
},
value: {
get: function get() {
_video2.default.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
},
privateData: {
get: function get() {
_video2.default.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
}
});
};
/**
* Remove text track from tech
*/
var removeExistingTrack = function removeExistingTrack(tech, kind, label) {
var tracks = tech.remoteTextTracks() || [];
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.kind === kind && track.label === label) {
tech.removeRemoteTextTrack(track);
}
}
};
/**
* convert a string to a byte array of char codes
*/
var stringToByteArray = function stringToByteArray(data) {
var bytes = new Uint8Array(data.length);
for (var i = 0; i < data.length; i++) {
bytes[i] = data.charCodeAt(i);
}
return bytes;
};
// see CEA-708-D, section 4.4
var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
var results = [];
var i = void 0;
var count = void 0;
var offset = void 0;
var data = void 0;
// if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
}
// parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
};
// capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
results.push(data);
}
}
return results;
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
var i = void 0;
var cue = void 0;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i];
// Remove any overlapping cue
if (cue.startTime <= end && cue.endTime >= start) {
track.removeCue(cue);
}
}
};
/*

@@ -67,9 +199,143 @@ * Registers the SWF as a handler for HLS video.

FlashlsSourceHandler.handleSource = function (source, tech, options) {
this.tech = tech;
var cea608Stream = new _captionStream.Cea608Stream();
var metadataStream = new _metadataStream2.default();
var captionPackets = [];
var inbandTextTrack = void 0;
var metadataTrack = void 0;
this.onSeeked = function () {
removeCuesFromTrack(0, Infinity, metadataTrack);
var buffered = tech.buffered();
if (buffered.length === 1) {
removeCuesFromTrack(0, buffered.start(0), inbandTextTrack);
removeCuesFromTrack(buffered.end(0), Infinity, inbandTextTrack);
} else {
removeCuesFromTrack(0, Infinity, inbandTextTrack);
}
};
this.onId3updated = function (event, data) {
var id3tag = _window2.default.atob(data[0]);
var bytes = stringToByteArray(id3tag);
var chunk = {
type: 'timed-metadata',
dataAlignmentIndicator: true,
data: bytes
};
metadataStream.push(chunk);
};
metadataStream.on('data', function (tag) {
if (!metadataTrack) {
metadataTrack = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, true).track;
metadataTrack.inBandMetadataTrackDispatchType = '';
}
var time = tech.currentTime();
tag.frames.forEach(function (frame) {
var cue = new _window2.default.VTTCue(time, time + 0.1, frame.value || frame.url || frame.data || '');
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
if (metadataTrack.cues && metadataTrack.cues.length) {
var cues = metadataTrack.cues;
var cuesArray = [];
var duration = tech.duration();
if (isNaN(duration) || Math.abs(duration) === Infinity) {
duration = Number.MAX_VALUE;
}
for (var i = 0; i < cues.length; i++) {
cuesArray.push(cues[i]);
}
cuesArray.sort(function (a, b) {
return a.startTime - b.startTime;
});
for (var _i = 0; _i < cuesArray.length - 1; _i++) {
if (cuesArray[_i].endTime !== cuesArray[_i + 1].startTime) {
cuesArray[_i].endTime = cuesArray[_i + 1].startTime;
}
}
cuesArray[cuesArray.length - 1].endTime = duration;
}
});
cea608Stream.on('data', function (caption) {
if (caption) {
if (!inbandTextTrack) {
removeExistingTrack(tech, 'captions', 'cc1');
inbandTextTrack = tech.addRemoteTextTrack({
kind: 'captions',
label: 'cc1'
}, true).track;
}
inbandTextTrack.addCue(new _window2.default.VTTCue(caption.startPts / 90000, caption.endPts / 90000, caption.text));
}
});
this.onCaptiondata = function (event, data) {
var captions = data[0].map(function (d) {
return {
pts: d.pos * 90000,
bytes: stringToByteArray(_window2.default.atob(d.data))
};
});
captions.forEach(function (caption) {
captionPackets = captionPackets.concat(parseCaptionPackets(caption.pts, caption.bytes));
});
if (captionPackets.length) {
// In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
captionPackets.forEach(function (elem, idx) {
elem.presortIndex = idx;
});
// sort caption byte-pairs based on their PTS values
captionPackets.sort(function (a, b) {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
// Push each caption into Cea608Stream
captionPackets.forEach(cea608Stream.push, cea608Stream);
captionPackets.length = 0;
cea608Stream.flush();
}
};
tech.on('seeked', this.onSeeked);
tech.on('id3updated', this.onId3updated);
tech.on('captiondata', this.onCaptiondata);
tech.setSrc(source.src);
};
/**
* No extra cleanup is necessary on dispose.
*/
FlashlsSourceHandler.dispose = function () {};
FlashlsSourceHandler.dispose = function () {
this.tech.off('seeked', this.onSeeked);
this.tech.off('id3updated', this.onId3updated);
this.tech.off('captiondata', this.onCaptiondata);
};

@@ -76,0 +342,0 @@ // Register the source handler and make sure it takes precedence over

{
"name": "@brightcove/videojs-flashls-source-handler",
"version": "1.0.0",
"version": "1.1.0",
"description": "A source handler to integrate flashls with video.js",

@@ -75,3 +75,4 @@ "main": "es5/index.js",

"browserify-versionify": "^1.0.6",
"video.js": "^5.10.1"
"video.js": "^5.10.1",
"mux.js": "^3.0.4"
},

@@ -78,0 +79,0 @@ "devDependencies": {

import videojs from 'video.js';
import window from 'global/window';
import { Cea608Stream } from 'mux.js/lib/m2ts/caption-stream';
import MetadataStream from 'mux.js/lib/m2ts/metadata-stream';
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
const deprecateOldCue = function(cue) {
Object.defineProperties(cue.frame, {
id: {
get() {
videojs.log.warn(
'cue.frame.id is deprecated. Use cue.value.key instead.'
);
return cue.value.key;
}
},
value: {
get() {
videojs.log.warn(
'cue.frame.value is deprecated. Use cue.value.data instead.'
);
return cue.value.data;
}
},
privateData: {
get() {
videojs.log.warn(
'cue.frame.privateData is deprecated. Use cue.value.data instead.'
);
return cue.value.data;
}
}
});
};
/**
* Remove text track from tech
*/
const removeExistingTrack = function(tech, kind, label) {
const tracks = tech.remoteTextTracks() || [];
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (track.kind === kind && track.label === label) {
tech.removeRemoteTextTrack(track);
}
}
};
/**
* convert a string to a byte array of char codes
*/
const stringToByteArray = function(data) {
const bytes = new Uint8Array(data.length);
for (let i = 0; i < data.length; i++) {
bytes[i] = data.charCodeAt(i);
}
return bytes;
};
// see CEA-708-D, section 4.4
const parseCaptionPackets = function(pts, userData) {
let results = [];
let i;
let count;
let offset;
let data;
// if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
}
// parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts
};
// capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
results.push(data);
}
}
return results;
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
const removeCuesFromTrack = function(start, end, track) {
let i;
let cue;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i];
// Remove any overlapping cue
if (cue.startTime <= end && cue.endTime >= start) {
track.removeCue(cue);
}
}
};
/*

@@ -58,9 +189,148 @@ * Registers the SWF as a handler for HLS video.

FlashlsSourceHandler.handleSource = function(source, tech, options) {
this.tech = tech;
const cea608Stream = new Cea608Stream();
const metadataStream = new MetadataStream();
let captionPackets = [];
let inbandTextTrack;
let metadataTrack;
this.onSeeked = () => {
removeCuesFromTrack(0, Infinity, metadataTrack);
let buffered = tech.buffered();
if (buffered.length === 1) {
removeCuesFromTrack(0, buffered.start(0), inbandTextTrack);
removeCuesFromTrack(buffered.end(0), Infinity, inbandTextTrack);
} else {
removeCuesFromTrack(0, Infinity, inbandTextTrack);
}
};
this.onId3updated = (event, data) => {
const id3tag = window.atob(data[0]);
const bytes = stringToByteArray(id3tag);
const chunk = {
type: 'timed-metadata',
dataAlignmentIndicator: true,
data: bytes
};
metadataStream.push(chunk);
};
metadataStream.on('data', (tag) => {
if (!metadataTrack) {
metadataTrack = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, true).track;
metadataTrack.inBandMetadataTrackDispatchType = '';
}
const time = tech.currentTime();
tag.frames.forEach((frame) => {
const cue = new window.VTTCue(
time,
time + 0.1,
frame.value || frame.url || frame.data || '');
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
if (metadataTrack.cues && metadataTrack.cues.length) {
const cues = metadataTrack.cues;
const cuesArray = [];
let duration = tech.duration();
if (isNaN(duration) || Math.abs(duration) === Infinity) {
duration = Number.MAX_VALUE;
}
for (let i = 0; i < cues.length; i++) {
cuesArray.push(cues[i]);
}
cuesArray.sort((a, b) => a.startTime - b.startTime);
for (let i = 0; i < cuesArray.length - 1; i++) {
if (cuesArray[i].endTime !== cuesArray[i + 1].startTime) {
cuesArray[i].endTime = cuesArray[i + 1].startTime;
}
}
cuesArray[cuesArray.length - 1].endTime = duration;
}
});
cea608Stream.on('data', (caption) => {
if (caption) {
if (!inbandTextTrack) {
removeExistingTrack(tech, 'captions', 'cc1');
inbandTextTrack = tech.addRemoteTextTrack({
kind: 'captions',
label: 'cc1'
}, true).track;
}
inbandTextTrack.addCue(
new window.VTTCue(caption.startPts / 90000,
caption.endPts / 90000,
caption.text));
}
});
this.onCaptiondata = (event, data) => {
let captions = data[0].map((d) => {
return {
pts: d.pos * 90000,
bytes: stringToByteArray(window.atob(d.data))
};
});
captions.forEach((caption) => {
captionPackets = captionPackets.concat(
parseCaptionPackets(caption.pts, caption.bytes));
});
if (captionPackets.length) {
// In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
captionPackets.forEach((elem, idx) => {
elem.presortIndex = idx;
});
// sort caption byte-pairs based on their PTS values
captionPackets.sort((a, b) => {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
// Push each caption into Cea608Stream
captionPackets.forEach(cea608Stream.push, cea608Stream);
captionPackets.length = 0;
cea608Stream.flush();
}
};
tech.on('seeked', this.onSeeked);
tech.on('id3updated', this.onId3updated);
tech.on('captiondata', this.onCaptiondata);
tech.setSrc(source.src);
};
/**
* No extra cleanup is necessary on dispose.
*/
FlashlsSourceHandler.dispose = function() {};
FlashlsSourceHandler.dispose = function() {
this.tech.off('seeked', this.onSeeked);
this.tech.off('id3updated', this.onId3updated);
this.tech.off('captiondata', this.onCaptiondata);
};

@@ -67,0 +337,0 @@ // Register the source handler and make sure it takes precedence over

@@ -24,4 +24,861 @@ (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){

(function (global){
if (typeof window !== "undefined") {
module.exports = window;
} else if (typeof global !== "undefined") {
module.exports = global;
} else if (typeof self !== "undefined"){
module.exports = self;
} else {
module.exports = {};
}
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],4:[function(require,module,exports){
/**
* mux.js
*
* Copyright (c) 2015 Brightcove
* All rights reserved.
*
* Reads in-band caption information from a video elementary
* stream. Captions must follow the CEA-708 standard for injection
* into an MPEG-2 transport streams.
* @see https://en.wikipedia.org/wiki/CEA-708
*/
'use strict';
// -----------------
// Link To Transport
// -----------------
// Supplemental enhancement information (SEI) NAL units have a
// payload type field to indicate how they are to be
// interpreted. CEAS-708 caption content is always transmitted with
// payload type 0x04.
var USER_DATA_REGISTERED_ITU_T_T35 = 4,
RBSP_TRAILING_BITS = 128,
Stream = require('../utils/stream');
/**
* Parse a supplemental enhancement information (SEI) NAL unit.
* Stops parsing once a message of type ITU T T35 has been found.
*
* @param bytes {Uint8Array} the bytes of a SEI NAL unit
* @return {object} the parsed SEI payload
* @see Rec. ITU-T H.264, 7.3.2.3.1
*/
var parseSei = function(bytes) {
var
i = 0,
result = {
payloadType: -1,
payloadSize: 0
},
payloadType = 0,
payloadSize = 0;
// go through the sei_rbsp parsing each each individual sei_message
while (i < bytes.byteLength) {
// stop once we have hit the end of the sei_rbsp
if (bytes[i] === RBSP_TRAILING_BITS) {
break;
}
// Parse payload type
while (bytes[i] === 0xFF) {
payloadType += 255;
i++;
}
payloadType += bytes[i++];
// Parse payload size
while (bytes[i] === 0xFF) {
payloadSize += 255;
i++;
}
payloadSize += bytes[i++];
// this sei_message is a 608/708 caption so save it and break
// there can only ever be one caption message in a frame's sei
if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
result.payloadType = payloadType;
result.payloadSize = payloadSize;
result.payload = bytes.subarray(i, i + payloadSize);
break;
}
// skip the payload and parse the next message
i += payloadSize;
payloadType = 0;
payloadSize = 0;
}
return result;
};
// see ANSI/SCTE 128-1 (2013), section 8.1
var parseUserData = function(sei) {
// itu_t_t35_contry_code must be 181 (United States) for
// captions
if (sei.payload[0] !== 181) {
return null;
}
// itu_t_t35_provider_code should be 49 (ATSC) for captions
if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
return null;
}
// the user_identifier should be "GA94" to indicate ATSC1 data
if (String.fromCharCode(sei.payload[3],
sei.payload[4],
sei.payload[5],
sei.payload[6]) !== 'GA94') {
return null;
}
// finally, user_data_type_code should be 0x03 for caption data
if (sei.payload[7] !== 0x03) {
return null;
}
// return the user_data_type_structure and strip the trailing
// marker bits
return sei.payload.subarray(8, sei.payload.length - 1);
};
// see CEA-708-D, section 4.4
var parseCaptionPackets = function(pts, userData) {
var results = [], i, count, offset, data;
// if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
}
// parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
};
// capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
results.push(data);
}
}
return results;
};
var CaptionStream = function() {
CaptionStream.prototype.init.call(this);
this.captionPackets_ = [];
this.field1_ = new Cea608Stream(); // eslint-disable-line no-use-before-define
// forward data and done events from field1_ to this CaptionStream
this.field1_.on('data', this.trigger.bind(this, 'data'));
this.field1_.on('done', this.trigger.bind(this, 'done'));
};
CaptionStream.prototype = new Stream();
CaptionStream.prototype.push = function(event) {
var sei, userData;
// only examine SEI NALs
if (event.nalUnitType !== 'sei_rbsp') {
return;
}
// parse the sei
sei = parseSei(event.escapedRBSP);
// ignore everything but user_data_registered_itu_t_t35
if (sei.payloadType !== USER_DATA_REGISTERED_ITU_T_T35) {
return;
}
// parse out the user data payload
userData = parseUserData(sei);
// ignore unrecognized userData
if (!userData) {
return;
}
// parse out CC data packets and save them for later
this.captionPackets_ = this.captionPackets_.concat(parseCaptionPackets(event.pts, userData));
};
CaptionStream.prototype.flush = function() {
// make sure we actually parsed captions before proceeding
if (!this.captionPackets_.length) {
this.field1_.flush();
return;
}
// In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
this.captionPackets_.forEach(function(elem, idx) {
elem.presortIndex = idx;
});
// sort caption byte-pairs based on their PTS values
this.captionPackets_.sort(function(a, b) {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
// Push each caption into Cea608Stream
this.captionPackets_.forEach(this.field1_.push, this.field1_);
this.captionPackets_.length = 0;
this.field1_.flush();
return;
};
// ----------------------
// Session to Application
// ----------------------
var BASIC_CHARACTER_TRANSLATION = {
0x2a: 0xe1,
0x5c: 0xe9,
0x5e: 0xed,
0x5f: 0xf3,
0x60: 0xfa,
0x7b: 0xe7,
0x7c: 0xf7,
0x7d: 0xd1,
0x7e: 0xf1,
0x7f: 0x2588
};
var getCharFromCode = function(code) {
if (code === null) {
return '';
}
code = BASIC_CHARACTER_TRANSLATION[code] || code;
return String.fromCharCode(code);
};
// Constants for the byte codes recognized by Cea608Stream. This
// list is not exhaustive. For a more comprehensive listing and
// semantics see
// http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
var PADDING = 0x0000,
// Pop-on Mode
RESUME_CAPTION_LOADING = 0x1420,
END_OF_CAPTION = 0x142f,
// Roll-up Mode
ROLL_UP_2_ROWS = 0x1425,
ROLL_UP_3_ROWS = 0x1426,
ROLL_UP_4_ROWS = 0x1427,
CARRIAGE_RETURN = 0x142d,
// Erasure
BACKSPACE = 0x1421,
ERASE_DISPLAYED_MEMORY = 0x142c,
ERASE_NON_DISPLAYED_MEMORY = 0x142e;
// the index of the last row in a CEA-608 display buffer
var BOTTOM_ROW = 14;
// CEA-608 captions are rendered onto a 34x15 matrix of character
// cells. The "bottom" row is the last element in the outer array.
var createDisplayBuffer = function() {
var result = [], i = BOTTOM_ROW + 1;
while (i--) {
result.push('');
}
return result;
};
var Cea608Stream = function() {
Cea608Stream.prototype.init.call(this);
this.mode_ = 'popOn';
// When in roll-up mode, the index of the last row that will
// actually display captions. If a caption is shifted to a row
// with a lower index than this, it is cleared from the display
// buffer
this.topRow_ = 0;
this.startPts_ = 0;
this.displayed_ = createDisplayBuffer();
this.nonDisplayed_ = createDisplayBuffer();
this.lastControlCode_ = null;
this.push = function(packet) {
// Ignore other channels
if (packet.type !== 0) {
return;
}
var data, swap, char0, char1;
// remove the parity bits
data = packet.ccData & 0x7f7f;
// ignore duplicate control codes
if (data === this.lastControlCode_) {
this.lastControlCode_ = null;
return;
}
// Store control codes
if ((data & 0xf000) === 0x1000) {
this.lastControlCode_ = data;
} else {
this.lastControlCode_ = null;
}
switch (data) {
case PADDING:
break;
case RESUME_CAPTION_LOADING:
this.mode_ = 'popOn';
break;
case END_OF_CAPTION:
// if a caption was being displayed, it's gone now
this.flushDisplayed(packet.pts);
// flip memory
swap = this.displayed_;
this.displayed_ = this.nonDisplayed_;
this.nonDisplayed_ = swap;
// start measuring the time to display the caption
this.startPts_ = packet.pts;
break;
case ROLL_UP_2_ROWS:
this.topRow_ = BOTTOM_ROW - 1;
this.mode_ = 'rollUp';
break;
case ROLL_UP_3_ROWS:
this.topRow_ = BOTTOM_ROW - 2;
this.mode_ = 'rollUp';
break;
case ROLL_UP_4_ROWS:
this.topRow_ = BOTTOM_ROW - 3;
this.mode_ = 'rollUp';
break;
case CARRIAGE_RETURN:
this.flushDisplayed(packet.pts);
this.shiftRowsUp_();
this.startPts_ = packet.pts;
break;
case BACKSPACE:
if (this.mode_ === 'popOn') {
this.nonDisplayed_[BOTTOM_ROW] = this.nonDisplayed_[BOTTOM_ROW].slice(0, -1);
} else {
this.displayed_[BOTTOM_ROW] = this.displayed_[BOTTOM_ROW].slice(0, -1);
}
break;
case ERASE_DISPLAYED_MEMORY:
this.flushDisplayed(packet.pts);
this.displayed_ = createDisplayBuffer();
break;
case ERASE_NON_DISPLAYED_MEMORY:
this.nonDisplayed_ = createDisplayBuffer();
break;
default:
char0 = data >>> 8;
char1 = data & 0xff;
// Look for a Channel 1 Preamble Address Code
if (char0 >= 0x10 && char0 <= 0x17 &&
char1 >= 0x40 && char1 <= 0x7F &&
(char0 !== 0x10 || char1 < 0x60)) {
// Follow Safari's lead and replace the PAC with a space
char0 = 0x20;
// we only want one space so make the second character null
// which will get become '' in getCharFromCode
char1 = null;
}
// Look for special character sets
if ((char0 === 0x11 || char0 === 0x19) &&
(char1 >= 0x30 && char1 <= 0x3F)) {
// Put in eigth note and space
char0 = 0x266A;
char1 = '';
}
// ignore unsupported control codes
if ((char0 & 0xf0) === 0x10) {
return;
}
// remove null chars
if (char0 === 0x00) {
char0 = null;
}
if (char1 === 0x00) {
char1 = null;
}
// character handling is dependent on the current mode
this[this.mode_](packet.pts, char0, char1);
break;
}
};
};
Cea608Stream.prototype = new Stream();
// Trigger a cue point that captures the current state of the
// display buffer
Cea608Stream.prototype.flushDisplayed = function(pts) {
var content = this.displayed_
// remove spaces from the start and end of the string
.map(function(row) {
return row.trim();
})
// remove empty rows
.filter(function(row) {
return row.length;
})
// combine all text rows to display in one cue
.join('\n');
if (content.length) {
this.trigger('data', {
startPts: this.startPts_,
endPts: pts,
text: content
});
}
};
// Mode Implementations
Cea608Stream.prototype.popOn = function(pts, char0, char1) {
var baseRow = this.nonDisplayed_[BOTTOM_ROW];
// buffer characters
baseRow += getCharFromCode(char0);
baseRow += getCharFromCode(char1);
this.nonDisplayed_[BOTTOM_ROW] = baseRow;
};
Cea608Stream.prototype.rollUp = function(pts, char0, char1) {
var baseRow = this.displayed_[BOTTOM_ROW];
if (baseRow === '') {
// we're starting to buffer new display input, so flush out the
// current display
this.flushDisplayed(pts);
this.startPts_ = pts;
}
baseRow += getCharFromCode(char0);
baseRow += getCharFromCode(char1);
this.displayed_[BOTTOM_ROW] = baseRow;
};
Cea608Stream.prototype.shiftRowsUp_ = function() {
var i;
// clear out inactive rows
for (i = 0; i < this.topRow_; i++) {
this.displayed_[i] = '';
}
// shift displayed rows up
for (i = this.topRow_; i < BOTTOM_ROW; i++) {
this.displayed_[i] = this.displayed_[i + 1];
}
// clear out the bottom row
this.displayed_[BOTTOM_ROW] = '';
};
// exports
module.exports = {
CaptionStream: CaptionStream,
Cea608Stream: Cea608Stream
};
},{"../utils/stream":7}],5:[function(require,module,exports){
/**
* Accepts program elementary stream (PES) data events and parses out
* ID3 metadata from them, if present.
* @see http://id3.org/id3v2.3.0
*/
'use strict';
var
Stream = require('../utils/stream'),
StreamTypes = require('./stream-types'),
// return a percent-encoded representation of the specified byte range
// @see http://en.wikipedia.org/wiki/Percent-encoding
percentEncode = function(bytes, start, end) {
var i, result = '';
for (i = start; i < end; i++) {
result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
}
return result;
},
// return the string representation of the specified byte range,
// interpreted as UTf-8.
parseUtf8 = function(bytes, start, end) {
return decodeURIComponent(percentEncode(bytes, start, end));
},
// return the string representation of the specified byte range,
// interpreted as ISO-8859-1.
parseIso88591 = function(bytes, start, end) {
return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
},
parseSyncSafeInteger = function(data) {
return (data[0] << 21) |
(data[1] << 14) |
(data[2] << 7) |
(data[3]);
},
tagParsers = {
TXXX: function(tag) {
var i;
if (tag.data[0] !== 3) {
// ignore frames with unrecognized character encodings
return;
}
for (i = 1; i < tag.data.length; i++) {
if (tag.data[i] === 0) {
// parse the text fields
tag.description = parseUtf8(tag.data, 1, i);
// do not include the null terminator in the tag value
tag.value = parseUtf8(tag.data, i + 1, tag.data.length - 1);
break;
}
}
tag.data = tag.value;
},
WXXX: function(tag) {
var i;
if (tag.data[0] !== 3) {
// ignore frames with unrecognized character encodings
return;
}
for (i = 1; i < tag.data.length; i++) {
if (tag.data[i] === 0) {
// parse the description and URL fields
tag.description = parseUtf8(tag.data, 1, i);
tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
break;
}
}
},
PRIV: function(tag) {
var i;
for (i = 0; i < tag.data.length; i++) {
if (tag.data[i] === 0) {
// parse the description and URL fields
tag.owner = parseIso88591(tag.data, 0, i);
break;
}
}
tag.privateData = tag.data.subarray(i + 1);
tag.data = tag.privateData;
}
},
MetadataStream;
MetadataStream = function(options) {
var
settings = {
debug: !!(options && options.debug),
// the bytes of the program-level descriptor field in MP2T
// see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
// program element descriptors"
descriptor: options && options.descriptor
},
// the total size in bytes of the ID3 tag being parsed
tagSize = 0,
// tag data that is not complete enough to be parsed
buffer = [],
// the total number of bytes currently in the buffer
bufferSize = 0,
i;
MetadataStream.prototype.init.call(this);
// calculate the text track in-band metadata track dispatch type
// https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
if (settings.descriptor) {
for (i = 0; i < settings.descriptor.length; i++) {
this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
}
}
this.push = function(chunk) {
var tag, frameStart, frameSize, frame, i, frameHeader;
if (chunk.type !== 'timed-metadata') {
return;
}
// if data_alignment_indicator is set in the PES header,
// we must have the start of a new ID3 tag. Assume anything
// remaining in the buffer was malformed and throw it out
if (chunk.dataAlignmentIndicator) {
bufferSize = 0;
buffer.length = 0;
}
// ignore events that don't look like ID3 data
if (buffer.length === 0 &&
(chunk.data.length < 10 ||
chunk.data[0] !== 'I'.charCodeAt(0) ||
chunk.data[1] !== 'D'.charCodeAt(0) ||
chunk.data[2] !== '3'.charCodeAt(0))) {
if (settings.debug) {
// eslint-disable-next-line no-console
console.log('Skipping unrecognized metadata packet');
}
return;
}
// add this chunk to the data we've collected so far
buffer.push(chunk);
bufferSize += chunk.data.byteLength;
// grab the size of the entire frame from the ID3 header
if (buffer.length === 1) {
// the frame size is transmitted as a 28-bit integer in the
// last four bytes of the ID3 header.
// The most significant bit of each byte is dropped and the
// results concatenated to recover the actual value.
tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
// ID3 reports the tag size excluding the header but it's more
// convenient for our comparisons to include it
tagSize += 10;
}
// if the entire frame has not arrived, wait for more data
if (bufferSize < tagSize) {
return;
}
// collect the entire frame so it can be parsed
tag = {
data: new Uint8Array(tagSize),
frames: [],
pts: buffer[0].pts,
dts: buffer[0].dts
};
for (i = 0; i < tagSize;) {
tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
i += buffer[0].data.byteLength;
bufferSize -= buffer[0].data.byteLength;
buffer.shift();
}
// find the start of the first frame and the end of the tag
frameStart = 10;
if (tag.data[5] & 0x40) {
// advance the frame start past the extended header
frameStart += 4; // header size field
frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
// clip any padding off the end
tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
}
// parse one or more ID3 frames
// http://id3.org/id3v2.3.0#ID3v2_frame_overview
do {
// determine the number of bytes in this frame
frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
if (frameSize < 1) {
// eslint-disable-next-line no-console
return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
}
frameHeader = String.fromCharCode(tag.data[frameStart],
tag.data[frameStart + 1],
tag.data[frameStart + 2],
tag.data[frameStart + 3]);
frame = {
id: frameHeader,
data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
};
frame.key = frame.id;
if (tagParsers[frame.id]) {
tagParsers[frame.id](frame);
// handle the special PRIV frame used to indicate the start
// time for raw AAC data
if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
var
d = frame.data,
size = ((d[3] & 0x01) << 30) |
(d[4] << 22) |
(d[5] << 14) |
(d[6] << 6) |
(d[7] >>> 2);
size *= 4;
size += d[7] & 0x03;
frame.timeStamp = size;
// in raw AAC, all subsequent data will be timestamped based
// on the value of this frame
// we couldn't have known the appropriate pts and dts before
// parsing this ID3 tag so set those values now
if (tag.pts === undefined && tag.dts === undefined) {
tag.pts = frame.timeStamp;
tag.dts = frame.timeStamp;
}
this.trigger('timestamp', frame);
}
}
tag.frames.push(frame);
frameStart += 10; // advance past the frame header
frameStart += frameSize; // advance past the frame body
} while (frameStart < tagSize);
this.trigger('data', tag);
};
};
MetadataStream.prototype = new Stream();
module.exports = MetadataStream;
},{"../utils/stream":7,"./stream-types":6}],6:[function(require,module,exports){
'use strict';
module.exports = {
H264_STREAM_TYPE: 0x1B,
ADTS_STREAM_TYPE: 0x0F,
METADATA_STREAM_TYPE: 0x15
};
},{}],7:[function(require,module,exports){
/**
* mux.js
*
* Copyright (c) 2014 Brightcove
* All rights reserved.
*
* A lightweight readable stream implemention that handles event dispatching.
* Objects that inherit from streams should call init in their constructors.
*/
'use strict';
var Stream = function() {
this.init = function() {
var listeners = {};
/**
* Add a listener for a specified event type.
* @param type {string} the event name
* @param listener {function} the callback to be invoked when an event of
* the specified type occurs
*/
this.on = function(type, listener) {
if (!listeners[type]) {
listeners[type] = [];
}
listeners[type] = listeners[type].concat(listener);
};
/**
* Remove a listener for a specified event type.
* @param type {string} the event name
* @param listener {function} a function previously registered for this
* type of event through `on`
*/
this.off = function(type, listener) {
var index;
if (!listeners[type]) {
return false;
}
index = listeners[type].indexOf(listener);
listeners[type] = listeners[type].slice();
listeners[type].splice(index, 1);
return index > -1;
};
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
* @param type {string} the event name
*/
this.trigger = function(type) {
var callbacks, i, length, args;
callbacks = listeners[type];
if (!callbacks) {
return;
}
// Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
length = callbacks.length;
for (i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
args = [];
i = arguments.length;
for (i = 1; i < arguments.length; ++i) {
args.push(arguments[i]);
}
length = callbacks.length;
for (i = 0; i < length; ++i) {
callbacks[i].apply(this, args);
}
}
};
/**
* Destroys the stream and cleans up.
*/
this.dispose = function() {
listeners = {};
};
};
};
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
* @param destination {stream} the stream that will receive all `data` events
* @param autoFlush {boolean} if false, we will not call `flush` on the destination
* when the current stream emits a 'done' event
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
Stream.prototype.pipe = function(destination) {
this.on('data', function(data) {
destination.push(data);
});
this.on('done', function(flushSource) {
destination.flush(flushSource);
});
return destination;
};
// Default stream functions that are expected to be overridden to perform
// actual work. These are provided by the prototype as a sort of no-op
// implementation so that we don't have to check for their existence in the
// `pipe` function above.
Stream.prototype.push = function(data) {
this.trigger('data', data);
};
Stream.prototype.flush = function(flushSource) {
this.trigger('done', flushSource);
};
module.exports = Stream;
},{}],8:[function(require,module,exports){
(function (global){
'use strict';
exports.__esModule = true;

@@ -33,4 +890,136 @@

var _window = require('global/window');
var _window2 = _interopRequireDefault(_window);
var _captionStream = require('mux.js/lib/m2ts/caption-stream');
var _metadataStream = require('mux.js/lib/m2ts/metadata-stream');
var _metadataStream2 = _interopRequireDefault(_metadataStream);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
var deprecateOldCue = function deprecateOldCue(cue) {
Object.defineProperties(cue.frame, {
id: {
get: function get() {
_video2.default.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
return cue.value.key;
}
},
value: {
get: function get() {
_video2.default.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
},
privateData: {
get: function get() {
_video2.default.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
}
});
};
/**
* Remove text track from tech
*/
var removeExistingTrack = function removeExistingTrack(tech, kind, label) {
var tracks = tech.remoteTextTracks() || [];
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.kind === kind && track.label === label) {
tech.removeRemoteTextTrack(track);
}
}
};
/**
* convert a string to a byte array of char codes
*/
var stringToByteArray = function stringToByteArray(data) {
var bytes = new Uint8Array(data.length);
for (var i = 0; i < data.length; i++) {
bytes[i] = data.charCodeAt(i);
}
return bytes;
};
// see CEA-708-D, section 4.4
var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
var results = [];
var i = void 0;
var count = void 0;
var offset = void 0;
var data = void 0;
// if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
}
// parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
};
// capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
results.push(data);
}
}
return results;
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
var removeCuesFromTrack = function removeCuesFromTrack(start, end, track) {
var i = void 0;
var cue = void 0;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i];
// Remove any overlapping cue
if (cue.startTime <= end && cue.endTime >= start) {
track.removeCue(cue);
}
}
};
/*

@@ -91,9 +1080,143 @@ * Registers the SWF as a handler for HLS video.

FlashlsSourceHandler.handleSource = function (source, tech, options) {
this.tech = tech;
var cea608Stream = new _captionStream.Cea608Stream();
var metadataStream = new _metadataStream2.default();
var captionPackets = [];
var inbandTextTrack = void 0;
var metadataTrack = void 0;
this.onSeeked = function () {
removeCuesFromTrack(0, Infinity, metadataTrack);
var buffered = tech.buffered();
if (buffered.length === 1) {
removeCuesFromTrack(0, buffered.start(0), inbandTextTrack);
removeCuesFromTrack(buffered.end(0), Infinity, inbandTextTrack);
} else {
removeCuesFromTrack(0, Infinity, inbandTextTrack);
}
};
this.onId3updated = function (event, data) {
var id3tag = _window2.default.atob(data[0]);
var bytes = stringToByteArray(id3tag);
var chunk = {
type: 'timed-metadata',
dataAlignmentIndicator: true,
data: bytes
};
metadataStream.push(chunk);
};
metadataStream.on('data', function (tag) {
if (!metadataTrack) {
metadataTrack = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, true).track;
metadataTrack.inBandMetadataTrackDispatchType = '';
}
var time = tech.currentTime();
tag.frames.forEach(function (frame) {
var cue = new _window2.default.VTTCue(time, time + 0.1, frame.value || frame.url || frame.data || '');
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
if (metadataTrack.cues && metadataTrack.cues.length) {
var cues = metadataTrack.cues;
var cuesArray = [];
var duration = tech.duration();
if (isNaN(duration) || Math.abs(duration) === Infinity) {
duration = Number.MAX_VALUE;
}
for (var i = 0; i < cues.length; i++) {
cuesArray.push(cues[i]);
}
cuesArray.sort(function (a, b) {
return a.startTime - b.startTime;
});
for (var _i = 0; _i < cuesArray.length - 1; _i++) {
if (cuesArray[_i].endTime !== cuesArray[_i + 1].startTime) {
cuesArray[_i].endTime = cuesArray[_i + 1].startTime;
}
}
cuesArray[cuesArray.length - 1].endTime = duration;
}
});
cea608Stream.on('data', function (caption) {
if (caption) {
if (!inbandTextTrack) {
removeExistingTrack(tech, 'captions', 'cc1');
inbandTextTrack = tech.addRemoteTextTrack({
kind: 'captions',
label: 'cc1'
}, true).track;
}
inbandTextTrack.addCue(new _window2.default.VTTCue(caption.startPts / 90000, caption.endPts / 90000, caption.text));
}
});
this.onCaptiondata = function (event, data) {
var captions = data[0].map(function (d) {
return {
pts: d.pos * 90000,
bytes: stringToByteArray(_window2.default.atob(d.data))
};
});
captions.forEach(function (caption) {
captionPackets = captionPackets.concat(parseCaptionPackets(caption.pts, caption.bytes));
});
if (captionPackets.length) {
// In Chrome, the Array#sort function is not stable so add a
// presortIndex that we can use to ensure we get a stable-sort
captionPackets.forEach(function (elem, idx) {
elem.presortIndex = idx;
});
// sort caption byte-pairs based on their PTS values
captionPackets.sort(function (a, b) {
if (a.pts === b.pts) {
return a.presortIndex - b.presortIndex;
}
return a.pts - b.pts;
});
// Push each caption into Cea608Stream
captionPackets.forEach(cea608Stream.push, cea608Stream);
captionPackets.length = 0;
cea608Stream.flush();
}
};
tech.on('seeked', this.onSeeked);
tech.on('id3updated', this.onId3updated);
tech.on('captiondata', this.onCaptiondata);
tech.setSrc(source.src);
};
/**
* No extra cleanup is necessary on dispose.
*/
FlashlsSourceHandler.dispose = function () {};
FlashlsSourceHandler.dispose = function () {
this.tech.off('seeked', this.onSeeked);
this.tech.off('id3updated', this.onId3updated);
this.tech.off('captiondata', this.onCaptiondata);
};

@@ -108,3 +1231,3 @@ // Register the source handler and make sure it takes precedence over

// Include the version number.
FlashlsSourceHandler.VERSION = '1.0.0';
FlashlsSourceHandler.VERSION = '1.1.0';

@@ -114,3 +1237,3 @@ exports.default = FlashlsSourceHandler;

}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],4:[function(require,module,exports){
},{"global/window":3,"mux.js/lib/m2ts/caption-stream":4,"mux.js/lib/m2ts/metadata-stream":5}],9:[function(require,module,exports){
(function (global){

@@ -171,2 +1294,2 @@ 'use strict';

}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"../src/index":3,"global/document":2}]},{},[4]);
},{"../src/index":8,"global/document":2}]},{},[9]);

@@ -17,3 +17,3 @@ module.exports = function(config) {

// this will detect all browsers that are available for testing
if (!config.browsers.length) {
if (!browsers.length) {
detectBrowsers.enabled = true;

@@ -45,2 +45,8 @@ }

browserStack: {
project: process.env.npm_package_name,
name: process.env.TEAMCITY_PROJECT_NAME + process.env.BUILD_NUMBER,
pollingTimeout: 30000
},
detectBrowsers: detectBrowsers,

@@ -47,0 +53,0 @@ reporters: reporters,

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc