Socket
Socket
Sign inDemoInstall

videocontext

Package Overview
Dependencies
Maintainers
2
Versions
121
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

videocontext - npm Package Compare versions

Comparing version 0.52.1 to 0.52.2

.babelrc

12

.eslintrc.json

@@ -5,7 +5,8 @@ {

"es6": true,
"mocha": true
"jest": true
},
"extends": "eslint:recommended",
"parserOptions": {
"sourceType": "module"
"sourceType": "module",
"ecmaVersion": 2017
},

@@ -23,3 +24,6 @@ "rules": {

"error",
"double"
"double",
{
"allowTemplateLiterals": true
}
],

@@ -30,4 +34,4 @@ "semi": [

],
"no-console":0
"no-console": 0
}
}

@@ -9,3 +9,3 @@ {

"allowUnknownTags": true,
"dictionaries": ["jsdoc","closure"]
"dictionaries": ["jsdoc", "closure"]
},

@@ -25,2 +25,2 @@ "source": {

}
}
}
{
"name": "videocontext",
"version": "0.52.1",
"version": "0.52.2",
"description": "A WebGL & HTML5 graph based video composition library",
"repository": {
"type": "git",
"url": "git+https://github.com/bbc/VideoContext.git"
},
"license": "Apache-2.0",
"main": "dist/videocontext.js",
"keywords": [

@@ -20,37 +26,48 @@ "video",

"scripts": {
"doc": "node ./node_modules/jsdoc/jsdoc.js -c jsdoc.json && cp dist/videocontext.* docs/dist/",
"build": "npm run test && ./node_modules/webpack/bin/webpack.js --config webpack.config.js && npm run doc",
"test": "node ./node_modules/mocha/bin/mocha --compilers js:babel-core/register",
"test-watch": "node ./node_modules/mocha/bin/mocha --compilers js:babel-core/register --watch",
"dev": "./node_modules/webpack/bin/webpack.js --watch --config webpack.config.js & ./node_modules/reload/bin/reload -b"
"dev": "webpack --config webpack.config.js --watch",
"build": "webpack --config webpack.config.js",
"test": "bash ./test.sh",
"test-unit": "jest test/unit/*.js",
"test-integration": "jest test/unit/*.js",
"test-watch": "jest --watch",
"test-coverage": "jest test/unit/*.js --coverage --collectCoverageFrom 'src/**/*.js'",
"test-regression": "jest test/regression/*.js",
"docs": "bash ./build-docs.sh",
"docs-commit": "npm run docs && git add docs && git commit -m 'update docs in preparation for release'",
"format": "prettier --write '{src,test}/**/*.js'",
"lint": "eslint '{src,test}/**/*.js'",
"precommit": "npm run lint && npm run format && npm run test-unit",
"release:major": "npm run docs-commit && changelog -M && git add CHANGELOG.md && git commit -m 'updated CHANGELOG.md' && npm version --new-version major",
"release:minor": "npm run docs-commit && changelog -m && git add CHANGELOG.md && git commit -m 'updated CHANGELOG.md' && npm version --new-version minor",
"release:patch": "npm run docs-commit && changelog -p && git add CHANGELOG.md && git commit -m 'updated CHANGELOG.md' && npm version --new-version patch",
"postversion": "git push origin --follow-tags"
},
"dependencies": {},
"dependencies": {
"regenerator-runtime": "^0.11.1"
},
"devDependencies": {
"babel-core": "^5.6.5",
"babel-loader": "^5.1.4",
"chai": "3.4.1",
"babel-cli": "^6.26.0",
"babel-core": "^6.26.3",
"babel-jest": "^23.2.0",
"babel-loader": "^7.1.4",
"babel-plugin-add-module-exports": "^0.2.1",
"babel-plugin-transform-es2015-modules-umd": "^6.24.1",
"babel-preset-env": "^1.7.0",
"eslint": "^3.9.1",
"eslint-loader": "^1.4.0",
"http-server": "^0.8.5",
"eslint-loader": "^2.0.0",
"generate-changelog": "^1.7.1",
"http-server": "^0.11.1",
"husky": "^0.14.3",
"jest": "^23.1.0",
"jest-cli": "^23.1.0",
"jest-image-snapshot": "^2.4.2",
"jsdoc": "^3.4.3",
"mocha": "2.3.4",
"node-libs-browser": "^0.5.2",
"reload": "^1.0.1",
"path": "^0.12.7",
"prettier": "^1.13.2",
"puppeteer": "^1.4.0",
"sinon": "^4.2.1",
"webgl-mock": "^0.1.6",
"webpack": "^1.9.11"
},
"main": "dist/videocontext.js",
"browserify": {
"transform": [
[
"babelify",
{
"presets": [
"es2015"
]
}
]
]
"webgl-mock": "^0.1.7",
"webpack": "^4.10.2",
"webpack-cli": "^3.0.1"
}
}
# VideoContext
![build status](https://travis-ci.org/bbc/VideoContext.svg?branch=master)
The VideoContext is an experimental HTML5/WebGL media processing and sequencing library for creating interactive and responsive videos on the web.
It consist of two main components. A graph based, shader accelerated processing pipeline, and a media playback sequencing time-line.
It consist of two main components. A graph based, shader accelerated processing pipeline, and a media playback sequencing time-line.
The design is heavily inspired by the WebAudioAPI so should feel familiar to use for people who've had previous experience in the WebAudio world.
The design is heavily inspired by the WebAudioAPI so should feel familiar to use for people who've had previous experience in the WebAudio world.

@@ -182,3 +184,3 @@

//Give a sepia tint to the monochrome output (note how shader description properties are automatically bound to the JavaScript object).
sepiaEffect.outputMix = [1.25,1.18,0.9];
sepiaEffect.outputMix = [1.25,1.18,0.9];

@@ -263,3 +265,3 @@ //Set-up the processing chain.

//Setup the transition. This will change the "mix" property of the cross-fade node from 0.0 to 1.0.
//Setup the transition. This will change the "mix" property of the cross-fade node from 0.0 to 1.0.
//Transision mix value from 0.0 to 1.0 at time=8 over a period of 2 seconds to time=10.

@@ -274,3 +276,3 @@ crossfadeEffect.transition(8.0, 10.0, 0.0, 1.0, "mix");

// NOTE: There's multiple ways to connect a node to specific input of a processing node, the
// NOTE: There's multiple ways to connect a node to specific input of a processing node, the
// following are all equivalent.

@@ -333,3 +335,3 @@ //

inputs:["u_image"]
};
};
```

@@ -387,4 +389,4 @@

},
inputs:["u_image"] //the names of the uniform sampler2D's in the fragment shader which represent the texture inputs to the effect.
};
inputs:["u_image"] //the names of the uniform sampler2D's in the fragment shader which represent the texture inputs to the effect.
};
```

@@ -408,2 +410,2 @@

The library is written in es6 and cross-compiled using babel.
The library is written in es6 and cross-compiled using babel.
let aaf_video_crop = {
"title":"AAF Video Crop Effect",
"description": "A crop effect based on the AAF spec.",
"vertexShader" : "\
title: "AAF Video Crop Effect",
description: "A crop effect based on the AAF spec.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -29,11 +31,11 @@ uniform sampler2D u_image;\

}",
"properties":{
"cropLeft":{"type":"uniform", "value":-1.0},
"cropRight":{"type":"uniform", "value":1.0},
"cropTop":{"type":"uniform", "value": -1.0},
"cropBottom":{"type":"uniform", "value": 1.0}
properties: {
cropLeft: { type: "uniform", value: -1.0 },
cropRight: { type: "uniform", value: 1.0 },
cropTop: { type: "uniform", value: -1.0 },
cropBottom: { type: "uniform", value: 1.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default aaf_video_crop;
export default aaf_video_crop;
let aaf_video_flip = {
"title":"AAF Video Scale Effect",
"description": "A flip effect based on the AAF spec. Mirrors the image in the x-axis",
"vertexShader" : "\
title: "AAF Video Scale Effect",
description: "A flip effect based on the AAF spec. Mirrors the image in the x-axis",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -22,7 +24,6 @@ uniform sampler2D u_image;\

}",
"properties":{
},
"inputs":["u_image"]
properties: {},
inputs: ["u_image"]
};
export default aaf_video_flip;
let aaf_video_flop = {
"title":"AAF Video Flop Effect",
"description": "A flop effect based on the AAF spec. Mirrors the image in the y-axis",
"vertexShader" : "\
title: "AAF Video Flop Effect",
description: "A flop effect based on the AAF spec. Mirrors the image in the y-axis",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -22,7 +24,6 @@ uniform sampler2D u_image;\

}",
"properties":{
},
"inputs":["u_image"]
properties: {},
inputs: ["u_image"]
};
export default aaf_video_flop;
export default aaf_video_flop;
let aaf_video_position = {
"title":"AAF Video Position Effect",
"description": "A position effect based on the AAF spec.",
"vertexShader" : "\
title: "AAF Video Position Effect",
description: "A position effect based on the AAF spec.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -28,9 +30,9 @@ uniform sampler2D u_image;\

}",
"properties":{
"positionOffsetX":{"type":"uniform", "value":0.0},
"positionOffsetY":{"type":"uniform", "value":0.0}
properties: {
positionOffsetX: { type: "uniform", value: 0.0 },
positionOffsetY: { type: "uniform", value: 0.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default aaf_video_position;
export default aaf_video_position;

@@ -1,5 +0,6 @@

let aaf_video_scale = {
"title":"AAF Video Scale Effect",
"description": "A scale effect based on the AAF spec.",
"vertexShader" : "\
let aaf_video_scale = {
title: "AAF Video Scale Effect",
description: "A scale effect based on the AAF spec.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -28,9 +30,9 @@ uniform sampler2D u_image;\

}",
"properties":{
"scaleX":{"type":"uniform", "value":1.0},
"scaleY":{"type":"uniform", "value":1.0}
properties: {
scaleX: { type: "uniform", value: 1.0 },
scaleY: { type: "uniform", value: 1.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default aaf_video_scale;
export default aaf_video_scale;
let colorThreshold = {
"title":"Color Threshold",
"description": "Turns all pixels with a greater value than the specified threshold transparent.",
"vertexShader" : "\
title: "Color Threshold",
description: "Turns all pixels with a greater value than the specified threshold transparent.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -27,9 +29,9 @@ uniform sampler2D u_image;\

}",
"properties":{
"a":{"type":"uniform", "value":0.0},
"colorAlphaThreshold":{"type":"uniform", "value":[0.0,0.55,0.0]}
properties: {
a: { type: "uniform", value: 0.0 },
colorAlphaThreshold: { type: "uniform", value: [0.0, 0.55, 0.0] }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default colorThreshold;
export default colorThreshold;
let combine = {
"title":"Combine",
"description": "A basic effect which renders the input to the output, Typically used as a combine node for layering up media with alpha transparency.",
"vertexShader" : "\
title: "Combine",
description:
"A basic effect which renders the input to the output, Typically used as a combine node for layering up media with alpha transparency.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +14,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -23,8 +26,8 @@ uniform sampler2D u_image;\

}",
"properties":{
"a":{"type":"uniform", "value":0.0}
properties: {
a: { type: "uniform", value: 0.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default combine;
export default combine;
let crop = {
"title":"Primer Simple Crop",
"description": "A simple crop processors for primer",
"vertexShader" : "\
title: "Primer Simple Crop",
description: "A simple crop processors for primer",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -30,11 +32,11 @@ uniform sampler2D u_image;\

}",
"properties":{
"x":{type:"uniform", value:0.0},
"y":{type:"uniform", value:0.0},
"width":{type:"uniform", value:1.0},
"height":{type:"uniform", value:1.0},
properties: {
x: { type: "uniform", value: 0.0 },
y: { type: "uniform", value: 0.0 },
width: { type: "uniform", value: 1.0 },
height: { type: "uniform", value: 1.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default crop;
export default crop;
let crossfade = {
"title":"Cross-Fade",
"description": "A cross-fade effect. Typically used as a transistion.",
"vertexShader" : "\
title: "Cross-Fade",
description: "A cross-fade effect. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -33,8 +35,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0}
properties: {
mix: { type: "uniform", value: 0.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default crossfade;
export default crossfade;

@@ -27,14 +27,14 @@ import aaf_video_scale from "./aaf_video_scale.js";

DREAMFADE: dreamfade,
HORIZONTAL_WIPE:horizontalWipe,
VERTICAL_WIPE:verticalWipe,
RANDOM_DISSOLVE:randomDissolve,
STATIC_DISSOLVE:staticDissolve,
STATIC_EFFECT:staticEffect,
HORIZONTAL_WIPE: horizontalWipe,
VERTICAL_WIPE: verticalWipe,
RANDOM_DISSOLVE: randomDissolve,
STATIC_DISSOLVE: staticDissolve,
STATIC_EFFECT: staticEffect,
TO_COLOR_AND_BACK: toColorAndBackFade,
STAR_WIPE:starWipe,
STAR_WIPE: starWipe,
COMBINE: combine,
COLORTHRESHOLD: colorThreshold,
MONOCHROME: monochrome,
HORIZONTAL_BLUR:horizontalBlur,
VERTICAL_BLUR:verticalBlur,
HORIZONTAL_BLUR: horizontalBlur,
VERTICAL_BLUR: verticalBlur,
AAF_VIDEO_CROP: aaf_video_crop,

@@ -41,0 +41,0 @@ AAF_VIDEO_POSITION: aaf_video_position,

let dreamfade = {
"title":"Dream-Fade",
"description": "A wobbly dream effect. Typically used as a transistion.",
"vertexShader" : "\
title: "Dream-Fade",
description: "A wobbly dream effect. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -35,8 +37,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0}
properties: {
mix: { type: "uniform", value: 0.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default dreamfade;
export default dreamfade;
let horizontal_blur = {
"title":"Horizontal Blur",
"description": "A horizontal blur effect. Adpated from http://xissburg.com/faster-gaussian-blur-in-glsl/",
"vertexShader" : "\
title: "Horizontal Blur",
description:
"A horizontal blur effect. Adpated from http://xissburg.com/faster-gaussian-blur-in-glsl/",
vertexShader:
"\
attribute vec2 a_position;\

@@ -28,3 +30,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -52,8 +55,8 @@ uniform sampler2D u_image;\

}",
"properties":{
"blurAmount":{"type":"uniform", "value":1.0}
properties: {
blurAmount: { type: "uniform", value: 1.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default horizontal_blur;
export default horizontal_blur;
let horizontal_wipe = {
"title":"Horizontal Wipe",
"description": "A horizontal wipe effect. Typically used as a transistion.",
"vertexShader" : "\
title: "Horizontal Wipe",
description: "A horizontal wipe effect. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -29,8 +31,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0}
properties: {
mix: { type: "uniform", value: 0.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default horizontal_wipe;
export default horizontal_wipe;
let monochrome = {
"title":"Monochrome",
"description": "Change images to a single chroma (e.g can be used to make a black & white filter). Input color mix and output color mix can be adjusted.",
"vertexShader" : "\
title: "Monochrome",
description:
"Change images to a single chroma (e.g can be used to make a black & white filter). Input color mix and output color mix can be adjusted.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +14,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -28,9 +31,9 @@ uniform sampler2D u_image;\

}",
"properties":{
"inputMix":{"type":"uniform", "value":[0.4,0.6,0.2]},
"outputMix":{"type":"uniform", "value":[1.0,1.0,1.0]}
properties: {
inputMix: { type: "uniform", value: [0.4, 0.6, 0.2] },
outputMix: { type: "uniform", value: [1.0, 1.0, 1.0] }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default monochrome;
export default monochrome;
const opacity = {
"title": "Opacity",
"description": "Sets the opacity of an input.",
"vertexShader": `
title: "Opacity",
description: "Sets the opacity of an input.",
vertexShader: `
attribute vec2 a_position;

@@ -12,3 +12,3 @@ attribute vec2 a_texCoord;

}`,
"fragmentShader": `
fragmentShader: `
precision mediump float;

@@ -24,8 +24,8 @@ uniform sampler2D u_image;

}`,
"properties": {
"opacity": {"type": "uniform", "value": 0.7},
properties: {
opacity: { type: "uniform", value: 0.7 }
},
"inputs": ["u_image"],
inputs: ["u_image"]
};
export default opacity;
let randomDissolve = {
"title":"Random Dissolve",
"description": "A random dissolve effect. Typically used as a transistion.",
"vertexShader" : "\
title: "Random Dissolve",
description: "A random dissolve effect. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -32,8 +34,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0}
properties: {
mix: { type: "uniform", value: 0.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default randomDissolve;
export default randomDissolve;
let starWipe = {
"title":"Star Wipe Fade",
"description": "A classic star wipe transistion. Typically used as a transistion.",
"vertexShader" : "\
title: "Star Wipe Fade",
description: "A classic star wipe transistion. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -93,8 +95,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":1.0}
properties: {
mix: { type: "uniform", value: 1.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default starWipe;
export default starWipe;
let staticDissolve = {
"title":"Static Dissolve",
"description": "A static dissolve effect. Typically used as a transistion.",
"vertexShader" : "\
title: "Static Dissolve",
description: "A static dissolve effect. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -33,8 +35,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0}
properties: {
mix: { type: "uniform", value: 0.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default staticDissolve;
export default staticDissolve;
let staticEffect = {
"title":"Static",
"description": "A static effect to add pseudo random noise to a video",
"vertexShader" : "\
title: "Static",
description: "A static effect to add pseudo random noise to a video",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -30,9 +32,9 @@ uniform sampler2D u_image;\

}",
"properties":{
"weight":{"type":"uniform", "value":[1.0,1.0,1.0]},
"amount":{"type":"uniform", "value":1.0}
properties: {
weight: { type: "uniform", value: [1.0, 1.0, 1.0] },
amount: { type: "uniform", value: 1.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default staticEffect;
export default staticEffect;
let toColorAndBackFade = {
"title":"To Color And Back Fade",
"description": "A fade to black and back effect. Setting mix to 0.5 is a fully solid color frame. Typically used as a transistion.",
"vertexShader" : "\
title: "To Color And Back Fade",
description:
"A fade to black and back effect. Setting mix to 0.5 is a fully solid color frame. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +14,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -31,8 +34,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0},
"color":{"type":"uniform", "value":[0.0,0.0,0.0,0.0]}
properties: {
mix: { type: "uniform", value: 0.0 },
color: { type: "uniform", value: [0.0, 0.0, 0.0, 0.0] }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default toColorAndBackFade;
export default toColorAndBackFade;
let verticalBlur = {
"title":"Vertical Blur",
"description": "A vertical blur effect. Adpated from http://xissburg.com/faster-gaussian-blur-in-glsl/",
"vertexShader" : "\
title: "Vertical Blur",
description:
"A vertical blur effect. Adpated from http://xissburg.com/faster-gaussian-blur-in-glsl/",
vertexShader:
"\
attribute vec2 a_position;\

@@ -28,3 +30,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -52,8 +55,8 @@ uniform sampler2D u_image;\

}",
"properties":{
"blurAmount":{"type":"uniform", "value":1.0}
properties: {
blurAmount: { type: "uniform", value: 1.0 }
},
"inputs":["u_image"]
inputs: ["u_image"]
};
export default verticalBlur;
export default verticalBlur;
let verticalWipe = {
"title":"vertical Wipe",
"description": "A vertical wipe effect. Typically used as a transistion.",
"vertexShader" : "\
title: "vertical Wipe",
description: "A vertical wipe effect. Typically used as a transistion.",
vertexShader:
"\
attribute vec2 a_position;\

@@ -12,3 +13,4 @@ attribute vec2 a_texCoord;\

}",
"fragmentShader" : "\
fragmentShader:
"\
precision mediump float;\

@@ -29,8 +31,8 @@ uniform sampler2D u_image_a;\

}",
"properties":{
"mix":{"type":"uniform", "value":0.0}
properties: {
mix: { type: "uniform", value: 0.0 }
},
"inputs":["u_image_a","u_image_b"]
inputs: ["u_image_a", "u_image_b"]
};
export default verticalWipe;
export default verticalWipe;

@@ -6,11 +6,11 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

/**
* Initialise an instance of a DestinationNode.
*
* There should only be a single instance of a DestinationNode per VideoContext instance. An VideoContext's destination can be accessed like so: videoContext.desitnation.
*
* You should not instantiate this directly.
*/
constructor(gl, renderGraph){
let vertexShader = "\
* Initialise an instance of a DestinationNode.
*
* There should only be a single instance of a DestinationNode per VideoContext instance. An VideoContext's destination can be accessed like so: videoContext.desitnation.
*
* You should not instantiate this directly.
*/
constructor(gl, renderGraph) {
let vertexShader =
"\
attribute vec2 a_position;\

@@ -24,3 +24,4 @@ attribute vec2 a_texCoord;\

let fragmentShader = "\
let fragmentShader =
"\
precision mediump float;\

@@ -34,3 +35,8 @@ uniform sampler2D u_image;\

let deffinition = {fragmentShader:fragmentShader, vertexShader:vertexShader, properties:{}, inputs:["u_image"]};
let deffinition = {
fragmentShader: fragmentShader,
vertexShader: vertexShader,
properties: {},
inputs: ["u_image"]
};

@@ -41,4 +47,4 @@ super(gl, renderGraph, deffinition, deffinition.inputs, false);

_render(){
let gl = this._gl;
_render() {
let gl = this._gl;

@@ -51,3 +57,3 @@ gl.bindFramebuffer(gl.FRAMEBUFFER, null);

this.inputs.forEach((node)=>{
this.inputs.forEach(node => {
super._render();

@@ -58,3 +64,3 @@ //map the input textures input the node

for(let mapping of this._inputTextureUnitMapping ){
for (let mapping of this._inputTextureUnitMapping) {
gl.activeTexture(mapping.textureUnit);

@@ -61,0 +67,0 @@ let textureLocation = gl.getUniformLocation(this._program, mapping.name);

//Matthew Shotton, R&D User Experience,© BBC 2015
export function ConnectException(message){
export function ConnectException(message) {
this.message = message;

@@ -7,5 +7,5 @@ this.name = "ConnectionException";

export function RenderException(message){
export function RenderException(message) {
this.message = message;
this.name = "RenderException";
}
//Matthew Shotton, R&D User Experience,© BBC 2015
class GraphNode {
/**
* Base class from which all processing and source nodes are derrived.
*/
constructor(gl, renderGraph, inputNames, limitConnections=false){
* Base class from which all processing and source nodes are derrived.
*/
constructor(gl, renderGraph, inputNames, limitConnections = false) {
this._renderGraph = renderGraph;

@@ -15,3 +15,3 @@ this._limitConnections = limitConnections;

this._renderGraph = renderGraph;
this._rendered =false;
this._rendered = false;
this._displayName = "GraphNode";

@@ -21,7 +21,8 @@ }

/**
* Get a string representation of the class name.
*
* @return String A string of the class name.
*/
get displayName(){
* Get a string representation of the class name.
*
* @return String A string of the class name.
*/
get displayName() {
return this._displayName;

@@ -31,7 +32,8 @@ }

/**
* Get the names of the inputs to this node.
*
* @return {String[]} An array of the names of the inputs ot the node.
*/
get inputNames(){
* Get the names of the inputs to this node.
*
* @return {String[]} An array of the names of the inputs ot the node.
*/
get inputNames() {
return this._inputNames.slice();

@@ -41,8 +43,8 @@ }

/**
* The maximum number of connections that can be made to this node. If there is not limit this will return Infinity.
*
* @return {number} The number of connections which can be made to this node.
*/
get maximumConnections(){
if (this._limitConnections ===false) return Infinity;
* The maximum number of connections that can be made to this node. If there is not limit this will return Infinity.
*
* @return {number} The number of connections which can be made to this node.
*/
get maximumConnections() {
if (this._limitConnections === false) return Infinity;
return this._inputNames.length;

@@ -52,18 +54,20 @@ }

/**
* Get an array of all the nodes which connect to this node.
*
* @return {GraphNode[]} An array of nodes which connect to this node.
*/
get inputs(){
* Get an array of all the nodes which connect to this node.
*
* @return {GraphNode[]} An array of nodes which connect to this node.
*/
get inputs() {
let result = this._renderGraph.getInputsForNode(this);
result = result.filter(function(n){return n !== undefined;});
result = result.filter(function(n) {
return n !== undefined;
});
return result;
}
/**
* Get an array of all the nodes which this node outputs to.
*
* @return {GraphNode[]} An array of nodes which this node connects to.
*/
get outputs(){
* Get an array of all the nodes which this node outputs to.
*
* @return {GraphNode[]} An array of nodes which this node connects to.
*/
get outputs() {
return this._renderGraph.getOutputsForNode(this);

@@ -73,32 +77,31 @@ }

/**
* Get whether the node has been destroyed or not.
*
* @return {boolean} A true/false value of whather the node has been destoryed or not.
*/
get destroyed(){
* Get whether the node has been destroyed or not.
*
* @return {boolean} A true/false value of whather the node has been destoryed or not.
*/
get destroyed() {
return this._destroyed;
}
/**
* Connect this node to the targetNode
*
* @param {GraphNode} targetNode - the node to connect.
* @param {(number| String)} [targetPort] - the port on the targetNode to connect to, this can be an index, a string identifier, or undefined (in which case the next available port will be connected to).
*
*/
connect(targetNode, targetPort){
return (this._renderGraph.registerConnection(this, targetNode, targetPort));
* Connect this node to the targetNode
*
* @param {GraphNode} targetNode - the node to connect.
* @param {(number| String)} [targetPort] - the port on the targetNode to connect to, this can be an index, a string identifier, or undefined (in which case the next available port will be connected to).
*
*/
connect(targetNode, targetPort) {
return this._renderGraph.registerConnection(this, targetNode, targetPort);
}
/**
* Disconnect this node from the targetNode. If targetNode is undefind remove all out-bound connections.
*
* @param {GraphNode} [targetNode] - the node to disconnect from. If undefined, disconnect from all nodes.
*
*/
disconnect(targetNode){
if (targetNode === undefined){
* Disconnect this node from the targetNode. If targetNode is undefind remove all out-bound connections.
*
* @param {GraphNode} [targetNode] - the node to disconnect from. If undefined, disconnect from all nodes.
*
*/
disconnect(targetNode) {
if (targetNode === undefined) {
let toRemove = this._renderGraph.getOutputsForNode(this);
toRemove.forEach((target) => this._renderGraph.unregisterConnection(this, target));
toRemove.forEach(target => this._renderGraph.unregisterConnection(this, target));
if (toRemove.length > 0) return true;

@@ -111,7 +114,7 @@ return false;

/**
* Destory this node, removing it from the graph.
*/
destroy(){
* Destory this node, removing it from the graph.
*/
destroy() {
this.disconnect();
for (let input of this.inputs){
for (let input of this.inputs) {
input.disconnect(this);

@@ -118,0 +121,0 @@ }

@@ -5,9 +5,19 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

class CompositingNode extends ProcessingNode{
class CompositingNode extends ProcessingNode {
/**
* Initialise an instance of a Compositing Node. You should not instantiate this directly, but use VideoContest.createCompositingNode().
*/
constructor(gl, renderGraph, definition){
* Initialise an instance of a Compositing Node. You should not instantiate this directly, but use VideoContest.createCompositingNode().
*/
constructor(gl, renderGraph, definition) {
let placeholderTexture = createElementTexture(gl);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0,0,0,0]));
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
1,
1,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
new Uint8Array([0, 0, 0, 0])
);
super(gl, renderGraph, definition, definition.inputs, false);

@@ -18,6 +28,12 @@ this._placeholderTexture = placeholderTexture;

_render(){
let gl = this._gl;
_render() {
let gl = this._gl;
gl.bindFramebuffer(gl.FRAMEBUFFER, this._framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this._texture,0);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
this._texture,
0
);
gl.clearColor(0, 0, 0, 0); // green;

@@ -27,4 +43,4 @@ gl.clear(gl.COLOR_BUFFER_BIT);

this.inputs.forEach((node) => {
if (node === undefined)return;
this.inputs.forEach(node => {
if (node === undefined) return;
super._render();

@@ -36,3 +52,3 @@

for(let mapping of this._inputTextureUnitMapping ){
for (let mapping of this._inputTextureUnitMapping) {
gl.activeTexture(mapping.textureUnit);

@@ -47,3 +63,3 @@ let textureLocation = gl.getUniformLocation(this._program, mapping.name);

});
gl.bindFramebuffer(gl.FRAMEBUFFER, null);

@@ -50,0 +66,0 @@ }

@@ -5,20 +5,36 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

class EffectNode extends ProcessingNode{
class EffectNode extends ProcessingNode {
/**
* Initialise an instance of an EffectNode. You should not instantiate this directly, but use VideoContest.createEffectNode().
*/
constructor(gl, renderGraph, definition){
* Initialise an instance of an EffectNode. You should not instantiate this directly, but use VideoContest.createEffectNode().
*/
constructor(gl, renderGraph, definition) {
let placeholderTexture = createElementTexture(gl);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0,0,0,0]));
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
1,
1,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
new Uint8Array([0, 0, 0, 0])
);
super(gl, renderGraph, definition, definition.inputs, true);
this._placeholderTexture = placeholderTexture;
this._displayName = "EffectNode";
}
_render(){
_render() {
let gl = this._gl;
gl.bindFramebuffer(gl.FRAMEBUFFER, this._framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this._texture,0);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
this._texture,
0
);
gl.clearColor(0, 0, 0, 0); // green;

@@ -33,6 +49,6 @@ gl.clear(gl.COLOR_BUFFER_BIT);

for (var i = 0; i < this._inputTextureUnitMapping.length; i++) {
let inputTexture = this._placeholderTexture;
let inputTexture = this._placeholderTexture;
let textureUnit = this._inputTextureUnitMapping[i].textureUnit;
let textureName = this._inputTextureUnitMapping[i].name;
if (i < inputs.length && inputs[i] !== undefined){
if (i < inputs.length && inputs[i] !== undefined) {
inputTexture = inputs[i]._texture;

@@ -39,0 +55,0 @@ }

//Matthew Shotton, R&D User Experience,© BBC 2015
import GraphNode from "../graphnode";
import { compileShader, createShaderProgram, createElementTexture, updateTexture } from "../utils.js";
import {
compileShader,
createShaderProgram,
createElementTexture,
updateTexture
} from "../utils.js";
import { RenderException } from "../exceptions.js";
class ProcessingNode extends GraphNode{
class ProcessingNode extends GraphNode {
/**
* Initialise an instance of a ProcessingNode.
*
* This class is not used directly, but is extended to create CompositingNodes, TransitionNodes, and EffectNodes.
*/
constructor(gl, renderGraph, definition, inputNames, limitConnections){
* Initialise an instance of a ProcessingNode.
*
* This class is not used directly, but is extended to create CompositingNodes, TransitionNodes, and EffectNodes.
*/
constructor(gl, renderGraph, definition, inputNames, limitConnections) {
super(gl, renderGraph, inputNames, limitConnections);

@@ -17,15 +22,18 @@ this._vertexShader = compileShader(gl, definition.vertexShader, gl.VERTEX_SHADER);

this._definition = definition;
this._properties = {};//definition.properties;
this._properties = {}; //definition.properties;
//copy definition properties
for(let propertyName in definition.properties){
for (let propertyName in definition.properties) {
let propertyValue = definition.properties[propertyName].value;
//if an array then shallow copy it
if(Object.prototype.toString.call(propertyValue) === "[object Array]"){
if (Object.prototype.toString.call(propertyValue) === "[object Array]") {
propertyValue = definition.properties[propertyName].value.slice();
}
let propertyType = definition.properties[propertyName].type;
this._properties[propertyName] = {type:propertyType, value:propertyValue};
this._properties[propertyName] = {
type: propertyType,
value: propertyValue
};
}
this._inputTextureUnitMapping =[];
this._inputTextureUnitMapping = [];
this._maxTextureUnits = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);

@@ -36,3 +44,13 @@ this._boundTextureUnits = 0;

this._texture = createElementTexture(gl);
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
gl.canvas.width,
gl.canvas.height,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
null
);
//compile the shader

@@ -44,10 +62,20 @@ this._program = createShaderProgram(gl, this._vertexShader, this._fragmentShader);

gl.bindFramebuffer(gl.FRAMEBUFFER, this._framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this._texture,0);
gl.framebufferTexture2D(
gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
this._texture,
0
);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
//create properties on this object for the passed properties
for (let propertyName in this._properties){
for (let propertyName in this._properties) {
Object.defineProperty(this, propertyName, {
get:function(){return this._properties[propertyName].value;},
set:function(passedValue){this._properties[propertyName].value = passedValue;}
get: function() {
return this._properties[propertyName].value;
},
set: function(passedValue) {
this._properties[propertyName].value = passedValue;
}
});

@@ -57,11 +85,13 @@ }

//create texutres for any texture properties
for (let propertyName in this._properties){
for (let propertyName in this._properties) {
let propertyValue = this._properties[propertyName].value;
if (propertyValue instanceof Image){
if (propertyValue instanceof Image) {
this._properties[propertyName].texture = createElementTexture(gl);
this._properties[propertyName].texutreUnit = gl.TEXTURE0 + this._boundTextureUnits;
this._boundTextureUnits += 1;
this._parameterTextureCount +=1;
if (this._boundTextureUnits > this._maxTextureUnits){
throw new RenderException("Trying to bind more than available textures units to shader");
this._parameterTextureCount += 1;
if (this._boundTextureUnits > this._maxTextureUnits) {
throw new RenderException(
"Trying to bind more than available textures units to shader"
);
}

@@ -72,16 +102,23 @@ }

//calculate texutre units for input textures
for(let inputName of definition.inputs){
this._inputTextureUnitMapping.push({name:inputName, textureUnit:gl.TEXTURE0 + this._boundTextureUnits});
for (let inputName of definition.inputs) {
this._inputTextureUnitMapping.push({
name: inputName,
textureUnit: gl.TEXTURE0 + this._boundTextureUnits
});
this._boundTextureUnits += 1;
this._inputTextureCount += 1;
if (this._boundTextureUnits > this._maxTextureUnits){
throw new RenderException("Trying to bind more than available textures units to shader");
if (this._boundTextureUnits > this._maxTextureUnits) {
throw new RenderException(
"Trying to bind more than available textures units to shader"
);
}
}
//find the locations of the properties in the compiled shader
for (let propertyName in this._properties){
if (this._properties[propertyName].type === "uniform"){
this._properties[propertyName].location = this._gl.getUniformLocation(this._program, propertyName);
for (let propertyName in this._properties) {
if (this._properties[propertyName].type === "uniform") {
this._properties[propertyName].location = this._gl.getUniformLocation(
this._program,
propertyName
);
}

@@ -92,3 +129,2 @@ }

//Other setup

@@ -102,10 +138,5 @@ let positionLocation = gl.getAttribLocation(this._program, "a_position");

gl.ARRAY_BUFFER,
new Float32Array([
1.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 0.0]),
gl.STATIC_DRAW);
new Float32Array([1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0]),
gl.STATIC_DRAW
);
let texCoordLocation = gl.getAttribLocation(this._program, "a_texCoord");

@@ -115,16 +146,15 @@ gl.enableVertexAttribArray(texCoordLocation);

this._displayName = "ProcessingNode";
}
/**
* Sets the passed processing node property to the passed value.
* @param {string} name - The name of the processing node parameter to modify.
* @param {Object} value - The value to set it to.
*
* @example
* var ctx = new VideoContext();
* var monoNode = ctx.effect(VideoContext.DEFINITIONS.MONOCHROME);
* monoNode.setProperty("inputMix", [1.0,0.0,0.0]); //Just use red channel
*/
setProperty(name, value){
* Sets the passed processing node property to the passed value.
* @param {string} name - The name of the processing node parameter to modify.
* @param {Object} value - The value to set it to.
*
* @example
* var ctx = new VideoContext();
* var monoNode = ctx.effect(VideoContext.DEFINITIONS.MONOCHROME);
* monoNode.setProperty("inputMix", [1.0,0.0,0.0]); //Just use red channel
*/
setProperty(name, value) {
this._properties[name].value = value;

@@ -134,12 +164,12 @@ }

/**
* Sets the passed processing node property to the passed value.
* @param {string} name - The name of the processing node parameter to get.
*
* @example
* var ctx = new VideoContext();
* var monoNode = ctx.effect(VideoContext.DEFINITIONS.MONOCHROME);
* console.log(monoNode.getProperty("inputMix")); //Will output [0.4,0.6,0.2], the default value from the effect definition.
*
*/
getProperty(name){
* Sets the passed processing node property to the passed value.
* @param {string} name - The name of the processing node parameter to get.
*
* @example
* var ctx = new VideoContext();
* var monoNode = ctx.effect(VideoContext.DEFINITIONS.MONOCHROME);
* console.log(monoNode.getProperty("inputMix")); //Will output [0.4,0.6,0.2], the default value from the effect definition.
*
*/
getProperty(name) {
return this._properties[name].value;

@@ -149,10 +179,10 @@ }

/**
* Destroy and clean-up the node.
*/
destroy(){
* Destroy and clean-up the node.
*/
destroy() {
super.destroy();
//destrpy texutres for any texture properties
for (let propertyName in this._properties){
for (let propertyName in this._properties) {
let propertyValue = this._properties[propertyName].value;
if (propertyValue instanceof Image){
if (propertyValue instanceof Image) {
this._gl.deleteTexture(this._properties[propertyName].texture);

@@ -177,11 +207,11 @@ this._texture = undefined;

_update(currentTime){
_update(currentTime) {
this._currentTime = currentTime;
}
_seek(currentTime){
_seek(currentTime) {
this._currentTime = currentTime;
}
_render(){
_render() {
this._rendered = true;

@@ -199,3 +229,3 @@ let gl = this._gl;

for (let propertyName in this._properties){
for (let propertyName in this._properties) {
let propertyValue = this._properties[propertyName].value;

@@ -206,19 +236,23 @@ let propertyType = this._properties[propertyName].type;

if (typeof propertyValue === "number"){
if (typeof propertyValue === "number") {
gl.uniform1f(propertyLocation, propertyValue);
}
else if( Object.prototype.toString.call(propertyValue) === "[object Array]"){
if(propertyValue.length === 1){
} else if (Object.prototype.toString.call(propertyValue) === "[object Array]") {
if (propertyValue.length === 1) {
gl.uniform1fv(propertyLocation, propertyValue);
} else if(propertyValue.length === 2){
} else if (propertyValue.length === 2) {
gl.uniform2fv(propertyLocation, propertyValue);
} else if(propertyValue.length === 3){
} else if (propertyValue.length === 3) {
gl.uniform3fv(propertyLocation, propertyValue);
} else if(propertyValue.length === 4){
} else if (propertyValue.length === 4) {
gl.uniform4fv(propertyLocation, propertyValue);
} else{
console.debug("Shader parameter", propertyName, "is too long an array:", propertyValue);
} else {
console.debug(
"Shader parameter",
propertyName,
"is too long an array:",
propertyValue
);
}
} else if(propertyValue instanceof Image){
let texture = this._properties[propertyName].texture;
} else if (propertyValue instanceof Image) {
let texture = this._properties[propertyName].texture;
let textureUnit = this._properties[propertyName].texutreUnit;

@@ -231,4 +265,3 @@ updateTexture(gl, texture, propertyValue);

gl.bindTexture(gl.TEXTURE_2D, texture);
}
else{
} else {
//TODO - add tests for textures

@@ -240,3 +273,2 @@ /*gl.activeTexture(gl.TEXTURE0 + textureOffset);

}
}

@@ -243,0 +275,0 @@ }

//Matthew Shotton, R&D User Experience,© BBC 2015
import EffectNode from "./effectnode";
class TransitionNode extends EffectNode{
class TransitionNode extends EffectNode {
/**
* Initialise an instance of a TransitionNode. You should not instantiate this directly, but use VideoContest.createTransitonNode().
*/
constructor(gl, renderGraph, definition){
* Initialise an instance of a TransitionNode. You should not instantiate this directly, but use VideoContest.createTransitonNode().
*/
constructor(gl, renderGraph, definition) {
super(gl, renderGraph, definition);
this._transitions = {};
//save a version of the original property values
this._initialPropertyValues = {};
for (let propertyName in this._properties){
for (let propertyName in this._properties) {
this._initialPropertyValues[propertyName] = this._properties[propertyName].value;

@@ -20,9 +20,13 @@ }

_doesTransitionFitOnTimeline(testTransition){
_doesTransitionFitOnTimeline(testTransition) {
if (this._transitions[testTransition.property] === undefined) return true;
for (let transition of this._transitions[testTransition.property]) {
if (testTransition.start > transition.start && testTransition.start < transition.end)return false;
if (testTransition.end > transition.start && testTransition.end < transition.end)return false;
if(transition.start > testTransition.start && transition.start < testTransition.end) return false;
if(transition.end > testTransition.start && transition.end < testTransition.end) return false;
if (testTransition.start > transition.start && testTransition.start < transition.end)
return false;
if (testTransition.end > transition.start && testTransition.end < transition.end)
return false;
if (transition.start > testTransition.start && transition.start < testTransition.end)
return false;
if (transition.end > testTransition.start && transition.end < testTransition.end)
return false;
}

@@ -32,7 +36,8 @@ return true;

_insertTransitionInTimeline(transition){
if (this._transitions[transition.property] === undefined) this._transitions[transition.property] = [];
_insertTransitionInTimeline(transition) {
if (this._transitions[transition.property] === undefined)
this._transitions[transition.property] = [];
this._transitions[transition.property].push(transition);
this._transitions[transition.property].sort(function(a,b){
this._transitions[transition.property].sort(function(a, b) {
return a.start - b.start;

@@ -43,15 +48,21 @@ });

/**
* Create a transition on the timeline.
*
* @param {number} startTime - The time at which the transition should start (relative to currentTime of video context).
* @param {number} endTime - The time at which the transition should be completed by (relative to currentTime of video context).
* @param {number} currentValue - The value to start the transition at.
* @param {number} targetValue - The value to transition to by endTime.
* @param {String} propertyName - The name of the property to clear transitions on, if undefined default to "mix".
*
* @return {Boolean} returns True if a transition is successfully added, false otherwise.
*/
transition(startTime, endTime, currentValue, targetValue, propertyName="mix"){
let transition = {start:startTime + this._currentTime, end:endTime + this._currentTime, current:currentValue, target:targetValue, property:propertyName};
if (!this._doesTransitionFitOnTimeline(transition))return false;
* Create a transition on the timeline.
*
* @param {number} startTime - The time at which the transition should start (relative to currentTime of video context).
* @param {number} endTime - The time at which the transition should be completed by (relative to currentTime of video context).
* @param {number} currentValue - The value to start the transition at.
* @param {number} targetValue - The value to transition to by endTime.
* @param {String} propertyName - The name of the property to clear transitions on, if undefined default to "mix".
*
* @return {Boolean} returns True if a transition is successfully added, false otherwise.
*/
transition(startTime, endTime, currentValue, targetValue, propertyName = "mix") {
let transition = {
start: startTime + this._currentTime,
end: endTime + this._currentTime,
current: currentValue,
target: targetValue,
property: propertyName
};
if (!this._doesTransitionFitOnTimeline(transition)) return false;
this._insertTransitionInTimeline(transition);

@@ -61,17 +72,22 @@ return true;

/**
* Create a transition on the timeline at an absolute time.
*
* @param {number} startTime - The time at which the transition should start (relative to time 0).
* @param {number} endTime - The time at which the transition should be completed by (relative to time 0).
* @param {number} currentValue - The value to start the transition at.
* @param {number} targetValue - The value to transition to by endTime.
* @param {String} propertyName - The name of the property to clear transitions on, if undefined default to "mix".
*
* @return {Boolean} returns True if a transition is successfully added, false otherwise.
*/
transitionAt(startTime, endTime, currentValue, targetValue, propertyName="mix"){
let transition = {start:startTime, end:endTime, current:currentValue, target:targetValue, property:propertyName};
if (!this._doesTransitionFitOnTimeline(transition))return false;
* Create a transition on the timeline at an absolute time.
*
* @param {number} startTime - The time at which the transition should start (relative to time 0).
* @param {number} endTime - The time at which the transition should be completed by (relative to time 0).
* @param {number} currentValue - The value to start the transition at.
* @param {number} targetValue - The value to transition to by endTime.
* @param {String} propertyName - The name of the property to clear transitions on, if undefined default to "mix".
*
* @return {Boolean} returns True if a transition is successfully added, false otherwise.
*/
transitionAt(startTime, endTime, currentValue, targetValue, propertyName = "mix") {
let transition = {
start: startTime,
end: endTime,
current: currentValue,
target: targetValue,
property: propertyName
};
if (!this._doesTransitionFitOnTimeline(transition)) return false;
this._insertTransitionInTimeline(transition);

@@ -82,31 +98,31 @@ return true;

/**
* Clear all transistions on the passed property. If no property is defined clear all transitions on the node.
*
* @param {String} propertyName - The name of the property to clear transitions on, if undefined clear all transitions on the node.
*/
clearTransitions(propertyName){
if (propertyName === undefined){
* Clear all transistions on the passed property. If no property is defined clear all transitions on the node.
*
* @param {String} propertyName - The name of the property to clear transitions on, if undefined clear all transitions on the node.
*/
clearTransitions(propertyName) {
if (propertyName === undefined) {
this._transitions = {};
}else{
} else {
this._transitions[propertyName] = [];
}
}
/**
* Clear a transistion on the passed property that the specified time lies within.
*
* @param {String} propertyName - The name of the property to clear a transition on.
* @param {number} time - A time which lies within the property you're trying to clear.
*
* @return {Boolean} returns True if a transition is removed, false otherwise.
*/
clearTransition(propertyName, time){
* Clear a transistion on the passed property that the specified time lies within.
*
* @param {String} propertyName - The name of the property to clear a transition on.
* @param {number} time - A time which lies within the property you're trying to clear.
*
* @return {Boolean} returns True if a transition is removed, false otherwise.
*/
clearTransition(propertyName, time) {
let transitionIndex = undefined;
for (var i = 0; i < this._transitions[propertyName].length; i++) {
let transition = this._transitions[propertyName][i];
if (time > transition.start && time < transition.end){
if (time > transition.start && time < transition.end) {
transitionIndex = i;
}
}
if(transitionIndex !== undefined){
if (transitionIndex !== undefined) {
this._transitions[propertyName].splice(transitionIndex, 1);

@@ -118,7 +134,7 @@ return true;

_update(currentTime){
_update(currentTime) {
super._update(currentTime);
for (let propertyName in this._transitions){
for (let propertyName in this._transitions) {
let value = this[propertyName];
if (this._transitions[propertyName].length > 0){
if (this._transitions[propertyName].length > 0) {
value = this._transitions[propertyName][0].current;

@@ -130,3 +146,3 @@ }

let transition = this._transitions[propertyName][i];
if (currentTime > transition.end){
if (currentTime > transition.end) {
value = transition.target;

@@ -136,7 +152,9 @@ continue;

if (currentTime > transition.start && currentTime < transition.end){
if (currentTime > transition.start && currentTime < transition.end) {
let difference = transition.target - transition.current;
let progress = (this._currentTime - transition.start)/(transition.end - transition.start);
let progress =
(this._currentTime - transition.start) /
(transition.end - transition.start);
transitionActive = true;
this[propertyName] = transition.current + (difference * progress);
this[propertyName] = transition.current + difference * progress;
break;

@@ -146,3 +164,3 @@ }

if(!transitionActive)this[propertyName] = value;
if (!transitionActive) this[propertyName] = value;
}

@@ -149,0 +167,0 @@ }

//Matthew Shotton, R&D User Experience,© BBC 2015
import { ConnectException } from "./exceptions.js";
class RenderGraph {
/**
* Manages the rendering graph.
*/
constructor(){
* Manages the rendering graph.
*/
constructor() {
this.connections = [];

@@ -14,11 +13,11 @@ }

/**
* Get a list of nodes which are connected to the output of the passed node.
*
* @param {GraphNode} node - the node to get the outputs for.
* @return {GraphNode[]} An array of the nodes which are connected to the output.
*/
getOutputsForNode(node){
* Get a list of nodes which are connected to the output of the passed node.
*
* @param {GraphNode} node - the node to get the outputs for.
* @return {GraphNode[]} An array of the nodes which are connected to the output.
*/
getOutputsForNode(node) {
let results = [];
this.connections.forEach(function(connection){
if (connection.source === node){
this.connections.forEach(function(connection) {
if (connection.source === node) {
results.push(connection.destination);

@@ -29,13 +28,13 @@ }

}
/**
* Get a list of nodes which are connected, by input name, to the given node. Array contains objects of the form: {"source":sourceNode, "type":"name", "name":inputName, "destination":destinationNode}.
*
* @param {GraphNode} node - the node to get the named inputs for.
* @return {Object[]} An array of objects representing the nodes and connection type, which are connected to the named inputs for the node.
*/
getNamedInputsForNode(node){
* Get a list of nodes which are connected, by input name, to the given node. Array contains objects of the form: {"source":sourceNode, "type":"name", "name":inputName, "destination":destinationNode}.
*
* @param {GraphNode} node - the node to get the named inputs for.
* @return {Object[]} An array of objects representing the nodes and connection type, which are connected to the named inputs for the node.
*/
getNamedInputsForNode(node) {
let results = [];
this.connections.forEach(function(connection){
if (connection.destination === node && connection.type === "name"){
this.connections.forEach(function(connection) {
if (connection.destination === node && connection.type === "name") {
results.push(connection);

@@ -48,28 +47,28 @@ }

/**
* Get a list of nodes which are connected, by z-index name, to the given node. Array contains objects of the form: {"source":sourceNode, "type":"zIndex", "zIndex":0, "destination":destinationNode}.
*
* @param {GraphNode} node - the node to get the z-index refernced inputs for.
* @return {Object[]} An array of objects representing the nodes and connection type, which are connected by z-Index for the node.
*/
getZIndexInputsForNode(node){
* Get a list of nodes which are connected, by z-index name, to the given node. Array contains objects of the form: {"source":sourceNode, "type":"zIndex", "zIndex":0, "destination":destinationNode}.
*
* @param {GraphNode} node - the node to get the z-index refernced inputs for.
* @return {Object[]} An array of objects representing the nodes and connection type, which are connected by z-Index for the node.
*/
getZIndexInputsForNode(node) {
let results = [];
this.connections.forEach(function(connection){
if (connection.destination === node && connection.type === "zIndex"){
this.connections.forEach(function(connection) {
if (connection.destination === node && connection.type === "zIndex") {
results.push(connection);
}
});
results.sort(function(a,b){
results.sort(function(a, b) {
return a.zIndex - b.zIndex;
});
return results;
return results;
}
/**
* Get a list of nodes which are connected as inputs to the given node. The length of the return array is always equal to the number of inputs for the node, with undefined taking the place of any inputs not connected.
*
* @param {GraphNode} node - the node to get the inputs for.
* @return {GraphNode[]} An array of GraphNodes which are connected to the node.
*/
getInputsForNode(node){
let inputNames = node.inputNames;
* Get a list of nodes which are connected as inputs to the given node. The length of the return array is always equal to the number of inputs for the node, with undefined taking the place of any inputs not connected.
*
* @param {GraphNode} node - the node to get the inputs for.
* @return {GraphNode[]} An array of GraphNodes which are connected to the node.
*/
getInputsForNode(node) {
let inputNames = node.inputNames;
let results = [];

@@ -79,8 +78,8 @@ let namedInputs = this.getNamedInputsForNode(node);

if(node._limitConnections === true){
if (node._limitConnections === true) {
for (let i = 0; i < inputNames.length; i++) {
results[i] = undefined;
}
for(let connection of namedInputs){
for (let connection of namedInputs) {
let index = inputNames.indexOf(connection.name);

@@ -91,3 +90,3 @@ results[index] = connection.source;

for (let i = 0; i < results.length; i++) {
if (results[i] === undefined && indexedInputs[indexedInputsIndex]!== undefined){
if (results[i] === undefined && indexedInputs[indexedInputsIndex] !== undefined) {
results[i] = indexedInputs[indexedInputsIndex].source;

@@ -97,7 +96,7 @@ indexedInputsIndex += 1;

}
}else{
for(let connection of namedInputs){
} else {
for (let connection of namedInputs) {
results.push(connection.source);
}
for(let connection of indexedInputs){
for (let connection of indexedInputs) {
results.push(connection.source);

@@ -110,11 +109,11 @@ }

/**
* Check if a named input on a node is available to connect too.
* @param {GraphNode} node - the node to check.
* @param {String} inputName - the named input to check.
*/
isInputAvailable(node, inputName){
* Check if a named input on a node is available to connect too.
* @param {GraphNode} node - the node to check.
* @param {String} inputName - the named input to check.
*/
isInputAvailable(node, inputName) {
if (node._inputNames.indexOf(inputName) === -1) return false;
for(let connection of this.connections){
if (connection.type === "name"){
if (connection.destination === node && connection.name === inputName){
for (let connection of this.connections) {
if (connection.type === "name") {
if (connection.destination === node && connection.name === inputName) {
return false;

@@ -128,19 +127,24 @@ }

/**
* Register a connection between two nodes.
*
* @param {GraphNode} sourceNode - the node to connect from.
* @param {GraphNode} destinationNode - the node to connect to.
* @param {(String | number)} [target] - the target port of the conenction, this could be a string to specfiy a specific named port, a number to specify a port by index, or undefined, in which case the next available port will be connected to.
* @return {boolean} Will return true if connection succeeds otherwise will throw a ConnectException.
*/
registerConnection(sourceNode, destinationNode, target){
if (destinationNode.inputs.length >= destinationNode.inputNames.length && destinationNode._limitConnections === true){
* Register a connection between two nodes.
*
* @param {GraphNode} sourceNode - the node to connect from.
* @param {GraphNode} destinationNode - the node to connect to.
* @param {(String | number)} [target] - the target port of the conenction, this could be a string to specfiy a specific named port, a number to specify a port by index, or undefined, in which case the next available port will be connected to.
* @return {boolean} Will return true if connection succeeds otherwise will throw a ConnectException.
*/
registerConnection(sourceNode, destinationNode, target) {
if (
destinationNode.inputs.length >= destinationNode.inputNames.length &&
destinationNode._limitConnections === true
) {
throw new ConnectException("Node has reached max number of inputs, can't connect");
}
if (destinationNode._limitConnections === false){
if (destinationNode._limitConnections === false) {
//check if connection is already made, if so raise a warning
const inputs = this.getInputsForNode(destinationNode);
if (inputs.includes(sourceNode)){
console.debug("WARNING - node connected mutliple times, removing previous connection");
if (inputs.includes(sourceNode)) {
console.debug(
"WARNING - node connected mutliple times, removing previous connection"
);
this.unregisterConnection(sourceNode, destinationNode);

@@ -150,36 +154,50 @@ }

if (typeof target === "number"){
if (typeof target === "number") {
//target is a specific
this.connections.push({"source":sourceNode, "type":"zIndex", "zIndex":target, "destination":destinationNode});
} else if (typeof target === "string" && destinationNode._limitConnections){
this.connections.push({
source: sourceNode,
type: "zIndex",
zIndex: target,
destination: destinationNode
});
} else if (typeof target === "string" && destinationNode._limitConnections) {
//target is a named port
//make sure named port is free
if (this.isInputAvailable(destinationNode, target)){
this.connections.push({"source":sourceNode, "type":"name", "name":target, "destination":destinationNode});
}else{
throw new ConnectException("Port "+target+" is already connected to");
if (this.isInputAvailable(destinationNode, target)) {
this.connections.push({
source: sourceNode,
type: "name",
name: target,
destination: destinationNode
});
} else {
throw new ConnectException("Port " + target + " is already connected to");
}
} else{
} else {
//target is undefined so just make it a high zIndex
let indexedConns = this.getZIndexInputsForNode(destinationNode);
let index = 0;
if (indexedConns.length > 0)index = indexedConns[indexedConns.length-1].zIndex +1;
this.connections.push({"source":sourceNode, "type":"zIndex", "zIndex":index, "destination":destinationNode});
if (indexedConns.length > 0) index = indexedConns[indexedConns.length - 1].zIndex + 1;
this.connections.push({
source: sourceNode,
type: "zIndex",
zIndex: index,
destination: destinationNode
});
}
return true;
}
/**
* Remove a connection between two nodes.
* @param {GraphNode} sourceNode - the node to unregsiter connection from.
* @param {GraphNode} destinationNode - the node to register connection to.
* @return {boolean} Will return true if removing connection succeeds, or false if there was no connectionsction to remove.
*/
unregisterConnection(sourceNode, destinationNode){
* Remove a connection between two nodes.
* @param {GraphNode} sourceNode - the node to unregsiter connection from.
* @param {GraphNode} destinationNode - the node to register connection to.
* @return {boolean} Will return true if removing connection succeeds, or false if there was no connectionsction to remove.
*/
unregisterConnection(sourceNode, destinationNode) {
let toRemove = [];
this.connections.forEach(function(connection){
if (connection.source === sourceNode && connection.destination === destinationNode){
this.connections.forEach(function(connection) {
if (connection.source === sourceNode && connection.destination === destinationNode) {
toRemove.push(connection);

@@ -191,3 +209,3 @@ }

toRemove.forEach((removeNode) => {
toRemove.forEach(removeNode => {
let index = this.connections.indexOf(removeNode);

@@ -200,6 +218,6 @@ this.connections.splice(index, 1);

static outputEdgesFor(node, connections){
static outputEdgesFor(node, connections) {
let results = [];
for(let conn of connections){
if (conn.source === node){
for (let conn of connections) {
if (conn.source === node) {
results.push(conn);

@@ -211,6 +229,6 @@ }

static inputEdgesFor(node, connections){
static inputEdgesFor(node, connections) {
let results = [];
for(let conn of connections){
if (conn.destination === node){
for (let conn of connections) {
if (conn.destination === node) {
results.push(conn);

@@ -222,10 +240,10 @@ }

static getInputlessNodes(connections){
static getInputlessNodes(connections) {
let inputLess = [];
for (let conn of connections){
for (let conn of connections) {
inputLess.push(conn.source);
}
for (let conn of connections){
for (let conn of connections) {
let index = inputLess.indexOf(conn.destination);
if (index !== -1){
if (index !== -1) {
inputLess.splice(index, 1);

@@ -232,0 +250,0 @@ }

@@ -6,5 +6,5 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

/**
* Initialise an instance of an AudioNode.
* This should not be called directly, but created through a call to videoContext.audio();
*/
* Initialise an instance of an AudioNode.
* This should not be called directly, but created through a call to videoContext.audio();
*/
constructor() {

@@ -11,0 +11,0 @@ super(...arguments);

@@ -6,6 +6,6 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

/**
* Initialise an instance of a CanvasNode.
* This should not be called directly, but created through a call to videoContext.createCanvasNode();
*/
constructor(canvas, gl, renderGraph, currentTime, preloadTime = 4){
* Initialise an instance of a CanvasNode.
* This should not be called directly, but created through a call to videoContext.createCanvasNode();
*/
constructor(canvas, gl, renderGraph, currentTime, preloadTime = 4) {
super(canvas, gl, renderGraph, currentTime);

@@ -16,3 +16,3 @@ this._preloadTime = preloadTime;

_load(){
_load() {
super._load();

@@ -23,3 +23,3 @@ this._ready = true;

_unload(){
_unload() {
super._unload();

@@ -29,9 +29,12 @@ this._ready = false;

_seek(time){
_seek(time) {
super._seek(time);
if (this.state === SOURCENODESTATE.playing || this.state === SOURCENODESTATE.paused){
if (this.state === SOURCENODESTATE.playing || this.state === SOURCENODESTATE.paused) {
if (this._element === undefined) this._load();
this._ready = false;
}
if((this._state === SOURCENODESTATE.sequenced || this._state === SOURCENODESTATE.ended) && this._element !== undefined){
if (
(this._state === SOURCENODESTATE.sequenced || this._state === SOURCENODESTATE.ended) &&
this._element !== undefined
) {
this._unload();

@@ -41,21 +44,23 @@ }

_update(currentTime){
_update(currentTime) {
//if (!super._update(currentTime)) return false;
super._update(currentTime);
if (this._startTime - this._currentTime <= this._preloadTime && this._state !== SOURCENODESTATE.waiting && this._state !== SOURCENODESTATE.ended)this._load();
if (
this._startTime - this._currentTime <= this._preloadTime &&
this._state !== SOURCENODESTATE.waiting &&
this._state !== SOURCENODESTATE.ended
)
this._load();
if (this._state === SOURCENODESTATE.playing){
if (this._state === SOURCENODESTATE.playing) {
return true;
} else if (this._state === SOURCENODESTATE.paused){
} else if (this._state === SOURCENODESTATE.paused) {
return true;
}
else if (this._state === SOURCENODESTATE.ended && this._element !== undefined){
} else if (this._state === SOURCENODESTATE.ended && this._element !== undefined) {
this._unload();
return false;
}
}
}
export default CanvasNode;

@@ -6,6 +6,6 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

/**
* Initialise an instance of an ImageNode.
* This should not be called directly, but created through a call to videoContext.createImageNode();
*/
constructor(src, gl, renderGraph, currentTime, preloadTime = 4, attributes = {}){
* Initialise an instance of an ImageNode.
* This should not be called directly, but created through a call to videoContext.createImageNode();
*/
constructor(src, gl, renderGraph, currentTime, preloadTime = 4, attributes = {}) {
super(src, gl, renderGraph, currentTime);

@@ -18,9 +18,8 @@ this._preloadTime = preloadTime;

get elementURL(){
get elementURL() {
return this._elementURL;
}
_load(){
if (this._element !== undefined){
_load() {
if (this._element !== undefined) {
for (var key in this._attributes) {

@@ -31,3 +30,3 @@ this._element[key] = this._attributes[key];

}
if (this._isResponsibleForElementLifeCycle){
if (this._isResponsibleForElementLifeCycle) {
super._load();

@@ -58,5 +57,5 @@ this._element = new Image();

_unload(){
_unload() {
super._unload();
if (this._isResponsibleForElementLifeCycle){
if (this._isResponsibleForElementLifeCycle) {
this._element.src = "";

@@ -70,8 +69,11 @@ this._element.onerror = undefined;

_seek(time){
_seek(time) {
super._seek(time);
if (this.state === SOURCENODESTATE.playing || this.state === SOURCENODESTATE.paused){
if (this.state === SOURCENODESTATE.playing || this.state === SOURCENODESTATE.paused) {
if (this._element === undefined) this._load();
}
if((this._state === SOURCENODESTATE.sequenced || this._state === SOURCENODESTATE.ended) && this._element !== undefined){
if (
(this._state === SOURCENODESTATE.sequenced || this._state === SOURCENODESTATE.ended) &&
this._element !== undefined
) {
this._unload();

@@ -81,26 +83,28 @@ }

_update(currentTime){
_update(currentTime) {
//if (!super._update(currentTime)) return false;
if (this._textureUploaded){
if (this._textureUploaded) {
super._update(currentTime, false);
}else{
} else {
super._update(currentTime);
}
if (this._startTime - this._currentTime <= this._preloadTime && this._state !== SOURCENODESTATE.waiting && this._state !== SOURCENODESTATE.ended)this._load();
if (
this._startTime - this._currentTime <= this._preloadTime &&
this._state !== SOURCENODESTATE.waiting &&
this._state !== SOURCENODESTATE.ended
)
this._load();
if (this._state === SOURCENODESTATE.playing){
if (this._state === SOURCENODESTATE.playing) {
return true;
} else if (this._state === SOURCENODESTATE.paused){
} else if (this._state === SOURCENODESTATE.paused) {
return true;
}
else if (this._state === SOURCENODESTATE.ended && this._element !== undefined){
} else if (this._state === SOURCENODESTATE.ended && this._element !== undefined) {
this._unload();
return false;
}
}
}
export default ImageNode;

@@ -6,6 +6,16 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

/**
* Initialise an instance of a MediaNode.
* This should not be called directly, but extended by other Node Types which use a `HTMLMediaElement`.
*/
constructor(src, gl, renderGraph, currentTime, globalPlaybackRate=1.0, sourceOffset=0, preloadTime = 4, mediaElementCache=undefined, attributes = {}){
* Initialise an instance of a MediaNode.
* This should not be called directly, but extended by other Node Types which use a `HTMLMediaElement`.
*/
constructor(
src,
gl,
renderGraph,
currentTime,
globalPlaybackRate = 1.0,
sourceOffset = 0,
preloadTime = 4,
mediaElementCache = undefined,
attributes = {}
) {
super(src, gl, renderGraph, currentTime);

@@ -22,3 +32,3 @@ this._preloadTime = preloadTime;

this._isElementPlaying = false;
if (this._attributes.loop){
if (this._attributes.loop) {
this._loopElement = this._attributes.loop;

@@ -28,3 +38,3 @@ }

set playbackRate(playbackRate){
set playbackRate(playbackRate) {
this._playbackRate = playbackRate;

@@ -34,9 +44,9 @@ this._playbackRateUpdated = true;

set stretchPaused(stretchPaused){
set stretchPaused(stretchPaused) {
super.stretchPaused = stretchPaused;
if(this._element){
if (this._stretchPaused){
if (this._element) {
if (this._stretchPaused) {
this._element.pause();
} else{
if(this._state === SOURCENODESTATE.playing){
} else {
if (this._state === SOURCENODESTATE.playing) {
this._element.play();

@@ -48,23 +58,22 @@ }

get stretchPaused(){
get stretchPaused() {
return this._stretchPaused;
}
get playbackRate(){
get playbackRate() {
return this._playbackRate;
}
get elementURL(){
get elementURL() {
return this._elementURL;
}
set volume(volume){
set volume(volume) {
this._volume = volume;
if(this._element !== undefined) this._element.volume = this._volume;
if (this._element !== undefined) this._element.volume = this._volume;
}
_load(){
_load() {
super._load();
if (this._element !== undefined){
if (this._element !== undefined) {
for (var key in this._attributes) {

@@ -74,5 +83,5 @@ this._element[key] = this._attributes[key];

if (this._element.readyState > 3 && !this._element.seeking){
if(this._loopElement === false){
if (this._stopTime === Infinity || this._stopTime == undefined){
if (this._element.readyState > 3 && !this._element.seeking) {
if (this._loopElement === false) {
if (this._stopTime === Infinity || this._stopTime == undefined) {
this._stopTime = this._startTime + this._element.duration;

@@ -82,12 +91,10 @@ this._triggerCallbacks("durationchange", this.duration);

}
if(this._ready !== true){
if (this._ready !== true) {
this._triggerCallbacks("loaded");
this._playbackRateUpdated = true;
}
this._ready = true;
} else{
if(this._state !== SOURCENODESTATE.error){
} else {
if (this._state !== SOURCENODESTATE.error) {
this._ready = false;

@@ -98,4 +105,4 @@ }

}
if (this._isResponsibleForElementLifeCycle){
if (this._mediaElementCache){
if (this._isResponsibleForElementLifeCycle) {
if (this._mediaElementCache) {
this._element = this._mediaElementCache.get();

@@ -110,3 +117,3 @@ } else {

this._element.volume = this._volume;
if (window.MediaStream !== undefined && this._elementURL instanceof MediaStream){
if (window.MediaStream !== undefined && this._elementURL instanceof MediaStream) {
this._element.srcObject = this._elementURL;

@@ -121,5 +128,6 @@ } else {

}
if (this._element){
if (this._element) {
let currentTimeOffset = 0;
if (this._currentTime > this._startTime) currentTimeOffset = this._currentTime - this._startTime;
if (this._currentTime > this._startTime)
currentTimeOffset = this._currentTime - this._startTime;
this._element.currentTime = this._sourceOffset + currentTimeOffset;

@@ -134,3 +142,3 @@ this._element.onerror = () => {

};
}else{
} else {
//If the element doesn't exist for whatever reason enter the error state.

@@ -143,12 +151,12 @@ this._state = SOURCENODESTATE.error;

_unload(){
_unload() {
super._unload();
if (this._isResponsibleForElementLifeCycle && this._element !== undefined){
if (this._isResponsibleForElementLifeCycle && this._element !== undefined) {
this._element.src = "";
this._element.srcObject = undefined;
for (let key in this._attributes){
for (let key in this._attributes) {
this._element.removeAttribute(key);
}
this._element = undefined;
if(!this._mediaElementCache) delete this._element;
if (!this._mediaElementCache) delete this._element;
}

@@ -159,5 +167,5 @@ this._ready = false;

_seek(time){
_seek(time) {
super._seek(time);
if (this.state === SOURCENODESTATE.playing || this.state === SOURCENODESTATE.paused){
if (this.state === SOURCENODESTATE.playing || this.state === SOURCENODESTATE.paused) {
if (this._element === undefined) this._load();

@@ -168,3 +176,6 @@ let relativeTime = this._currentTime - this._startTime + this._sourceOffset;

}
if((this._state === SOURCENODESTATE.sequenced || this._state === SOURCENODESTATE.ended) && this._element !== undefined){
if (
(this._state === SOURCENODESTATE.sequenced || this._state === SOURCENODESTATE.ended) &&
this._element !== undefined
) {
this._unload();

@@ -174,8 +185,8 @@ }

_update(currentTime, triggerTextureUpdate=true){
_update(currentTime, triggerTextureUpdate = true) {
//if (!super._update(currentTime)) return false;
super._update(currentTime, triggerTextureUpdate);
//check if the media has ended
if(this._element !== undefined){
if (this._element.ended){
if (this._element !== undefined) {
if (this._element.ended) {
this._state = SOURCENODESTATE.ended;

@@ -186,13 +197,17 @@ this._triggerCallbacks("ended");

if (this._startTime - this._currentTime <= this._preloadTime && this._state !== SOURCENODESTATE.waiting && this._state !== SOURCENODESTATE.ended)this._load();
if (
this._startTime - this._currentTime <= this._preloadTime &&
this._state !== SOURCENODESTATE.waiting &&
this._state !== SOURCENODESTATE.ended
)
this._load();
if (this._state === SOURCENODESTATE.playing){
if (this._playbackRateUpdated)
{
if (this._state === SOURCENODESTATE.playing) {
if (this._playbackRateUpdated) {
this._element.playbackRate = this._globalPlaybackRate * this._playbackRate;
this._playbackRateUpdated = false;
}
if (!this._isElementPlaying){
if (!this._isElementPlaying) {
this._element.play();
if (this._stretchPaused){
if (this._stretchPaused) {
this._element.pause();

@@ -203,10 +218,9 @@ }

return true;
} else if (this._state === SOURCENODESTATE.paused){
} else if (this._state === SOURCENODESTATE.paused) {
this._element.pause();
this._isElementPlaying = false;
return true;
}
else if (this._state === SOURCENODESTATE.ended && this._element !== undefined){
} else if (this._state === SOURCENODESTATE.ended && this._element !== undefined) {
this._element.pause();
if (this._isElementPlaying){
if (this._isElementPlaying) {
this._unload();

@@ -218,3 +232,3 @@ }

clearTimelineState(){
clearTimelineState() {
super.clearTimelineState();

@@ -228,9 +242,8 @@ if (this._element !== undefined) {

destroy(){
destroy() {
if (this._element) this._element.pause();
super.destroy();
}
}
export default MediaNode;

@@ -5,11 +5,18 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

let STATE = {"waiting":0, "sequenced":1, "playing":2, "paused":3, "ended":4, "error":5};
let STATE = {
waiting: 0,
sequenced: 1,
playing: 2,
paused: 3,
ended: 4,
error: 5
};
class SourceNode extends GraphNode{
class SourceNode extends GraphNode {
/**
* Initialise an instance of a SourceNode.
* This is the base class for other Nodes which generate media to be passed into the processing pipeline.
*/
constructor(src, gl, renderGraph, currentTime){
super(gl,renderGraph, [], true);
* Initialise an instance of a SourceNode.
* This is the base class for other Nodes which generate media to be passed into the processing pipeline.
*/
constructor(src, gl, renderGraph, currentTime) {
super(gl, renderGraph, [], true);
this._element = undefined;

@@ -19,6 +26,9 @@ this._elementURL = undefined;

if (typeof src === "string" || (window.MediaStream !== undefined && src instanceof MediaStream)){
if (
typeof src === "string" ||
(window.MediaStream !== undefined && src instanceof MediaStream)
) {
//create the node from the passed URL or MediaStream
this._elementURL = src;
}else{
} else {
//use the passed element to create the SourceNode

@@ -37,79 +47,86 @@ this._element = src;

this._texture = createElementTexture(gl);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0,0,0,0]));
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
1,
1,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
new Uint8Array([0, 0, 0, 0])
);
this._callbacks = [];
this._renderPaused = false;
this._displayName = "SourceNode";
}
/**
* Returns the state of the node.
* 0 - Waiting, start() has not been called on it yet.
* 1 - Sequenced, start() has been called but it is not playing yet.
* 2 - Playing, the node is playing.
* 3 - Paused, the node is paused.
* 4 - Ended, playback of the node has finished.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
* console.log(videoNode.state); //will output 0 (for waiting)
* videoNode.start(5);
* console.log(videoNode.state); //will output 1 (for sequenced)
* videoNode.stop(10);
* ctx.play();
* console.log(videoNode.state); //will output 2 (for playing)
* ctx.paused();
* console.log(videoNode.state); //will output 3 (for paused)
*/
get state(){
* Returns the state of the node.
* 0 - Waiting, start() has not been called on it yet.
* 1 - Sequenced, start() has been called but it is not playing yet.
* 2 - Playing, the node is playing.
* 3 - Paused, the node is paused.
* 4 - Ended, playback of the node has finished.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
* console.log(videoNode.state); //will output 0 (for waiting)
* videoNode.start(5);
* console.log(videoNode.state); //will output 1 (for sequenced)
* videoNode.stop(10);
* ctx.play();
* console.log(videoNode.state); //will output 2 (for playing)
* ctx.paused();
* console.log(videoNode.state); //will output 3 (for paused)
*/
get state() {
return this._state;
}
/**
* Returns the underlying DOM element which represents this source node.
* Note: If a source node is created with a url rather than passing in an existing element then this will return undefined until the source node preloads the element.
*
* @return {Element} The underlying DOM element representing the media for the node. If the lifecycle of the video is owned UNSIGNED_BYTE the node itself, this can return undefined if the element hasn't been loaded yet.
*
* @example
* //Accessing the Element on a VideoNode created via a URL
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
* videoNode.start(0);
* videoNode.stop(5);
* //When the node starts playing the element should exist so set it's volume to 0
* videoNode.regsiterCallback("play", function(){videoNode.element.volume = 0;});
*
*
* @example
* //Accessing the Element on a VideoNode created via an already existing element
* var ctx = new VideoContext();
* var videoElement = document.createElement("video");
* var videoNode = ctx.createVideoSourceNode(videoElement);
* videoNode.start(0);
* videoNode.stop(5);
* //The elemnt can be accessed any time because it's lifecycle is managed outside of the VideoContext
* videoNode.element.volume = 0;
*
*/
get element(){
* Returns the underlying DOM element which represents this source node.
* Note: If a source node is created with a url rather than passing in an existing element then this will return undefined until the source node preloads the element.
*
* @return {Element} The underlying DOM element representing the media for the node. If the lifecycle of the video is owned UNSIGNED_BYTE the node itself, this can return undefined if the element hasn't been loaded yet.
*
* @example
* //Accessing the Element on a VideoNode created via a URL
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
* videoNode.start(0);
* videoNode.stop(5);
* //When the node starts playing the element should exist so set it's volume to 0
* videoNode.regsiterCallback("play", function(){videoNode.element.volume = 0;});
*
*
* @example
* //Accessing the Element on a VideoNode created via an already existing element
* var ctx = new VideoContext();
* var videoElement = document.createElement("video");
* var videoNode = ctx.createVideoSourceNode(videoElement);
* videoNode.start(0);
* videoNode.stop(5);
* //The elemnt can be accessed any time because it's lifecycle is managed outside of the VideoContext
* videoNode.element.volume = 0;
*
*/
get element() {
return this._element;
}
/**
* Returns the duration of the node on a timeline. If no start time is set will return undefiend, if no stop time is set will return Infinity.
*
* @return {number} The duration of the node in seconds.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
* videoNode.start(5);
* videoNode.stop(10);
* console.log(videoNode.duration); //will output 10
*/
get duration(){
* Returns the duration of the node on a timeline. If no start time is set will return undefiend, if no stop time is set will return Infinity.
*
* @return {number} The duration of the node in seconds.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
* videoNode.start(5);
* videoNode.stop(10);
* console.log(videoNode.duration); //will output 10
*/
get duration() {
if (isNaN(this._startTime)) return undefined;

@@ -120,12 +137,12 @@ if (this._stopTime === Infinity) return Infinity;

set stretchPaused(stretchPaused){
set stretchPaused(stretchPaused) {
this._stretchPaused = stretchPaused;
}
get stretchPaused(){
get stretchPaused() {
return this._stretchPaused;
}
_load(){
if (!this._loadCalled){
_load() {
if (!this._loadCalled) {
this._triggerCallbacks("load");

@@ -136,3 +153,3 @@ this._loadCalled = true;

_unload (){
_unload() {
this._triggerCallbacks("destroy");

@@ -143,45 +160,45 @@ this._loadCalled = false;

/**
* Register callbacks against one of these events: "load", "destroy", "seek", "pause", "play", "ended", "durationchange", "loaded", "error"
*
* @param {String} type - the type of event to register the callback against.
* @param {function} func - the function to call.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
*
* videoNode.registerCallback("load", function(){"video is loading"});
* videoNode.registerCallback("play", function(){"video is playing"});
* videoNode.registerCallback("ended", function(){"video has eneded"});
*
*/
registerCallback(type, func){
this._callbacks.push({type:type, func:func});
* Register callbacks against one of these events: "load", "destroy", "seek", "pause", "play", "ended", "durationchange", "loaded", "error"
*
* @param {String} type - the type of event to register the callback against.
* @param {function} func - the function to call.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
*
* videoNode.registerCallback("load", function(){"video is loading"});
* videoNode.registerCallback("play", function(){"video is playing"});
* videoNode.registerCallback("ended", function(){"video has eneded"});
*
*/
registerCallback(type, func) {
this._callbacks.push({ type: type, func: func });
}
/**
* Remove callback.
*
* @param {function} [func] - the callback to remove, if undefined will remove all callbacks for this node.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
*
* videoNode.registerCallback("load", function(){"video is loading"});
* videoNode.registerCallback("play", function(){"video is playing"});
* videoNode.registerCallback("ended", function(){"video has eneded"});
* videoNode.unregisterCallback(); //remove all of the three callbacks.
*
*/
unregisterCallback(func){
* Remove callback.
*
* @param {function} [func] - the callback to remove, if undefined will remove all callbacks for this node.
*
* @example
* var ctx = new VideoContext();
* var videoNode = ctx.createVideoSourceNode('video.mp4');
*
* videoNode.registerCallback("load", function(){"video is loading"});
* videoNode.registerCallback("play", function(){"video is playing"});
* videoNode.registerCallback("ended", function(){"video has eneded"});
* videoNode.unregisterCallback(); //remove all of the three callbacks.
*
*/
unregisterCallback(func) {
let toRemove = [];
for(let callback of this._callbacks){
if (func === undefined){
for (let callback of this._callbacks) {
if (func === undefined) {
toRemove.push(callback);
} else if (callback.func === func){
} else if (callback.func === func) {
toRemove.push(callback);
}
}
for(let callback of toRemove){
for (let callback of toRemove) {
let index = this._callbacks.indexOf(callback);

@@ -192,8 +209,8 @@ this._callbacks.splice(index, 1);

_triggerCallbacks(type, data){
for(let callback of this._callbacks){
if (callback.type === type){
if (data!== undefined){
_triggerCallbacks(type, data) {
for (let callback of this._callbacks) {
if (callback.type === type) {
if (data !== undefined) {
callback.func(this, data);
}else{
} else {
callback.func(this);

@@ -206,9 +223,9 @@ }

/**
* Start playback at VideoContext.currentTime plus passed time. If passed time is negative, will play as soon as possible.
*
* @param {number} time - the time from the currentTime of the VideoContext which to start playing, if negative will play as soon as possible.
* @return {boolean} Will return true is seqeuncing has succeded, or false if it is already sequenced.
*/
start(time){
if (this._state !== STATE.waiting){
* Start playback at VideoContext.currentTime plus passed time. If passed time is negative, will play as soon as possible.
*
* @param {number} time - the time from the currentTime of the VideoContext which to start playing, if negative will play as soon as possible.
* @return {boolean} Will return true is seqeuncing has succeded, or false if it is already sequenced.
*/
start(time) {
if (this._state !== STATE.waiting) {
console.debug("SourceNode is has already been sequenced. Can't sequence twice.");

@@ -224,9 +241,9 @@ return false;

/**
* Start playback at an absolute time ont the VideoContext's timeline.
*
* @param {number} time - the time on the VideoContexts timeline to start playing.
* @return {boolean} Will return true is seqeuncing has succeded, or false if it is already sequenced.
*/
startAt(time){
if (this._state !== STATE.waiting){
* Start playback at an absolute time ont the VideoContext's timeline.
*
* @param {number} time - the time on the VideoContexts timeline to start playing.
* @return {boolean} Will return true is seqeuncing has succeded, or false if it is already sequenced.
*/
startAt(time) {
if (this._state !== STATE.waiting) {
console.debug("SourceNode is has already been sequenced. Can't sequence twice.");

@@ -240,23 +257,21 @@ return false;

get startTime(){
get startTime() {
return this._startTime;
}
/**
* Stop playback at VideoContext.currentTime plus passed time. If passed time is negative, will play as soon as possible.
*
* @param {number} time - the time from the currentTime of the video context which to stop playback.
* @return {boolean} Will return true is seqeuncing has succeded, or false if the playback has already ended or if start hasn't been called yet, or if time is less than the start time.
*/
stop(time){
if (this._state === STATE.ended){
* Stop playback at VideoContext.currentTime plus passed time. If passed time is negative, will play as soon as possible.
*
* @param {number} time - the time from the currentTime of the video context which to stop playback.
* @return {boolean} Will return true is seqeuncing has succeded, or false if the playback has already ended or if start hasn't been called yet, or if time is less than the start time.
*/
stop(time) {
if (this._state === STATE.ended) {
console.debug("SourceNode has already ended. Cannot call stop.");
return false;
} else if (this._state === STATE.waiting){
} else if (this._state === STATE.waiting) {
console.debug("SourceNode must have start called before stop is called");
return false;
}
if (this._currentTime + time <= this._startTime){
if (this._currentTime + time <= this._startTime) {
console.debug("SourceNode must have a stop time after it's start time, not before.");

@@ -272,16 +287,16 @@ return false;

/**
* Stop playback at an absolute time ont the VideoContext's timeline.
*
* @param {number} time - the time on the VideoContexts timeline to stop playing.
* @return {boolean} Will return true is seqeuncing has succeded, or false if the playback has already ended or if start hasn't been called yet, or if time is less than the start time.
*/
stopAt(time){
if (this._state === STATE.ended){
* Stop playback at an absolute time ont the VideoContext's timeline.
*
* @param {number} time - the time on the VideoContexts timeline to stop playing.
* @return {boolean} Will return true is seqeuncing has succeded, or false if the playback has already ended or if start hasn't been called yet, or if time is less than the start time.
*/
stopAt(time) {
if (this._state === STATE.ended) {
console.debug("SourceNode has already ended. Cannot call stop.");
return false;
} else if (this._state === STATE.waiting){
} else if (this._state === STATE.waiting) {
console.debug("SourceNode must have start called before stop is called");
return false;
}
if (time <= this._startTime){
if (time <= this._startTime) {
console.debug("SourceNode must have a stop time after it's start time, not before.");

@@ -296,9 +311,7 @@ return false;

get stopTime(){
get stopTime() {
return this._stopTime;
}
_seek(time){
_seek(time) {
this._renderPaused = false;

@@ -309,10 +322,10 @@

if (this._state === STATE.waiting) return;
if (time < this._startTime){
if (time < this._startTime) {
clearTexture(this._gl, this._texture);
this._state = STATE.sequenced;
}
if (time >= this._startTime && this._state !== STATE.paused){
if (time >= this._startTime && this._state !== STATE.paused) {
this._state = STATE.playing;
}
if (time >= this._stopTime){
if (time >= this._stopTime) {
clearTexture(this._gl, this._texture);

@@ -326,4 +339,4 @@ this._triggerCallbacks("ended");

_pause(){
if(this._state === STATE.playing || (this._currentTime === 0 && this._startTime === 0)){
_pause() {
if (this._state === STATE.playing || (this._currentTime === 0 && this._startTime === 0)) {
this._triggerCallbacks("pause");

@@ -334,5 +347,4 @@ this._state = STATE.paused;

}
_play(){
if(this._state === STATE.paused){
_play() {
if (this._state === STATE.paused) {
this._triggerCallbacks("play");

@@ -343,4 +355,8 @@ this._state = STATE.playing;

_isReady(){
if (this._state === STATE.playing || this._state === STATE.paused || this._state === STATE.error){
_isReady() {
if (
this._state === STATE.playing ||
this._state === STATE.paused ||
this._state === STATE.error
) {
return this._ready;

@@ -351,3 +367,3 @@ }

_update(currentTime, triggerTextureUpdate=true){
_update(currentTime, triggerTextureUpdate = true) {
this._rendered = true;

@@ -360,8 +376,12 @@ let timeDelta = currentTime - this._currentTime;

//update the state
if (this._state === STATE.waiting || this._state === STATE.ended || this._state === STATE.error) return false;
if (
this._state === STATE.waiting ||
this._state === STATE.ended ||
this._state === STATE.error
)
return false;
this._triggerCallbacks("render", currentTime);
if (currentTime < this._startTime){
if (currentTime < this._startTime) {
clearTexture(this._gl, this._texture);

@@ -371,4 +391,7 @@ this._state = STATE.sequenced;

if (currentTime >= this._startTime && this._state !== STATE.paused && this._state !== STATE.error){
if (
currentTime >= this._startTime &&
this._state !== STATE.paused &&
this._state !== STATE.error
) {
if (this._state !== STATE.playing) this._triggerCallbacks("play");

@@ -378,3 +401,3 @@ this._state = STATE.playing;

if (currentTime >= this._stopTime){
if (currentTime >= this._stopTime) {
clearTexture(this._gl, this._texture);

@@ -389,8 +412,8 @@ this._triggerCallbacks("ended");

if (!this._renderPaused && this._state === STATE.paused) {
if(triggerTextureUpdate)updateTexture(this._gl, this._texture, this._element);
if (triggerTextureUpdate) updateTexture(this._gl, this._texture, this._element);
this._renderPaused = true;
}
if(this._state === STATE.playing){
if(triggerTextureUpdate)updateTexture(this._gl, this._texture, this._element);
if(this._stretchPaused){
if (this._state === STATE.playing) {
if (triggerTextureUpdate) updateTexture(this._gl, this._texture, this._element);
if (this._stretchPaused) {
this._stopTime += timeDelta;

@@ -404,5 +427,5 @@ }

/**
* Clear any timeline state the node currently has, this puts the node in the "waiting" state, as if neither start nor stop had been called.
*/
clearTimelineState(){
* Clear any timeline state the node currently has, this puts the node in the "waiting" state, as if neither start nor stop had been called.
*/
clearTimelineState() {
this._startTime = NaN;

@@ -414,5 +437,5 @@ this._stopTime = Infinity;

/**
* Destroy and clean-up the node.
*/
destroy(){
* Destroy and clean-up the node.
*/
destroy() {
this._unload();

@@ -434,4 +457,4 @@ super.destroy();

export {STATE as SOURCENODESTATE};
export { STATE as SOURCENODESTATE };
export default SourceNode;

@@ -6,5 +6,5 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

/**
* Initialise an instance of a VideoNode.
* This should not be called directly, but created through a call to videoContext.createVideoNode();
*/
* Initialise an instance of a VideoNode.
* This should not be called directly, but created through a call to videoContext.createVideoNode();
*/
constructor() {

@@ -11,0 +11,0 @@ super(...arguments);

@@ -35,3 +35,3 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

*/
export function createShaderProgram(gl, vertexShader, fragmentShader){
export function createShaderProgram(gl, vertexShader, fragmentShader) {
let program = gl.createProgram();

@@ -43,4 +43,10 @@

if (!gl.getProgramParameter(program, gl.LINK_STATUS)){
throw {"error":4,"msg":"Can't link shader program for track", toString:function(){return this.msg;}};
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw {
error: 4,
msg: "Can't link shader program for track",
toString: function() {
return this.msg;
}
};
}

@@ -50,3 +56,3 @@ return program;

export function createElementTexture(gl){
export function createElementTexture(gl) {
let texture = gl.createTexture();

@@ -66,4 +72,4 @@ gl.bindTexture(gl.TEXTURE_2D, texture);

export function updateTexture(gl, texture, element){
if(element.readyState !== undefined && element.readyState === 0) return;
export function updateTexture(gl, texture, element) {
if (element.readyState !== undefined && element.readyState === 0) return;
gl.bindTexture(gl.TEXTURE_2D, texture);

@@ -74,19 +80,140 @@ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);

export function clearTexture(gl, texture){
export function clearTexture(gl, texture) {
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0,0,0,0]));
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
1,
1,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
new Uint8Array([0, 0, 0, 0])
);
}
export function generateRandomId() {
const appearanceAdjective = [
"adorable",
"alert",
"average",
"beautiful",
"blonde",
"bloody",
"blushing",
"bright",
"clean",
"clear",
"cloudy",
"colourful",
"concerned",
"crowded",
"curious",
"cute",
"dark",
"dirty",
"drab",
"distinct",
"dull",
"elegant",
"fancy",
"filthy",
"glamorous",
"gleaming",
"graceful",
"grotesque",
"homely",
"light",
"misty",
"motionless",
"muddy",
"plain",
"poised",
"quaint",
"scary",
"shiny",
"smoggy",
"sparkling",
"spotless",
"stormy",
"strange",
"ugly",
"unsightly",
"unusual"
];
const conditionAdjective = [
"alive",
"brainy",
"broken",
"busy",
"careful",
"cautious",
"clever",
"crazy",
"damaged",
"dead",
"difficult",
"easy",
"fake",
"false",
"famous",
"forward",
"fragile",
"guilty",
"helpful",
"helpless",
"important",
"impossible",
"infamous",
"innocent",
"inquisitive",
"mad",
"modern",
"open",
"outgoing",
"outstanding",
"poor",
"powerful",
"puzzled",
"real",
"rich",
"right",
"robust",
"sane",
"scary",
"shy",
"sleepy",
"stupid",
"super",
"tame",
"thick",
"tired",
"wild",
"wrong"
];
const nounAnimal = [
"manatee",
"gila monster",
"nematode",
"seahorse",
"slug",
"koala bear",
"giant tortoise",
"garden snail",
"starfish",
"sloth",
"american woodcock",
"coral",
"swallowtail butterfly",
"house sparrow",
"sea anemone"
];
export function generateRandomId(){
const appearanceAdjective = ["adorable", "alert", "average", "beautiful", "blonde", "bloody", "blushing", "bright", "clean", "clear", "cloudy", "colourful", "concerned","crowded", "curious", "cute", "dark", "dirty", "drab", "distinct", "dull", "elegant", "fancy", "filthy", "glamorous", "gleaming", "graceful", "grotesque", "homely", "light", "misty", "motionless", "muddy", "plain", "poised", "quaint", "scary", "shiny", "smoggy", "sparkling", "spotless", "stormy", "strange", "ugly", "unsightly", "unusual"];
const conditionAdjective = ["alive", "brainy", "broken", "busy", "careful","cautious", "clever", "crazy", "damaged", "dead", "difficult","easy","fake", "false","famous", "forward", "fragile","guilty", "helpful","helpless","important", "impossible","infamous","innocent", "inquisitive", "mad", "modern", "open", "outgoing", "outstanding","poor", "powerful","puzzled", "real", "rich", "right", "robust","sane", "scary", "shy", "sleepy","stupid", "super", "tame", "thick","tired","wild", "wrong"];
const nounAnimal = ["manatee", "gila monster", "nematode", "seahorse", "slug", "koala bear", "giant tortoise","garden snail", "starfish", "sloth", "american woodcock", "coral", "swallowtail butterfly", "house sparrow", "sea anemone"];
function randomChoice(array){
function randomChoice(array) {
return array[Math.floor(Math.random() * array.length)];
}
function capitalize(word){
function capitalize(word) {
word = word.replace(/\b\w/g, l => l.toUpperCase());

@@ -96,3 +223,8 @@ return word;

let name = randomChoice(appearanceAdjective) + " " + randomChoice(conditionAdjective) + " " + randomChoice(nounAnimal);
let name =
randomChoice(appearanceAdjective) +
" " +
randomChoice(conditionAdjective) +
" " +
randomChoice(nounAnimal);
name = capitalize(name);

@@ -103,5 +235,6 @@ name = name.replace(/ /g, "-");

export function exportToJSON(vc) {
console.warn("VideoContext.exportToJSON has been deprecated. Please use VideoContext.snapshot instead.");
console.warn(
"VideoContext.exportToJSON has been deprecated. Please use VideoContext.snapshot instead."
);
return JSON.stringify(snapshotNodes(vc));

@@ -122,3 +255,3 @@ }

state: vc.state,
playbackRate: vc.playbackRate,
playbackRate: vc.playbackRate
};

@@ -128,4 +261,3 @@ }

let warningExportSourceLogged = false;
function snapshotNodes(vc){
function snapshotNodes(vc) {
function qualifyURL(url) {

@@ -137,20 +269,20 @@ var a = document.createElement("a");

function getInputIDs(node, vc){
function getInputIDs(node, vc) {
let inputs = [];
for (let input of node.inputs){
if (input === undefined)continue;
for (let input of node.inputs) {
if (input === undefined) continue;
let inputID;
let inputIndex = node.inputs.indexOf(input);
let index = vc._processingNodes.indexOf(input);
if (index > -1){
if (index > -1) {
inputID = "processor" + index;
} else{
} else {
let index = vc._sourceNodes.indexOf(input);
if(index > -1){
if (index > -1) {
inputID = "source" + index;
} else {
console.log("Warning, can't find input",input);
console.log("Warning, can't find input", input);
}
}
inputs.push({id:inputID, index:inputIndex});
inputs.push({ id: inputID, index: inputIndex });
}

@@ -162,9 +294,8 @@ return inputs;

let sourceNodeStateMapping = [];
for (let state in SOURCENODESTATE){
for (let state in SOURCENODESTATE) {
sourceNodeStateMapping[SOURCENODESTATE[state]] = state;
}
for(let index in vc._sourceNodes){
for (let index in vc._sourceNodes) {
let source = vc._sourceNodes[index];

@@ -174,5 +305,8 @@ let id = "source" + index;

if(!source._isResponsibleForElementLifeCycle){
if (!warningExportSourceLogged){
console.debug("Warning - Trying to export source created from an element not a URL. URL of export will be set to the elements src attribute and may be incorrect", source);
if (!source._isResponsibleForElementLifeCycle) {
if (!warningExportSourceLogged) {
console.debug(
"Warning - Trying to export source created from an element not a URL. URL of export will be set to the elements src attribute and may be incorrect",
source
);
warningExportSourceLogged = true;

@@ -190,7 +324,7 @@ }

stop: source.stopTime,
state: sourceNodeStateMapping[source.state],
state: sourceNodeStateMapping[source.state]
};
if (node.type === "VideoNode") {
node.currentTime = null;
if(source.element && source.element.currentTime) {
if (source.element && source.element.currentTime) {
node.currentTime = source.element.currentTime;

@@ -200,3 +334,3 @@ }

if (source._sourceOffset){
if (source._sourceOffset) {
node.sourceOffset = source._sourceOffset;

@@ -207,3 +341,3 @@ }

for (let index in vc._processingNodes){
for (let index in vc._processingNodes) {
let processor = vc._processingNodes[index];

@@ -215,10 +349,10 @@ let id = "processor" + index;

inputs: getInputIDs(processor, vc),
properties:{}
properties: {}
};
for(let property in node.definition.properties){
for (let property in node.definition.properties) {
node.properties[property] = processor[property];
}
if (node.type === "TransitionNode"){
if (node.type === "TransitionNode") {
node.transitions = processor._transitions;

@@ -231,3 +365,3 @@ }

result["destination"] = {
type:"Destination",
type: "Destination",
inputs: getInputIDs(vc.destination, vc)

@@ -239,6 +373,6 @@ };

export function createControlFormForNode(node, nodeName){
export function createControlFormForNode(node, nodeName) {
let rootDiv = document.createElement("div");
if (nodeName !== undefined){
if (nodeName !== undefined) {
var title = document.createElement("h2");

@@ -249,3 +383,3 @@ title.innerHTML = nodeName;

for(let propertyName in node._properties){
for (let propertyName in node._properties) {
let propertyParagraph = document.createElement("p");

@@ -257,3 +391,3 @@ let propertyTitleHeader = document.createElement("h3");

let propertyValue = node._properties[propertyName].value;
if (typeof propertyValue === "number"){
if (typeof propertyValue === "number") {
let range = document.createElement("input");

@@ -264,3 +398,3 @@ range.setAttribute("type", "range");

range.setAttribute("step", "0.01");
range.setAttribute("value", propertyValue,toString());
range.setAttribute("value", propertyValue, toString());

@@ -272,9 +406,13 @@ let number = document.createElement("input");

number.setAttribute("step", "0.01");
number.setAttribute("value", propertyValue,toString());
number.setAttribute("value", propertyValue, toString());
let mouseDown = false;
range.onmousedown =function(){mouseDown=true;};
range.onmouseup =function(){mouseDown=false;};
range.onmousemove = function(){
if(mouseDown){
range.onmousedown = function() {
mouseDown = true;
};
range.onmouseup = function() {
mouseDown = false;
};
range.onmousemove = function() {
if (mouseDown) {
node[propertyName] = parseFloat(range.value);

@@ -284,7 +422,7 @@ number.value = range.value;

};
range.onchange = function(){
range.onchange = function() {
node[propertyName] = parseFloat(range.value);
number.value = range.value;
};
number.onchange =function(){
number.onchange = function() {
node[propertyName] = parseFloat(number.value);

@@ -295,4 +433,3 @@ range.value = number.value;

propertyParagraph.appendChild(number);
}
else if(Object.prototype.toString.call(propertyValue) === "[object Array]"){
} else if (Object.prototype.toString.call(propertyValue) === "[object Array]") {
for (var i = 0; i < propertyValue.length; i++) {

@@ -304,3 +441,3 @@ let range = document.createElement("input");

range.setAttribute("step", "0.01");
range.setAttribute("value", propertyValue[i],toString());
range.setAttribute("value", propertyValue[i], toString());

@@ -312,10 +449,14 @@ let number = document.createElement("input");

number.setAttribute("step", "0.01");
number.setAttribute("value", propertyValue,toString());
number.setAttribute("value", propertyValue, toString());
let index = i;
let mouseDown = false;
range.onmousedown =function(){mouseDown=true;};
range.onmouseup =function(){mouseDown=false;};
range.onmousemove = function(){
if(mouseDown){
range.onmousedown = function() {
mouseDown = true;
};
range.onmouseup = function() {
mouseDown = false;
};
range.onmousemove = function() {
if (mouseDown) {
node[propertyName][index] = parseFloat(range.value);

@@ -325,3 +466,3 @@ number.value = range.value;

};
range.onchange = function(){
range.onchange = function() {
node[propertyName][index] = parseFloat(range.value);

@@ -331,3 +472,3 @@ number.value = range.value;

number.onchange = function(){
number.onchange = function() {
node[propertyName][index] = parseFloat(number.value);

@@ -346,17 +487,16 @@ range.value = number.value;

function calculateNodeDepthFromDestination(videoContext){
function calculateNodeDepthFromDestination(videoContext) {
let destination = videoContext.destination;
let depthMap= new Map();
let depthMap = new Map();
depthMap.set(destination, 0);
function itterateBackwards(node, depth=0){
for (let n of node.inputs){
function itterateBackwards(node, depth = 0) {
for (let n of node.inputs) {
let d = depth + 1;
if (depthMap.has(n)){
if (d > depthMap.get(n)){
if (depthMap.has(n)) {
if (d > depthMap.get(n)) {
depthMap.set(n, d);
}
} else{
depthMap.set(n,d);
} else {
depthMap.set(n, d);
}

@@ -371,23 +511,21 @@ itterateBackwards(n, depthMap.get(n));

export function visualiseVideoContextGraph(videoContext, canvas){
export function visualiseVideoContextGraph(videoContext, canvas) {
let ctx = canvas.getContext("2d");
let w = canvas.width;
let h = canvas.height;
ctx.clearRect(0,0,w,h);
ctx.clearRect(0, 0, w, h);
let nodeDepths = calculateNodeDepthFromDestination(videoContext);
let depths = nodeDepths.values();
depths = Array.from(depths).sort(function(a, b){return b-a;});
depths = Array.from(depths).sort(function(a, b) {
return b - a;
});
let maxDepth = depths[0];
let xStep = w / (maxDepth+1);
let xStep = w / (maxDepth + 1);
let nodeHeight = (h / videoContext._sourceNodes.length)/3;
let nodeHeight = h / videoContext._sourceNodes.length / 3;
let nodeWidth = nodeHeight * 1.618;
function calculateNodePos(node, nodeDepths, xStep, nodeHeight){
function calculateNodePos(node, nodeDepths, xStep, nodeHeight) {
let depth = nodeDepths.get(node);

@@ -397,10 +535,12 @@ nodeDepths.values();

let count = 0;
for(let nodeDepth of nodeDepths){
for (let nodeDepth of nodeDepths) {
if (nodeDepth[0] === node) break;
if (nodeDepth[1] === depth) count += 1;
}
return {x:(xStep*nodeDepths.get(node)), y:nodeHeight*1.5*count + 50};
return {
x: xStep * nodeDepths.get(node),
y: nodeHeight * 1.5 * count + 50
};
}
// "video":["#572A72", "#3C1255"],

@@ -414,24 +554,24 @@ // "image":["#7D9F35", "#577714"],

let destination = calculateNodePos(conn.destination, nodeDepths, xStep, nodeHeight);
if (source !== undefined && destination !== undefined){
if (source !== undefined && destination !== undefined) {
ctx.beginPath();
//ctx.moveTo(source.x + nodeWidth/2, source.y + nodeHeight/2);
let x1 = source.x + nodeWidth/2;
let y1 = source.y + nodeHeight/2;
let x2 = destination.x + nodeWidth/2;
let y2 = destination.y + nodeHeight/2;
let x1 = source.x + nodeWidth / 2;
let y1 = source.y + nodeHeight / 2;
let x2 = destination.x + nodeWidth / 2;
let y2 = destination.y + nodeHeight / 2;
let dx = x2 - x1;
let dy = y2 - y1;
let angle = Math.PI/2 - Math.atan2(dx,dy);
let angle = Math.PI / 2 - Math.atan2(dx, dy);
let distance = Math.sqrt(Math.pow(x1-x2,2) + Math.pow(y1-y2,2));
let distance = Math.sqrt(Math.pow(x1 - x2, 2) + Math.pow(y1 - y2, 2));
let midX = Math.min(x1, x2) + (Math.max(x1,x2) - Math.min(x1, x2))/2;
let midY = Math.min(y1, y2) + (Math.max(y1,y2) - Math.min(y1, y2))/2;
let midX = Math.min(x1, x2) + (Math.max(x1, x2) - Math.min(x1, x2)) / 2;
let midY = Math.min(y1, y2) + (Math.max(y1, y2) - Math.min(y1, y2)) / 2;
let testX = (Math.cos(angle + Math.PI/2))*distance/1.5 + midX;
let testY = (Math.sin(angle + Math.PI/2))*distance/1.5 + midY;
let testX = (Math.cos(angle + Math.PI / 2) * distance) / 1.5 + midX;
let testY = (Math.sin(angle + Math.PI / 2) * distance) / 1.5 + midY;
// console.log(testX, testY);
ctx.arc(testX, testY, distance/1.2, angle-Math.PI+0.95, angle-0.95);
ctx.arc(testX, testY, distance / 1.2, angle - Math.PI + 0.95, angle - 0.95);

@@ -445,23 +585,22 @@ //ctx.arcTo(source.x + nodeWidth/2 ,source.y + nodeHeight/2,destination.x + nodeWidth/2,destination.y + nodeHeight/2,100);

for(let node of nodeDepths.keys()){
for (let node of nodeDepths.keys()) {
let pos = calculateNodePos(node, nodeDepths, xStep, nodeHeight);
let color = "#AA9639";
let text = "";
if (node.displayName === "CompositingNode"){
if (node.displayName === "CompositingNode") {
color = "#000000";
}
if (node.displayName === "DestinationNode"){
if (node.displayName === "DestinationNode") {
color = "#7D9F35";
text="Output";
text = "Output";
}
if (node.displayName === "VideoNode"){
if (node.displayName === "VideoNode") {
color = "#572A72";
text = "Video";
}
if (node.displayName === "CanvasNode"){
if (node.displayName === "CanvasNode") {
color = "#572A72";
text = "Canvas";
}
if (node.displayName === "ImageNode"){
if (node.displayName === "ImageNode") {
color = "#572A72";

@@ -475,7 +614,6 @@ text = "Image";

ctx.fillStyle = "#000";
ctx.textAlign = "center";
ctx.font = "10px Arial";
ctx.fillText(text,pos.x+nodeWidth/2, pos.y+nodeHeight/2+2.5);
ctx.fillText(text, pos.x + nodeWidth / 2, pos.y + nodeHeight / 2 + 2.5);
ctx.fill();

@@ -487,13 +625,10 @@ }

export function createSigmaGraphDataFromRenderGraph(videoContext){
function idForNode(node){
if (videoContext._sourceNodes.indexOf(node) !== -1){
let id = "source " + node.displayName+ " "+videoContext._sourceNodes.indexOf(node);
export function createSigmaGraphDataFromRenderGraph(videoContext) {
function idForNode(node) {
if (videoContext._sourceNodes.indexOf(node) !== -1) {
let id = "source " + node.displayName + " " + videoContext._sourceNodes.indexOf(node);
return id;
}
let id = "processor " + node.displayName + " "+videoContext._processingNodes.indexOf(node);
let id =
"processor " + node.displayName + " " + videoContext._processingNodes.indexOf(node);
return id;

@@ -503,12 +638,13 @@ }

let graph = {
nodes:[
nodes: [
{
id: idForNode(videoContext.destination),
label:"Destination Node",
x:2.5,
y:0.5,
size:2,
label: "Destination Node",
x: 2.5,
y: 0.5,
size: 2,
node: videoContext.destination
}],
edges:[]
}
],
edges: []
};

@@ -521,8 +657,8 @@

id: idForNode(sourceNode),
label:"Source "+ i.toString(),
x:0,
label: "Source " + i.toString(),
x: 0,
y: y,
size:2,
color:"#572A72",
node:sourceNode
size: 2,
color: "#572A72",
node: sourceNode
});

@@ -534,5 +670,5 @@ }

id: idForNode(processingNode),
x: Math.random() *2.5,
x: Math.random() * 2.5,
y: Math.random(),
size:2,
size: 2,
node: processingNode

@@ -545,15 +681,12 @@ });

graph.edges.push({
"id":"e"+i.toString(),
"source": idForNode(conn.source),
"target": idForNode(conn.destination)
id: "e" + i.toString(),
source: idForNode(conn.source),
target: idForNode(conn.destination)
});
}
return graph;
}
export function importSimpleEDL(ctx, playlist){
export function importSimpleEDL(ctx, playlist) {
// Create a "track" node to connect all the clips to.

@@ -563,14 +696,14 @@ let trackNode = ctx.compositor(DEFINITIONS.COMBINE);

// Create a source node for each of the clips.
for (let clip of playlist){
for (let clip of playlist) {
let node;
if (clip.type === "video"){
if (clip.type === "video") {
node = ctx.video(clip.src, clip.sourceStart);
} else if (clip.type === "image"){
} else if (clip.type === "image") {
node = ctx.image(clip.src, clip.sourceStart);
}else {
console.debug("Clip type \"" + clip.type + "\" not recognised, skipping.");
} else {
console.debug(`Clip type ${clip.type} not recognised, skipping.`);
continue;
}
node.startAt(clip.start);
node.stopAt(clip.start + clip.duration);
node.stopAt(clip.start + clip.duration);
node.connect(trackNode);

@@ -581,3 +714,3 @@ }

export function visualiseVideoContextTimeline(videoContext, canvas, currentTime){
export function visualiseVideoContextTimeline(videoContext, canvas, currentTime) {
let ctx = canvas.getContext("2d");

@@ -589,15 +722,16 @@ let w = canvas.width;

if (currentTime > playlistDuration && !videoContext.endOnLastSourceEnd) playlistDuration = currentTime;
if (currentTime > playlistDuration && !videoContext.endOnLastSourceEnd)
playlistDuration = currentTime;
if (videoContext.duration === Infinity){
if (videoContext.duration === Infinity) {
let total = 0;
for (let i = 0; i < videoContext._sourceNodes.length; i++) {
let sourceNode = videoContext._sourceNodes[i];
if(sourceNode._stopTime !== Infinity) total += sourceNode._stopTime;
if (sourceNode._stopTime !== Infinity) total += sourceNode._stopTime;
}
if (total > videoContext.currentTime){
playlistDuration = total+5;
}else{
playlistDuration = videoContext.currentTime+5;
if (total > videoContext.currentTime) {
playlistDuration = total + 5;
} else {
playlistDuration = videoContext.currentTime + 5;
}

@@ -607,15 +741,14 @@ }

let mediaSourceStyle = {
"video":["#572A72", "#3C1255"],
"image":["#7D9F35", "#577714"],
"canvas":["#AA9639", "#806D15"]
video: ["#572A72", "#3C1255"],
image: ["#7D9F35", "#577714"],
canvas: ["#AA9639", "#806D15"]
};
ctx.clearRect(0,0,w,h);
ctx.clearRect(0, 0, w, h);
ctx.fillStyle = "#999";
for(let node of videoContext._processingNodes){
for (let node of videoContext._processingNodes) {
if (node.displayName !== "TransitionNode") continue;
for(let propertyName in node._transitions){
for(let transition of node._transitions[propertyName]){
for (let propertyName in node._transitions) {
for (let transition of node._transitions[propertyName]) {
let tW = (transition.end - transition.start) * pixelsPerSecond;

@@ -632,7 +765,6 @@ let tH = h;

for (let i = 0; i < videoContext._sourceNodes.length; i++) {
let sourceNode = videoContext._sourceNodes[i];
let duration = sourceNode._stopTime - sourceNode._startTime;
if(duration=== Infinity) duration = videoContext.currentTime;
if (duration === Infinity) duration = videoContext.currentTime;
let start = sourceNode._startTime;

@@ -644,22 +776,16 @@

let msY = trackHeight * i;
ctx.fillStyle = mediaSourceStyle.video[i%mediaSourceStyle.video.length];
ctx.fillStyle = mediaSourceStyle.video[i % mediaSourceStyle.video.length];
ctx.fillRect(msX,msY,msW,msH);
ctx.fillRect(msX, msY, msW, msH);
ctx.fill();
}
if (currentTime !== undefined){
if (currentTime !== undefined) {
ctx.fillStyle = "#000";
ctx.fillRect(currentTime*pixelsPerSecond, 0, 1, h);
ctx.fillRect(currentTime * pixelsPerSecond, 0, 1, h);
}
}
export class UpdateablesManager{
constructor(){
export class UpdateablesManager {
constructor() {
this._updateables = [];

@@ -671,3 +797,4 @@ this._useWebworker = false;

this._webWorkerString = "\
this._webWorkerString =
"\
var running = false;\

@@ -691,7 +818,9 @@ function tick(){\

_initWebWorker(){
_initWebWorker() {
window.URL = window.URL || window.webkitURL;
let blob = new Blob([this._webWorkerString], {type: "application/javascript"});
let blob = new Blob([this._webWorkerString], {
type: "application/javascript"
});
this._webWorker = new Worker(URL.createObjectURL(blob));
this._webWorker.onmessage = (msg)=>{
this._webWorker.onmessage = msg => {
let time = msg.data;

@@ -702,6 +831,6 @@ this._updateWorkerTime(time);

_lostVisibility(){
_lostVisibility() {
this._previousWorkerTime = Date.now();
this._useWebworker = true;
if (!this._webWorker){
if (!this._webWorker) {
this._initWebWorker();

@@ -712,11 +841,11 @@ }

_gainedVisibility(){
_gainedVisibility() {
this._useWebworker = false;
this._previousRAFTime = undefined;
if(this._webWorker) this._webWorker.postMessage("stop");
if (this._webWorker) this._webWorker.postMessage("stop");
requestAnimationFrame(this._updateRAFTime.bind(this));
}
_init(){
if(!window.Worker)return;
_init() {
if (!window.Worker) return;

@@ -730,9 +859,13 @@ //If page visibility API not present fallback to using "focus" and "blur" event listeners.

//Otherwise we can use the visibility API to do the loose/gain focus properly
document.addEventListener("visibilitychange", ()=>{
if (document.hidden === true) {
this._lostVisibility();
} else {
this._gainedVisibility();
}
}, false);
document.addEventListener(
"visibilitychange",
() => {
if (document.hidden === true) {
this._lostVisibility();
} else {
this._gainedVisibility();
}
},
false
);

@@ -742,3 +875,3 @@ requestAnimationFrame(this._updateRAFTime.bind(this));

_updateWorkerTime(time){
_updateWorkerTime(time) {
let dt = (time - this._previousWorkerTime) / 1000;

@@ -749,20 +882,19 @@ if (dt !== 0) this._update(dt);

_updateRAFTime(time){
if (this._previousRAFTime === undefined)this._previousRAFTime = time;
_updateRAFTime(time) {
if (this._previousRAFTime === undefined) this._previousRAFTime = time;
let dt = (time - this._previousRAFTime) / 1000;
if (dt !== 0) this._update(dt);
this._previousRAFTime = time;
if(!this._useWebworker)requestAnimationFrame(this._updateRAFTime.bind(this));
if (!this._useWebworker) requestAnimationFrame(this._updateRAFTime.bind(this));
}
_update(dt){
for(let i = 0; i < this._updateables.length; i++){
_update(dt) {
for (let i = 0; i < this._updateables.length; i++) {
this._updateables[i]._update(parseFloat(dt));
}
}
register(updateable){
register(updateable) {
this._updateables.push(updateable);
if (this._active === false){
if (this._active === false) {
this._active = true;

@@ -769,0 +901,0 @@ this._init();

@@ -13,3 +13,13 @@ //Matthew Shotton, R&D User Experience,© BBC 2015

import VideoElementCache from "./videoelementcache.js";
import { createSigmaGraphDataFromRenderGraph, visualiseVideoContextTimeline, visualiseVideoContextGraph, createControlFormForNode, UpdateablesManager, exportToJSON, importSimpleEDL, snapshot, generateRandomId } from "./utils.js";
import {
createSigmaGraphDataFromRenderGraph,
visualiseVideoContextTimeline,
visualiseVideoContextGraph,
createControlFormForNode,
UpdateablesManager,
exportToJSON,
importSimpleEDL,
snapshot,
generateRandomId
} from "./utils.js";
import DEFINITIONS from "./Definitions/definitions.js";

@@ -23,40 +33,57 @@

*/
export default class VideoContext{
export default class VideoContext {
/**
* Initialise the VideoContext and render to the specific canvas. A 2nd parameter can be passed to the constructor which is a function that get's called if the VideoContext fails to initialise.
*
* @param {Canvas} canvas - the canvas element to render the output to.
* @param {function} initErrorCallback - a callback for if initialising the canvas failed.
* @param {Object} options - a nuber of custom options which can be set on the VideoContext, generally best left as default.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement, function(){console.error("Sorry, your browser dosen\'t support WebGL");});
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
*
*/
constructor(canvas, initErrorCallback, options={"preserveDrawingBuffer":true, "manualUpdate":false, "endOnLastSourceEnd":true, useVideoElementCache:true, videoElementCacheSize:6, webglContextAttributes: {preserveDrawingBuffer: true, alpha: false }}){
* Initialise the VideoContext and render to the specific canvas. A 2nd parameter can be passed to the constructor which is a function that get's called if the VideoContext fails to initialise.
*
* @param {Canvas} canvas - the canvas element to render the output to.
* @param {function} initErrorCallback - a callback for if initialising the canvas failed.
* @param {Object} options - a nuber of custom options which can be set on the VideoContext, generally best left as default.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement, function(){console.error("Sorry, your browser dosen\'t support WebGL");});
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
*
*/
constructor(
canvas,
initErrorCallback,
options = {
preserveDrawingBuffer: true,
manualUpdate: false,
endOnLastSourceEnd: true,
useVideoElementCache: true,
videoElementCacheSize: 6,
webglContextAttributes: {
preserveDrawingBuffer: true,
alpha: false
}
}
) {
this._canvas = canvas;
let manualUpdate = false;
this.endOnLastSourceEnd = true;
let webglContextAttributes = {preserveDrawingBuffer: true, alpha: false };
let webglContextAttributes = {
preserveDrawingBuffer: true,
alpha: false
};
if ("manualUpdate" in options) manualUpdate = options.manualUpdate;
if ("endOnLastSourceEnd" in options) this._endOnLastSourceEnd = options.endOnLastSourceEnd;
if ("webglContextAttributes" in options) webglContextAttributes = options.webglContextAttributes;
if ("webglContextAttributes" in options)
webglContextAttributes = options.webglContextAttributes;
if (webglContextAttributes.alpha === undefined) webglContextAttributes.alpha = false;
if (webglContextAttributes.alpha === true){
if (webglContextAttributes.alpha === true) {
console.error("webglContextAttributes.alpha must be false for correct opeation");
}
this._gl = canvas.getContext("experimental-webgl", webglContextAttributes);
if(this._gl === null){
if (this._gl === null) {
console.error("Failed to intialise WebGL.");
if(initErrorCallback)initErrorCallback();
if (initErrorCallback) initErrorCallback();
return;

@@ -66,20 +93,19 @@ }

// Initialise the video element cache
if(options.useVideoElementCache === undefined) options.useVideoElementCache = true;
if (options.useVideoElementCache === undefined) options.useVideoElementCache = true;
this._useVideoElementCache = options.useVideoElementCache;
if (this._useVideoElementCache){
if (this._useVideoElementCache) {
if (!options.videoElementCacheSize) options.videoElementCacheSize = 5;
this._videoElementCache = new VideoElementCache(options.videoElementCacheSize);
}
// Create a unique ID for this VideoContext which can be used in the debugger.
if(this._canvas.id) {
if (typeof this._canvas.id === "string" || this._canvas.id instanceof String){
if (this._canvas.id) {
if (typeof this._canvas.id === "string" || this._canvas.id instanceof String) {
this._id = canvas.id;
}
}
if(this._id === undefined) this._id = generateRandomId();
if (this._id === undefined) this._id = generateRandomId();
if (window.__VIDEOCONTEXT_REFS__ === undefined) window.__VIDEOCONTEXT_REFS__ = {};
window.__VIDEOCONTEXT_REFS__[this._id] = this;
this._renderGraph = new RenderGraph();

@@ -105,3 +131,3 @@ this._sourceNodes = [];

if(!manualUpdate){
if (!manualUpdate) {
updateablesManager.register(this);

@@ -115,3 +141,3 @@ }

*/
get id(){
get id() {
return this._id;

@@ -123,5 +149,6 @@ }

*/
set id(newID){
set id(newID) {
delete window.__VIDEOCONTEXT_REFS__[this._id];
if (window.__VIDEOCONTEXT_REFS__[newID] !== undefined) console.warn("Warning; setting id to that of an existing VideoContext instance.");
if (window.__VIDEOCONTEXT_REFS__[newID] !== undefined)
console.warn("Warning; setting id to that of an existing VideoContext instance.");
window.__VIDEOCONTEXT_REFS__[newID] = this;

@@ -132,24 +159,27 @@ this._id = newID;

/**
* Register a callback to happen at a specific point in time.
* @param {number} time - the time at which to trigger the callback.
* @param {Function} func - the callback to register.
* @param {number} ordering - the order in which to call the callback if more than one is registered for the same time.
*/
registerTimelineCallback(time, func, ordering= 0){
this._timelineCallbacks.push({"time":time, "func":func, "ordering":ordering});
* Register a callback to happen at a specific point in time.
* @param {number} time - the time at which to trigger the callback.
* @param {Function} func - the callback to register.
* @param {number} ordering - the order in which to call the callback if more than one is registered for the same time.
*/
registerTimelineCallback(time, func, ordering = 0) {
this._timelineCallbacks.push({
time: time,
func: func,
ordering: ordering
});
}
/**
* Unregister a callback which happens at a specific point in time.
* @param {Function} func - the callback to unregister.
*/
unregisterTimelineCallback(func){
* Unregister a callback which happens at a specific point in time.
* @param {Function} func - the callback to unregister.
*/
unregisterTimelineCallback(func) {
let toRemove = [];
for(let callback of this._timelineCallbacks){
if (callback.func === func){
for (let callback of this._timelineCallbacks) {
if (callback.func === func) {
toRemove.push(callback);
}
}
for (let callback of toRemove){
for (let callback of toRemove) {
let index = this._timelineCallbacks.indexOf(callback);

@@ -161,21 +191,21 @@ this._timelineCallbacks.splice(index, 1);

/**
* Regsiter a callback to listen to one of the following events: "stalled", "update", "ended", "content", "nocontent"
*
* "stalled" happend anytime playback is stopped due to unavailbale data for playing assets (i.e video still loading)
* . "update" is called any time a frame is rendered to the screen. "ended" is called once plackback has finished
* (i.e ctx.currentTime == ctx.duration). "content" is called a the start of a time region where there is content
* playing out of one or more sourceNodes. "nocontent" is called at the start of any time region where the
* VideoContext is still playing, but there are currently no activly playing soureces.
*
* @param {String} type - the event to register against ("stalled", "update", or "ended").
* @param {Function} func - the callback to register.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* ctx.registerCallback("stalled", function(){console.log("Playback stalled");});
* ctx.registerCallback("update", function(){console.log("new frame");});
* ctx.registerCallback("ended", function(){console.log("Playback ended");});
*/
registerCallback(type, func){
* Regsiter a callback to listen to one of the following events: "stalled", "update", "ended", "content", "nocontent"
*
* "stalled" happend anytime playback is stopped due to unavailbale data for playing assets (i.e video still loading)
* . "update" is called any time a frame is rendered to the screen. "ended" is called once plackback has finished
* (i.e ctx.currentTime == ctx.duration). "content" is called a the start of a time region where there is content
* playing out of one or more sourceNodes. "nocontent" is called at the start of any time region where the
* VideoContext is still playing, but there are currently no activly playing soureces.
*
* @param {String} type - the event to register against ("stalled", "update", or "ended").
* @param {Function} func - the callback to register.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* ctx.registerCallback("stalled", function(){console.log("Playback stalled");});
* ctx.registerCallback("update", function(){console.log("new frame");});
* ctx.registerCallback("ended", function(){console.log("Playback ended");});
*/
registerCallback(type, func) {
if (!this._callbacks.has(type)) return false;

@@ -186,23 +216,23 @@ this._callbacks.get(type).push(func);

/**
* Remove a previously registed callback
*
* @param {Function} func - the callback to remove.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //the callback
* var updateCallback = function(){console.log("new frame")};
*
* //register the callback
* ctx.registerCallback("update", updateCallback);
* //then unregister it
* ctx.unregisterCallback(updateCallback);
*
*/
unregisterCallback(func){
for(let funcArray of this._callbacks.values()){
* Remove a previously registed callback
*
* @param {Function} func - the callback to remove.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //the callback
* var updateCallback = function(){console.log("new frame")};
*
* //register the callback
* ctx.registerCallback("update", updateCallback);
* //then unregister it
* ctx.unregisterCallback(updateCallback);
*
*/
unregisterCallback(func) {
for (let funcArray of this._callbacks.values()) {
let index = funcArray.indexOf(func);
if (index !== -1){
if (index !== -1) {
funcArray.splice(index, 1);

@@ -215,5 +245,5 @@ return true;

_callCallbacks(type){
_callCallbacks(type) {
let funcArray = this._callbacks.get(type);
for (let func of funcArray){
for (let func of funcArray) {
func(this._currentTime);

@@ -224,8 +254,8 @@ }

/**
* Get the canvas that the VideoContext is using.
*
* @return {HTMLElement} The canvas that the VideoContext is using.
*
*/
get element(){
* Get the canvas that the VideoContext is using.
*
* @return {HTMLElement} The canvas that the VideoContext is using.
*
*/
get element() {
return this._canvas;

@@ -235,14 +265,14 @@ }

/**
* Get the current state.
*
* This will be either
* - VideoContext.STATE.PLAYING: current sources on timeline are active
* - VideoContext.STATE.PAUSED: all sources are paused
* - VideoContext.STATE.STALLED: one or more sources is unable to play
* - VideoContext.STATE.ENDED: all sources have finished playing
* - VideoContext.STATE.BROKEN: the render graph is in a broken state
* @return {number} The number representing the state.
*
*/
get state(){
* Get the current state.
*
* This will be either
* - VideoContext.STATE.PLAYING: current sources on timeline are active
* - VideoContext.STATE.PAUSED: all sources are paused
* - VideoContext.STATE.STALLED: one or more sources is unable to play
* - VideoContext.STATE.ENDED: all sources have finished playing
* - VideoContext.STATE.BROKEN: the render graph is in a broken state
* @return {number} The number representing the state.
*
*/
get state() {
return this._state;

@@ -252,22 +282,23 @@ }

/**
* Set the progress through the internal timeline.
* Setting this can be used as a way to implement a scrubaable timeline.
*
* @param {number} currentTime - this is the currentTime to set the context to.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(20);
* ctx.currentTime = 10; // seek 10 seconds in
* ctx.play();
*
*/
set currentTime(currentTime){
if (currentTime < this.duration && this._state === VideoContext.STATE.ENDED) this._state = VideoContext.STATE.PAUSED;
* Set the progress through the internal timeline.
* Setting this can be used as a way to implement a scrubaable timeline.
*
* @param {number} currentTime - this is the currentTime to set the context to.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(20);
* ctx.currentTime = 10; // seek 10 seconds in
* ctx.play();
*
*/
set currentTime(currentTime) {
if (currentTime < this.duration && this._state === VideoContext.STATE.ENDED)
this._state = VideoContext.STATE.PAUSED;
if (typeof currentTime === "string" || currentTime instanceof String){
if (typeof currentTime === "string" || currentTime instanceof String) {
currentTime = parseFloat(currentTime);

@@ -286,19 +317,19 @@ }

/**
* Get how far through the internal timeline has been played.
*
* Getting this value will give the current playhead position. Can be used for updating timelines.
* @return {number} The time in seconds through the current playlist.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
* setTimeout(function(){console.log(ctx.currentTime);},1000); //should print roughly 1.0
*
*/
get currentTime(){
* Get how far through the internal timeline has been played.
*
* Getting this value will give the current playhead position. Can be used for updating timelines.
* @return {number} The time in seconds through the current playlist.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
* setTimeout(function(){console.log(ctx.currentTime);},1000); //should print roughly 1.0
*
*/
get currentTime() {
return this._currentTime;

@@ -308,24 +339,27 @@ }

/**
* Get the time at which the last node in the current internal timeline finishes playing.
*
* @return {number} The end time in seconds of the last video node to finish playing.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* console.log(ctx.duration); //prints 0
*
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
*
* console.log(ctx.duration); //prints 10
*
* ctx.play();
*/
get duration(){
* Get the time at which the last node in the current internal timeline finishes playing.
*
* @return {number} The end time in seconds of the last video node to finish playing.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* console.log(ctx.duration); //prints 0
*
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
*
* console.log(ctx.duration); //prints 10
*
* ctx.play();
*/
get duration() {
let maxTime = 0;
for (let i = 0; i < this._sourceNodes.length; i++) {
if (this._sourceNodes[i].state !== SOURCENODESTATE.waiting &&this._sourceNodes[i]._stopTime > maxTime){
if (
this._sourceNodes[i].state !== SOURCENODESTATE.waiting &&
this._sourceNodes[i]._stopTime > maxTime
) {
maxTime = this._sourceNodes[i]._stopTime;

@@ -337,19 +371,18 @@ }

/**
* Get the final node in the render graph which represents the canvas to display content on to.
*
* This proprety is read-only and there can only ever be one destination node. Other nodes can connect to this but you cannot connect this node to anything.
*
* @return {DestinationNode} A graph node represnting the canvas to display the content on.
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.start(0);
* videoNode.stop(10);
* videoNode.connect(ctx.destination);
*
*/
get destination(){
* Get the final node in the render graph which represents the canvas to display content on to.
*
* This proprety is read-only and there can only ever be one destination node. Other nodes can connect to this but you cannot connect this node to anything.
*
* @return {DestinationNode} A graph node represnting the canvas to display the content on.
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.start(0);
* videoNode.stop(10);
* videoNode.connect(ctx.destination);
*
*/
get destination() {
return this._destinationNode;

@@ -359,23 +392,23 @@ }

/**
* Set the playback rate of the VideoContext instance.
* This will alter the playback speed of all media elements played through the VideoContext.
*
* @param {number} rate - this is the playback rate.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.start(0);
* videoNode.stop(10);
* videoNode.connect(ctx.destination);
* ctx.playbackRate = 2;
* ctx.play(); // Double playback rate means this will finish playing in 5 seconds.
*/
set playbackRate(rate){
if (rate <= 0){
* Set the playback rate of the VideoContext instance.
* This will alter the playback speed of all media elements played through the VideoContext.
*
* @param {number} rate - this is the playback rate.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.start(0);
* videoNode.stop(10);
* videoNode.connect(ctx.destination);
* ctx.playbackRate = 2;
* ctx.play(); // Double playback rate means this will finish playing in 5 seconds.
*/
set playbackRate(rate) {
if (rate <= 0) {
throw new RangeError("playbackRate must be greater than 0");
}
for (let node of this._sourceNodes) {
if (node.constructor.name === "VideoNode"){
if (node.constructor.name === "VideoNode") {
node._globalPlaybackRate = rate;

@@ -388,12 +421,10 @@ node._playbackRateUpdated = true;

/**
* Return the current playbackRate of the video context.
* @return {number} A value representing the playbackRate. 1.0 by default.
*/
get playbackRate(){
* Return the current playbackRate of the video context.
* @return {number} A value representing the playbackRate. 1.0 by default.
*/
get playbackRate() {
return this._playbackRate;
}
/**

@@ -403,5 +434,5 @@ * Set the volume of all VideoNode's created in the VideoContext.

*/
set volume(vol){
for (let node of this._sourceNodes){
if(node instanceof VideoNode || node instanceof AudioNode){
set volume(vol) {
for (let node of this._sourceNodes) {
if (node instanceof VideoNode || node instanceof AudioNode) {
node.volume = vol;

@@ -414,6 +445,6 @@ }

/**
* Return the current volume of the video context.
* @return {number} A value representing the volume. 1.0 by default.
*/
get volume(){
* Return the current volume of the video context.
* @return {number} A value representing the volume. 1.0 by default.
*/
get volume() {
return this._volume;

@@ -423,16 +454,16 @@ }

/**
* Start the VideoContext playing
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
*/
play(){
* Start the VideoContext playing
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(10);
* ctx.play();
*/
play() {
console.debug("VideoContext - playing");
//Initialise the video elemnt cache
if (this._videoElementCache)this._videoElementCache.init();
if (this._videoElementCache) this._videoElementCache.init();
// set the state.

@@ -444,15 +475,15 @@ this._state = VideoContext.STATE.PLAYING;

/**
* Pause playback of the VideoContext
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(20);
* ctx.currentTime = 10; // seek 10 seconds in
* ctx.play();
* setTimeout(function(){ctx.pause();}, 1000); //pause playback after roughly one second.
*/
pause(){
* Pause playback of the VideoContext
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
* videoNode.connect(ctx.destination);
* videoNode.start(0);
* videoNode.stop(20);
* ctx.currentTime = 10; // seek 10 seconds in
* ctx.play();
* setTimeout(function(){ctx.pause();}, 1000); //pause playback after roughly one second.
*/
pause() {
console.debug("VideoContext - pausing");

@@ -463,25 +494,34 @@ this._state = VideoContext.STATE.PAUSED;

/**
* Create a new node representing a video source
*
* @param {string|Video} - The URL or video element to create the video from.
* @sourceOffset {number} - Offset into the start of the source video to start playing from.
* @preloadTime {number} - How many seconds before the video is to be played to start loading it.
* @videoElementAttributes {Object} - A dictionary of attributes to map onto the underlying video element.
* @return {VideoNode} A new video node.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
*
* @example
* var canvasElement = document.getElementById("canvas");
* var videoElement = document.getElementById("video");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video(videoElement);
*/
video(src, sourceOffset=0, preloadTime=4, videoElementAttributes={}){
let videoNode = new VideoNode(src, this._gl, this._renderGraph, this._currentTime, this._playbackRate, sourceOffset, preloadTime, this._videoElementCache, videoElementAttributes);
* Create a new node representing a video source
*
* @param {string|Video} - The URL or video element to create the video from.
* @sourceOffset {number} - Offset into the start of the source video to start playing from.
* @preloadTime {number} - How many seconds before the video is to be played to start loading it.
* @videoElementAttributes {Object} - A dictionary of attributes to map onto the underlying video element.
* @return {VideoNode} A new video node.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video("video.mp4");
*
* @example
* var canvasElement = document.getElementById("canvas");
* var videoElement = document.getElementById("video");
* var ctx = new VideoContext(canvasElement);
* var videoNode = ctx.video(videoElement);
*/
video(src, sourceOffset = 0, preloadTime = 4, videoElementAttributes = {}) {
let videoNode = new VideoNode(
src,
this._gl,
this._renderGraph,
this._currentTime,
this._playbackRate,
sourceOffset,
preloadTime,
this._videoElementCache,
videoElementAttributes
);
this._sourceNodes.push(videoNode);

@@ -491,4 +531,14 @@ return videoNode;

audio(src, sourceOffset=0, preloadTime=4, audioElementAttributes={}){
let audioNode = new AudioNode(src, this._gl, this._renderGraph, this._currentTime, this._playbackRate, sourceOffset, preloadTime, this._audioElementCache, audioElementAttributes);
audio(src, sourceOffset = 0, preloadTime = 4, audioElementAttributes = {}) {
let audioNode = new AudioNode(
src,
this._gl,
this._renderGraph,
this._currentTime,
this._playbackRate,
sourceOffset,
preloadTime,
this._audioElementCache,
audioElementAttributes
);
this._sourceNodes.push(audioNode);

@@ -499,30 +549,38 @@ return audioNode;

/**
* @depricated
*/
createVideoSourceNode(src, sourceOffset=0, preloadTime=4, videoElementAttributes={}){
this._depricate("Warning: createVideoSourceNode will be depricated in v1.0, please switch to using VideoContext.video()");
* @depricated
*/
createVideoSourceNode(src, sourceOffset = 0, preloadTime = 4, videoElementAttributes = {}) {
this._depricate(
"Warning: createVideoSourceNode will be depricated in v1.0, please switch to using VideoContext.video()"
);
return this.video(src, sourceOffset, preloadTime, videoElementAttributes);
}
/**
* Create a new node representing an image source
* @param {string|Image} src - The url or image element to create the image node from.
* @param {number} [preloadTime] - How long before a node is to be displayed to attmept to load it.
* @param {Object} [imageElementAttributes] - Any attributes to be given to the underlying image element.
* @return {ImageNode} A new image node.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var imageNode = ctx.image("image.png");
*
* @example
* var canvasElement = document.getElementById("canvas");
* var imageElement = document.getElementById("image");
* var ctx = new VideoContext(canvasElement);
* var imageNode = ctx.image(imageElement);
*/
image(src, preloadTime=4, imageElementAttributes={}){
let imageNode = new ImageNode(src, this._gl, this._renderGraph, this._currentTime, preloadTime, imageElementAttributes);
* Create a new node representing an image source
* @param {string|Image} src - The url or image element to create the image node from.
* @param {number} [preloadTime] - How long before a node is to be displayed to attmept to load it.
* @param {Object} [imageElementAttributes] - Any attributes to be given to the underlying image element.
* @return {ImageNode} A new image node.
*
* @example
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
* var imageNode = ctx.image("image.png");
*
* @example
* var canvasElement = document.getElementById("canvas");
* var imageElement = document.getElementById("image");
* var ctx = new VideoContext(canvasElement);
* var imageNode = ctx.image(imageElement);
*/
image(src, preloadTime = 4, imageElementAttributes = {}) {
let imageNode = new ImageNode(
src,
this._gl,
this._renderGraph,
this._currentTime,
preloadTime,
imageElementAttributes
);
this._sourceNodes.push(imageNode);

@@ -533,16 +591,17 @@ return imageNode;

/**
* @depricated
*/
createImageSourceNode(src, sourceOffset=0, preloadTime=4, imageElementAttributes={}){
this._depricate("Warning: createImageSourceNode will be depricated in v1.0, please switch to using VideoContext.image()");
* @depricated
*/
createImageSourceNode(src, sourceOffset = 0, preloadTime = 4, imageElementAttributes = {}) {
this._depricate(
"Warning: createImageSourceNode will be depricated in v1.0, please switch to using VideoContext.image()"
);
return this.image(src, sourceOffset, preloadTime, imageElementAttributes);
}
/**
* Create a new node representing a canvas source
* @param {Canvas} src - The canvas element to create the canvas node from.
* @return {CanvasNode} A new canvas node.
*/
canvas(canvas){
* Create a new node representing a canvas source
* @param {Canvas} src - The canvas element to create the canvas node from.
* @return {CanvasNode} A new canvas node.
*/
canvas(canvas) {
let canvasNode = new CanvasNode(canvas, this._gl, this._renderGraph, this._currentTime);

@@ -554,16 +613,17 @@ this._sourceNodes.push(canvasNode);

/**
* @depricated
*/
createCanvasSourceNode(canvas, sourceOffset=0, preloadTime=4){
this._depricate("Warning: createCanvasSourceNode will be depricated in v1.0, please switch to using VideoContext.canvas()");
* @depricated
*/
createCanvasSourceNode(canvas, sourceOffset = 0, preloadTime = 4) {
this._depricate(
"Warning: createCanvasSourceNode will be depricated in v1.0, please switch to using VideoContext.canvas()"
);
return this.canvas(canvas, sourceOffset, preloadTime);
}
/**
* Create a new effect node.
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the compositing node to create. Builtin definitions can be found by accessing VideoContext.DEFINITIONS.
* @return {EffectNode} A new effect node created from the passed definition
*/
effect(definition){
* Create a new effect node.
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the compositing node to create. Builtin definitions can be found by accessing VideoContext.DEFINITIONS.
* @return {EffectNode} A new effect node created from the passed definition
*/
effect(definition) {
let effectNode = new EffectNode(this._gl, this._renderGraph, definition);

@@ -575,6 +635,8 @@ this._processingNodes.push(effectNode);

/**
* @depricated
*/
createEffectNode(definition){
this._depricate("Warning: createEffectNode will be depricated in v1.0, please switch to using VideoContext.effect()");
* @depricated
*/
createEffectNode(definition) {
this._depricate(
"Warning: createEffectNode will be depricated in v1.0, please switch to using VideoContext.effect()"
);
return this.effect(definition);

@@ -584,64 +646,64 @@ }

/**
* Create a new compositiing node.
*
* Compositing nodes are used for operations such as combining multiple video sources into a single track/connection for further processing in the graph.
*
* A compositing node is slightly different to other processing nodes in that it only has one input in it's definition but can have unlimited connections made to it.
* The shader in the definition is run for each input in turn, drawing them to the output buffer. This means there can be no interaction between the spearte inputs to a compositing node, as they are individually processed in seperate shader passes.
*
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the compositing node to create. Builtin definitions can be found by accessing VideoContext.DEFINITIONS
*
* @return {CompositingNode} A new compositing node created from the passed definition.
*
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //A simple compositing node definition which just renders all the inputs to the output buffer.
* var combineDefinition = {
* vertexShader : "\
* attribute vec2 a_position;\
* attribute vec2 a_texCoord;\
* varying vec2 v_texCoord;\
* void main() {\
* gl_Position = vec4(vec2(2.0,2.0)*vec2(1.0, 1.0), 0.0, 1.0);\
* v_texCoord = a_texCoord;\
* }",
* fragmentShader : "\
* precision mediump float;\
* uniform sampler2D u_image;\
* uniform float a;\
* varying vec2 v_texCoord;\
* varying float v_progress;\
* void main(){\
* vec4 color = texture2D(u_image, v_texCoord);\
* gl_FragColor = color;\
* }",
* properties:{
* "a":{type:"uniform", value:0.0},
* },
* inputs:["u_image"]
* };
* //Create the node, passing in the definition.
* var trackNode = videoCtx.compositor(combineDefinition);
*
* //create two videos which will play at back to back
* var videoNode1 = ctx.video("video1.mp4");
* videoNode1.play(0);
* videoNode1.stop(10);
* var videoNode2 = ctx.video("video2.mp4");
* videoNode2.play(10);
* videoNode2.stop(20);
*
* //Connect the nodes to the combine node. This will give a single connection representing the two videos which can
* //be connected to other effects such as LUTs, chromakeyers, etc.
* videoNode1.connect(trackNode);
* videoNode2.connect(trackNode);
*
* //Don't do anything exciting, just connect it to the output.
* trackNode.connect(ctx.destination);
*
*/
compositor(definition){
* Create a new compositiing node.
*
* Compositing nodes are used for operations such as combining multiple video sources into a single track/connection for further processing in the graph.
*
* A compositing node is slightly different to other processing nodes in that it only has one input in it's definition but can have unlimited connections made to it.
* The shader in the definition is run for each input in turn, drawing them to the output buffer. This means there can be no interaction between the spearte inputs to a compositing node, as they are individually processed in seperate shader passes.
*
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the compositing node to create. Builtin definitions can be found by accessing VideoContext.DEFINITIONS
*
* @return {CompositingNode} A new compositing node created from the passed definition.
*
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //A simple compositing node definition which just renders all the inputs to the output buffer.
* var combineDefinition = {
* vertexShader : "\
* attribute vec2 a_position;\
* attribute vec2 a_texCoord;\
* varying vec2 v_texCoord;\
* void main() {\
* gl_Position = vec4(vec2(2.0,2.0)*vec2(1.0, 1.0), 0.0, 1.0);\
* v_texCoord = a_texCoord;\
* }",
* fragmentShader : "\
* precision mediump float;\
* uniform sampler2D u_image;\
* uniform float a;\
* varying vec2 v_texCoord;\
* varying float v_progress;\
* void main(){\
* vec4 color = texture2D(u_image, v_texCoord);\
* gl_FragColor = color;\
* }",
* properties:{
* "a":{type:"uniform", value:0.0},
* },
* inputs:["u_image"]
* };
* //Create the node, passing in the definition.
* var trackNode = videoCtx.compositor(combineDefinition);
*
* //create two videos which will play at back to back
* var videoNode1 = ctx.video("video1.mp4");
* videoNode1.play(0);
* videoNode1.stop(10);
* var videoNode2 = ctx.video("video2.mp4");
* videoNode2.play(10);
* videoNode2.stop(20);
*
* //Connect the nodes to the combine node. This will give a single connection representing the two videos which can
* //be connected to other effects such as LUTs, chromakeyers, etc.
* videoNode1.connect(trackNode);
* videoNode2.connect(trackNode);
*
* //Don't do anything exciting, just connect it to the output.
* trackNode.connect(ctx.destination);
*
*/
compositor(definition) {
let compositingNode = new CompositingNode(this._gl, this._renderGraph, definition);

@@ -653,90 +715,90 @@ this._processingNodes.push(compositingNode);

/**
* @depricated
*/
createCompositingNode(definition){
this._depricate("Warning: createCompositingNode will be depricated in v1.0, please switch to using VideoContext.compositor()");
* @depricated
*/
createCompositingNode(definition) {
this._depricate(
"Warning: createCompositingNode will be depricated in v1.0, please switch to using VideoContext.compositor()"
);
return this.compositor(definition);
}
/**
* Create a new transition node.
*
* Transistion nodes are a type of effect node which have parameters which can be changed as events on the timeline.
*
* For example a transition node which cross-fades between two videos could have a "mix" property which sets the
* progress through the transistion. Rather than having to write your own code to adjust this property at specfic
* points in time a transition node has a "transition" function which takes a startTime, stopTime, targetValue, and a
* propertyName (which will be "mix"). This will linearly interpolate the property from the curernt value to
* tragetValue between the startTime and stopTime.
*
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the transition node to create.
* @return {TransitionNode} A new transition node created from the passed definition.
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //A simple cross-fade node definition which cross-fades between two videos based on the mix property.
* var crossfadeDefinition = {
* vertexShader : "\
* attribute vec2 a_position;\
* attribute vec2 a_texCoord;\
* varying vec2 v_texCoord;\
* void main() {\
* gl_Position = vec4(vec2(2.0,2.0)*a_position-vec2(1.0, 1.0), 0.0, 1.0);\
* v_texCoord = a_texCoord;\
* }",
* fragmentShader : "\
* precision mediump float;\
* uniform sampler2D u_image_a;\
* uniform sampler2D u_image_b;\
* uniform float mix;\
* varying vec2 v_texCoord;\
* varying float v_mix;\
* void main(){\
* vec4 color_a = texture2D(u_image_a, v_texCoord);\
* vec4 color_b = texture2D(u_image_b, v_texCoord);\
* color_a[0] *= mix;\
* color_a[1] *= mix;\
* color_a[2] *= mix;\
* color_a[3] *= mix;\
* color_b[0] *= (1.0 - mix);\
* color_b[1] *= (1.0 - mix);\
* color_b[2] *= (1.0 - mix);\
* color_b[3] *= (1.0 - mix);\
* gl_FragColor = color_a + color_b;\
* }",
* properties:{
* "mix":{type:"uniform", value:0.0},
* },
* inputs:["u_image_a","u_image_b"]
* };
*
* //Create the node, passing in the definition.
* var transitionNode = videoCtx.transition(crossfadeDefinition);
*
* //create two videos which will overlap by two seconds
* var videoNode1 = ctx.video("video1.mp4");
* videoNode1.play(0);
* videoNode1.stop(10);
* var videoNode2 = ctx.video("video2.mp4");
* videoNode2.play(8);
* videoNode2.stop(18);
*
* //Connect the nodes to the transistion node.
* videoNode1.connect(transitionNode);
* videoNode2.connect(transitionNode);
*
* //Set-up a transition which happens at the crossover point of the playback of the two videos
* transitionNode.transition(8,10,1.0,"mix");
*
* //Connect the transition node to the output
* transitionNode.connect(ctx.destination);
*
* //start playback
* ctx.play();
*/
transition(definition){
* Create a new transition node.
*
* Transistion nodes are a type of effect node which have parameters which can be changed as events on the timeline.
*
* For example a transition node which cross-fades between two videos could have a "mix" property which sets the
* progress through the transistion. Rather than having to write your own code to adjust this property at specfic
* points in time a transition node has a "transition" function which takes a startTime, stopTime, targetValue, and a
* propertyName (which will be "mix"). This will linearly interpolate the property from the curernt value to
* tragetValue between the startTime and stopTime.
*
* @param {Object} definition - this is an object defining the shaders, inputs, and properties of the transition node to create.
* @return {TransitionNode} A new transition node created from the passed definition.
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement);
*
* //A simple cross-fade node definition which cross-fades between two videos based on the mix property.
* var crossfadeDefinition = {
* vertexShader : "\
* attribute vec2 a_position;\
* attribute vec2 a_texCoord;\
* varying vec2 v_texCoord;\
* void main() {\
* gl_Position = vec4(vec2(2.0,2.0)*a_position-vec2(1.0, 1.0), 0.0, 1.0);\
* v_texCoord = a_texCoord;\
* }",
* fragmentShader : "\
* precision mediump float;\
* uniform sampler2D u_image_a;\
* uniform sampler2D u_image_b;\
* uniform float mix;\
* varying vec2 v_texCoord;\
* varying float v_mix;\
* void main(){\
* vec4 color_a = texture2D(u_image_a, v_texCoord);\
* vec4 color_b = texture2D(u_image_b, v_texCoord);\
* color_a[0] *= mix;\
* color_a[1] *= mix;\
* color_a[2] *= mix;\
* color_a[3] *= mix;\
* color_b[0] *= (1.0 - mix);\
* color_b[1] *= (1.0 - mix);\
* color_b[2] *= (1.0 - mix);\
* color_b[3] *= (1.0 - mix);\
* gl_FragColor = color_a + color_b;\
* }",
* properties:{
* "mix":{type:"uniform", value:0.0},
* },
* inputs:["u_image_a","u_image_b"]
* };
*
* //Create the node, passing in the definition.
* var transitionNode = videoCtx.transition(crossfadeDefinition);
*
* //create two videos which will overlap by two seconds
* var videoNode1 = ctx.video("video1.mp4");
* videoNode1.play(0);
* videoNode1.stop(10);
* var videoNode2 = ctx.video("video2.mp4");
* videoNode2.play(8);
* videoNode2.stop(18);
*
* //Connect the nodes to the transistion node.
* videoNode1.connect(transitionNode);
* videoNode2.connect(transitionNode);
*
* //Set-up a transition which happens at the crossover point of the playback of the two videos
* transitionNode.transition(8,10,1.0,"mix");
*
* //Connect the transition node to the output
* transitionNode.connect(ctx.destination);
*
* //start playback
* ctx.play();
*/
transition(definition) {
let transitionNode = new TransitionNode(this._gl, this._renderGraph, definition);

@@ -748,16 +810,15 @@ this._processingNodes.push(transitionNode);

/**
* @depricated
*/
createTransitionNode(definition){
this._depricate("Warning: createTransitionNode will be depricated in v1.0, please switch to using VideoContext.transition()");
* @depricated
*/
createTransitionNode(definition) {
this._depricate(
"Warning: createTransitionNode will be depricated in v1.0, please switch to using VideoContext.transition()"
);
return this.transition(definition);
}
_isStalled(){
_isStalled() {
for (let i = 0; i < this._sourceNodes.length; i++) {
let sourceNode = this._sourceNodes[i];
if (!sourceNode._isReady()){
if (!sourceNode._isReady()) {
return true;

@@ -769,47 +830,48 @@ }

/**
* This allows manual calling of the update loop of the videoContext.
*
* @param {Number} dt - The difference in seconds between this and the previous calling of update.
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement, undefined, {"manualUpdate" : true});
*
* var previousTime;
* function update(time){
* if (previousTime === undefined) previousTime = time;
* var dt = (time - previousTime)/1000;
* ctx.update(dt);
* previousTime = time;
* requestAnimationFrame(update);
* }
* update();
*
*/
update(dt){
* This allows manual calling of the update loop of the videoContext.
*
* @param {Number} dt - The difference in seconds between this and the previous calling of update.
* @example
*
* var canvasElement = document.getElementById("canvas");
* var ctx = new VideoContext(canvasElement, undefined, {"manualUpdate" : true});
*
* var previousTime;
* function update(time){
* if (previousTime === undefined) previousTime = time;
* var dt = (time - previousTime)/1000;
* ctx.update(dt);
* previousTime = time;
* requestAnimationFrame(update);
* }
* update();
*
*/
update(dt) {
this._update(dt);
}
_update(dt){
_update(dt) {
//Remove any destroyed nodes
this._sourceNodes = this._sourceNodes.filter(sourceNode=>{
this._sourceNodes = this._sourceNodes.filter(sourceNode => {
if (!sourceNode.destroyed) return sourceNode;
});
this._processingNodes = this._processingNodes.filter(processingNode=>{
this._processingNodes = this._processingNodes.filter(processingNode => {
if (!processingNode.destroyed) return processingNode;
});
if (this._state === VideoContext.STATE.PLAYING || this._state === VideoContext.STATE.STALLED || this._state === VideoContext.STATE.PAUSED) {
if (
this._state === VideoContext.STATE.PLAYING ||
this._state === VideoContext.STATE.STALLED ||
this._state === VideoContext.STATE.PAUSED
) {
this._callCallbacks("update");
if (this._state !== VideoContext.STATE.PAUSED){
if (this._isStalled()){
if (this._state !== VideoContext.STATE.PAUSED) {
if (this._isStalled()) {
this._callCallbacks("stalled");
this._state = VideoContext.STATE.STALLED;
}else{
} else {
this._state = VideoContext.STATE.PLAYING;

@@ -819,9 +881,13 @@ }

if(this._state === VideoContext.STATE.PLAYING){
if (this._state === VideoContext.STATE.PLAYING) {
//Handle timeline callbacks.
let activeCallbacks = new Map();
for(let callback of this._timelineCallbacks){
if (callback.time >= this.currentTime && callback.time < (this._currentTime + dt * this._playbackRate)){
for (let callback of this._timelineCallbacks) {
if (
callback.time >= this.currentTime &&
callback.time < this._currentTime + dt * this._playbackRate
) {
//group the callbacks by time
if(!activeCallbacks.has(callback.time)) activeCallbacks.set(callback.time, []);
if (!activeCallbacks.has(callback.time))
activeCallbacks.set(callback.time, []);
activeCallbacks.get(callback.time).push(callback);

@@ -831,15 +897,14 @@ }

//Sort the groups of callbacks by the times of the groups
let timeIntervals = Array.from(activeCallbacks.keys());
timeIntervals.sort(function(a, b){
timeIntervals.sort(function(a, b) {
return a - b;
});
for (let t of timeIntervals){
for (let t of timeIntervals) {
let callbacks = activeCallbacks.get(t);
callbacks.sort(function(a,b){
callbacks.sort(function(a, b) {
return a.ordering - b.ordering;
});
for(let callback of callbacks){
for (let callback of callbacks) {
callback.func();

@@ -850,3 +915,3 @@ }

this._currentTime += dt * this._playbackRate;
if(this._currentTime > this.duration && this._endOnLastSourceEnd){
if (this._currentTime > this.duration && this._endOnLastSourceEnd) {
//Do an update od the sourcenodes in case anything in the "ended" callbacks modifes currentTime and sources haven't had a chance to stop.

@@ -866,13 +931,17 @@ for (let i = 0; i < this._sourceNodes.length; i++) {

if(this._state === VideoContext.STATE.STALLED){
if (sourceNode._isReady() && sourceNode._state === SOURCENODESTATE.playing) sourceNode._pause();
if (this._state === VideoContext.STATE.STALLED) {
if (sourceNode._isReady() && sourceNode._state === SOURCENODESTATE.playing)
sourceNode._pause();
}
if(this._state === VideoContext.STATE.PAUSED){
if (this._state === VideoContext.STATE.PAUSED) {
sourceNode._pause();
}
if(this._state === VideoContext.STATE.PLAYING){
if (this._state === VideoContext.STATE.PLAYING) {
sourceNode._play();
}
sourceNode._update(this._currentTime);
if (sourceNode._state === SOURCENODESTATE.paused || sourceNode._state === SOURCENODESTATE.playing){
if (
sourceNode._state === SOURCENODESTATE.paused ||
sourceNode._state === SOURCENODESTATE.playing
) {
sourcesPlaying = true;

@@ -882,7 +951,9 @@ }

if (sourcesPlaying !== this._sourcesPlaying && this._state === VideoContext.STATE.PLAYING){
if (sourcesPlaying === true){
if (
sourcesPlaying !== this._sourcesPlaying &&
this._state === VideoContext.STATE.PLAYING
) {
if (sourcesPlaying === true) {
this._callCallbacks("content");
}else{
} else {
this._callCallbacks("nocontent");

@@ -893,3 +964,2 @@ }

/*

@@ -911,10 +981,9 @@ * Itterate the directed acyclic graph using Khan's algorithm (KHAAAAAN!).

while (nodes.length > 0) {
let node = nodes.pop();
sortedNodes.push(node);
for (let edge of RenderGraph.outputEdgesFor(node, connections)){
for (let edge of RenderGraph.outputEdgesFor(node, connections)) {
let index = connections.indexOf(edge);
if (index > -1) connections.splice(index, 1);
if (RenderGraph.inputEdgesFor(edge.destination, connections).length === 0){
if (RenderGraph.inputEdgesFor(edge.destination, connections).length === 0) {
nodes.push(edge.destination);

@@ -925,4 +994,4 @@ }

for (let node of sortedNodes){
if (this._sourceNodes.indexOf(node) === -1){
for (let node of sortedNodes) {
if (this._sourceNodes.indexOf(node) === -1) {
node._update(this._currentTime);

@@ -936,12 +1005,12 @@ node._render();

/**
* Destroy all nodes in the graph and reset the timeline. After calling this any created nodes will be unusable.
*/
reset(){
for (let callback of this._callbacks){
* Destroy all nodes in the graph and reset the timeline. After calling this any created nodes will be unusable.
*/
reset() {
for (let callback of this._callbacks) {
this.unregisterCallback(callback);
}
for (let node of this._sourceNodes){
for (let node of this._sourceNodes) {
node.destroy();
}
for (let node of this._processingNodes){
for (let node of this._processingNodes) {
node.destroy();

@@ -965,3 +1034,3 @@ }

_depricate(msg){
_depricate(msg) {
console.log(msg);

@@ -977,3 +1046,3 @@ }

*/
snapshot () {
snapshot() {
return snapshot(this);

@@ -980,0 +1049,0 @@ }

@@ -1,3 +0,3 @@

function stripHash (url){
if (url.port === "" || url.port === undefined){
function stripHash(url) {
if (url.port === "" || url.port === undefined) {
return `${url.protocol}//${url.hostname}${url.pathname}`;

@@ -10,3 +10,2 @@ } else {

class VideoElementCache {
constructor(cache_size = 3) {

@@ -16,3 +15,3 @@ this._elements = [];

for (let i = 0; i < cache_size; i++) {
let element = this._createElement();
let element = this._createElement();
this._elements.push(element);

@@ -22,4 +21,3 @@ }

_createElement(){
_createElement() {
let videoElement = document.createElement("video");

@@ -33,14 +31,16 @@ videoElement.setAttribute("crossorigin", "anonymous");

init(){
if (!this._elementsInitialised){
for(let element of this._elements){
init() {
if (!this._elementsInitialised) {
for (let element of this._elements) {
try {
element.play().then(()=>{
}, (e)=>{
if (e.name !== "NotSupportedError")throw(e);
});
} catch(e) {
element.play().then(
() => {},
e => {
if (e.name !== "NotSupportedError") throw e;
}
);
} catch (e) {
//console.log(e.name);
}
}
}
}

@@ -54,6 +54,14 @@ this._elementsInitialised = true;

// For some reason an uninitialised videoElement has its sr attribute set to the windows href. Hence the below check.
if ((element.src === "" || element.src === undefined || element.src === stripHash(window.location)) && element.srcObject == null )return element;
if (
(element.src === "" ||
element.src === undefined ||
element.src === stripHash(window.location)) &&
element.srcObject == null
)
return element;
}
//Fallback to creating a new element if non exists.
console.debug("No available video element in the cache, creating a new one. This may break mobile, make your initial cache larger.");
console.debug(
"No available video element in the cache, creating a new one. This may break mobile, make your initial cache larger."
);
let element = this._createElement();

@@ -65,17 +73,22 @@ this._elements.push(element);

get length(){
get length() {
return this._elements.length;
}
get unused(){
get unused() {
let count = 0;
for (let element of this._elements) {
// For some reason an uninitialised videoElement has its sr attribute set to the windows href. Hence the below check.
if ((element.src === "" || element.src === undefined || element.src === stripHash(window.location)) && element.srcObject == null )count += 1;
if (
(element.src === "" ||
element.src === undefined ||
element.src === stripHash(window.location)) &&
element.srcObject == null
)
count += 1;
}
return count;
}
}
export default VideoElementCache;
export default VideoElementCache;

@@ -0,7 +1,11 @@

/* eslint-disable */
module.exports = {
mode: "production",
entry: __dirname + "/src/videocontext.js",
devtool: "source-map",
stats: { warnings: false },
output: {
path: __dirname+'/dist',
filename: "videocontext.commonjs2.js",
path: __dirname + "/dist",
filename: "videocontext.commonjs2.js",
libraryTarget: "commonjs2",

@@ -11,7 +15,14 @@ library: "VideoContext"

module: {
loaders: [
{ test: /\.css$/, loader: "style!css" },
{ test: /\.js$/, exclude: /node_modules/, loaders: ["babel-loader", "eslint-loader"]}
rules: [
{ test: /\.css$/, use: "style!css" },
{
test: /\.js$/,
exclude: /node_modules/,
use: [
{ loader: "babel-loader" },
{ loader: "eslint-loader" }
]
}
]
}
};
};

@@ -0,7 +1,11 @@

/* eslint-disable */
module.exports = {
mode: "production",
entry: __dirname + "/src/videocontext.js",
devtool: "source-map",
stats: { warnings: false },
output: {
path: __dirname+'/dist',
filename: "videocontext.js",
path: __dirname + "/dist",
filename: "videocontext.js",
libraryTarget: "umd",

@@ -11,7 +15,14 @@ library: "VideoContext"

module: {
loaders: [
{ test: /\.css$/, loader: "style!css" },
{ test: /\.js$/, exclude: /node_modules/, loaders: ["babel-loader", "eslint-loader"]}
rules: [
{ test: /\.css$/, use: "style!css" },
{
test: /\.js$/,
exclude: /node_modules/,
use: [
{ loader: "babel-loader" },
{ loader: "eslint-loader" }
]
}
]
}
};

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc