Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

kampos

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

kampos - npm Package Compare versions

Comparing version 0.1.0 to 0.2.0

demo/disp-cloud.png

333

demo/index.js

@@ -40,3 +40,4 @@ (function () {

constant = '',
main = ''
main = '',
source = ''
}) => `

@@ -52,3 +53,5 @@ precision mediump float;

void main() {
vec4 pixel = texture2D(u_source, v_texCoord);
vec2 sourceCoord = v_texCoord;
${source}
vec4 pixel = texture2D(u_source, sourceCoord);
vec3 color = pixel.rgb;

@@ -76,8 +79,12 @@ float alpha = pixel.a;

let WEBGL_CONTEXT_SUPPORTED = false;
/**
* Get a webgl context for the given canvas element.
*
* Will return `null` if can not get a context.
*
* @private
* @param {HTMLCanvasElement} canvas
* @return {WebGLRenderingContext}
* @return {WebGLRenderingContext|null}
*/

@@ -96,5 +103,11 @@ function getWebGLContext (canvas) {

if ( ! context ) {
if ( context ) {
WEBGL_CONTEXT_SUPPORTED = true;
}
else if ( ! WEBGL_CONTEXT_SUPPORTED ) {
context = canvas.getContext('experimental-webgl', config);
}
else {
return null;
}

@@ -170,3 +183,14 @@ return context;

gl.activeTexture(gl.TEXTURE0 + (i + 1));
gl.bindTexture(gl.TEXTURE_2D, i === -1 ? source.texture : textures[i].texture);
if ( i === -1 ) {
gl.bindTexture(gl.TEXTURE_2D, source.texture);
}
else {
const tex = textures[i];
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
if ( tex.update ) {
gl.texImage2D(gl.TEXTURE_2D, 0,gl[tex.format], gl[tex.format], gl.UNSIGNED_BYTE, tex.image);
}
}
}

@@ -214,4 +238,4 @@ }

const data = _mergeEffectsData(effects);
const vertexSrc = _stringifyShaderSrc(data.vertexSrc, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragmentSrc, fragmentTemplate);
const vertexSrc = _stringifyShaderSrc(data.vertex, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragment, fragmentTemplate);

@@ -222,3 +246,3 @@ // compile the GLSL program

if ( error ) {
throw new Error(`${type} error:: ${error}`);
throw new Error(`${type} error:: ${error}\n${fragmentSrc}`);
}

@@ -247,3 +271,3 @@

const merge = shader => Object.keys(config[shader]).forEach(key => {
if ( key === 'constant' || key === 'main' ) {
if ( key === 'constant' || key === 'main' || key === 'source' ) {
result[shader][key] += config[shader][key] + '\n';

@@ -256,4 +280,4 @@ }

merge('vertexSrc');
merge('fragmentSrc');
merge('vertex');
merge('fragment');

@@ -276,8 +300,8 @@ attributes.forEach(attribute => {

Object.assign(result.vertexSrc.varying, varying);
Object.assign(result.fragmentSrc.varying, varying);
Object.assign(result.vertex.varying, varying);
Object.assign(result.fragment.varying, varying);
return result;
}, {
vertexSrc: {
vertex: {
uniform: {},

@@ -289,7 +313,8 @@ attribute: {},

},
fragmentSrc: {
fragment: {
uniform: {},
varying: {},
constant: '',
main: ''
main: '',
source: ''
},

@@ -327,3 +352,2 @@ /*

name: 'u_source',
size: 1,
type: 'i',

@@ -451,3 +475,3 @@ data: [0]

return {texture, width, height};
return {texture, width, height, format};
}

@@ -485,3 +509,3 @@

location,
size: uniform.size,
size: uniform.size || uniform.data.length,
type: uniform.type,

@@ -583,2 +607,3 @@ data: uniform.data

this.pool.push(instance);
instance.playing = true;
}

@@ -597,2 +622,3 @@ }

this.pool.splice(index, 1);
instance.playing = false;
}

@@ -607,2 +633,8 @@ }

* @param {kamposConfig} config
* @example
* import {Ticker, Kampos, effects} from 'kampos';
* const ticker = new Ticker();
* const target = document.querySelector('#canvas');
* const hueSat = effects.hueSaturation();
* const kampos = new Kampos({ticker, target, effects: [hueSat]});
*/

@@ -614,10 +646,34 @@ class Kampos {

constructor (config) {
this.init(config);
if ( ! config || ! config.target ) {
throw new Error('A target canvas was not provided');
}
if ( Kampos.preventContextCreation )
throw new Error('Context creation is prevented');
this._contextCreationError = function () {
Kampos.preventContextCreation = true;
if ( config && config.onContextCreationError ) {
config.onContextCreationError.call(this, config);
}
};
config.target.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
const success = this.init(config);
if ( ! success )
throw new Error('Could not create context');
this._restoreContext = (e) => {
e && e.preventDefault();
this.config.target.removeEventListener('webglcontextrestored', this._restoreContext, true);
this.init();
const success = this.init();
if ( ! success )
return false;
if ( this._source ) {

@@ -629,5 +685,7 @@ this.setSource(this._source);

if (config && config.onContextRestored) {
if ( config && config.onContextRestored ) {
config.onContextRestored.call(this, config);
}
return true;
};

@@ -638,3 +696,3 @@

if (this.gl && this.gl.isContextLost()) {
if ( this.gl && this.gl.isContextLost() ) {

@@ -647,3 +705,3 @@ this.lostContext = true;

if (config && config.onContextLost) {
if ( config && config.onContextLost ) {
config.onContextLost.call(this, config);

@@ -664,2 +722,3 @@ }

* @param {kamposConfig} [config] defaults to `this.config`
* @return {boolean} success whether initializing of the context and program were successful
*/

@@ -670,2 +729,5 @@ init (config) {

if ( Kampos.preventContextCreation )
return false;
this.lostContext = false;

@@ -675,6 +737,16 @@

if (gl.isContextLost()) {
this.restoreContext();
if ( ! gl )
return false;
if ( gl.isContextLost() ) {
const success = this.restoreContext();
if ( ! success )
return false;
// get new context from the fresh clone
gl = core.getWebGLContext(this.config.target);
if ( ! gl )
return false;
}

@@ -694,2 +766,4 @@

}
return true;
}

@@ -701,2 +775,5 @@

* @param {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap|kamposSource} source
* @example
* const media = document.querySelector('#video');
* kampos.setSource(media);
*/

@@ -707,3 +784,5 @@ setSource (source) {

if ( this.lostContext ) {
this.restoreContext();
const success = this.restoreContext();
if ( ! success ) return;
}

@@ -737,3 +816,5 @@

if ( this.lostContext ) {
this.restoreContext();
const success = this.restoreContext();
if ( ! success ) return;
}

@@ -746,2 +827,4 @@

* Starts the animation loop.
*
* If using a {@see Ticker} this instance will be added to that {@see Ticker}.
*/

@@ -772,2 +855,4 @@ play () {

* Stops the animation loop.
*
* If using a {@see Ticker} this instance will be removed from that {@see Ticker}.
*/

@@ -809,2 +894,3 @@ stop () {

this.config.target.removeEventListener('webglcontextlost', this._loseContext, true);
this.config.target.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);

@@ -824,4 +910,9 @@ this.config = null;

* This will replace canvas DOM element with a fresh clone.
*
* @return {boolean} success whether forcing a context restore was successful
*/
restoreContext () {
if ( Kampos.preventContextCreation )
return false;
const canvas = this.config.target;

@@ -839,7 +930,11 @@ const clone = this.config.target.cloneNode(true);

canvas.removeEventListener('webglcontextrestored', this._restoreContext, true);
canvas.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
clone.addEventListener('webglcontextlost', this._loseContext, true);
clone.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
if (this.lostContext) {
this._restoreContext();
if ( this.lostContext ) {
return this._restoreContext();
}
return true;
}

@@ -849,3 +944,4 @@

this.data && this.data.textures.forEach((texture, i) => {
this.data.textures[i].texture = core.createTexture(this.gl, {
const data = this.data.textures[i];
data.texture = core.createTexture(this.gl, {
width: this.dimensions.width,

@@ -856,2 +952,5 @@ height: this.dimensions.height,

}).texture;
data.format = texture.format;
data.update = texture.update;
});

@@ -861,5 +960,21 @@ }

/**
* @function alphaMask
* @returns {alphaMaskEffect}
* @example alphaMask()
*/
function alphaMask () {
/**
* @typedef {Object} alphaMaskEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} mask
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'picture.png';
* effect.mask = img;
* effect.disabled = true;
*/
return {
vertexSrc: {
vertex: {
attribute: {

@@ -871,3 +986,3 @@ a_alphaMaskTexCoord: 'vec2'

},
fragmentSrc: {
fragment: {
uniform: {

@@ -883,7 +998,13 @@ u_alphaMaskEnabled: 'bool',

get disabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},
get mask () {
return this.textures[0].image;
},
set mask (img) {
this.textures[0].image = img;
},
varying: {

@@ -895,3 +1016,2 @@ v_alphaMaskTexCoord: 'vec2'

name: 'u_alphaMaskEnabled',
size: 1,
type: 'i',

@@ -902,3 +1022,2 @@ data: [1]

name: 'u_mask',
size: 1,
type: 'i',

@@ -928,6 +1047,23 @@ data: [1]

/**
* @function brightnessContrast
* @returns {brightnessContrastEffect}
* @example brightnessContrast()
*/
function brightnessContrast () {
/**
* @typedef {Object} brightnessContrastEffect
* @property {number} brightness
* @property {number} contrast
* @property {boolean} brightnessDisabled
* @property {boolean} contrastDisabled
*
* @example
* effect.brightness = 1.5;
* effect.contrast = 0.9;
* effect.contrastDisabled = true;
*/
return {
vertexSrc: {},
fragmentSrc: {
vertex: {},
fragment: {
uniform: {

@@ -954,4 +1090,4 @@ u_brEnabled: 'bool',

},
set brightness (b) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, b));
set brightness (value) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, value));
},

@@ -961,16 +1097,16 @@ get contrast () {

},
set contrast (c) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, c));
set contrast (value) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, value));
},
get brightnessDisabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set brightnessDisabled (b) {
return this.uniforms[0].data[0] = +!b;
set brightnessDisabled (toggle) {
this.uniforms[0].data[0] = +!toggle;
},
get contrastDisabled () {
return !this.uniforms[1].data;
return !this.uniforms[1].data[0];
},
set contrastDisabled (b) {
return this.uniforms[1].data[0] = +!b;
set contrastDisabled (toggle) {
this.uniforms[1].data[0] = +!toggle;
},

@@ -980,3 +1116,2 @@ uniforms: [

name: 'u_brEnabled',
size: 1,
type: 'i',

@@ -987,29 +1122,12 @@ data: [1]

name: 'u_ctEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* 0.0 is completely black.
* 1.0 is no change.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_brightness',
size: 1,
type: 'f',
data: [1.0]
},
/**
* 0.0 is completely gray.
* 1.0 is no change.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_contrast',
size: 1,
type: 'f',

@@ -1022,5 +1140,21 @@ data: [1.0]

/**
* @function hueSaturation
* @returns {hueSaturationEffect}
* @example hueSaturation()
*/
function hueSaturation () {
/**
* @typedef {Object} hueSaturationEffect
* @property {number} hue
* @property {number} saturation
* @property {boolean} hueDisabled
* @property {boolean} saturationDisabled
*
* @example
* effect.hue = 45;
* effect.saturation = 0.8;
*/
return {
vertexSrc: {
vertex: {
uniform: {

@@ -1057,3 +1191,3 @@ u_hue: 'float',

},
fragmentSrc: {
fragment: {
uniform: {

@@ -1102,12 +1236,12 @@ u_hueEnabled: 'bool',

get hueDisabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set hueDisabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},
get saturationDisabled () {
return !this.uniforms[1].data;
return !this.uniforms[1].data[0];
},
set saturationDisabled (b) {
return this.uniforms[1].data[0] = +!b;
this.uniforms[1].data[0] = +!b;
},

@@ -1117,3 +1251,2 @@ uniforms: [

name: 'u_hueEnabled',
size: 1,
type: 'i',

@@ -1124,31 +1257,12 @@ data: [1]

name: 'u_satEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* 0.0 is no change.
* -180.0 is -180deg hue rotation.
* 180.0 is +180deg hue rotation.
*
* @min -180.0
* @max 180.0
* @default 0.0
*/
{
name: 'u_hue',
size: 1,
type: 'f',
data: [0.0]
},
/**
* 1.0 is no change.
* 0.0 is grayscale.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_saturation',
size: 1,
type: 'f',

@@ -1161,6 +1275,21 @@ data: [1.0]

/**
* @function duotone
* @returns {duotoneEffect}
* @example duotone()
*/
function duotone () {
/**
* @typedef {Object} duotoneEffect
* @property {number[]} light Array of 4 numbers normalized (0.0 - 1.0)
* @property {number[]} dark Array of 4 numbers normalized (0.0 - 1.0)
* @property {boolean} disabled
*
* @example
* effect.light = [1.0, 1.0, 0.8];
* effect.dark = [0.2, 0.6, 0.33];
*/
return {
vertexSrc: {},
fragmentSrc: {
vertex: {},
fragment: {
uniform: {

@@ -1198,6 +1327,6 @@ u_duotoneEnabled: 'bool',

get disabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},

@@ -1207,22 +1336,12 @@ uniforms: [

name: 'u_duotoneEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* Light tone
*/
{
name: 'u_light',
size: 4,
type: 'f',
data: [0.9882352941, 0.7333333333, 0.05098039216, 1]
},
/**
* Dark tone
*
*/
{
name: 'u_dark',
size: 4,
type: 'f',

@@ -1229,0 +1348,0 @@ data: [0.7411764706, 0.0431372549, 0.568627451, 1]

@@ -7,5 +7,21 @@ (function (global, factory) {

/**
* @function alphaMask
* @returns {alphaMaskEffect}
* @example alphaMask()
*/
function alphaMask () {
/**
* @typedef {Object} alphaMaskEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} mask
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'picture.png';
* effect.mask = img;
* effect.disabled = true;
*/
return {
vertexSrc: {
vertex: {
attribute: {

@@ -17,3 +33,3 @@ a_alphaMaskTexCoord: 'vec2'

},
fragmentSrc: {
fragment: {
uniform: {

@@ -29,7 +45,13 @@ u_alphaMaskEnabled: 'bool',

get disabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},
get mask () {
return this.textures[0].image;
},
set mask (img) {
this.textures[0].image = img;
},
varying: {

@@ -41,3 +63,2 @@ v_alphaMaskTexCoord: 'vec2'

name: 'u_alphaMaskEnabled',
size: 1,
type: 'i',

@@ -48,3 +69,2 @@ data: [1]

name: 'u_mask',
size: 1,
type: 'i',

@@ -74,6 +94,23 @@ data: [1]

/**
* @function brightnessContrast
* @returns {brightnessContrastEffect}
* @example brightnessContrast()
*/
function brightnessContrast () {
/**
* @typedef {Object} brightnessContrastEffect
* @property {number} brightness
* @property {number} contrast
* @property {boolean} brightnessDisabled
* @property {boolean} contrastDisabled
*
* @example
* effect.brightness = 1.5;
* effect.contrast = 0.9;
* effect.contrastDisabled = true;
*/
return {
vertexSrc: {},
fragmentSrc: {
vertex: {},
fragment: {
uniform: {

@@ -100,4 +137,4 @@ u_brEnabled: 'bool',

},
set brightness (b) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, b));
set brightness (value) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, value));
},

@@ -107,16 +144,16 @@ get contrast () {

},
set contrast (c) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, c));
set contrast (value) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, value));
},
get brightnessDisabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set brightnessDisabled (b) {
return this.uniforms[0].data[0] = +!b;
set brightnessDisabled (toggle) {
this.uniforms[0].data[0] = +!toggle;
},
get contrastDisabled () {
return !this.uniforms[1].data;
return !this.uniforms[1].data[0];
},
set contrastDisabled (b) {
return this.uniforms[1].data[0] = +!b;
set contrastDisabled (toggle) {
this.uniforms[1].data[0] = +!toggle;
},

@@ -126,3 +163,2 @@ uniforms: [

name: 'u_brEnabled',
size: 1,
type: 'i',

@@ -133,29 +169,12 @@ data: [1]

name: 'u_ctEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* 0.0 is completely black.
* 1.0 is no change.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_brightness',
size: 1,
type: 'f',
data: [1.0]
},
/**
* 0.0 is completely gray.
* 1.0 is no change.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_contrast',
size: 1,
type: 'f',

@@ -168,5 +187,21 @@ data: [1.0]

/**
* @function hueSaturation
* @returns {hueSaturationEffect}
* @example hueSaturation()
*/
function hueSaturation () {
/**
* @typedef {Object} hueSaturationEffect
* @property {number} hue
* @property {number} saturation
* @property {boolean} hueDisabled
* @property {boolean} saturationDisabled
*
* @example
* effect.hue = 45;
* effect.saturation = 0.8;
*/
return {
vertexSrc: {
vertex: {
uniform: {

@@ -203,3 +238,3 @@ u_hue: 'float',

},
fragmentSrc: {
fragment: {
uniform: {

@@ -248,12 +283,12 @@ u_hueEnabled: 'bool',

get hueDisabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set hueDisabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},
get saturationDisabled () {
return !this.uniforms[1].data;
return !this.uniforms[1].data[0];
},
set saturationDisabled (b) {
return this.uniforms[1].data[0] = +!b;
this.uniforms[1].data[0] = +!b;
},

@@ -263,3 +298,2 @@ uniforms: [

name: 'u_hueEnabled',
size: 1,
type: 'i',

@@ -270,31 +304,12 @@ data: [1]

name: 'u_satEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* 0.0 is no change.
* -180.0 is -180deg hue rotation.
* 180.0 is +180deg hue rotation.
*
* @min -180.0
* @max 180.0
* @default 0.0
*/
{
name: 'u_hue',
size: 1,
type: 'f',
data: [0.0]
},
/**
* 1.0 is no change.
* 0.0 is grayscale.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_saturation',
size: 1,
type: 'f',

@@ -307,6 +322,21 @@ data: [1.0]

/**
* @function duotone
* @returns {duotoneEffect}
* @example duotone()
*/
function duotone () {
/**
* @typedef {Object} duotoneEffect
* @property {number[]} light Array of 4 numbers normalized (0.0 - 1.0)
* @property {number[]} dark Array of 4 numbers normalized (0.0 - 1.0)
* @property {boolean} disabled
*
* @example
* effect.light = [1.0, 1.0, 0.8];
* effect.dark = [0.2, 0.6, 0.33];
*/
return {
vertexSrc: {},
fragmentSrc: {
vertex: {},
fragment: {
uniform: {

@@ -344,6 +374,6 @@ u_duotoneEnabled: 'bool',

get disabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},

@@ -353,22 +383,12 @@ uniforms: [

name: 'u_duotoneEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* Light tone
*/
{
name: 'u_light',
size: 4,
type: 'f',
data: [0.9882352941, 0.7333333333, 0.05098039216, 1]
},
/**
* Dark tone
*
*/
{
name: 'u_dark',
size: 4,
type: 'f',

@@ -381,2 +401,374 @@ data: [0.7411764706, 0.0431372549, 0.568627451, 1]

/**
* @function displacement
* @returns {displacementEffect}
* @example displacement()
*/
function displacement () {
/**
* @typedef {Object} displacementEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map
* @property {{x: number, y: number}} scale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.scale = {x: 0.4};
*/
return {
vertex: {
attribute: {
a_displacementMapTexCoord: 'vec2'
},
main: `
v_displacementMapTexCoord = a_displacementMapTexCoord;`
},
fragment: {
uniform: {
u_displacementEnabled: 'bool',
u_dispMap: 'sampler2D',
u_dispScale: 'vec2'
},
source: `
if (u_displacementEnabled) {
vec3 dispMap = texture2D(u_dispMap, v_displacementMapTexCoord).rgb - 0.5;
vec2 dispVec = vec2(v_texCoord.x + u_dispScale.x * dispMap.r, v_texCoord.y + u_dispScale.y * dispMap.g);
sourceCoord = clamp(dispVec, 0.0, 1.0);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
},
get scale () {
const [x, y] = this.uniforms[2].data;
return {x, y};
},
set scale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[2].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[2].data[1] = y;
},
get map () {
return this.textures[0].image;
},
set map (img) {
this.textures[0].image = img;
},
varying: {
v_displacementMapTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_displacementEnabled',
type: 'i',
data: [1]
},
{
name: 'u_dispMap',
type: 'i',
data: [1]
},
{
name: 'u_dispScale',
type: 'f',
data: [0.0, 0.0]
}
],
attributes: [
{
name: 'a_displacementMapTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGB'
}
]
};
}
/**
* @function fadeTransition
* @returns {fadeTransitionEffect}
* @example fadeTransition()
*/
function fade () {
/**
* @typedef {Object} fadeTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {number} progress number between 0.0 and 1.0
* @property {boolean} disabled
*
* @example
* effect.to = document.querySelector('#video-to');
* effect.progress = 0.5;
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2'
},
main: `
v_transitionToTexCoord = a_transitionToTexCoord;`
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D'
},
main: `
if (u_transitionEnabled) {
vec4 targetPixel = texture2D(u_transitionTo, v_transitionToTexCoord);
color = mix(color, targetPixel.rgb, u_transitionProgress);
alpha = mix(alpha, targetPixel.a, u_transitionProgress);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
get to () {
return this.textures[0].image;
},
set to (media) {
this.textures[0].image = media;
},
varying: {
v_transitionToTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
},
{
name: 'u_transitionTo',
type: 'i',
data: [1]
},
{
name: 'u_transitionProgress',
type: 'f',
data: [0]
}
],
attributes: [
{
name: 'a_transitionToTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGBA',
update: true
}
]
};
}
/**
* @function displacementTransition
* @returns {displacementTransitionEffect}
* @example displacementTransition()
*/
function displacementTransition () {
/**
* @typedef {Object} displacementTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map displacement map to use
* @property {number} progress number between 0.0 and 1.0
* @property {{x: number, y: number}} sourceScale
* @property {{x: number, y: number}} toScale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.to = document.querySelector('#video-to');
* effect.sourceScale = {x: 0.4};
* effect.toScale = {x: 0.8};
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2',
a_transitionDispMapTexCoord: 'vec2'
},
main: `
v_transitionToTexCoord = a_transitionToTexCoord;
v_transitionDispMapTexCoord = a_transitionDispMapTexCoord;`
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D',
u_transitionDispMap: 'sampler2D',
u_transitionProgress: 'float',
u_sourceDispScale: 'vec2',
u_toDispScale: 'vec2'
},
source: `
vec3 transDispMap = vec3(1.0);
vec2 transDispVec = vec2(0.0);
if (u_transitionEnabled) {
// read the displacement texture once and create the displacement map
transDispMap = texture2D(u_transitionDispMap, v_transitionDispMapTexCoord).rgb - 0.5;
// prepare the source coordinates for sampling
transDispVec = vec2(u_sourceDispScale.x * transDispMap.r, u_sourceDispScale.y * transDispMap.g);
sourceCoord = clamp(v_texCoord + transDispVec * u_transitionProgress, 0.0, 1.0);
}`,
main: `
if (u_transitionEnabled) {
// prepare the target coordinates for sampling
transDispVec = vec2(u_toDispScale.x * transDispMap.r, u_toDispScale.y * transDispMap.g);
vec2 targetCoord = clamp(v_transitionToTexCoord + transDispVec * (1.0 - u_transitionProgress), 0.0, 1.0);
// sample the target
vec4 targetPixel = texture2D(u_transitionTo, targetCoord);
// mix the results of source and target
color = mix(color, targetPixel.rgb, u_transitionProgress);
alpha = mix(alpha, targetPixel.a, u_transitionProgress);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
get progress () {
return this.uniforms[3].data[0];
},
set progress (p) {
this.uniforms[3].data[0] = p;
},
get sourceScale () {
const [x, y] = this.uniforms[4].data;
return {x, y};
},
set sourceScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[4].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[4].data[1] = y;
},
get toScale () {
const [x, y] = this.uniforms[5].data;
return {x, y};
},
set toScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[5].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[5].data[1] = y;
},
get to () {
return this.textures[0].image;
},
set to (media) {
this.textures[0].image = media;
},
get map () {
return this.textures[1].image;
},
set map (img) {
this.textures[1].image = img;
},
varying: {
v_transitionToTexCoord: 'vec2',
v_transitionDispMapTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
},
{
name: 'u_transitionTo',
type: 'i',
data: [1]
},
{
name: 'u_transitionDispMap',
type: 'i',
data: [2]
},
{
name: 'u_transitionProgress',
type: 'f',
data: [0]
},
{
name: 'u_sourceDispScale',
type: 'f',
data: [0.0, 0.0]
},
{
name: 'u_toDispScale',
type: 'f',
data: [0.0, 0.0]
}
],
attributes: [
{
name: 'a_transitionToTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
},
{
name: 'a_transitionDispMapTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGBA',
update: true
},
{
format: 'RGB'
}
]
};
}
var core = {

@@ -418,3 +810,4 @@ init,

constant = '',
main = ''
main = '',
source = ''
}) => `

@@ -430,3 +823,5 @@ precision mediump float;

void main() {
vec4 pixel = texture2D(u_source, v_texCoord);
vec2 sourceCoord = v_texCoord;
${source}
vec4 pixel = texture2D(u_source, sourceCoord);
vec3 color = pixel.rgb;

@@ -454,8 +849,12 @@ float alpha = pixel.a;

let WEBGL_CONTEXT_SUPPORTED = false;
/**
* Get a webgl context for the given canvas element.
*
* Will return `null` if can not get a context.
*
* @private
* @param {HTMLCanvasElement} canvas
* @return {WebGLRenderingContext}
* @return {WebGLRenderingContext|null}
*/

@@ -474,5 +873,11 @@ function getWebGLContext (canvas) {

if ( ! context ) {
if ( context ) {
WEBGL_CONTEXT_SUPPORTED = true;
}
else if ( ! WEBGL_CONTEXT_SUPPORTED ) {
context = canvas.getContext('experimental-webgl', config);
}
else {
return null;
}

@@ -548,3 +953,14 @@ return context;

gl.activeTexture(gl.TEXTURE0 + (i + 1));
gl.bindTexture(gl.TEXTURE_2D, i === -1 ? source.texture : textures[i].texture);
if ( i === -1 ) {
gl.bindTexture(gl.TEXTURE_2D, source.texture);
}
else {
const tex = textures[i];
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
if ( tex.update ) {
gl.texImage2D(gl.TEXTURE_2D, 0,gl[tex.format], gl[tex.format], gl.UNSIGNED_BYTE, tex.image);
}
}
}

@@ -592,4 +1008,4 @@ }

const data = _mergeEffectsData(effects);
const vertexSrc = _stringifyShaderSrc(data.vertexSrc, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragmentSrc, fragmentTemplate);
const vertexSrc = _stringifyShaderSrc(data.vertex, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragment, fragmentTemplate);

@@ -600,3 +1016,3 @@ // compile the GLSL program

if ( error ) {
throw new Error(`${type} error:: ${error}`);
throw new Error(`${type} error:: ${error}\n${fragmentSrc}`);
}

@@ -625,3 +1041,3 @@

const merge = shader => Object.keys(config[shader]).forEach(key => {
if ( key === 'constant' || key === 'main' ) {
if ( key === 'constant' || key === 'main' || key === 'source' ) {
result[shader][key] += config[shader][key] + '\n';

@@ -634,4 +1050,4 @@ }

merge('vertexSrc');
merge('fragmentSrc');
merge('vertex');
merge('fragment');

@@ -654,8 +1070,8 @@ attributes.forEach(attribute => {

Object.assign(result.vertexSrc.varying, varying);
Object.assign(result.fragmentSrc.varying, varying);
Object.assign(result.vertex.varying, varying);
Object.assign(result.fragment.varying, varying);
return result;
}, {
vertexSrc: {
vertex: {
uniform: {},

@@ -667,7 +1083,8 @@ attribute: {},

},
fragmentSrc: {
fragment: {
uniform: {},
varying: {},
constant: '',
main: ''
main: '',
source: ''
},

@@ -705,3 +1122,2 @@ /*

name: 'u_source',
size: 1,
type: 'i',

@@ -829,3 +1245,3 @@ data: [0]

return {texture, width, height};
return {texture, width, height, format};
}

@@ -863,3 +1279,3 @@

location,
size: uniform.size,
size: uniform.size || uniform.data.length,
type: uniform.type,

@@ -961,2 +1377,3 @@ data: uniform.data

this.pool.push(instance);
instance.playing = true;
}

@@ -975,2 +1392,3 @@ }

this.pool.splice(index, 1);
instance.playing = false;
}

@@ -985,2 +1403,8 @@ }

* @param {kamposConfig} config
* @example
* import {Ticker, Kampos, effects} from 'kampos';
* const ticker = new Ticker();
* const target = document.querySelector('#canvas');
* const hueSat = effects.hueSaturation();
* const kampos = new Kampos({ticker, target, effects: [hueSat]});
*/

@@ -992,10 +1416,34 @@ class Kampos {

constructor (config) {
this.init(config);
if ( ! config || ! config.target ) {
throw new Error('A target canvas was not provided');
}
if ( Kampos.preventContextCreation )
throw new Error('Context creation is prevented');
this._contextCreationError = function () {
Kampos.preventContextCreation = true;
if ( config && config.onContextCreationError ) {
config.onContextCreationError.call(this, config);
}
};
config.target.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
const success = this.init(config);
if ( ! success )
throw new Error('Could not create context');
this._restoreContext = (e) => {
e && e.preventDefault();
this.config.target.removeEventListener('webglcontextrestored', this._restoreContext, true);
this.init();
const success = this.init();
if ( ! success )
return false;
if ( this._source ) {

@@ -1007,5 +1455,7 @@ this.setSource(this._source);

if (config && config.onContextRestored) {
if ( config && config.onContextRestored ) {
config.onContextRestored.call(this, config);
}
return true;
};

@@ -1016,3 +1466,3 @@

if (this.gl && this.gl.isContextLost()) {
if ( this.gl && this.gl.isContextLost() ) {

@@ -1025,3 +1475,3 @@ this.lostContext = true;

if (config && config.onContextLost) {
if ( config && config.onContextLost ) {
config.onContextLost.call(this, config);

@@ -1042,2 +1492,3 @@ }

* @param {kamposConfig} [config] defaults to `this.config`
* @return {boolean} success whether initializing of the context and program were successful
*/

@@ -1048,2 +1499,5 @@ init (config) {

if ( Kampos.preventContextCreation )
return false;
this.lostContext = false;

@@ -1053,6 +1507,16 @@

if (gl.isContextLost()) {
this.restoreContext();
if ( ! gl )
return false;
if ( gl.isContextLost() ) {
const success = this.restoreContext();
if ( ! success )
return false;
// get new context from the fresh clone
gl = core.getWebGLContext(this.config.target);
if ( ! gl )
return false;
}

@@ -1072,2 +1536,4 @@

}
return true;
}

@@ -1079,2 +1545,5 @@

* @param {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap|kamposSource} source
* @example
* const media = document.querySelector('#video');
* kampos.setSource(media);
*/

@@ -1085,3 +1554,5 @@ setSource (source) {

if ( this.lostContext ) {
this.restoreContext();
const success = this.restoreContext();
if ( ! success ) return;
}

@@ -1115,3 +1586,5 @@

if ( this.lostContext ) {
this.restoreContext();
const success = this.restoreContext();
if ( ! success ) return;
}

@@ -1124,2 +1597,4 @@

* Starts the animation loop.
*
* If using a {@see Ticker} this instance will be added to that {@see Ticker}.
*/

@@ -1150,2 +1625,4 @@ play () {

* Stops the animation loop.
*
* If using a {@see Ticker} this instance will be removed from that {@see Ticker}.
*/

@@ -1187,2 +1664,3 @@ stop () {

this.config.target.removeEventListener('webglcontextlost', this._loseContext, true);
this.config.target.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);

@@ -1202,4 +1680,9 @@ this.config = null;

* This will replace canvas DOM element with a fresh clone.
*
* @return {boolean} success whether forcing a context restore was successful
*/
restoreContext () {
if ( Kampos.preventContextCreation )
return false;
const canvas = this.config.target;

@@ -1217,7 +1700,11 @@ const clone = this.config.target.cloneNode(true);

canvas.removeEventListener('webglcontextrestored', this._restoreContext, true);
canvas.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
clone.addEventListener('webglcontextlost', this._loseContext, true);
clone.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
if (this.lostContext) {
this._restoreContext();
if ( this.lostContext ) {
return this._restoreContext();
}
return true;
}

@@ -1227,3 +1714,4 @@

this.data && this.data.textures.forEach((texture, i) => {
this.data.textures[i].texture = core.createTexture(this.gl, {
const data = this.data.textures[i];
data.texture = core.createTexture(this.gl, {
width: this.dimensions.width,

@@ -1234,2 +1722,5 @@ height: this.dimensions.height,

}).texture;
data.format = texture.format;
data.update = texture.update;
});

@@ -1244,4 +1735,9 @@ }

hueSaturation,
duotone
duotone,
displacement
},
transitions: {
fade,
displacement: displacementTransition
},
Kampos,

@@ -1248,0 +1744,0 @@ Ticker

{
"name": "kampos",
"version": "0.1.0",
"version": "0.2.0",
"description": "Tiny and fast effects compositor on WebGL",

@@ -18,3 +18,3 @@ "registry": "https://registry.npmjs.org/",

"test": "npm run test:unit && npm run build && npm run test:e2e",
"docs": "documentation build src/kampos.js -f html -o docs -c documentation.yml",
"docs": "documentation build src/index.js -f html -o docs -c documentation.yml",
"check": "npm-check -u"

@@ -21,0 +21,0 @@ },

# kampos
Tiny and fast effects compositor on WebGL
### Tiny and fast effects compositor on WebGL
kampos lets you filter effects and beautiful transitions to your site's media,
be that images, video, etc.
Just like [SVG filter effects](https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Filter_effects),
only using WebGL, and hence works everywhere!
## Features
* Filter effects for images and videos that you can mix and compose.
* As tiny as **~4KB** (minified and gzipped).
* Core engine for creating and running effects.
* Plugins for effects and transitions - available for import.
* Custom plugins? Extremely easy by using the effects/transitions descriptors DSL.
## Usage
Here's a simple example for using kampos:
```
import {Kampos, effects} from 'kampos';
const target = document.querySelector('canvas');
const media = document.querySelector('video');
const hueSaturation = effects.hueSaturation();
hueSaturation.hue = 90;
const kampos = new Kampos({target, effects: [hueSaturation]});
kampos.setSource(media);
kampos.play();
```
## Demo
Watch a [live demo](https://wix-incubator.github.io/kampos/demo/).
## Documentation
For API reference and examples read [the docs](https://wix-incubator.github.io/kampos/docs/).
## Getting started
Grab the source from here, or install via package manager.
### npm example:
```
npm install kampos
```
Import the default build:
```
import {Kampos, Ticker, effects, transitions} from 'kampos';
```
Or just what you need:
```
import {Kampos} from './node_modules/kampos/src/kampos';
import duotone from './node_modules/kampos/src/effects/duotone';
import displacement from './node_modules/kampos/src/effects/displacement';
```
## Building locally
```
npm install
npm run build
```
## Running tests
```
npm run test
```
## Contributing
Contributions are welcome! (:
## License
kampos is distributed under the MIT license.

@@ -37,3 +37,4 @@ export default {

constant = '',
main = ''
main = '',
source = ''
}) => `

@@ -49,3 +50,5 @@ precision mediump float;

void main() {
vec4 pixel = texture2D(u_source, v_texCoord);
vec2 sourceCoord = v_texCoord;
${source}
vec4 pixel = texture2D(u_source, sourceCoord);
vec3 color = pixel.rgb;

@@ -73,8 +76,12 @@ float alpha = pixel.a;

let WEBGL_CONTEXT_SUPPORTED = false;
/**
* Get a webgl context for the given canvas element.
*
* Will return `null` if can not get a context.
*
* @private
* @param {HTMLCanvasElement} canvas
* @return {WebGLRenderingContext}
* @return {WebGLRenderingContext|null}
*/

@@ -93,5 +100,11 @@ function getWebGLContext (canvas) {

if ( ! context ) {
if ( context ) {
WEBGL_CONTEXT_SUPPORTED = true;
}
else if ( ! WEBGL_CONTEXT_SUPPORTED ) {
context = canvas.getContext('experimental-webgl', config);
}
else {
return null;
}

@@ -167,3 +180,14 @@ return context;

gl.activeTexture(gl.TEXTURE0 + (i + 1));
gl.bindTexture(gl.TEXTURE_2D, i === -1 ? source.texture : textures[i].texture);
if ( i === -1 ) {
gl.bindTexture(gl.TEXTURE_2D, source.texture);
}
else {
const tex = textures[i];
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
if ( tex.update ) {
gl.texImage2D(gl.TEXTURE_2D, 0,gl[tex.format], gl[tex.format], gl.UNSIGNED_BYTE, tex.image);
}
}
}

@@ -211,4 +235,4 @@ }

const data = _mergeEffectsData(effects);
const vertexSrc = _stringifyShaderSrc(data.vertexSrc, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragmentSrc, fragmentTemplate);
const vertexSrc = _stringifyShaderSrc(data.vertex, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragment, fragmentTemplate);

@@ -219,3 +243,3 @@ // compile the GLSL program

if ( error ) {
throw new Error(`${type} error:: ${error}`);
throw new Error(`${type} error:: ${error}\n${fragmentSrc}`);
}

@@ -244,3 +268,3 @@

const merge = shader => Object.keys(config[shader]).forEach(key => {
if ( key === 'constant' || key === 'main' ) {
if ( key === 'constant' || key === 'main' || key === 'source' ) {
result[shader][key] += config[shader][key] + '\n';

@@ -253,4 +277,4 @@ }

merge('vertexSrc');
merge('fragmentSrc');
merge('vertex');
merge('fragment');

@@ -273,8 +297,8 @@ attributes.forEach(attribute => {

Object.assign(result.vertexSrc.varying, varying);
Object.assign(result.fragmentSrc.varying, varying);
Object.assign(result.vertex.varying, varying);
Object.assign(result.fragment.varying, varying);
return result;
}, {
vertexSrc: {
vertex: {
uniform: {},

@@ -286,7 +310,8 @@ attribute: {},

},
fragmentSrc: {
fragment: {
uniform: {},
varying: {},
constant: '',
main: ''
main: '',
source: ''
},

@@ -324,3 +349,2 @@ /*

name: 'u_source',
size: 1,
type: 'i',

@@ -448,3 +472,3 @@ data: [0]

return {texture, width, height};
return {texture, width, height, format};
}

@@ -482,3 +506,3 @@

location,
size: uniform.size,
size: uniform.size || uniform.data.length,
type: uniform.type,

@@ -485,0 +509,0 @@ data: uniform.data

@@ -0,4 +1,20 @@

/**
* @function alphaMask
* @returns {alphaMaskEffect}
* @example alphaMask()
*/
export default function () {
/**
* @typedef {Object} alphaMaskEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} mask
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'picture.png';
* effect.mask = img;
* effect.disabled = true;
*/
return {
vertexSrc: {
vertex: {
attribute: {

@@ -10,3 +26,3 @@ a_alphaMaskTexCoord: 'vec2'

},
fragmentSrc: {
fragment: {
uniform: {

@@ -22,7 +38,13 @@ u_alphaMaskEnabled: 'bool',

get disabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},
get mask () {
return this.textures[0].image;
},
set mask (img) {
this.textures[0].image = img;
},
varying: {

@@ -34,3 +56,2 @@ v_alphaMaskTexCoord: 'vec2'

name: 'u_alphaMaskEnabled',
size: 1,
type: 'i',

@@ -41,3 +62,2 @@ data: [1]

name: 'u_mask',
size: 1,
type: 'i',

@@ -44,0 +64,0 @@ data: [1]

@@ -0,5 +1,22 @@

/**
* @function brightnessContrast
* @returns {brightnessContrastEffect}
* @example brightnessContrast()
*/
export default function () {
/**
* @typedef {Object} brightnessContrastEffect
* @property {number} brightness
* @property {number} contrast
* @property {boolean} brightnessDisabled
* @property {boolean} contrastDisabled
*
* @example
* effect.brightness = 1.5;
* effect.contrast = 0.9;
* effect.contrastDisabled = true;
*/
return {
vertexSrc: {},
fragmentSrc: {
vertex: {},
fragment: {
uniform: {

@@ -26,4 +43,4 @@ u_brEnabled: 'bool',

},
set brightness (b) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, b));
set brightness (value) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, value));
},

@@ -33,16 +50,16 @@ get contrast () {

},
set contrast (c) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, c));
set contrast (value) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, value));
},
get brightnessDisabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set brightnessDisabled (b) {
return this.uniforms[0].data[0] = +!b;
set brightnessDisabled (toggle) {
this.uniforms[0].data[0] = +!toggle;
},
get contrastDisabled () {
return !this.uniforms[1].data;
return !this.uniforms[1].data[0];
},
set contrastDisabled (b) {
return this.uniforms[1].data[0] = +!b;
set contrastDisabled (toggle) {
this.uniforms[1].data[0] = +!toggle;
},

@@ -52,3 +69,2 @@ uniforms: [

name: 'u_brEnabled',
size: 1,
type: 'i',

@@ -59,29 +75,12 @@ data: [1]

name: 'u_ctEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* 0.0 is completely black.
* 1.0 is no change.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_brightness',
size: 1,
type: 'f',
data: [1.0]
},
/**
* 0.0 is completely gray.
* 1.0 is no change.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_contrast',
size: 1,
type: 'f',

@@ -88,0 +87,0 @@ data: [1.0]

@@ -0,5 +1,20 @@

/**
* @function duotone
* @returns {duotoneEffect}
* @example duotone()
*/
export default function () {
/**
* @typedef {Object} duotoneEffect
* @property {number[]} light Array of 4 numbers normalized (0.0 - 1.0)
* @property {number[]} dark Array of 4 numbers normalized (0.0 - 1.0)
* @property {boolean} disabled
*
* @example
* effect.light = [1.0, 1.0, 0.8];
* effect.dark = [0.2, 0.6, 0.33];
*/
return {
vertexSrc: {},
fragmentSrc: {
vertex: {},
fragment: {
uniform: {

@@ -37,6 +52,6 @@ u_duotoneEnabled: 'bool',

get disabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},

@@ -46,22 +61,12 @@ uniforms: [

name: 'u_duotoneEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* Light tone
*/
{
name: 'u_light',
size: 4,
type: 'f',
data: [0.9882352941, 0.7333333333, 0.05098039216, 1]
},
/**
* Dark tone
*
*/
{
name: 'u_dark',
size: 4,
type: 'f',

@@ -68,0 +73,0 @@ data: [0.7411764706, 0.0431372549, 0.568627451, 1]

@@ -0,4 +1,20 @@

/**
* @function hueSaturation
* @returns {hueSaturationEffect}
* @example hueSaturation()
*/
export default function () {
/**
* @typedef {Object} hueSaturationEffect
* @property {number} hue
* @property {number} saturation
* @property {boolean} hueDisabled
* @property {boolean} saturationDisabled
*
* @example
* effect.hue = 45;
* effect.saturation = 0.8;
*/
return {
vertexSrc: {
vertex: {
uniform: {

@@ -35,3 +51,3 @@ u_hue: 'float',

},
fragmentSrc: {
fragment: {
uniform: {

@@ -80,12 +96,12 @@ u_hueEnabled: 'bool',

get hueDisabled () {
return !this.uniforms[0].data;
return !this.uniforms[0].data[0];
},
set hueDisabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},
get saturationDisabled () {
return !this.uniforms[1].data;
return !this.uniforms[1].data[0];
},
set saturationDisabled (b) {
return this.uniforms[1].data[0] = +!b;
this.uniforms[1].data[0] = +!b;
},

@@ -95,3 +111,2 @@ uniforms: [

name: 'u_hueEnabled',
size: 1,
type: 'i',

@@ -102,31 +117,12 @@ data: [1]

name: 'u_satEnabled',
size: 1,
type: 'i',
data: [1]
},
/**
* 0.0 is no change.
* -180.0 is -180deg hue rotation.
* 180.0 is +180deg hue rotation.
*
* @min -180.0
* @max 180.0
* @default 0.0
*/
{
name: 'u_hue',
size: 1,
type: 'f',
data: [0.0]
},
/**
* 1.0 is no change.
* 0.0 is grayscale.
*
* @min 0.0
* @default 1.0
*/
{
name: 'u_saturation',
size: 1,
type: 'f',

@@ -133,0 +129,0 @@ data: [1.0]

@@ -5,2 +5,5 @@ import alphaMask from './effects/alpha-mask';

import duotone from './effects/duotone';
import displacement from './effects/displacement';
import fade from './transitions/fade';
import displacementTransition from './transitions/displacement';
import {Kampos, Ticker} from './kampos';

@@ -13,6 +16,11 @@

hueSaturation,
duotone
duotone,
displacement
},
transitions: {
fade,
displacement: displacementTransition
},
Kampos,
Ticker
};

@@ -9,2 +9,8 @@ import core from './core';

* @param {kamposConfig} config
* @example
* import {Ticker, Kampos, effects} from 'kampos';
* const ticker = new Ticker();
* const target = document.querySelector('#canvas');
* const hueSat = effects.hueSaturation();
* const kampos = new Kampos({ticker, target, effects: [hueSat]});
*/

@@ -16,10 +22,34 @@ class Kampos {

constructor (config) {
this.init(config);
if ( ! config || ! config.target ) {
throw new Error('A target canvas was not provided');
}
if ( Kampos.preventContextCreation )
throw new Error('Context creation is prevented');
this._contextCreationError = function () {
Kampos.preventContextCreation = true;
if ( config && config.onContextCreationError ) {
config.onContextCreationError.call(this, config);
}
};
config.target.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
const success = this.init(config);
if ( ! success )
throw new Error('Could not create context');
this._restoreContext = (e) => {
e && e.preventDefault();
this.config.target.removeEventListener('webglcontextrestored', this._restoreContext, true);
this.init();
const success = this.init();
if ( ! success )
return false;
if ( this._source ) {

@@ -31,5 +61,7 @@ this.setSource(this._source);

if (config && config.onContextRestored) {
if ( config && config.onContextRestored ) {
config.onContextRestored.call(this, config);
}
return true;
};

@@ -40,3 +72,3 @@

if (this.gl && this.gl.isContextLost()) {
if ( this.gl && this.gl.isContextLost() ) {

@@ -49,3 +81,3 @@ this.lostContext = true;

if (config && config.onContextLost) {
if ( config && config.onContextLost ) {
config.onContextLost.call(this, config);

@@ -66,2 +98,3 @@ }

* @param {kamposConfig} [config] defaults to `this.config`
* @return {boolean} success whether initializing of the context and program were successful
*/

@@ -72,2 +105,5 @@ init (config) {

if ( Kampos.preventContextCreation )
return false;
this.lostContext = false;

@@ -77,6 +113,16 @@

if (gl.isContextLost()) {
this.restoreContext();
if ( ! gl )
return false;
if ( gl.isContextLost() ) {
const success = this.restoreContext();
if ( ! success )
return false;
// get new context from the fresh clone
gl = core.getWebGLContext(this.config.target);
if ( ! gl )
return false;
}

@@ -96,2 +142,4 @@

}
return true;
}

@@ -103,2 +151,5 @@

* @param {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap|kamposSource} source
* @example
* const media = document.querySelector('#video');
* kampos.setSource(media);
*/

@@ -109,3 +160,5 @@ setSource (source) {

if ( this.lostContext ) {
this.restoreContext();
const success = this.restoreContext();
if ( ! success ) return;
}

@@ -139,3 +192,5 @@

if ( this.lostContext ) {
this.restoreContext();
const success = this.restoreContext();
if ( ! success ) return;
}

@@ -148,2 +203,4 @@

* Starts the animation loop.
*
* If using a {@see Ticker} this instance will be added to that {@see Ticker}.
*/

@@ -174,2 +231,4 @@ play () {

* Stops the animation loop.
*
* If using a {@see Ticker} this instance will be removed from that {@see Ticker}.
*/

@@ -211,2 +270,3 @@ stop () {

this.config.target.removeEventListener('webglcontextlost', this._loseContext, true);
this.config.target.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);

@@ -226,4 +286,9 @@ this.config = null;

* This will replace canvas DOM element with a fresh clone.
*
* @return {boolean} success whether forcing a context restore was successful
*/
restoreContext () {
if ( Kampos.preventContextCreation )
return false;
const canvas = this.config.target;

@@ -241,7 +306,11 @@ const clone = this.config.target.cloneNode(true);

canvas.removeEventListener('webglcontextrestored', this._restoreContext, true);
canvas.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
clone.addEventListener('webglcontextlost', this._loseContext, true);
clone.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
if (this.lostContext) {
this._restoreContext();
if ( this.lostContext ) {
return this._restoreContext();
}
return true;
}

@@ -251,3 +320,4 @@

this.data && this.data.textures.forEach((texture, i) => {
this.data.textures[i].texture = core.createTexture(this.gl, {
const data = this.data.textures[i];
data.texture = core.createTexture(this.gl, {
width: this.dimensions.width,

@@ -258,2 +328,5 @@ height: this.dimensions.height,

}).texture;
data.format = texture.format;
data.update = texture.update;
});

@@ -268,2 +341,5 @@ }

* @property {Ticker} [ticker]
* @property {function} [onContextLost]
* @property {function} [onContextRestored]
* @property {function} [onContextCreationError]
*/

@@ -280,4 +356,4 @@

* @typedef {Object} effectConfig
* @property {shaderConfig} vertexSrc
* @property {shaderConfig} fragmentSrc
* @property {shaderConfig} vertex
* @property {shaderConfig} fragment
* @property {Attribute[]} attributes

@@ -292,5 +368,6 @@ * @property {Uniform[]} uniforms

* @property {string} [main]
* @property {string} [source]
* @property {string} [constant]
* @property {Object} [uniform]
* @property {Object} [attribute]
* @property {Object} [uniform] mapping name of variable to type
* @property {Object} [attribute] mapping name of variable to type
*/

@@ -301,2 +378,4 @@

* @property {string} format
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} [image]
* @property {boolean} [update] defaults to `false`
*/

@@ -315,3 +394,3 @@

* @property {string} name
* @property {number} size
* @property {number} [size] defaults to `data.length`
* @property {string} type

@@ -318,0 +397,0 @@ * @property {Array} data

@@ -50,2 +50,3 @@ /**

this.pool.push(instance);
instance.playing = true;
}

@@ -64,2 +65,3 @@ }

this.pool.splice(index, 1);
instance.playing = false;
}

@@ -66,0 +68,0 @@ }

@@ -323,3 +323,45 @@ const {Kampos, Ticker} = require('./src/kampos');

});
})
});
describe('Kampos#_contextCreationError', function () {
let canvas, video;
beforeEach(function () {
canvas = {
getContext() {
return null;
},
addEventListener() {}
};
video = document.createElement('video');
});
it('should bail out when getContext() fails', function () {
assert.throws(() => new Kampos({target: canvas, effects: [brightnessContrast]}));
});
it('should bail out when preventContextCreation is true', function () {
canvas = document.createElement('canvas');
Kampos.preventContextCreation = true;
assert.throws(() => new Kampos({target: canvas, effects: [brightnessContrast]}));
});
it('should bail out after context creation error event was fired', function () {
canvas = document.createElement('canvas');
const instance = new Kampos({target: canvas, effects: [brightnessContrast]});
instance._contextCreationError();
assert.throws(() => new Kampos({target: canvas, effects: [brightnessContrast]}));
});
afterEach(function () {
Kampos.preventContextCreation = false;
canvas = null;
video = null;
});
});
});

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc