Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

kampos

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

kampos - npm Package Compare versions

Comparing version 0.2.0 to 0.2.1

.babelrc

15

demo/disp.js

@@ -16,2 +16,6 @@ import {Ticker} from '../src/kampos';

const video7 = document.querySelector('#video7');
const video8 = document.querySelector('#video8');
const target4 = document.querySelector('#target4');
const ticker = new Ticker();

@@ -46,5 +50,14 @@

Promise.all([trans1.ready, trans2.ready, trans3.ready])
const trans4 = new Transition({
vid1: video7,
vid2: video8,
target: target4,
ticker,
disp: 'disp-liquid.jpg',
dispScale: 0.35
});
Promise.all([trans1.ready, trans2.ready, trans3.ready, trans4.ready])
.then(() => {
ticker.start();
});

816

demo/index.js

@@ -926,10 +926,14 @@ (function () {

/**
* @function alphaMask
* @returns {alphaMaskEffect}
* @example alphaMask()
* @function displacementTransition
* @returns {displacementTransitionEffect}
* @example displacementTransition()
*/
function alphaMask () {
function transitionDisplacement () {
/**
* @typedef {Object} alphaMaskEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} mask
* @typedef {Object} displacementTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map displacement map to use
* @property {number} progress number between 0.0 and 1.0
* @property {{x: number, y: number}} sourceScale
* @property {{x: number, y: number}} toScale
* @property {boolean} disabled

@@ -939,5 +943,7 @@ *

* const img = new Image();
* img.src = 'picture.png';
* effect.mask = img;
* effect.disabled = true;
* img.src = 'disp.jpg';
* effect.map = img;
* effect.to = document.querySelector('#video-to');
* effect.sourceScale = {x: 0.4};
* effect.toScale = {x: 0.8};
*/

@@ -947,15 +953,42 @@ return {

attribute: {
a_alphaMaskTexCoord: 'vec2'
a_transitionToTexCoord: 'vec2',
a_transitionDispMapTexCoord: 'vec2'
},
main: `
v_alphaMaskTexCoord = a_alphaMaskTexCoord;`
v_transitionToTexCoord = a_transitionToTexCoord;
v_transitionDispMapTexCoord = a_transitionDispMapTexCoord;`
},
fragment: {
uniform: {
u_alphaMaskEnabled: 'bool',
u_mask: 'sampler2D'
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D',
u_transitionDispMap: 'sampler2D',
u_transitionProgress: 'float',
u_sourceDispScale: 'vec2',
u_toDispScale: 'vec2'
},
source: `
vec3 transDispMap = vec3(1.0);
vec2 transDispVec = vec2(0.0);
if (u_transitionEnabled) {
// read the displacement texture once and create the displacement map
transDispMap = texture2D(u_transitionDispMap, v_transitionDispMapTexCoord).rgb - 0.5;
// prepare the source coordinates for sampling
transDispVec = vec2(u_sourceDispScale.x * transDispMap.r, u_sourceDispScale.y * transDispMap.g);
sourceCoord = clamp(v_texCoord + transDispVec * u_transitionProgress, 0.0, 1.0);
}`,
main: `
if (u_alphaMaskEnabled) {
alpha *= texture2D(u_mask, v_alphaMaskTexCoord).a;
if (u_transitionEnabled) {
// prepare the target coordinates for sampling
transDispVec = vec2(u_toDispScale.x * transDispMap.r, u_toDispScale.y * transDispMap.g);
vec2 targetCoord = clamp(v_transitionToTexCoord + transDispVec * (1.0 - u_transitionProgress), 0.0, 1.0);
// sample the target
vec4 targetPixel = texture2D(u_transitionTo, targetCoord);
// mix the results of source and target
color = mix(color, targetPixel.rgb, u_transitionProgress);
alpha = mix(alpha, targetPixel.a, u_transitionProgress);
}`

@@ -969,109 +1002,47 @@ },

},
get mask () {
return this.textures[0].image;
get progress () {
return this.uniforms[3].data[0];
},
set mask (img) {
this.textures[0].image = img;
set progress (p) {
this.uniforms[3].data[0] = p;
},
varying: {
v_alphaMaskTexCoord: 'vec2'
get sourceScale () {
const [x, y] = this.uniforms[4].data;
return {x, y};
},
uniforms: [
{
name: 'u_alphaMaskEnabled',
type: 'i',
data: [1]
},
{
name: 'u_mask',
type: 'i',
data: [1]
}
],
attributes: [
{
name: 'a_alphaMaskTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'ALPHA'
}
]
};
}
/**
* @function brightnessContrast
* @returns {brightnessContrastEffect}
* @example brightnessContrast()
*/
function brightnessContrast () {
/**
* @typedef {Object} brightnessContrastEffect
* @property {number} brightness
* @property {number} contrast
* @property {boolean} brightnessDisabled
* @property {boolean} contrastDisabled
*
* @example
* effect.brightness = 1.5;
* effect.contrast = 0.9;
* effect.contrastDisabled = true;
*/
return {
vertex: {},
fragment: {
uniform: {
u_brEnabled: 'bool',
u_ctEnabled: 'bool',
u_contrast: 'float',
u_brightness: 'float'
},
constant: 'const vec3 half3 = vec3(0.5);',
main: `
if (u_brEnabled) {
color *= u_brightness;
}
if (u_ctEnabled) {
color = (color - half3) * u_contrast + half3;
}
color = clamp(color, 0.0, 1.0);`
set sourceScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[4].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[4].data[1] = y;
},
get brightness () {
return this.uniforms[2].data[0];
get toScale () {
const [x, y] = this.uniforms[5].data;
return {x, y};
},
set brightness (value) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, value));
set toScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[5].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[5].data[1] = y;
},
get contrast () {
return this.uniforms[3].data[0];
get to () {
return this.textures[0].image;
},
set contrast (value) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, value));
set to (media) {
this.textures[0].image = media;
},
get brightnessDisabled () {
return !this.uniforms[0].data[0];
get map () {
return this.textures[1].image;
},
set brightnessDisabled (toggle) {
this.uniforms[0].data[0] = +!toggle;
set map (img) {
this.textures[1].image = img;
},
get contrastDisabled () {
return !this.uniforms[1].data[0];
varying: {
v_transitionToTexCoord: 'vec2',
v_transitionDispMapTexCoord: 'vec2'
},
set contrastDisabled (toggle) {
this.uniforms[1].data[0] = +!toggle;
},
uniforms: [
{
name: 'u_brEnabled',
name: 'u_transitionEnabled',
type: 'i',

@@ -1081,3 +1052,3 @@ data: [1]

{
name: 'u_ctEnabled',
name: 'u_transitionTo',
type: 'i',

@@ -1087,216 +1058,51 @@ data: [1]

{
name: 'u_brightness',
type: 'f',
data: [1.0]
name: 'u_transitionDispMap',
type: 'i',
data: [2]
},
{
name: 'u_contrast',
name: 'u_transitionProgress',
type: 'f',
data: [1.0]
}
]
};
}
/**
* @function hueSaturation
* @returns {hueSaturationEffect}
* @example hueSaturation()
*/
function hueSaturation () {
/**
* @typedef {Object} hueSaturationEffect
* @property {number} hue
* @property {number} saturation
* @property {boolean} hueDisabled
* @property {boolean} saturationDisabled
*
* @example
* effect.hue = 45;
* effect.saturation = 0.8;
*/
return {
vertex: {
uniform: {
u_hue: 'float',
u_saturation: 'float'
data: [0]
},
// for implementation see: https://www.w3.org/TR/SVG11/filters.html#feColorMatrixElement
constant: `
const mat3 lummat = mat3(
lumcoeff,
lumcoeff,
lumcoeff
);
const mat3 cosmat = mat3(
vec3(0.787, -0.715, -0.072),
vec3(-0.213, 0.285, -0.072),
vec3(-0.213, -0.715, 0.928)
);
const mat3 sinmat = mat3(
vec3(-0.213, -0.715, 0.928),
vec3(0.143, 0.140, -0.283),
vec3(-0.787, 0.715, 0.072)
);
const mat3 satmat = mat3(
vec3(0.787, -0.715, -0.072),
vec3(-0.213, 0.285, -0.072),
vec3(-0.213, -0.715, 0.928)
);`,
main: `
float angle = (u_hue / 180.0) * 3.14159265358979323846264;
v_hueRotation = lummat + cos(angle) * cosmat + sin(angle) * sinmat;
v_saturation = lummat + satmat * u_saturation;`
},
fragment: {
uniform: {
u_hueEnabled: 'bool',
u_satEnabled: 'bool',
u_hue: 'float',
u_saturation: 'float'
},
main: `
if (u_hueEnabled) {
color = vec3(
dot(color, v_hueRotation[0]),
dot(color, v_hueRotation[1]),
dot(color, v_hueRotation[2])
);
}
if (u_satEnabled) {
color = vec3(
dot(color, v_saturation[0]),
dot(color, v_saturation[1]),
dot(color, v_saturation[2])
);
}
color = clamp(color, 0.0, 1.0);`
},
varying: {
v_hueRotation: 'mat3',
v_saturation: 'mat3'
},
get hue () {
return this.uniforms[2].data[0];
},
set hue (h) {
this.uniforms[2].data[0] = parseFloat(h);
},
get saturation () {
return this.uniforms[3].data[0];
},
set saturation (s) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, s));
},
get hueDisabled () {
return !this.uniforms[0].data[0];
},
set hueDisabled (b) {
this.uniforms[0].data[0] = +!b;
},
get saturationDisabled () {
return !this.uniforms[1].data[0];
},
set saturationDisabled (b) {
this.uniforms[1].data[0] = +!b;
},
uniforms: [
{
name: 'u_hueEnabled',
type: 'i',
data: [1]
},
{
name: 'u_satEnabled',
type: 'i',
data: [1]
},
{
name: 'u_hue',
name: 'u_sourceDispScale',
type: 'f',
data: [0.0]
data: [0.0, 0.0]
},
{
name: 'u_saturation',
name: 'u_toDispScale',
type: 'f',
data: [1.0]
data: [0.0, 0.0]
}
]
};
}
/**
* @function duotone
* @returns {duotoneEffect}
* @example duotone()
*/
function duotone () {
/**
* @typedef {Object} duotoneEffect
* @property {number[]} light Array of 4 numbers normalized (0.0 - 1.0)
* @property {number[]} dark Array of 4 numbers normalized (0.0 - 1.0)
* @property {boolean} disabled
*
* @example
* effect.light = [1.0, 1.0, 0.8];
* effect.dark = [0.2, 0.6, 0.33];
*/
return {
vertex: {},
fragment: {
uniform: {
u_duotoneEnabled: 'bool',
u_light: 'vec4',
u_dark: 'vec4'
},
main: `
if (u_duotoneEnabled) {
vec3 gray = vec3(dot(lumcoeff, color));
color = mix(u_dark.rgb, u_light.rgb, gray);
}`
},
get light () {
return this.uniforms[1].data.slice(0);
},
set light (l) {
l.forEach((c, i) => {
if ( ! Number.isNaN(c) ) {
this.uniforms[1].data[i] = c;
}
});
},
get dark () {
return this.uniforms[2].data.slice(0);
},
set dark (d) {
d.forEach((c, i) => {
if ( ! Number.isNaN(c) ) {
this.uniforms[2].data[i] = c;
}
});
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
uniforms: [
],
attributes: [
{
name: 'u_duotoneEnabled',
type: 'i',
data: [1]
name: 'a_transitionToTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
},
{
name: 'u_light',
type: 'f',
data: [0.9882352941, 0.7333333333, 0.05098039216, 1]
name: 'a_transitionDispMapTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGBA',
update: true
},
{
name: 'u_dark',
type: 'f',
data: [0.7411764706, 0.0431372549, 0.568627451, 1]
format: 'RGB'
}

@@ -1307,287 +1113,205 @@ ]

const video = document.querySelector('#video');
const videoUrl = document.querySelector('#video-url');
const maskUrl = document.querySelector('#alpha-mask-url');
let target = document.querySelector('#target');
// let maskURL = 'https://static.wixstatic.com/shapes/3943e2a044854dfbae0fbe56ec72c7d9.svg';
let maskURL = 'https://static.wixstatic.com/shapes/2fc6253d53dc4925aab74c224256d7f8.svg';
const observer = new IntersectionObserver(entries => {
entries.forEach(entry => {
const direction = entry.isIntersecting ? 'forward' : 'backward';
Transition.targets.get(entry.target)[direction]();
});
}, {
root: null,
rootMargin: '0%',
threshold: 0.8
});
let playing = false;
let timeupdate = false;
class Transition {
constructor ({vid1, vid2, target, disp, ticker, dispScale}) {
this.vid1 = vid1;
this.vid2 = vid2;
this.target = target;
this.playing = 0;
this.timeupdate = 0;
this.disp = disp;
this.dispScale = dispScale;
this.transition = transitionDisplacement();
function initVideo () {
video.src = videoUrl.value;
this.direction = 1;
this.startTime = 0;
video.addEventListener('playing', isPlaying, true);
video.addEventListener('timeupdate', isTimeupdate, true);
video.addEventListener('canplay', canPlay, true);
}
this.ready = new Promise(resolve => {
this.setReady = resolve;
});
function canPlay () {
video.play();
}
this.kampos = new Kampos({target, effects: [this.transition], ticker});
function isPlaying () {
playing = true;
video.removeEventListener('playing', isPlaying, true);
check();
}
function isTimeupdate () {
timeupdate = true;
video.removeEventListener('timeupdate', isTimeupdate, true);
check();
}
this.initVideo();
function check () {
if (playing && timeupdate) {
playing = false;
timeupdate = false;
// target.addEventListener('mouseenter', forward);
// target.addEventListener('mouseleave', backward);
const width = video.videoWidth;
// const height = video.videoHeight / (toggleTransparent.checked ? 2 : 1);
const height = video.videoHeight;
observer.observe(target);
target.style.width = `${width}px`;
target.style.height = `${height}px`;
if ( toggleAlphaMask.checked ) {
createMaskImage(width, height)
.then(function () {
instance.setSource({media: video, type: 'video', width, height});
ticker.start();
});
}
else {
instance.setSource({media: video, type: 'video', width, height});
ticker.start();
}
video.removeEventListener('canplay', canPlay, true);
Transition.targets.set(target, this);
}
}
function hex2vec4 (hex) {
const s = hex.substring(1);
return [s[0] + s[1], s[2] + s[3], s[4] + s[5], 'ff'].map(h => parseInt(h, 16) / 255);
}
initVideo () {
function drawInlineSVG (ctx, rawSVG, callback) {
const svg = new Blob([rawSVG], {type:"image/svg+xml"}),
url = URL.createObjectURL(svg),
img = new Image;
function canPlay (e) {
e.target.play();
}
img.onload = function () {
ctx.drawImage(this, 0, 0);
URL.revokeObjectURL(url);
callback(this);
};
const isPlaying = e => {
this.playing += 1;
e.target.removeEventListener('playing', isPlaying, true);
check();
};
const isTimeupdate = (e) => {
this.timeupdate += 1;
e.target.removeEventListener('timeupdate', isTimeupdate, true);
check();
};
img.src = url;
}
const dispReady = new Promise(resolve => {
const img = new Image();
function fetchSVG () {
return window.fetch(maskURL).then(function (response) {
return response.text();
});
}
img.onload = function () {
resolve(this);
};
function handleRangeChange (e) {
const target = e.target;
const effect = target.id;
let data;
img.src = this.disp;
});
switch ( effect ) {
case 'brightness':
case 'contrast':
bc[effect] = target.value;
data = [bc[effect]];
break;
case 'hue':
case 'saturation':
hs[effect] = target.value;
data = [hs[effect]];
break;
case 'duotone-light':
dt.light = hex2vec4(target.value);
e.target.nextElementSibling.textContent = target.value;
break;
case 'duotone-dark':
dt.dark = hex2vec4(target.value);
e.target.nextElementSibling.textContent = target.value;
break;
}
const check = () => {
if (this.playing === 2 && this.timeupdate === 2) {
const width = this.vid1.videoWidth;
const height = this.vid1.videoHeight;
if ( data ) {
data[0] = parseFloat(target.value);
e.target.nextElementSibling.textContent = data[0];
}
}
this.target.style.width = `${width}px`;
this.target.style.height = `${height}px`;
const inputs = ['brightness', 'contrast', 'hue', 'saturation', 'duotone-light', 'duotone-dark'];
const hs = hueSaturation();
const bc = brightnessContrast();
const dt = duotone();
// const tv = transparentVideo();
const am = alphaMask();
dispReady.then(img => {
this.transition.map = img;
this.transition.to = this.vid2;
// const toggleTransparent = document.querySelector('#toggle-transparent');
const toggleDuotone = document.querySelector('#toggle-duotone');
const toggleAlphaMask = document.querySelector('#toggle-alphamask');
this.transition.sourceScale = {x: this.dispScale};
this.transition.toScale = {x: -this.dispScale};
// const duotoneChecked = toggleDuotone.checked;
// const transparentChecked = toggleTransparent.checked;
const toggleAlphaMaskChecked = toggleAlphaMask.checked;
this.kampos.setSource({media: this.vid1, width, height});
this.setReady();
});
const effects = [];
this.vid1.removeEventListener('canplay', canPlay, true);
this.vid2.removeEventListener('canplay', canPlay, true);
}
};
// if (transparentChecked) {
// effects.push(tv);
// }
effects.push(bc);
[this.vid1, this.vid2].forEach(vid => {
vid.addEventListener('playing', isPlaying, true);
vid.addEventListener('timeupdate', isTimeupdate, true);
vid.addEventListener('canplay', canPlay, true);
});
}
// if (duotoneChecked) {
effects.push(dt);
// }
tick (p) {
this.transition.progress = p;
}
effects.push(hs);
play () {
const now = Date.now() - this.startTime;
let p = Math.abs(Math.sin(now / .5e3));
p = this.direction ? p : 1 - p;
if (toggleAlphaMaskChecked) {
effects.push(am);
}
this.tick(p);
function createMaskImage (width, height) {
if ( maskURL.endsWith('.svg') ) {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
if (this.direction) {
if (p * 1e2 < 99) {
window.requestAnimationFrame(() => this.play());
}
else {
window.requestAnimationFrame(() => this.tick(1));
}
}
else {
if (p * 1e2 > 1) {
window.requestAnimationFrame(() => this.play());
}
else {
window.requestAnimationFrame(() => this.tick(0));
}
}
}
return new Promise(function (resolve) {
fetchSVG().then(function (text) {
const div = document.createElement('div');
div.innerHTML = text;
const svg = div.firstElementChild;
document.body.appendChild(svg);
const bbox = svg.getBBox();
document.body.removeChild(svg);
svg.setAttribute('viewBox', `${bbox.x} ${bbox.y} ${bbox.width} ${bbox.height}`);
svg.setAttribute('width', width);
svg.setAttribute('height', height);
canvas.width = width;
canvas.height = height;
drawInlineSVG(ctx, svg.outerHTML, () => {
am.textures[0].image = canvas;
resolve();
});
});
});
forward () {
this.direction = 1;
this.startTime = Date.now();
this.play();
}
else {
return new Promise(function (resolve) {
const img = new Image();
img.crossOrigin = 'anonymous';
img.onload = function () {
am.textures[0].image = this;
resolve();
};
img.src = maskURL;
});
backward () {
this.direction = 0;
this.startTime = Date.now();
this.play();
}
}
inputs.map(function (name) {
return document.getElementById(name);
})
.map(function (input) {
input.addEventListener('input', handleRangeChange);
});
Transition.targets = new Map();
function toggleHandler () {
// instance.destroy();
const video = document.querySelector('#video');
const video2 = document.querySelector('#video2');
const target = document.querySelector('#target');
// Works around an issue with working with the old context
const newCanvas = document.createElement('canvas');
target.parentElement.replaceChild(newCanvas, target);
target = newCanvas;
const video3 = document.querySelector('#video3');
const video4 = document.querySelector('#video4');
const target2 = document.querySelector('#target2');
const video5 = document.querySelector('#video5');
const video6 = document.querySelector('#video6');
const target3 = document.querySelector('#target3');
effects.length = 0;
const video7 = document.querySelector('#video7');
const video8 = document.querySelector('#video8');
const target4 = document.querySelector('#target4');
// if ( toggleTransparent.checked ) {
// effects.push(tv);
// }
const ticker = new Ticker();
effects.push(bc);
// if ( toggleDuotone.checked ) {
effects.push(dt);
// }
effects.push(hs);
if ( toggleAlphaMask.checked ) {
effects.push(am);
}
const width = video.videoWidth;
// const height = video.videoHeight / (toggleTransparent.checked ? 2 : 1);
const height = video.videoHeight;
newCanvas.style.width = `${width}px`;
newCanvas.style.height = `${height}px`;
instance.init({target, effects, ticker});
instance.setSource({media: video, type: 'video', width, height});
}
toggleDuotone.addEventListener('input', e => {
dt.disabled = !e.target.checked;
const trans1 = new Transition({
vid1: video,
vid2: video2,
target: target,
ticker,
disp: 'disp-tri.jpg',
dispScale: 0.3
});
// toggleTransparent.addEventListener('input', toggleHandler);
toggleAlphaMask.addEventListener('input', toggleHandler);
const ticker = new Ticker();
let instance = new Kampos({target, effects, ticker});
initVideo();
videoUrl.addEventListener('input', initVideo);
maskUrl.addEventListener('input', e => {
maskURL = e.target.value;
const width = video.videoWidth;
// const height = video.videoHeight / (toggleTransparent.checked ? 2 : 1);
const height = video.videoHeight;
createMaskImage(width, height)
.then(() => instance._createTextures());
const trans2 = new Transition({
vid1: video3,
vid2: video4,
target: target2,
ticker,
disp: 'disp-snow.jpg',
dispScale: 1.0
});
const toggleBackgroundColor = document.querySelector('#toggle-background-color');
const backgroundColor = document.querySelector('#background-color');
toggleBackgroundColor.addEventListener('input', e => {
document.body.style.backgroundImage = e.target.checked ? 'none' : '';
const trans3 = new Transition({
vid1: video5,
vid2: video6,
target: target3,
ticker,
disp: 'disp-cloud.png',
dispScale: 0.2
});
backgroundColor.addEventListener('input', e => {
document.body.style.backgroundColor = backgroundColor.value;
e.target.nextElementSibling.innerText = e.target.value;
const trans4 = new Transition({
vid1: video7,
vid2: video8,
target: target4,
ticker,
disp: 'disp-liquid.jpg',
dispScale: 0.35
});
document.querySelector('#duotone-switch').addEventListener('click', e => {
const light = document.querySelector('#duotone-light');
const dark = document.querySelector('#duotone-dark');
Promise.all([trans1.ready, trans2.ready, trans3.ready, trans4.ready])
.then(() => {
ticker.start();
});
const lightValue = light.value;
const darkValue = dark.value;
light.value = darkValue;
dark.value = lightValue;
handleRangeChange({target: light});
handleRangeChange({target: dark});
});
}());

@@ -5,3 +5,3 @@ import progress from 'rollup-plugin-progress';

const config = {
input: 'demo.js',
input: 'disp.js',
output: {

@@ -8,0 +8,0 @@ file: 'index.js',

import {Kampos} from '../src/kampos';
import transitionDisplacement from '../src/transitions/displacement';
export default class Transition {
const observer = new IntersectionObserver(entries => {
entries.forEach(entry => {
const direction = entry.isIntersecting ? 'forward' : 'backward';
Transition.targets.get(entry.target)[direction]();
});
}, {
root: null,
rootMargin: '0%',
threshold: 0.8
});
class Transition {
constructor ({vid1, vid2, target, disp, ticker, dispScale}) {

@@ -26,17 +37,8 @@ this.vid1 = vid1;

const enter = () => {
this.direction = 1;
this.startTime = Date.now();
this.play();
};
// target.addEventListener('mouseenter', forward);
// target.addEventListener('mouseleave', backward);
const leave = () => {
this.direction = 0;
this.startTime = Date.now();
this.play();
};
observer.observe(target);
target.addEventListener('mouseenter', enter);
target.addEventListener('mouseleave', leave);
Transition.targets.set(target, this);
}

@@ -131,2 +133,18 @@

}
forward () {
this.direction = 1;
this.startTime = Date.now();
this.play();
}
backward () {
this.direction = 0;
this.startTime = Date.now();
this.play();
}
}
Transition.targets = new Map();
export default Transition;

@@ -13,76 +13,66 @@ (function (global, factory) {

function alphaMask () {
/**
* @typedef {Object} alphaMaskEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} mask
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'picture.png';
* effect.mask = img;
* effect.disabled = true;
*/
return {
vertex: {
attribute: {
a_alphaMaskTexCoord: 'vec2'
},
main: `
v_alphaMaskTexCoord = a_alphaMaskTexCoord;`
},
fragment: {
uniform: {
u_alphaMaskEnabled: 'bool',
u_mask: 'sampler2D'
},
main: `
if (u_alphaMaskEnabled) {
alpha *= texture2D(u_mask, v_alphaMaskTexCoord).a;
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
get mask () {
return this.textures[0].image;
},
set mask (img) {
this.textures[0].image = img;
},
varying: {
v_alphaMaskTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_alphaMaskEnabled',
type: 'i',
data: [1]
},
{
name: 'u_mask',
type: 'i',
data: [1]
}
],
attributes: [
{
name: 'a_alphaMaskTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'ALPHA'
}
]
};
/**
* @typedef {Object} alphaMaskEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} mask
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'picture.png';
* effect.mask = img;
* effect.disabled = true;
*/
return {
vertex: {
attribute: {
a_alphaMaskTexCoord: 'vec2'
},
main: "\n v_alphaMaskTexCoord = a_alphaMaskTexCoord;"
},
fragment: {
uniform: {
u_alphaMaskEnabled: 'bool',
u_mask: 'sampler2D'
},
main: "\n if (u_alphaMaskEnabled) {\n alpha *= texture2D(u_mask, v_alphaMaskTexCoord).a;\n }"
},
get disabled() {
return !this.uniforms[0].data[0];
},
set disabled(b) {
this.uniforms[0].data[0] = +!b;
},
get mask() {
return this.textures[0].image;
},
set mask(img) {
this.textures[0].image = img;
},
varying: {
v_alphaMaskTexCoord: 'vec2'
},
uniforms: [{
name: 'u_alphaMaskEnabled',
type: 'i',
data: [1]
}, {
name: 'u_mask',
type: 'i',
data: [1]
}],
attributes: [{
name: 'a_alphaMaskTexCoord',
data: new Float32Array([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}],
textures: [{
format: 'ALPHA'
}]
};
}

@@ -96,82 +86,77 @@

function brightnessContrast () {
/**
* @typedef {Object} brightnessContrastEffect
* @property {number} brightness
* @property {number} contrast
* @property {boolean} brightnessDisabled
* @property {boolean} contrastDisabled
*
* @example
* effect.brightness = 1.5;
* effect.contrast = 0.9;
* effect.contrastDisabled = true;
*/
return {
vertex: {},
fragment: {
uniform: {
u_brEnabled: 'bool',
u_ctEnabled: 'bool',
u_contrast: 'float',
u_brightness: 'float'
},
constant: 'const vec3 half3 = vec3(0.5);',
main: `
if (u_brEnabled) {
color *= u_brightness;
}
/**
* @typedef {Object} brightnessContrastEffect
* @property {number} brightness
* @property {number} contrast
* @property {boolean} brightnessDisabled
* @property {boolean} contrastDisabled
*
* @example
* effect.brightness = 1.5;
* effect.contrast = 0.9;
* effect.contrastDisabled = true;
*/
return {
vertex: {},
fragment: {
uniform: {
u_brEnabled: 'bool',
u_ctEnabled: 'bool',
u_contrast: 'float',
u_brightness: 'float'
},
constant: 'const vec3 half3 = vec3(0.5);',
main: "\n if (u_brEnabled) {\n color *= u_brightness;\n }\n\n if (u_ctEnabled) {\n color = (color - half3) * u_contrast + half3;\n }\n\n color = clamp(color, 0.0, 1.0);"
},
if (u_ctEnabled) {
color = (color - half3) * u_contrast + half3;
}
get brightness() {
return this.uniforms[2].data[0];
},
color = clamp(color, 0.0, 1.0);`
},
get brightness () {
return this.uniforms[2].data[0];
},
set brightness (value) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, value));
},
get contrast () {
return this.uniforms[3].data[0];
},
set contrast (value) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, value));
},
get brightnessDisabled () {
return !this.uniforms[0].data[0];
},
set brightnessDisabled (toggle) {
this.uniforms[0].data[0] = +!toggle;
},
get contrastDisabled () {
return !this.uniforms[1].data[0];
},
set contrastDisabled (toggle) {
this.uniforms[1].data[0] = +!toggle;
},
uniforms: [
{
name: 'u_brEnabled',
type: 'i',
data: [1]
},
{
name: 'u_ctEnabled',
type: 'i',
data: [1]
},
{
name: 'u_brightness',
type: 'f',
data: [1.0]
},
{
name: 'u_contrast',
type: 'f',
data: [1.0]
}
]
};
set brightness(value) {
this.uniforms[2].data[0] = parseFloat(Math.max(0, value));
},
get contrast() {
return this.uniforms[3].data[0];
},
set contrast(value) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, value));
},
get brightnessDisabled() {
return !this.uniforms[0].data[0];
},
set brightnessDisabled(toggle) {
this.uniforms[0].data[0] = +!toggle;
},
get contrastDisabled() {
return !this.uniforms[1].data[0];
},
set contrastDisabled(toggle) {
this.uniforms[1].data[0] = +!toggle;
},
uniforms: [{
name: 'u_brEnabled',
type: 'i',
data: [1]
}, {
name: 'u_ctEnabled',
type: 'i',
data: [1]
}, {
name: 'u_brightness',
type: 'f',
data: [1.0]
}, {
name: 'u_contrast',
type: 'f',
data: [1.0]
}]
};
}

@@ -185,124 +170,87 @@

function hueSaturation () {
/**
* @typedef {Object} hueSaturationEffect
* @property {number} hue
* @property {number} saturation
* @property {boolean} hueDisabled
* @property {boolean} saturationDisabled
*
* @example
* effect.hue = 45;
* effect.saturation = 0.8;
*/
return {
vertex: {
uniform: {
u_hue: 'float',
u_saturation: 'float'
},
// for implementation see: https://www.w3.org/TR/SVG11/filters.html#feColorMatrixElement
constant: `
const mat3 lummat = mat3(
lumcoeff,
lumcoeff,
lumcoeff
);
const mat3 cosmat = mat3(
vec3(0.787, -0.715, -0.072),
vec3(-0.213, 0.285, -0.072),
vec3(-0.213, -0.715, 0.928)
);
const mat3 sinmat = mat3(
vec3(-0.213, -0.715, 0.928),
vec3(0.143, 0.140, -0.283),
vec3(-0.787, 0.715, 0.072)
);
const mat3 satmat = mat3(
vec3(0.787, -0.715, -0.072),
vec3(-0.213, 0.285, -0.072),
vec3(-0.213, -0.715, 0.928)
);`,
main: `
float angle = (u_hue / 180.0) * 3.14159265358979323846264;
v_hueRotation = lummat + cos(angle) * cosmat + sin(angle) * sinmat;
v_saturation = lummat + satmat * u_saturation;`
},
fragment: {
uniform: {
u_hueEnabled: 'bool',
u_satEnabled: 'bool',
u_hue: 'float',
u_saturation: 'float'
},
main: `
if (u_hueEnabled) {
color = vec3(
dot(color, v_hueRotation[0]),
dot(color, v_hueRotation[1]),
dot(color, v_hueRotation[2])
);
}
/**
* @typedef {Object} hueSaturationEffect
* @property {number} hue
* @property {number} saturation
* @property {boolean} hueDisabled
* @property {boolean} saturationDisabled
*
* @example
* effect.hue = 45;
* effect.saturation = 0.8;
*/
return {
vertex: {
uniform: {
u_hue: 'float',
u_saturation: 'float'
},
// for implementation see: https://www.w3.org/TR/SVG11/filters.html#feColorMatrixElement
constant: "\nconst mat3 lummat = mat3(\n lumcoeff,\n lumcoeff,\n lumcoeff\n);\nconst mat3 cosmat = mat3(\n vec3(0.787, -0.715, -0.072),\n vec3(-0.213, 0.285, -0.072),\n vec3(-0.213, -0.715, 0.928)\n);\nconst mat3 sinmat = mat3(\n vec3(-0.213, -0.715, 0.928),\n vec3(0.143, 0.140, -0.283),\n vec3(-0.787, 0.715, 0.072)\n);\nconst mat3 satmat = mat3(\n vec3(0.787, -0.715, -0.072),\n vec3(-0.213, 0.285, -0.072),\n vec3(-0.213, -0.715, 0.928)\n);",
main: "\n float angle = (u_hue / 180.0) * 3.14159265358979323846264;\n v_hueRotation = lummat + cos(angle) * cosmat + sin(angle) * sinmat;\n v_saturation = lummat + satmat * u_saturation;"
},
fragment: {
uniform: {
u_hueEnabled: 'bool',
u_satEnabled: 'bool',
u_hue: 'float',
u_saturation: 'float'
},
main: "\n if (u_hueEnabled) {\n color = vec3(\n dot(color, v_hueRotation[0]),\n dot(color, v_hueRotation[1]),\n dot(color, v_hueRotation[2])\n );\n }\n\n if (u_satEnabled) {\n color = vec3(\n dot(color, v_saturation[0]),\n dot(color, v_saturation[1]),\n dot(color, v_saturation[2])\n );\n }\n \n color = clamp(color, 0.0, 1.0);"
},
varying: {
v_hueRotation: 'mat3',
v_saturation: 'mat3'
},
if (u_satEnabled) {
color = vec3(
dot(color, v_saturation[0]),
dot(color, v_saturation[1]),
dot(color, v_saturation[2])
);
}
color = clamp(color, 0.0, 1.0);`
},
varying: {
v_hueRotation: 'mat3',
v_saturation: 'mat3'
},
get hue() {
return this.uniforms[2].data[0];
},
get hue () {
return this.uniforms[2].data[0];
},
set hue (h) {
this.uniforms[2].data[0] = parseFloat(h);
},
get saturation () {
return this.uniforms[3].data[0];
},
set saturation (s) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, s));
},
get hueDisabled () {
return !this.uniforms[0].data[0];
},
set hueDisabled (b) {
this.uniforms[0].data[0] = +!b;
},
get saturationDisabled () {
return !this.uniforms[1].data[0];
},
set saturationDisabled (b) {
this.uniforms[1].data[0] = +!b;
},
uniforms: [
{
name: 'u_hueEnabled',
type: 'i',
data: [1]
},
{
name: 'u_satEnabled',
type: 'i',
data: [1]
},
{
name: 'u_hue',
type: 'f',
data: [0.0]
},
{
name: 'u_saturation',
type: 'f',
data: [1.0]
}
]
};
set hue(h) {
this.uniforms[2].data[0] = parseFloat(h);
},
get saturation() {
return this.uniforms[3].data[0];
},
set saturation(s) {
this.uniforms[3].data[0] = parseFloat(Math.max(0, s));
},
get hueDisabled() {
return !this.uniforms[0].data[0];
},
set hueDisabled(b) {
this.uniforms[0].data[0] = +!b;
},
get saturationDisabled() {
return !this.uniforms[1].data[0];
},
set saturationDisabled(b) {
this.uniforms[1].data[0] = +!b;
},
uniforms: [{
name: 'u_hueEnabled',
type: 'i',
data: [1]
}, {
name: 'u_satEnabled',
type: 'i',
data: [1]
}, {
name: 'u_hue',
type: 'f',
data: [0.0]
}, {
name: 'u_saturation',
type: 'f',
data: [1.0]
}]
};
}

@@ -316,72 +264,189 @@

function duotone () {
/**
* @typedef {Object} duotoneEffect
* @property {number[]} light Array of 4 numbers normalized (0.0 - 1.0)
* @property {number[]} dark Array of 4 numbers normalized (0.0 - 1.0)
* @property {boolean} disabled
*
* @example
* effect.light = [1.0, 1.0, 0.8];
* effect.dark = [0.2, 0.6, 0.33];
*/
return {
vertex: {},
fragment: {
uniform: {
u_duotoneEnabled: 'bool',
u_light: 'vec4',
u_dark: 'vec4'
},
main: `
if (u_duotoneEnabled) {
vec3 gray = vec3(dot(lumcoeff, color));
color = mix(u_dark.rgb, u_light.rgb, gray);
}`
},
get light () {
return this.uniforms[1].data.slice(0);
},
set light (l) {
l.forEach((c, i) => {
if ( ! Number.isNaN(c) ) {
this.uniforms[1].data[i] = c;
}
});
},
get dark () {
return this.uniforms[2].data.slice(0);
},
set dark (d) {
d.forEach((c, i) => {
if ( ! Number.isNaN(c) ) {
this.uniforms[2].data[i] = c;
}
});
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
uniforms: [
{
name: 'u_duotoneEnabled',
type: 'i',
data: [1]
},
{
name: 'u_light',
type: 'f',
data: [0.9882352941, 0.7333333333, 0.05098039216, 1]
},
{
name: 'u_dark',
type: 'f',
data: [0.7411764706, 0.0431372549, 0.568627451, 1]
}
]
};
/**
* @typedef {Object} duotoneEffect
* @property {number[]} light Array of 4 numbers normalized (0.0 - 1.0)
* @property {number[]} dark Array of 4 numbers normalized (0.0 - 1.0)
* @property {boolean} disabled
*
* @example
* effect.light = [1.0, 1.0, 0.8];
* effect.dark = [0.2, 0.6, 0.33];
*/
return {
vertex: {},
fragment: {
uniform: {
u_duotoneEnabled: 'bool',
u_light: 'vec4',
u_dark: 'vec4'
},
main: "\n if (u_duotoneEnabled) {\n vec3 gray = vec3(dot(lumcoeff, color));\n color = mix(u_dark.rgb, u_light.rgb, gray);\n }"
},
get light() {
return this.uniforms[1].data.slice(0);
},
set light(l) {
var _this = this;
l.forEach(function (c, i) {
if (!Number.isNaN(c)) {
_this.uniforms[1].data[i] = c;
}
});
},
get dark() {
return this.uniforms[2].data.slice(0);
},
set dark(d) {
var _this2 = this;
d.forEach(function (c, i) {
if (!Number.isNaN(c)) {
_this2.uniforms[2].data[i] = c;
}
});
},
get disabled() {
return !this.uniforms[0].data[0];
},
set disabled(b) {
this.uniforms[0].data[0] = +!b;
},
uniforms: [{
name: 'u_duotoneEnabled',
type: 'i',
data: [1]
}, {
name: 'u_light',
type: 'f',
data: [0.9882352941, 0.7333333333, 0.05098039216, 1]
}, {
name: 'u_dark',
type: 'f',
data: [0.7411764706, 0.0431372549, 0.568627451, 1]
}]
};
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _objectSpread(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
var ownKeys = Object.keys(source);
if (typeof Object.getOwnPropertySymbols === 'function') {
ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function (sym) {
return Object.getOwnPropertyDescriptor(source, sym).enumerable;
}));
}
ownKeys.forEach(function (key) {
_defineProperty(target, key, source[key]);
});
}
return target;
}
function _slicedToArray(arr, i) {
return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _nonIterableRest();
}
function _toConsumableArray(arr) {
return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread();
}
function _arrayWithoutHoles(arr) {
if (Array.isArray(arr)) {
for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];
return arr2;
}
}
function _arrayWithHoles(arr) {
if (Array.isArray(arr)) return arr;
}
function _iterableToArray(iter) {
if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter);
}
function _iterableToArrayLimit(arr, i) {
var _arr = [];
var _n = true;
var _d = false;
var _e = undefined;
try {
for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) {
_arr.push(_s.value);
if (i && _arr.length === i) break;
}
} catch (err) {
_d = true;
_e = err;
} finally {
try {
if (!_n && _i["return"] != null) _i["return"]();
} finally {
if (_d) throw _e;
}
}
return _arr;
}
function _nonIterableSpread() {
throw new TypeError("Invalid attempt to spread non-iterable instance");
}
function _nonIterableRest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance");
}
/**

@@ -393,95 +458,90 @@ * @function displacement

function displacement () {
/**
* @typedef {Object} displacementEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map
* @property {{x: number, y: number}} scale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.scale = {x: 0.4};
*/
return {
vertex: {
attribute: {
a_displacementMapTexCoord: 'vec2'
},
main: `
v_displacementMapTexCoord = a_displacementMapTexCoord;`
},
fragment: {
uniform: {
u_displacementEnabled: 'bool',
u_dispMap: 'sampler2D',
u_dispScale: 'vec2'
},
source: `
if (u_displacementEnabled) {
vec3 dispMap = texture2D(u_dispMap, v_displacementMapTexCoord).rgb - 0.5;
vec2 dispVec = vec2(v_texCoord.x + u_dispScale.x * dispMap.r, v_texCoord.y + u_dispScale.y * dispMap.g);
sourceCoord = clamp(dispVec, 0.0, 1.0);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
return this.uniforms[0].data[0] = +!b;
},
get scale () {
const [x, y] = this.uniforms[2].data;
return {x, y};
},
set scale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[2].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[2].data[1] = y;
},
get map () {
return this.textures[0].image;
},
set map (img) {
this.textures[0].image = img;
},
varying: {
v_displacementMapTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_displacementEnabled',
type: 'i',
data: [1]
},
{
name: 'u_dispMap',
type: 'i',
data: [1]
},
{
name: 'u_dispScale',
type: 'f',
data: [0.0, 0.0]
}
],
attributes: [
{
name: 'a_displacementMapTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGB'
}
]
};
/**
* @typedef {Object} displacementEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map
* @property {{x: number, y: number}} scale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.scale = {x: 0.4};
*/
return {
vertex: {
attribute: {
a_displacementMapTexCoord: 'vec2'
},
main: "\n v_displacementMapTexCoord = a_displacementMapTexCoord;"
},
fragment: {
uniform: {
u_displacementEnabled: 'bool',
u_dispMap: 'sampler2D',
u_dispScale: 'vec2'
},
source: "\n if (u_displacementEnabled) {\n vec3 dispMap = texture2D(u_dispMap, v_displacementMapTexCoord).rgb - 0.5;\n vec2 dispVec = vec2(v_texCoord.x + u_dispScale.x * dispMap.r, v_texCoord.y + u_dispScale.y * dispMap.g);\n sourceCoord = clamp(dispVec, 0.0, 1.0);\n }"
},
get disabled() {
return !this.uniforms[0].data[0];
},
set disabled(b) {
return this.uniforms[0].data[0] = +!b;
},
get scale() {
var _this$uniforms$2$data = _slicedToArray(this.uniforms[2].data, 2),
x = _this$uniforms$2$data[0],
y = _this$uniforms$2$data[1];
return {
x: x,
y: y
};
},
set scale(_ref) {
var x = _ref.x,
y = _ref.y;
if (typeof x !== 'undefined') this.uniforms[2].data[0] = x;
if (typeof y !== 'undefined') this.uniforms[2].data[1] = y;
},
get map() {
return this.textures[0].image;
},
set map(img) {
this.textures[0].image = img;
},
varying: {
v_displacementMapTexCoord: 'vec2'
},
uniforms: [{
name: 'u_displacementEnabled',
type: 'i',
data: [1]
}, {
name: 'u_dispMap',
type: 'i',
data: [1]
}, {
name: 'u_dispScale',
type: 'f',
data: [0.0, 0.0]
}],
attributes: [{
name: 'a_displacementMapTexCoord',
data: new Float32Array([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}],
textures: [{
format: 'RGB'
}]
};
}

@@ -495,83 +555,70 @@

function fade () {
/**
* @typedef {Object} fadeTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {number} progress number between 0.0 and 1.0
* @property {boolean} disabled
*
* @example
* effect.to = document.querySelector('#video-to');
* effect.progress = 0.5;
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2'
},
main: `
v_transitionToTexCoord = a_transitionToTexCoord;`
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D'
},
main: `
if (u_transitionEnabled) {
vec4 targetPixel = texture2D(u_transitionTo, v_transitionToTexCoord);
color = mix(color, targetPixel.rgb, u_transitionProgress);
alpha = mix(alpha, targetPixel.a, u_transitionProgress);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
get to () {
return this.textures[0].image;
},
set to (media) {
this.textures[0].image = media;
},
varying: {
v_transitionToTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
},
{
name: 'u_transitionTo',
type: 'i',
data: [1]
},
{
name: 'u_transitionProgress',
type: 'f',
data: [0]
}
],
attributes: [
{
name: 'a_transitionToTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGBA',
update: true
}
]
};
/**
* @typedef {Object} fadeTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {number} progress number between 0.0 and 1.0
* @property {boolean} disabled
*
* @example
* effect.to = document.querySelector('#video-to');
* effect.progress = 0.5;
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2'
},
main: "\n v_transitionToTexCoord = a_transitionToTexCoord;"
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D'
},
main: "\n if (u_transitionEnabled) {\n vec4 targetPixel = texture2D(u_transitionTo, v_transitionToTexCoord);\n color = mix(color, targetPixel.rgb, u_transitionProgress);\n alpha = mix(alpha, targetPixel.a, u_transitionProgress);\n }"
},
get disabled() {
return !this.uniforms[0].data[0];
},
set disabled(b) {
this.uniforms[0].data[0] = +!b;
},
get to() {
return this.textures[0].image;
},
set to(media) {
this.textures[0].image = media;
},
varying: {
v_transitionToTexCoord: 'vec2'
},
uniforms: [{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
}, {
name: 'u_transitionTo',
type: 'i',
data: [1]
}, {
name: 'u_transitionProgress',
type: 'f',
data: [0]
}],
attributes: [{
name: 'a_transitionToTexCoord',
data: new Float32Array([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}],
textures: [{
format: 'RGBA',
update: true
}]
};
}

@@ -585,235 +632,193 @@

function displacementTransition () {
/**
* @typedef {Object} displacementTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map displacement map to use
* @property {number} progress number between 0.0 and 1.0
* @property {{x: number, y: number}} sourceScale
* @property {{x: number, y: number}} toScale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.to = document.querySelector('#video-to');
* effect.sourceScale = {x: 0.4};
* effect.toScale = {x: 0.8};
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2',
a_transitionDispMapTexCoord: 'vec2'
},
main: `
v_transitionToTexCoord = a_transitionToTexCoord;
v_transitionDispMapTexCoord = a_transitionDispMapTexCoord;`
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D',
u_transitionDispMap: 'sampler2D',
u_transitionProgress: 'float',
u_sourceDispScale: 'vec2',
u_toDispScale: 'vec2'
},
source: `
vec3 transDispMap = vec3(1.0);
vec2 transDispVec = vec2(0.0);
/**
* @typedef {Object} displacementTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map displacement map to use
* @property {number} progress number between 0.0 and 1.0
* @property {{x: number, y: number}} sourceScale
* @property {{x: number, y: number}} toScale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.to = document.querySelector('#video-to');
* effect.sourceScale = {x: 0.4};
* effect.toScale = {x: 0.8};
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2',
a_transitionDispMapTexCoord: 'vec2'
},
main: "\n v_transitionToTexCoord = a_transitionToTexCoord;\n v_transitionDispMapTexCoord = a_transitionDispMapTexCoord;"
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D',
u_transitionDispMap: 'sampler2D',
u_transitionProgress: 'float',
u_sourceDispScale: 'vec2',
u_toDispScale: 'vec2'
},
source: "\n vec3 transDispMap = vec3(1.0);\n vec2 transDispVec = vec2(0.0);\n\n if (u_transitionEnabled) {\n // read the displacement texture once and create the displacement map\n transDispMap = texture2D(u_transitionDispMap, v_transitionDispMapTexCoord).rgb - 0.5;\n\n // prepare the source coordinates for sampling\n transDispVec = vec2(u_sourceDispScale.x * transDispMap.r, u_sourceDispScale.y * transDispMap.g);\n sourceCoord = clamp(v_texCoord + transDispVec * u_transitionProgress, 0.0, 1.0);\n }",
main: "\n if (u_transitionEnabled) {\n // prepare the target coordinates for sampling\n transDispVec = vec2(u_toDispScale.x * transDispMap.r, u_toDispScale.y * transDispMap.g);\n vec2 targetCoord = clamp(v_transitionToTexCoord + transDispVec * (1.0 - u_transitionProgress), 0.0, 1.0);\n\n // sample the target\n vec4 targetPixel = texture2D(u_transitionTo, targetCoord);\n\n // mix the results of source and target\n color = mix(color, targetPixel.rgb, u_transitionProgress);\n alpha = mix(alpha, targetPixel.a, u_transitionProgress);\n }"
},
if (u_transitionEnabled) {
// read the displacement texture once and create the displacement map
transDispMap = texture2D(u_transitionDispMap, v_transitionDispMapTexCoord).rgb - 0.5;
get disabled() {
return !this.uniforms[0].data[0];
},
// prepare the source coordinates for sampling
transDispVec = vec2(u_sourceDispScale.x * transDispMap.r, u_sourceDispScale.y * transDispMap.g);
sourceCoord = clamp(v_texCoord + transDispVec * u_transitionProgress, 0.0, 1.0);
}`,
main: `
if (u_transitionEnabled) {
// prepare the target coordinates for sampling
transDispVec = vec2(u_toDispScale.x * transDispMap.r, u_toDispScale.y * transDispMap.g);
vec2 targetCoord = clamp(v_transitionToTexCoord + transDispVec * (1.0 - u_transitionProgress), 0.0, 1.0);
set disabled(b) {
this.uniforms[0].data[0] = +!b;
},
// sample the target
vec4 targetPixel = texture2D(u_transitionTo, targetCoord);
get progress() {
return this.uniforms[3].data[0];
},
// mix the results of source and target
color = mix(color, targetPixel.rgb, u_transitionProgress);
alpha = mix(alpha, targetPixel.a, u_transitionProgress);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
get progress () {
return this.uniforms[3].data[0];
},
set progress (p) {
this.uniforms[3].data[0] = p;
},
get sourceScale () {
const [x, y] = this.uniforms[4].data;
return {x, y};
},
set sourceScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[4].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[4].data[1] = y;
},
get toScale () {
const [x, y] = this.uniforms[5].data;
return {x, y};
},
set toScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[5].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[5].data[1] = y;
},
get to () {
return this.textures[0].image;
},
set to (media) {
this.textures[0].image = media;
},
get map () {
return this.textures[1].image;
},
set map (img) {
this.textures[1].image = img;
},
varying: {
v_transitionToTexCoord: 'vec2',
v_transitionDispMapTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
},
{
name: 'u_transitionTo',
type: 'i',
data: [1]
},
{
name: 'u_transitionDispMap',
type: 'i',
data: [2]
},
{
name: 'u_transitionProgress',
type: 'f',
data: [0]
},
{
name: 'u_sourceDispScale',
type: 'f',
data: [0.0, 0.0]
},
{
name: 'u_toDispScale',
type: 'f',
data: [0.0, 0.0]
}
],
attributes: [
{
name: 'a_transitionToTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
},
{
name: 'a_transitionDispMapTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGBA',
update: true
},
{
format: 'RGB'
}
]
};
set progress(p) {
this.uniforms[3].data[0] = p;
},
get sourceScale() {
var _this$uniforms$4$data = _slicedToArray(this.uniforms[4].data, 2),
x = _this$uniforms$4$data[0],
y = _this$uniforms$4$data[1];
return {
x: x,
y: y
};
},
set sourceScale(_ref) {
var x = _ref.x,
y = _ref.y;
if (typeof x !== 'undefined') this.uniforms[4].data[0] = x;
if (typeof y !== 'undefined') this.uniforms[4].data[1] = y;
},
get toScale() {
var _this$uniforms$5$data = _slicedToArray(this.uniforms[5].data, 2),
x = _this$uniforms$5$data[0],
y = _this$uniforms$5$data[1];
return {
x: x,
y: y
};
},
set toScale(_ref2) {
var x = _ref2.x,
y = _ref2.y;
if (typeof x !== 'undefined') this.uniforms[5].data[0] = x;
if (typeof y !== 'undefined') this.uniforms[5].data[1] = y;
},
get to() {
return this.textures[0].image;
},
set to(media) {
this.textures[0].image = media;
},
get map() {
return this.textures[1].image;
},
set map(img) {
this.textures[1].image = img;
},
varying: {
v_transitionToTexCoord: 'vec2',
v_transitionDispMapTexCoord: 'vec2'
},
uniforms: [{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
}, {
name: 'u_transitionTo',
type: 'i',
data: [1]
}, {
name: 'u_transitionDispMap',
type: 'i',
data: [2]
}, {
name: 'u_transitionProgress',
type: 'f',
data: [0]
}, {
name: 'u_sourceDispScale',
type: 'f',
data: [0.0, 0.0]
}, {
name: 'u_toDispScale',
type: 'f',
data: [0.0, 0.0]
}],
attributes: [{
name: 'a_transitionToTexCoord',
data: new Float32Array([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}, {
name: 'a_transitionDispMapTexCoord',
data: new Float32Array([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}],
textures: [{
format: 'RGBA',
update: true
}, {
format: 'RGB'
}]
};
}
var core = {
init,
draw,
destroy,
resize,
getWebGLContext,
createTexture
init: init,
draw: draw,
destroy: destroy,
resize: resize,
getWebGLContext: getWebGLContext,
createTexture: createTexture
};
const vertexTemplate = ({
uniform = '',
attribute = '',
varying = '',
constant = '',
main = ''
}) => `
precision mediump float;
${uniform}
${attribute}
attribute vec2 a_texCoord;
attribute vec2 a_position;
${varying}
varying vec2 v_texCoord;
var vertexTemplate = function vertexTemplate(_ref) {
var _ref$uniform = _ref.uniform,
uniform = _ref$uniform === void 0 ? '' : _ref$uniform,
_ref$attribute = _ref.attribute,
attribute = _ref$attribute === void 0 ? '' : _ref$attribute,
_ref$varying = _ref.varying,
varying = _ref$varying === void 0 ? '' : _ref$varying,
_ref$constant = _ref.constant,
constant = _ref$constant === void 0 ? '' : _ref$constant,
_ref$main = _ref.main,
main = _ref$main === void 0 ? '' : _ref$main;
return "\nprecision mediump float;\n".concat(uniform, "\n").concat(attribute, "\nattribute vec2 a_texCoord;\nattribute vec2 a_position;\n").concat(varying, "\nvarying vec2 v_texCoord;\n\nconst vec3 lumcoeff = vec3(0.2125, 0.7154, 0.0721);\n").concat(constant, "\nvoid main() {\n v_texCoord = a_texCoord;\n ").concat(main, "\n gl_Position = vec4(a_position.xy, 0.0, 1.0);\n}");
};
const vec3 lumcoeff = vec3(0.2125, 0.7154, 0.0721);
${constant}
void main() {
v_texCoord = a_texCoord;
${main}
gl_Position = vec4(a_position.xy, 0.0, 1.0);
}`;
const fragmentTemplate = ({
uniform = '',
varying = '',
constant = '',
main = '',
source = ''
}) => `
precision mediump float;
${varying}
varying vec2 v_texCoord;
${uniform}
uniform sampler2D u_source;
const vec3 lumcoeff = vec3(0.2125, 0.7154, 0.0721);
${constant}
void main() {
vec2 sourceCoord = v_texCoord;
${source}
vec4 pixel = texture2D(u_source, sourceCoord);
vec3 color = pixel.rgb;
float alpha = pixel.a;
${main}
gl_FragColor = vec4(color, 1.0) * alpha;
}`;
var fragmentTemplate = function fragmentTemplate(_ref2) {
var _ref2$uniform = _ref2.uniform,
uniform = _ref2$uniform === void 0 ? '' : _ref2$uniform,
_ref2$varying = _ref2.varying,
varying = _ref2$varying === void 0 ? '' : _ref2$varying,
_ref2$constant = _ref2.constant,
constant = _ref2$constant === void 0 ? '' : _ref2$constant,
_ref2$main = _ref2.main,
main = _ref2$main === void 0 ? '' : _ref2$main,
_ref2$source = _ref2.source,
source = _ref2$source === void 0 ? '' : _ref2$source;
return "\nprecision mediump float;\n".concat(varying, "\nvarying vec2 v_texCoord;\n").concat(uniform, "\nuniform sampler2D u_source;\n\nconst vec3 lumcoeff = vec3(0.2125, 0.7154, 0.0721);\n").concat(constant, "\nvoid main() {\n vec2 sourceCoord = v_texCoord;\n ").concat(source, "\n vec4 pixel = texture2D(u_source, sourceCoord);\n vec3 color = pixel.rgb;\n float alpha = pixel.a;\n ").concat(main, "\n gl_FragColor = vec4(color, 1.0) * alpha;\n}");
};
/**

@@ -828,11 +833,15 @@ * Initialize a compiled WebGLProgram for the given canvas and effects.

*/
function init (gl, effects, dimensions) {
const programData = _initProgram(gl, effects);
return {gl, data: programData, dimensions: dimensions || {}};
function init(gl, effects, dimensions) {
var programData = _initProgram(gl, effects);
return {
gl: gl,
data: programData,
dimensions: dimensions || {}
};
}
let WEBGL_CONTEXT_SUPPORTED = false;
var WEBGL_CONTEXT_SUPPORTED = false;
/**

@@ -847,27 +856,27 @@ * Get a webgl context for the given canvas element.

*/
function getWebGLContext (canvas) {
let context;
const config = {
preserveDrawingBuffer: false, // should improve performance - https://stackoverflow.com/questions/27746091/preservedrawingbuffer-false-is-it-worth-the-effort
antialias: false, // should improve performance
depth: false, // turn off for explicitness - and in some cases perf boost
stencil: false // turn off for explicitness - and in some cases perf boost
};
function getWebGLContext(canvas) {
var context;
var config = {
preserveDrawingBuffer: false,
// should improve performance - https://stackoverflow.com/questions/27746091/preservedrawingbuffer-false-is-it-worth-the-effort
antialias: false,
// should improve performance
depth: false,
// turn off for explicitness - and in some cases perf boost
stencil: false // turn off for explicitness - and in some cases perf boost
context = canvas.getContext('webgl', config);
};
context = canvas.getContext('webgl', config);
if ( context ) {
WEBGL_CONTEXT_SUPPORTED = true;
}
else if ( ! WEBGL_CONTEXT_SUPPORTED ) {
context = canvas.getContext('experimental-webgl', config);
}
else {
return null;
}
if (context) {
WEBGL_CONTEXT_SUPPORTED = true;
} else if (!WEBGL_CONTEXT_SUPPORTED) {
context = canvas.getContext('experimental-webgl', config);
} else {
return null;
}
return context;
return context;
}
/**

@@ -881,30 +890,32 @@ * Resize the target canvas.

*/
function resize (gl, dimensions) {
const canvas = gl.canvas;
const realToCSSPixels = 1; //window.devicePixelRatio;
const {width, height} = dimensions || {};
let displayWidth, displayHeight;
if ( width && height ) {
displayWidth = width;
displayHeight = height;
}
else {
// Lookup the size the browser is displaying the canvas.
displayWidth = Math.floor(canvas.clientWidth * realToCSSPixels);
displayHeight = Math.floor(canvas.clientHeight * realToCSSPixels);
}
// Check if the canvas is not the same size.
if ( canvas.width !== displayWidth ||
canvas.height !== displayHeight ) {
function resize(gl, dimensions) {
var canvas = gl.canvas;
var realToCSSPixels = 1; //window.devicePixelRatio;
// Make the canvas the same size
canvas.width = displayWidth;
canvas.height = displayHeight;
}
var _ref3 = dimensions || {},
width = _ref3.width,
height = _ref3.height;
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
var displayWidth, displayHeight;
if (width && height) {
displayWidth = width;
displayHeight = height;
} else {
// Lookup the size the browser is displaying the canvas.
displayWidth = Math.floor(canvas.clientWidth * realToCSSPixels);
displayHeight = Math.floor(canvas.clientHeight * realToCSSPixels);
} // Check if the canvas is not the same size.
if (canvas.width !== displayWidth || canvas.height !== displayHeight) {
// Make the canvas the same size
canvas.width = displayWidth;
canvas.height = displayHeight;
}
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
}
/**

@@ -919,42 +930,42 @@ * Draw a given scene

*/
function draw (gl, media, data, dimensions) {
const {program, source, attributes, uniforms, textures} = data;
// bind the source texture
gl.bindTexture(gl.TEXTURE_2D, source.texture);
// read source data into texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, media);
function draw(gl, media, data, dimensions) {
var program = data.program,
source = data.source,
attributes = data.attributes,
uniforms = data.uniforms,
textures = data.textures; // bind the source texture
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
gl.bindTexture(gl.TEXTURE_2D, source.texture); // read source data into texture
// set attribute buffers with data
_enableVertexAttributes(gl, attributes);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, media); // Tell it to use our program (pair of shaders)
// set uniforms with data
_setUniforms(gl, uniforms);
gl.useProgram(program); // set attribute buffers with data
if ( textures ) {
for ( let i = -1; i < textures.length; i++ ) {
gl.activeTexture(gl.TEXTURE0 + (i + 1));
_enableVertexAttributes(gl, attributes); // set uniforms with data
if ( i === -1 ) {
gl.bindTexture(gl.TEXTURE_2D, source.texture);
}
else {
const tex = textures[i];
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
if ( tex.update ) {
gl.texImage2D(gl.TEXTURE_2D, 0,gl[tex.format], gl[tex.format], gl.UNSIGNED_BYTE, tex.image);
}
}
_setUniforms(gl, uniforms);
if (textures) {
for (var i = -1; i < textures.length; i++) {
gl.activeTexture(gl.TEXTURE0 + (i + 1));
if (i === -1) {
gl.bindTexture(gl.TEXTURE_2D, source.texture);
} else {
var tex = textures[i];
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
if (tex.update) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl[tex.format], gl[tex.format], gl.UNSIGNED_BYTE, tex.image);
}
}
}
} // Draw the rectangles
// Draw the rectangles
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}
/**

@@ -967,229 +978,238 @@ * Free all resources attached to a specific webgl context.

*/
function destroy (gl, data) {
const {program, vertexShader, fragmentShader, source, attributes} = data;
// delete buffers
(attributes || []).forEach(attr => gl.deleteBuffer(attr.buffer));
// delete texture
gl.deleteTexture(source.texture);
function destroy(gl, data) {
var program = data.program,
vertexShader = data.vertexShader,
fragmentShader = data.fragmentShader,
source = data.source,
attributes = data.attributes; // delete buffers
// delete program
gl.deleteProgram(program);
(attributes || []).forEach(function (attr) {
return gl.deleteBuffer(attr.buffer);
}); // delete texture
// delete shaders
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
gl.deleteTexture(source.texture); // delete program
gl.deleteProgram(program); // delete shaders
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
}
function _initProgram (gl, effects) {
const source = {
texture: createTexture(gl).texture,
buffer: null
};
function _initProgram(gl, effects) {
var source = {
texture: createTexture(gl).texture,
buffer: null
}; // flip Y axis for source texture
// flip Y axis for source texture
gl.bindTexture(gl.TEXTURE_2D, source.texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.bindTexture(gl.TEXTURE_2D, source.texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
const data = _mergeEffectsData(effects);
const vertexSrc = _stringifyShaderSrc(data.vertex, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragment, fragmentTemplate);
var data = _mergeEffectsData(effects);
// compile the GLSL program
const {program, vertexShader, fragmentShader, error, type} = _getWebGLProgram(gl, vertexSrc, fragmentSrc);
var vertexSrc = _stringifyShaderSrc(data.vertex, vertexTemplate);
if ( error ) {
throw new Error(`${type} error:: ${error}\n${fragmentSrc}`);
}
var fragmentSrc = _stringifyShaderSrc(data.fragment, fragmentTemplate); // compile the GLSL program
// setup the vertex data
const attributes = _initVertexAttributes(gl, program, data.attributes);
// setup uniforms
const uniforms = _initUniforms(gl, program, data.uniforms);
var _getWebGLProgram2 = _getWebGLProgram(gl, vertexSrc, fragmentSrc),
program = _getWebGLProgram2.program,
vertexShader = _getWebGLProgram2.vertexShader,
fragmentShader = _getWebGLProgram2.fragmentShader,
error = _getWebGLProgram2.error,
type = _getWebGLProgram2.type;
return {
program,
vertexShader,
fragmentShader,
source,
attributes,
uniforms,
textures: data.textures
};
if (error) {
throw new Error("".concat(type, " error:: ").concat(error, "\n").concat(fragmentSrc));
} // setup the vertex data
var attributes = _initVertexAttributes(gl, program, data.attributes); // setup uniforms
var uniforms = _initUniforms(gl, program, data.uniforms);
return {
program: program,
vertexShader: vertexShader,
fragmentShader: fragmentShader,
source: source,
attributes: attributes,
uniforms: uniforms,
textures: data.textures
};
}
function _mergeEffectsData (effects) {
return effects.reduce((result, config) => {
const {attributes = [], uniforms = [], textures = [], varying = {}} = config;
const merge = shader => Object.keys(config[shader]).forEach(key => {
if ( key === 'constant' || key === 'main' || key === 'source' ) {
result[shader][key] += config[shader][key] + '\n';
}
else {
result[shader][key] = {...result[shader][key], ...config[shader][key]};
}
});
function _mergeEffectsData(effects) {
return effects.reduce(function (result, config) {
var _result$uniforms, _result$textures;
merge('vertex');
merge('fragment');
var _config$attributes = config.attributes,
attributes = _config$attributes === void 0 ? [] : _config$attributes,
_config$uniforms = config.uniforms,
uniforms = _config$uniforms === void 0 ? [] : _config$uniforms,
_config$textures = config.textures,
textures = _config$textures === void 0 ? [] : _config$textures,
_config$varying = config.varying,
varying = _config$varying === void 0 ? {} : _config$varying;
attributes.forEach(attribute => {
const found = result.attributes.some((attr, n) => {
if ( attr.name === attribute.name ) {
Object.assign(attr, attribute);
return true;
}
});
var merge = function merge(shader) {
return Object.keys(config[shader]).forEach(function (key) {
if (key === 'constant' || key === 'main' || key === 'source') {
result[shader][key] += config[shader][key] + '\n';
} else {
result[shader][key] = _objectSpread({}, result[shader][key], config[shader][key]);
}
});
};
if ( ! found ) {
result.attributes.push(attribute);
}
});
merge('vertex');
merge('fragment');
attributes.forEach(function (attribute) {
var found = result.attributes.some(function (attr, n) {
if (attr.name === attribute.name) {
Object.assign(attr, attribute);
return true;
}
});
result.uniforms.push(...uniforms);
result.textures.push(...textures);
if (!found) {
result.attributes.push(attribute);
}
});
Object.assign(result.vertex.varying, varying);
Object.assign(result.fragment.varying, varying);
(_result$uniforms = result.uniforms).push.apply(_result$uniforms, _toConsumableArray(uniforms));
return result;
(_result$textures = result.textures).push.apply(_result$textures, _toConsumableArray(textures));
Object.assign(result.vertex.varying, varying);
Object.assign(result.fragment.varying, varying);
return result;
}, {
vertex: {
uniform: {},
attribute: {},
varying: {},
constant: '',
main: ''
},
fragment: {
uniform: {},
varying: {},
constant: '',
main: '',
source: ''
},
/*
* Default attributes
*/
attributes: [{
name: 'a_position',
data: new Float32Array([-1.0, -1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}, {
vertex: {
uniform: {},
attribute: {},
varying: {},
constant: '',
main: ''
},
fragment: {
uniform: {},
varying: {},
constant: '',
main: '',
source: ''
},
/*
* Default attributes
*/
attributes: [
{
name: 'a_position',
data: new Float32Array([
-1.0, -1.0,
-1.0, 1.0,
1.0, -1.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
},
{
name: 'a_texCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
/*
* Default uniforms
*/
uniforms: [
{
name: 'u_source',
type: 'i',
data: [0]
}
],
/*
* Default textures
*/
textures: []
});
}
name: 'a_texCoord',
data: new Float32Array([0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0]),
size: 2,
type: 'FLOAT'
}],
function _stringifyShaderSrc (data, template) {
const templateData = Object.entries(data)
.reduce((result, [key, value]) => {
if ( ['uniform', 'attribute', 'varying'].includes(key) ) {
result[key] = Object.entries(value)
.reduce((str, [name, type]) =>
str + `${key} ${type} ${name};\n`,
''
);
}
else {
result[key] = value;
}
/*
* Default uniforms
*/
uniforms: [{
name: 'u_source',
type: 'i',
data: [0]
}],
return result;
}, {});
return template(templateData);
/*
* Default textures
*/
textures: []
});
}
function _getWebGLProgram (gl, vertexSrc, fragmentSrc) {
const vertexShader = _createShader(gl, gl.VERTEX_SHADER, vertexSrc);
const fragmentShader = _createShader(gl, gl.FRAGMENT_SHADER, fragmentSrc);
function _stringifyShaderSrc(data, template) {
var templateData = Object.entries(data).reduce(function (result, _ref4) {
var _ref5 = _slicedToArray(_ref4, 2),
key = _ref5[0],
value = _ref5[1];
if ( vertexShader.error ) {
return vertexShader;
}
if (['uniform', 'attribute', 'varying'].includes(key)) {
result[key] = Object.entries(value).reduce(function (str, _ref6) {
var _ref7 = _slicedToArray(_ref6, 2),
name = _ref7[0],
type = _ref7[1];
if ( fragmentShader.error ) {
return fragmentShader;
return str + "".concat(key, " ").concat(type, " ").concat(name, ";\n");
}, '');
} else {
result[key] = value;
}
return _createProgram(gl, vertexShader, fragmentShader);
return result;
}, {});
return template(templateData);
}
function _createProgram (gl, vertexShader, fragmentShader) {
const program = gl.createProgram();
function _getWebGLProgram(gl, vertexSrc, fragmentSrc) {
var vertexShader = _createShader(gl, gl.VERTEX_SHADER, vertexSrc);
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
var fragmentShader = _createShader(gl, gl.FRAGMENT_SHADER, fragmentSrc);
const success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (vertexShader.error) {
return vertexShader;
}
if ( success ) {
return {program, vertexShader, fragmentShader};
}
if (fragmentShader.error) {
return fragmentShader;
}
const exception = {
error: gl.getProgramInfoLog(program),
type: 'program'
};
gl.deleteProgram(program);
return exception;
return _createProgram(gl, vertexShader, fragmentShader);
}
function _createShader (gl, type, source) {
const shader = gl.createShader(type);
function _createProgram(gl, vertexShader, fragmentShader) {
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
var success = gl.getProgramParameter(program, gl.LINK_STATUS);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (success) {
return {
program: program,
vertexShader: vertexShader,
fragmentShader: fragmentShader
};
}
const success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
var exception = {
error: gl.getProgramInfoLog(program),
type: 'program'
};
gl.deleteProgram(program);
return exception;
}
if ( success ) {
return shader;
}
function _createShader(gl, type, source) {
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
var success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
const exception = {
error: gl.getShaderInfoLog(shader),
type: type === gl.VERTEX_SHADER ? 'VERTEX' : 'FRAGMENT'
};
if (success) {
return shader;
}
gl.deleteShader(shader);
return exception;
var exception = {
error: gl.getShaderInfoLog(shader),
type: type === gl.VERTEX_SHADER ? 'VERTEX' : 'FRAGMENT'
};
gl.deleteShader(shader);
return exception;
}
/**

@@ -1206,80 +1226,99 @@ * Create a WebGLTexture object.

*/
function createTexture (gl, {width=1, height=1, data=null, format='RGBA'}={}) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
function createTexture(gl) {
var _ref8 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref8$width = _ref8.width,
width = _ref8$width === void 0 ? 1 : _ref8$width,
_ref8$height = _ref8.height,
height = _ref8$height === void 0 ? 1 : _ref8$height,
_ref8$data = _ref8.data,
data = _ref8$data === void 0 ? null : _ref8$data,
_ref8$format = _ref8.format,
format = _ref8$format === void 0 ? 'RGBA' : _ref8$format;
if ( data ) {
// Upload the image into the texture
gl.texImage2D(gl.TEXTURE_2D, 0,gl[format], gl[format], gl.UNSIGNED_BYTE, data);
}
else {
// Create empty texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl[format], width, height, 0, gl[format], gl.UNSIGNED_BYTE, null);
}
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture); // Set the parameters so we can render any size image
return {texture, width, height, format};
}
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
function _createBuffer (gl, program, name, data) {
const location = gl.getAttribLocation(program, name);
const buffer = gl.createBuffer();
if (data) {
// Upload the image into the texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl[format], gl[format], gl.UNSIGNED_BYTE, data);
} else {
// Create empty texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl[format], width, height, 0, gl[format], gl.UNSIGNED_BYTE, null);
}
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
return {
texture: texture,
width: width,
height: height,
format: format
};
}
return {location, buffer};
function _createBuffer(gl, program, name, data) {
var location = gl.getAttribLocation(program, name);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
return {
location: location,
buffer: buffer
};
}
function _initVertexAttributes (gl, program, data) {
return (data || []).map(attr => {
const {location, buffer} = _createBuffer(gl, program, attr.name, attr.data);
function _initVertexAttributes(gl, program, data) {
return (data || []).map(function (attr) {
var _createBuffer2 = _createBuffer(gl, program, attr.name, attr.data),
location = _createBuffer2.location,
buffer = _createBuffer2.buffer;
return {
name: attr.name,
location,
buffer,
type: attr.type,
size: attr.size
};
});
return {
name: attr.name,
location: location,
buffer: buffer,
type: attr.type,
size: attr.size
};
});
}
function _initUniforms (gl, program, uniforms) {
return (uniforms || []).map(uniform => {
const location = gl.getUniformLocation(program, uniform.name);
return {
location,
size: uniform.size || uniform.data.length,
type: uniform.type,
data: uniform.data
};
});
function _initUniforms(gl, program, uniforms) {
return (uniforms || []).map(function (uniform) {
var location = gl.getUniformLocation(program, uniform.name);
return {
location: location,
size: uniform.size || uniform.data.length,
type: uniform.type,
data: uniform.data
};
});
}
function _setUniforms (gl, uniformData) {
(uniformData || []).forEach(uniform => {
const {size, type, location, data} = uniform;
gl[`uniform${size}${type}v`](location, data);
});
function _setUniforms(gl, uniformData) {
(uniformData || []).forEach(function (uniform) {
var size = uniform.size,
type = uniform.type,
location = uniform.location,
data = uniform.data;
gl["uniform".concat(size).concat(type, "v")](location, data);
});
}
function _enableVertexAttributes (gl, attributes) {
(attributes || []).forEach(attrib => {
const {location, buffer, size, type} = attrib;
gl.enableVertexAttribArray(location);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(location, size, gl[type], false, 0, 0);
});
function _enableVertexAttributes(gl, attributes) {
(attributes || []).forEach(function (attrib) {
var location = attrib.location,
buffer = attrib.buffer,
size = attrib.size,
type = attrib.type;
gl.enableVertexAttribArray(location);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(location, size, gl[type], false, 0, 0);
});
}
/**

@@ -1309,70 +1348,2 @@ * @private

/**
* Initialize a ticker instance for batching animation of multiple Kampos instances.
*
* @class Ticker
*/
class Ticker {
constructor () {
this.pool = [];
}
/**
* Starts the animation loop.
*/
start () {
if ( ! this.animationFrameId ) {
const loop = () => {
this.animationFrameId = window.requestAnimationFrame(loop);
this.draw();
};
this.animationFrameId = window.requestAnimationFrame(loop);
}
}
/**
* Stops the animation loop.
*/
stop () {
window.cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = null;
}
/**
* Invoke draw() on all instances in the pool.
*/
draw () {
this.pool.forEach(instance => instance.draw());
}
/**
* Add an instance to the pool.
*
* @param {Kampos} instance
*/
add (instance) {
const index = this.pool.indexOf(instance);
if ( ! ~ index ) {
this.pool.push(instance);
instance.playing = true;
}
}
/**
* Remove an instance form the pool.
*
* @param {Kampos} instance
*/
remove (instance) {
const index = this.pool.indexOf(instance);
if ( ~ index ) {
this.pool.splice(index, 1);
instance.playing = false;
}
}
}
/**
* Initialize a webgl target with effects.

@@ -1389,124 +1360,119 @@ *

*/
class Kampos {
/**
* @constructor
*/
constructor (config) {
if ( ! config || ! config.target ) {
throw new Error('A target canvas was not provided');
}
if ( Kampos.preventContextCreation )
throw new Error('Context creation is prevented');
var Kampos =
/*#__PURE__*/
function () {
/**
* @constructor
*/
function Kampos(config) {
var _this = this;
this._contextCreationError = function () {
Kampos.preventContextCreation = true;
_classCallCheck(this, Kampos);
if ( config && config.onContextCreationError ) {
config.onContextCreationError.call(this, config);
}
};
if (!config || !config.target) {
throw new Error('A target canvas was not provided');
}
config.target.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
if (Kampos.preventContextCreation) throw new Error('Context creation is prevented');
const success = this.init(config);
this._contextCreationError = function () {
Kampos.preventContextCreation = true;
if ( ! success )
throw new Error('Could not create context');
if (config && config.onContextCreationError) {
config.onContextCreationError.call(this, config);
}
};
this._restoreContext = (e) => {
e && e.preventDefault();
config.target.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
var success = this.init(config);
if (!success) throw new Error('Could not create context');
this.config.target.removeEventListener('webglcontextrestored', this._restoreContext, true);
this._restoreContext = function (e) {
e && e.preventDefault();
const success = this.init();
_this.config.target.removeEventListener('webglcontextrestored', _this._restoreContext, true);
if ( ! success )
return false;
var success = _this.init();
if ( this._source ) {
this.setSource(this._source);
}
if (!success) return false;
delete this._source;
if (_this._source) {
_this.setSource(_this._source);
}
if ( config && config.onContextRestored ) {
config.onContextRestored.call(this, config);
}
delete _this._source;
return true;
};
if (config && config.onContextRestored) {
config.onContextRestored.call(_this, config);
}
this._loseContext = (e) => {
e.preventDefault();
return true;
};
if ( this.gl && this.gl.isContextLost() ) {
this._loseContext = function (e) {
e.preventDefault();
this.lostContext = true;
if (_this.gl && _this.gl.isContextLost()) {
_this.lostContext = true;
this.config.target.addEventListener('webglcontextrestored', this._restoreContext, true);
_this.config.target.addEventListener('webglcontextrestored', _this._restoreContext, true);
this.destroy(true);
_this.destroy(true);
if ( config && config.onContextLost ) {
config.onContextLost.call(this, config);
}
}
};
if (config && config.onContextLost) {
config.onContextLost.call(_this, config);
}
}
};
this.config.target.addEventListener('webglcontextlost', this._loseContext, true);
}
this.config.target.addEventListener('webglcontextlost', this._loseContext, true);
}
/**
* Initializes an Kampos instance.
* This is called inside the constructor,
* but can be called again after effects have changed
* or after {@link Kampos#desotry()}.
*
* @param {kamposConfig} [config] defaults to `this.config`
* @return {boolean} success whether initializing of the context and program were successful
*/
/**
* Initializes an Kampos instance.
* This is called inside the constructor,
* but can be called again after effects have changed
* or after {@link Kampos#desotry()}.
*
* @param {kamposConfig} [config] defaults to `this.config`
* @return {boolean} success whether initializing of the context and program were successful
*/
init (config) {
config = config || this.config;
let {target, effects, ticker} = config;
if ( Kampos.preventContextCreation )
return false;
_createClass(Kampos, [{
key: "init",
value: function init(config) {
config = config || this.config;
var _config = config,
target = _config.target,
effects = _config.effects,
ticker = _config.ticker;
if (Kampos.preventContextCreation) return false;
this.lostContext = false;
var gl = core.getWebGLContext(target);
if (!gl) return false;
this.lostContext = false;
if (gl.isContextLost()) {
var success = this.restoreContext();
if (!success) return false; // get new context from the fresh clone
let gl = core.getWebGLContext(target);
gl = core.getWebGLContext(this.config.target);
if (!gl) return false;
}
if ( ! gl )
return false;
var _core$init = core.init(gl, effects, this.dimensions),
data = _core$init.data;
if ( gl.isContextLost() ) {
const success = this.restoreContext();
this.gl = gl;
this.data = data; // cache for restoring context
if ( ! success )
return false;
this.config = config;
// get new context from the fresh clone
gl = core.getWebGLContext(this.config.target);
if (ticker) {
this.ticker = ticker;
ticker.add(this);
}
if ( ! gl )
return false;
}
const {data} = core.init(gl, effects, this.dimensions);
this.gl = gl;
this.data = data;
// cache for restoring context
this.config = config;
if ( ticker ) {
this.ticker = ticker;
ticker.add(this);
}
return true;
return true;
}
/**

@@ -1520,45 +1486,51 @@ * Set the source config.

*/
setSource (source) {
if ( ! source ) return;
if ( this.lostContext ) {
const success = this.restoreContext();
}, {
key: "setSource",
value: function setSource(source) {
if (!source) return;
if ( ! success ) return;
}
if (this.lostContext) {
var success = this.restoreContext();
if (!success) return;
}
let media, width, height;
var media, width, height;
if ( Object.prototype.toString.call(source) === '[object Object]' ) {
({media, width, height} = source);
}
else {
media = source;
}
if (Object.prototype.toString.call(source) === '[object Object]') {
media = source.media;
width = source.width;
height = source.height;
} else {
media = source;
}
if ( width && height ) {
this.dimensions = { width, height };
}
if (width && height) {
this.dimensions = {
width: width,
height: height
};
} // resize the target canvas if needed
// resize the target canvas if needed
core.resize(this.gl, this.dimensions);
this._createTextures();
core.resize(this.gl, this.dimensions);
this.media = media;
this._createTextures();
this.media = media;
}
/**
* Draw current scene.
*/
draw () {
if ( this.lostContext ) {
const success = this.restoreContext();
if ( ! success ) return;
}
}, {
key: "draw",
value: function draw() {
if (this.lostContext) {
var success = this.restoreContext();
if (!success) return;
}
core.draw(this.gl, this.media, this.data, this.dimensions);
core.draw(this.gl, this.media, this.data, this.dimensions);
}
/**

@@ -1569,24 +1541,27 @@ * Starts the animation loop.

*/
play () {
if ( this.ticker ) {
if ( this.animationFrameId ) {
this.stop();
}
if ( ! this.playing ) {
this.playing = true;
this.ticker.add(this);
}
}, {
key: "play",
value: function play() {
var _this2 = this;
if (this.ticker) {
if (this.animationFrameId) {
this.stop();
}
else if ( ! this.animationFrameId ) {
const loop = () => {
this.animationFrameId = window.requestAnimationFrame(loop);
this.draw();
};
this.animationFrameId = window.requestAnimationFrame(loop);
if (!this.playing) {
this.playing = true;
this.ticker.add(this);
}
} else if (!this.animationFrameId) {
var loop = function loop() {
_this2.animationFrameId = window.requestAnimationFrame(loop);
_this2.draw();
};
this.animationFrameId = window.requestAnimationFrame(loop);
}
}
/**

@@ -1597,14 +1572,16 @@ * Stops the animation loop.

*/
stop () {
if ( this.animationFrameId ) {
window.cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = null;
}
if ( this.playing ) {
this.playing = false;
this.ticker.remove(this);
}
}, {
key: "stop",
value: function stop() {
if (this.animationFrameId) {
window.cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = null;
}
if (this.playing) {
this.playing = false;
this.ticker.remove(this);
}
}
/**

@@ -1615,32 +1592,30 @@ * Stops animation loop and frees all resources.

*/
destroy (keepState) {
this.stop();
if ( this.gl && this.data ) {
core.destroy(this.gl, this.data);
}
}, {
key: "destroy",
value: function destroy(keepState) {
this.stop();
if ( keepState ) {
const dims = this.dimensions || {};
if (this.gl && this.data) {
core.destroy(this.gl, this.data);
}
this._source = this._source || {
media: this.media,
width: dims.width,
height: dims.height
};
}
else {
this.config.target.removeEventListener('webglcontextlost', this._loseContext, true);
this.config.target.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
if (keepState) {
var dims = this.dimensions || {};
this._source = this._source || {
media: this.media,
width: dims.width,
height: dims.height
};
} else {
this.config.target.removeEventListener('webglcontextlost', this._loseContext, true);
this.config.target.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
this.config = null;
this.dimensions = null;
}
this.config = null;
this.dimensions = null;
}
this.gl = null;
this.data = null;
this.media = null;
this.gl = null;
this.data = null;
this.media = null;
}
/**

@@ -1652,59 +1627,155 @@ * Restore a lost WebGL context fot the given target.

*/
restoreContext () {
if ( Kampos.preventContextCreation )
return false;
const canvas = this.config.target;
const clone = this.config.target.cloneNode(true);
const parent = canvas.parentNode;
}, {
key: "restoreContext",
value: function restoreContext() {
if (Kampos.preventContextCreation) return false;
var canvas = this.config.target;
var clone = this.config.target.cloneNode(true);
var parent = canvas.parentNode;
if ( parent ) {
parent.replaceChild(clone, canvas);
}
if (parent) {
parent.replaceChild(clone, canvas);
}
this.config.target = clone;
this.config.target = clone;
canvas.removeEventListener('webglcontextlost', this._loseContext, true);
canvas.removeEventListener('webglcontextrestored', this._restoreContext, true);
canvas.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
clone.addEventListener('webglcontextlost', this._loseContext, true);
clone.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
canvas.removeEventListener('webglcontextlost', this._loseContext, true);
canvas.removeEventListener('webglcontextrestored', this._restoreContext, true);
canvas.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
clone.addEventListener('webglcontextlost', this._loseContext, true);
clone.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
if (this.lostContext) {
return this._restoreContext();
}
if ( this.lostContext ) {
return this._restoreContext();
}
return true;
}
}, {
key: "_createTextures",
value: function _createTextures() {
var _this3 = this;
return true;
this.data && this.data.textures.forEach(function (texture, i) {
var data = _this3.data.textures[i];
data.texture = core.createTexture(_this3.gl, {
width: _this3.dimensions.width,
height: _this3.dimensions.height,
format: texture.format,
data: texture.image
}).texture;
data.format = texture.format;
data.update = texture.update;
});
}
}]);
_createTextures () {
this.data && this.data.textures.forEach((texture, i) => {
const data = this.data.textures[i];
data.texture = core.createTexture(this.gl, {
width: this.dimensions.width,
height: this.dimensions.height,
format: texture.format,
data: texture.image
}).texture;
return Kampos;
}();
data.format = texture.format;
data.update = texture.update;
});
/**
* Initialize a ticker instance for batching animation of multiple Kampos instances.
*
* @class Ticker
*/
var Ticker =
/*#__PURE__*/
function () {
function Ticker() {
_classCallCheck(this, Ticker);
this.pool = [];
}
/**
* Starts the animation loop.
*/
_createClass(Ticker, [{
key: "start",
value: function start() {
var _this = this;
if (!this.animationFrameId) {
var loop = function loop() {
_this.animationFrameId = window.requestAnimationFrame(loop);
_this.draw();
};
this.animationFrameId = window.requestAnimationFrame(loop);
}
}
}
/**
* Stops the animation loop.
*/
}, {
key: "stop",
value: function stop() {
window.cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = null;
}
/**
* Invoke draw() on all instances in the pool.
*/
}, {
key: "draw",
value: function draw() {
this.pool.forEach(function (instance) {
return instance.draw();
});
}
/**
* Add an instance to the pool.
*
* @param {Kampos} instance
*/
}, {
key: "add",
value: function add(instance) {
var index = this.pool.indexOf(instance);
if (!~index) {
this.pool.push(instance);
instance.playing = true;
}
}
/**
* Remove an instance form the pool.
*
* @param {Kampos} instance
*/
}, {
key: "remove",
value: function remove(instance) {
var index = this.pool.indexOf(instance);
if (~index) {
this.pool.splice(index, 1);
instance.playing = false;
}
}
}]);
return Ticker;
}();
var index = {
effects: {
alphaMask,
brightnessContrast,
hueSaturation,
duotone,
displacement
},
transitions: {
fade,
displacement: displacementTransition
},
Kampos,
Ticker
effects: {
alphaMask: alphaMask,
brightnessContrast: brightnessContrast,
hueSaturation: hueSaturation,
duotone: duotone,
displacement: displacement
},
transitions: {
fade: fade,
displacement: displacementTransition
},
Kampos: Kampos,
Ticker: Ticker
};

@@ -1711,0 +1782,0 @@

{
"name": "kampos",
"version": "0.2.0",
"version": "0.2.1",
"description": "Tiny and fast effects compositor on WebGL",

@@ -50,2 +50,3 @@ "registry": "https://registry.npmjs.org/",

"ava": "^2.1.0",
"codemirror": "^5.48.2",
"documentation": "^11.0.1",

@@ -52,0 +53,0 @@ "electron": "^5.0.4",

# kampos
### Tiny and fast effects compositor on WebGL
kampos lets you filter effects and beautiful transitions to your site's media,
be that images, video, etc.
kampos lets you add filter effects and beautiful transitions to video and images (or any other media).
Just like [SVG filter effects](https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Filter_effects),
only using WebGL, and hence works everywhere!
## Demo
Watch a [live demo](https://wix-incubator.github.io/kampos/demo/).
## Documentation
For API reference and examples read [the docs](https://wix-incubator.github.io/kampos/docs/).
## Features

@@ -17,3 +22,2 @@ * Filter effects for images and videos that you can mix and compose.

## Usage
Here's a simple example for using kampos:

@@ -35,8 +39,2 @@ ```

## Demo
Watch a [live demo](https://wix-incubator.github.io/kampos/demo/).
## Documentation
For API reference and examples read [the docs](https://wix-incubator.github.io/kampos/docs/).
## Getting started

@@ -43,0 +41,0 @@ Grab the source from here, or install via package manager.

import progress from 'rollup-plugin-progress';
import filesize from 'rollup-plugin-filesize';
import babel from 'rollup-plugin-babel';

@@ -16,2 +17,3 @@ const config = {

}),
babel(),
filesize()

@@ -18,0 +20,0 @@ ]

@@ -8,3 +8,4 @@ import alphaMask from './effects/alpha-mask';

import displacementTransition from './transitions/displacement';
import {Kampos, Ticker} from './kampos';
import Kampos from './kampos';
import Ticker from './ticker';

@@ -11,0 +12,0 @@ export default {

import core from './core';
import Ticker from './ticker';

@@ -16,3 +15,3 @@ /**

*/
class Kampos {
export default class Kampos {
/**

@@ -376,6 +375,1 @@ * @constructor

*/
export {
Kampos,
Ticker
}

@@ -6,3 +6,3 @@ /**

*/
class Ticker {
export default class Ticker {
constructor () {

@@ -69,3 +69,1 @@ this.pool = [];

}
export default Ticker;

@@ -1,2 +0,3 @@

const {Kampos, Ticker} = require('./src/kampos');
const Kampos = require('./src/kampos');
const Ticker = require('./src/ticker');
const brightnessContrast = require('./src/brightness-contrast')();

@@ -3,0 +4,0 @@ const assert = require('assert');

@@ -7,2 +7,3 @@ import progress from 'rollup-plugin-progress';

'../src/kampos.js',
'../src/ticker.js',
'../src/core.js',

@@ -9,0 +10,0 @@ '../src/effects/brightness-contrast.js'

@@ -1,2 +0,3 @@

const {Kampos, Ticker} = require('./src/kampos');
const Kampos = require('./src/kampos');
const Ticker = require('./src/ticker');
const assert = require('assert');

@@ -3,0 +4,0 @@

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc