Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

kampos

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

kampos - npm Package Compare versions

Comparing version 0.2.1 to 0.2.2

demo/hue-fade.js

294

demo/demo.js

@@ -1,291 +0,3 @@

import {Kampos, Ticker} from '../src/kampos';
// import transparentVideo from '../src/effects/transparent-video';
import alphaMask from '../src/effects/alpha-mask';
import brightnessContrast from '../src/effects/brightness-contrast';
import hueSaturation from '../src/effects/hue-saturation';
import duotone from '../src/effects/duotone';
const video = document.querySelector('#video');
const videoUrl = document.querySelector('#video-url');
const maskUrl = document.querySelector('#alpha-mask-url');
let target = document.querySelector('#target');
// let maskURL = 'https://static.wixstatic.com/shapes/3943e2a044854dfbae0fbe56ec72c7d9.svg';
let maskURL = 'https://static.wixstatic.com/shapes/2fc6253d53dc4925aab74c224256d7f8.svg';
let playing = false;
let timeupdate = false;
function initVideo () {
video.src = videoUrl.value;
video.addEventListener('playing', isPlaying, true);
video.addEventListener('timeupdate', isTimeupdate, true);
video.addEventListener('canplay', canPlay, true);
}
function canPlay () {
video.play();
}
function isPlaying () {
playing = true;
video.removeEventListener('playing', isPlaying, true);
check();
}
function isTimeupdate () {
timeupdate = true;
video.removeEventListener('timeupdate', isTimeupdate, true);
check();
}
function check () {
if (playing && timeupdate) {
playing = false;
timeupdate = false;
const width = video.videoWidth;
// const height = video.videoHeight / (toggleTransparent.checked ? 2 : 1);
const height = video.videoHeight;
target.style.width = `${width}px`;
target.style.height = `${height}px`;
if ( toggleAlphaMask.checked ) {
createMaskImage(width, height)
.then(function () {
instance.setSource({media: video, type: 'video', width, height});
ticker.start();
});
}
else {
instance.setSource({media: video, type: 'video', width, height});
ticker.start();
}
video.removeEventListener('canplay', canPlay, true);
}
}
function hex2vec4 (hex) {
const s = hex.substring(1);
return [s[0] + s[1], s[2] + s[3], s[4] + s[5], 'ff'].map(h => parseInt(h, 16) / 255);
}
function drawInlineSVG (ctx, rawSVG, callback) {
const svg = new Blob([rawSVG], {type:"image/svg+xml"}),
url = URL.createObjectURL(svg),
img = new Image;
img.onload = function () {
ctx.drawImage(this, 0, 0);
URL.revokeObjectURL(url);
callback(this);
};
img.src = url;
}
function fetchSVG () {
return window.fetch(maskURL).then(function (response) {
return response.text();
});
}
function handleRangeChange (e) {
const target = e.target;
const effect = target.id;
let data;
switch ( effect ) {
case 'brightness':
case 'contrast':
bc[effect] = target.value;
data = [bc[effect]];
break;
case 'hue':
case 'saturation':
hs[effect] = target.value;
data = [hs[effect]];
break;
case 'duotone-light':
dt.light = hex2vec4(target.value);
e.target.nextElementSibling.textContent = target.value;
break;
case 'duotone-dark':
dt.dark = hex2vec4(target.value);
e.target.nextElementSibling.textContent = target.value;
break;
}
if ( data ) {
data[0] = parseFloat(target.value);
e.target.nextElementSibling.textContent = data[0];
}
}
const inputs = ['brightness', 'contrast', 'hue', 'saturation', 'duotone-light', 'duotone-dark'];
const hs = hueSaturation();
const bc = brightnessContrast();
const dt = duotone();
// const tv = transparentVideo();
const am = alphaMask();
// const toggleTransparent = document.querySelector('#toggle-transparent');
const toggleDuotone = document.querySelector('#toggle-duotone');
const toggleAlphaMask = document.querySelector('#toggle-alphamask');
// const duotoneChecked = toggleDuotone.checked;
// const transparentChecked = toggleTransparent.checked;
const toggleAlphaMaskChecked = toggleAlphaMask.checked;
const effects = [];
// if (transparentChecked) {
// effects.push(tv);
// }
effects.push(bc);
// if (duotoneChecked) {
effects.push(dt);
// }
effects.push(hs);
if (toggleAlphaMaskChecked) {
effects.push(am);
}
function createMaskImage (width, height) {
if ( maskURL.endsWith('.svg') ) {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d');
return new Promise(function (resolve) {
fetchSVG().then(function (text) {
const div = document.createElement('div');
div.innerHTML = text;
const svg = div.firstElementChild;
document.body.appendChild(svg);
const bbox = svg.getBBox();
document.body.removeChild(svg);
svg.setAttribute('viewBox', `${bbox.x} ${bbox.y} ${bbox.width} ${bbox.height}`);
svg.setAttribute('width', width);
svg.setAttribute('height', height);
canvas.width = width;
canvas.height = height;
drawInlineSVG(ctx, svg.outerHTML, () => {
am.textures[0].image = canvas;
resolve();
});
});
});
}
else {
return new Promise(function (resolve) {
const img = new Image();
img.crossOrigin = 'anonymous';
img.onload = function () {
am.textures[0].image = this;
resolve();
};
img.src = maskURL;
});
}
}
inputs.map(function (name) {
return document.getElementById(name);
})
.map(function (input) {
input.addEventListener('input', handleRangeChange);
});
function toggleHandler () {
// instance.destroy();
// Works around an issue with working with the old context
const newCanvas = document.createElement('canvas');
target.parentElement.replaceChild(newCanvas, target);
target = newCanvas;
effects.length = 0;
// if ( toggleTransparent.checked ) {
// effects.push(tv);
// }
effects.push(bc);
// if ( toggleDuotone.checked ) {
effects.push(dt);
// }
effects.push(hs);
if ( toggleAlphaMask.checked ) {
effects.push(am);
}
const width = video.videoWidth;
// const height = video.videoHeight / (toggleTransparent.checked ? 2 : 1);
const height = video.videoHeight;
newCanvas.style.width = `${width}px`;
newCanvas.style.height = `${height}px`;
instance.init({target, effects, ticker});
instance.setSource({media: video, type: 'video', width, height});
}
toggleDuotone.addEventListener('input', e => {
dt.disabled = !e.target.checked;
});
// toggleTransparent.addEventListener('input', toggleHandler);
toggleAlphaMask.addEventListener('input', toggleHandler);
const ticker = new Ticker();
let instance = new Kampos({target, effects, ticker});
initVideo();
videoUrl.addEventListener('input', initVideo);
maskUrl.addEventListener('input', e => {
maskURL = e.target.value;
const width = video.videoWidth;
// const height = video.videoHeight / (toggleTransparent.checked ? 2 : 1);
const height = video.videoHeight;
createMaskImage(width, height)
.then(() => instance._createTextures())
});
const toggleBackgroundColor = document.querySelector('#toggle-background-color');
const backgroundColor = document.querySelector('#background-color');
toggleBackgroundColor.addEventListener('input', e => {
document.body.style.backgroundImage = e.target.checked ? 'none' : '';
});
backgroundColor.addEventListener('input', e => {
document.body.style.backgroundColor = backgroundColor.value;
e.target.nextElementSibling.innerText = e.target.value;
});
document.querySelector('#duotone-switch').addEventListener('click', e => {
const light = document.querySelector('#duotone-light');
const dark = document.querySelector('#duotone-dark');
const lightValue = light.value;
const darkValue = dark.value;
light.value = darkValue;
dark.value = lightValue;
handleRangeChange({target: light});
handleRangeChange({target: dark});
});
import './utils';
import './disp';
import './hue-fade';

@@ -1,36 +0,116 @@

import {Ticker} from '../src/kampos';
import Transition from './transition';
const {Kampos, transitions} = window.kampos;
const transitionDisplacement = transitions.displacement;
const video = document.querySelector('#video');
const video2 = document.querySelector('#video2');
const target = document.querySelector('#target');
/*
* Wrapper class for transition logic.
* This is a simple vanilla implementation
*/
class Transition {
constructor ({vid1, vid2, target, disp, dispScale}) {
/*
* prepare here everything we need
*/
this.vid1 = vid1;
this.vid2 = vid2;
this.target = target;
this.dispScale = dispScale;
this.transition = transitionDisplacement();
const video3 = document.querySelector('#video3');
const video4 = document.querySelector('#video4');
const target2 = document.querySelector('#target2');
this.direction = 1;
this.startTime = 0;
const video5 = document.querySelector('#video5');
const video6 = document.querySelector('#video6');
const target3 = document.querySelector('#target3');
// init kampos
this.kampos = new Kampos({target, effects: [this.transition]});
const video7 = document.querySelector('#video7');
const video8 = document.querySelector('#video8');
const target4 = document.querySelector('#target4');
// load the displacement map image
const dispReady = loadImage(disp);
const ticker = new Ticker();
// make sure videos are loaded and playing
prepareVideos([this.vid1, this.vid2])
.then(() => {
const width = this.vid1.videoWidth;
const height = this.vid1.videoHeight;
const trans1 = new Transition({
vid1: video,
dispReady.then(img => {
/*
* set transition values
*/
this.transition.map = img;
this.transition.to = this.vid2;
this.transition.sourceScale = {x: this.dispScale};
this.transition.toScale = {x: -this.dispScale};
// set media source
this.kampos.setSource({media: this.vid1, width, height});
// start kampos
this.kampos.play();
});
});
}
/*
* start animation playback forward
*/
forward () {
this.direction = 1;
this.startTime = Date.now();
this.loop();
}
/*
* start animation playback backwards
*/
backward () {
this.direction = 0;
this.startTime = Date.now();
this.loop();
}
/*
* This will probably be a callback you'll provide to your animation library
*/
tick (p) {
this.transition.progress = p;
}
/*
* This will usually be implemented by an animation library you may already have in your project
*/
loop () {
const now = Date.now() - this.startTime;
// dividing by 500 is just enough to slow down the effect
let p = Math.abs(Math.sin(now / 500));
p = this.direction ? p : 1 - p;
this.tick(p);
let nextTick = () => this.loop();
// we choose a cutoff value where the progress value
// is almost 0/1, depending on direction
// and then stop the animation by just rendering
// 1 extra tick with the final value (0 or 1 respectively).
if (this.direction) {
if (p * 100 >= 99) {
nextTick = () => this.tick(1);
}
}
else if (p * 100 <= 1) {
nextTick = () => this.tick(0);
}
window.requestAnimationFrame(nextTick);
}
}
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
const target1 = document.querySelector('#target1');
const trans = new Transition({
vid1: video1,
vid2: video2,
target: target,
ticker,
disp: 'disp-tri.jpg',
dispScale: 0.3
});
const trans2 = new Transition({
vid1: video3,
vid2: video4,
target: target2,
ticker,
target: target1,
disp: 'disp-snow.jpg',

@@ -40,23 +120,6 @@ dispScale: 1.0

const trans3 = new Transition({
vid1: video5,
vid2: video6,
target: target3,
ticker,
disp: 'disp-cloud.png',
dispScale: 0.2
});
const trans4 = new Transition({
vid1: video7,
vid2: video8,
target: target4,
ticker,
disp: 'disp-liquid.jpg',
dispScale: 0.35
});
Promise.all([trans1.ready, trans2.ready, trans3.ready, trans4.ready])
.then(() => {
ticker.start();
});
/*
* register event handlers for interaction
*/
target1.addEventListener('mouseenter', () => trans.forward());
target1.addEventListener('mouseleave', () => trans.backward());
(function () {
'use strict';
'use strict';
var core = {
init,
draw,
destroy,
resize,
getWebGLContext,
createTexture
};
/*
* Most simple image loader
* You'll probably have something like this already
*/
function loadImage$1 (src) {
return new Promise(resolve => {
const img = new Image();
const vertexTemplate = ({
uniform = '',
attribute = '',
varying = '',
constant = '',
main = ''
}) => `
precision mediump float;
${uniform}
${attribute}
attribute vec2 a_texCoord;
attribute vec2 a_position;
${varying}
varying vec2 v_texCoord;
img.onload = function () {
resolve(this);
};
const vec3 lumcoeff = vec3(0.2125, 0.7154, 0.0721);
${constant}
void main() {
v_texCoord = a_texCoord;
${main}
gl_Position = vec4(a_position.xy, 0.0, 1.0);
}`;
img.src = src;
});
}
const fragmentTemplate = ({
uniform = '',
varying = '',
constant = '',
main = '',
source = ''
}) => `
precision mediump float;
${varying}
varying vec2 v_texCoord;
${uniform}
uniform sampler2D u_source;
window.loadImage = loadImage$1;
const vec3 lumcoeff = vec3(0.2125, 0.7154, 0.0721);
${constant}
void main() {
vec2 sourceCoord = v_texCoord;
${source}
vec4 pixel = texture2D(u_source, sourceCoord);
vec3 color = pixel.rgb;
float alpha = pixel.a;
${main}
gl_FragColor = vec4(color, 1.0) * alpha;
}`;
/*
* Minimal, cross-browser logic for playing videos and making sure
* they are ready to work with
*/
function prepareVideos$1 (videos) {
return new Promise(resolve => {
let playing = 0;
let timeupdate = 0;
/**
* Initialize a compiled WebGLProgram for the given canvas and effects.
*
* @private
* @param {WebGLRenderingContext} gl
* @param effects
* @param dimensions
* @return {{gl: WebGLRenderingContext, data: kamposSceneData, [dimensions]: {width: number, height: number}}}
*/
function init (gl, effects, dimensions) {
function canPlay (e) {
e.target.play();
}
const programData = _initProgram(gl, effects);
const isPlaying = e => {
playing += 1;
e.target.removeEventListener('playing', isPlaying, true);
check();
};
const isTimeupdate = (e) => {
timeupdate += 1;
e.target.removeEventListener('timeupdate', isTimeupdate, true);
check();
};
return {gl, data: programData, dimensions: dimensions || {}};
}
const check = () => {
if (playing === videos.length && timeupdate === videos.length) {
videos.forEach(vid => {
vid.removeEventListener('canplay', canPlay, true);
});
let WEBGL_CONTEXT_SUPPORTED = false;
resolve();
}
};
/**
* Get a webgl context for the given canvas element.
*
* Will return `null` if can not get a context.
*
* @private
* @param {HTMLCanvasElement} canvas
* @return {WebGLRenderingContext|null}
*/
function getWebGLContext (canvas) {
let context;
videos.forEach(vid => {
vid.addEventListener('playing', isPlaying, true);
vid.addEventListener('timeupdate', isTimeupdate, true);
vid.addEventListener('canplay', canPlay, true);
});
});
}
const config = {
preserveDrawingBuffer: false, // should improve performance - https://stackoverflow.com/questions/27746091/preservedrawingbuffer-false-is-it-worth-the-effort
antialias: false, // should improve performance
depth: false, // turn off for explicitness - and in some cases perf boost
stencil: false // turn off for explicitness - and in some cases perf boost
};
window.prepareVideos = prepareVideos$1;
context = canvas.getContext('webgl', config);
const {Kampos, transitions} = window.kampos;
const transitionDisplacement = transitions.displacement;
if ( context ) {
WEBGL_CONTEXT_SUPPORTED = true;
}
else if ( ! WEBGL_CONTEXT_SUPPORTED ) {
context = canvas.getContext('experimental-webgl', config);
}
else {
return null;
}
/*
* Wrapper class for transition logic.
* This is a simple vanilla implementation
*/
class Transition {
constructor ({vid1, vid2, target, disp, dispScale}) {
/*
* prepare here everything we need
*/
this.vid1 = vid1;
this.vid2 = vid2;
this.target = target;
this.dispScale = dispScale;
this.transition = transitionDisplacement();
return context;
}
this.direction = 1;
this.startTime = 0;
/**
* Resize the target canvas.
*
* @private
* @param {WebGLRenderingContext} gl
* @param {{width: number, height: number}} [dimensions]
* @return {boolean}
*/
function resize (gl, dimensions) {
const canvas = gl.canvas;
const realToCSSPixels = 1; //window.devicePixelRatio;
const {width, height} = dimensions || {};
let displayWidth, displayHeight;
// init kampos
this.kampos = new Kampos({target, effects: [this.transition]});
if ( width && height ) {
displayWidth = width;
displayHeight = height;
}
else {
// Lookup the size the browser is displaying the canvas.
displayWidth = Math.floor(canvas.clientWidth * realToCSSPixels);
displayHeight = Math.floor(canvas.clientHeight * realToCSSPixels);
}
// load the displacement map image
const dispReady = loadImage(disp);
// Check if the canvas is not the same size.
if ( canvas.width !== displayWidth ||
canvas.height !== displayHeight ) {
// make sure videos are loaded and playing
prepareVideos([this.vid1, this.vid2])
.then(() => {
const width = this.vid1.videoWidth;
const height = this.vid1.videoHeight;
// Make the canvas the same size
canvas.width = displayWidth;
canvas.height = displayHeight;
}
dispReady.then(img => {
/*
* set transition values
*/
this.transition.map = img;
this.transition.to = this.vid2;
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
}
this.transition.sourceScale = {x: this.dispScale};
this.transition.toScale = {x: -this.dispScale};
/**
* Draw a given scene
*
* @private
* @param {WebGLRenderingContext} gl
* @param {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} media
* @param {kamposSceneData} data
* @param {{width: number, height: number}} dimensions
*/
function draw (gl, media, data, dimensions) {
const {program, source, attributes, uniforms, textures} = data;
// set media source
this.kampos.setSource({media: this.vid1, width, height});
// bind the source texture
gl.bindTexture(gl.TEXTURE_2D, source.texture);
// start kampos
this.kampos.play();
});
});
}
// read source data into texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, media);
/*
* start animation playback forward
*/
forward () {
this.direction = 1;
this.startTime = Date.now();
this.loop();
}
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
/*
* start animation playback backwards
*/
backward () {
this.direction = 0;
this.startTime = Date.now();
this.loop();
}
// set attribute buffers with data
_enableVertexAttributes(gl, attributes);
/*
* This will probably be a callback you'll provide to your animation library
*/
tick (p) {
this.transition.progress = p;
}
// set uniforms with data
_setUniforms(gl, uniforms);
/*
* This will usually be implemented by an animation library you may already have in your project
*/
loop () {
const now = Date.now() - this.startTime;
// dividing by 500 is just enough to slow down the effect
let p = Math.abs(Math.sin(now / 500));
p = this.direction ? p : 1 - p;
if ( textures ) {
for ( let i = -1; i < textures.length; i++ ) {
gl.activeTexture(gl.TEXTURE0 + (i + 1));
this.tick(p);
if ( i === -1 ) {
gl.bindTexture(gl.TEXTURE_2D, source.texture);
}
else {
const tex = textures[i];
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
let nextTick = () => this.loop();
if ( tex.update ) {
gl.texImage2D(gl.TEXTURE_2D, 0,gl[tex.format], gl[tex.format], gl.UNSIGNED_BYTE, tex.image);
}
}
}
}
// we choose a cutoff value where the progress value
// is almost 0/1, depending on direction
// and then stop the animation by just rendering
// 1 extra tick with the final value (0 or 1 respectively).
if (this.direction) {
if (p * 100 >= 99) {
nextTick = () => this.tick(1);
}
}
else if (p * 100 <= 1) {
nextTick = () => this.tick(0);
}
// Draw the rectangles
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
}
window.requestAnimationFrame(nextTick);
}
}
/**
* Free all resources attached to a specific webgl context.
*
* @private
* @param {WebGLRenderingContext} gl
* @param {kamposSceneData} data
*/
function destroy (gl, data) {
const {program, vertexShader, fragmentShader, source, attributes} = data;
const video1 = document.querySelector('#video1');
const video2 = document.querySelector('#video2');
const target1 = document.querySelector('#target1');
// delete buffers
(attributes || []).forEach(attr => gl.deleteBuffer(attr.buffer));
const trans = new Transition({
vid1: video1,
vid2: video2,
target: target1,
disp: 'disp-snow.jpg',
dispScale: 1.0
});
// delete texture
gl.deleteTexture(source.texture);
/*
* register event handlers for interaction
*/
target1.addEventListener('mouseenter', () => trans.forward());
target1.addEventListener('mouseleave', () => trans.backward());
// delete program
gl.deleteProgram(program);
const {Kampos: Kampos$1, effects, transitions: transitions$1} = window.kampos;
// delete shaders
gl.deleteShader(vertexShader);
gl.deleteShader(fragmentShader);
}
const media1 = document.querySelector('#video3');
const media2 = document.querySelector('#video4');
const target = document.querySelector('#target2');
function _initProgram (gl, effects) {
const source = {
texture: createTexture(gl).texture,
buffer: null
};
// create the effects/transitions we need
const hueSat = effects.hueSaturation();
const fade = transitions$1.fade();
// flip Y axis for source texture
gl.bindTexture(gl.TEXTURE_2D, source.texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
// init kampos
const instance = new Kampos$1({target, effects:[fade, hueSat]});
const data = _mergeEffectsData(effects);
const vertexSrc = _stringifyShaderSrc(data.vertex, vertexTemplate);
const fragmentSrc = _stringifyShaderSrc(data.fragment, fragmentTemplate);
// make sure videos are loaded and playing
prepareVideos([media1, media2])
.then(() => {
const width = media1.videoWidth;
const height = media1.videoHeight;
// compile the GLSL program
const {program, vertexShader, fragmentShader, error, type} = _getWebGLProgram(gl, vertexSrc, fragmentSrc);
// set media source
instance.setSource({media: media1, width, height});
if ( error ) {
throw new Error(`${type} error:: ${error}\n${fragmentSrc}`);
}
// set media to transition into
fade.to = media2;
// setup the vertex data
const attributes = _initVertexAttributes(gl, program, data.attributes);
// start kampos
instance.play();
});
// setup uniforms
const uniforms = _initUniforms(gl, program, data.uniforms);
let x, y, rect;
let drawing = false;
return {
program,
vertexShader,
fragmentShader,
source,
attributes,
uniforms,
textures: data.textures
};
}
// this is invoked once in every animation frame, while there's a mouse move over the canvas
function tick () {
fade.progress = Math.max(0, Math.min(1, (y - rect.y) / rect.height));
hueSat.hue = Math.max(0, Math.min(1, (x - rect.x) / rect.width)) * 360 - 180;
drawing = false;
}
function _mergeEffectsData (effects) {
return effects.reduce((result, config) => {
const {attributes = [], uniforms = [], textures = [], varying = {}} = config;
const merge = shader => Object.keys(config[shader]).forEach(key => {
if ( key === 'constant' || key === 'main' || key === 'source' ) {
result[shader][key] += config[shader][key] + '\n';
}
else {
result[shader][key] = {...result[shader][key], ...config[shader][key]};
}
});
// handler for detecting mouse move
const moveHandler = e => {
const {clientX, clientY} = e;
merge('vertex');
merge('fragment');
// cache mouse location
x = clientX;
y = clientY;
attributes.forEach(attribute => {
const found = result.attributes.some((attr, n) => {
if ( attr.name === attribute.name ) {
Object.assign(attr, attribute);
return true;
}
});
// only once! a frame
if (!drawing) {
drawing = true;
// read here
rect = target.getBoundingClientRect();
// write on next frame
requestAnimationFrame(tick);
}
};
if ( ! found ) {
result.attributes.push(attribute);
}
});
/*
* register event handlers for interaction
*/
target.addEventListener('mouseenter', () => {
target.addEventListener('mousemove', moveHandler);
});
result.uniforms.push(...uniforms);
result.textures.push(...textures);
target.addEventListener('mouseleave', () => {
target.removeEventListener('mousemove', moveHandler);
});
Object.assign(result.vertex.varying, varying);
Object.assign(result.fragment.varying, varying);
return result;
}, {
vertex: {
uniform: {},
attribute: {},
varying: {},
constant: '',
main: ''
},
fragment: {
uniform: {},
varying: {},
constant: '',
main: '',
source: ''
},
/*
* Default attributes
*/
attributes: [
{
name: 'a_position',
data: new Float32Array([
-1.0, -1.0,
-1.0, 1.0,
1.0, -1.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
},
{
name: 'a_texCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
/*
* Default uniforms
*/
uniforms: [
{
name: 'u_source',
type: 'i',
data: [0]
}
],
/*
* Default textures
*/
textures: []
});
}
function _stringifyShaderSrc (data, template) {
const templateData = Object.entries(data)
.reduce((result, [key, value]) => {
if ( ['uniform', 'attribute', 'varying'].includes(key) ) {
result[key] = Object.entries(value)
.reduce((str, [name, type]) =>
str + `${key} ${type} ${name};\n`,
''
);
}
else {
result[key] = value;
}
return result;
}, {});
return template(templateData);
}
function _getWebGLProgram (gl, vertexSrc, fragmentSrc) {
const vertexShader = _createShader(gl, gl.VERTEX_SHADER, vertexSrc);
const fragmentShader = _createShader(gl, gl.FRAGMENT_SHADER, fragmentSrc);
if ( vertexShader.error ) {
return vertexShader;
}
if ( fragmentShader.error ) {
return fragmentShader;
}
return _createProgram(gl, vertexShader, fragmentShader);
}
function _createProgram (gl, vertexShader, fragmentShader) {
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
const success = gl.getProgramParameter(program, gl.LINK_STATUS);
if ( success ) {
return {program, vertexShader, fragmentShader};
}
const exception = {
error: gl.getProgramInfoLog(program),
type: 'program'
};
gl.deleteProgram(program);
return exception;
}
function _createShader (gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
const success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if ( success ) {
return shader;
}
const exception = {
error: gl.getShaderInfoLog(shader),
type: type === gl.VERTEX_SHADER ? 'VERTEX' : 'FRAGMENT'
};
gl.deleteShader(shader);
return exception;
}
/**
* Create a WebGLTexture object.
*
* @private
* @param {WebGLRenderingContext} gl
* @param {number} width
* @param {number} height
* @param {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} data
* @param {string} format
* @return {{texture: WebGLTexture, width: number, height: number}}
*/
function createTexture (gl, {width=1, height=1, data=null, format='RGBA'}={}) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Set the parameters so we can render any size image
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
if ( data ) {
// Upload the image into the texture
gl.texImage2D(gl.TEXTURE_2D, 0,gl[format], gl[format], gl.UNSIGNED_BYTE, data);
}
else {
// Create empty texture
gl.texImage2D(gl.TEXTURE_2D, 0, gl[format], width, height, 0, gl[format], gl.UNSIGNED_BYTE, null);
}
return {texture, width, height, format};
}
function _createBuffer (gl, program, name, data) {
const location = gl.getAttribLocation(program, name);
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
return {location, buffer};
}
function _initVertexAttributes (gl, program, data) {
return (data || []).map(attr => {
const {location, buffer} = _createBuffer(gl, program, attr.name, attr.data);
return {
name: attr.name,
location,
buffer,
type: attr.type,
size: attr.size
};
});
}
function _initUniforms (gl, program, uniforms) {
return (uniforms || []).map(uniform => {
const location = gl.getUniformLocation(program, uniform.name);
return {
location,
size: uniform.size || uniform.data.length,
type: uniform.type,
data: uniform.data
};
});
}
function _setUniforms (gl, uniformData) {
(uniformData || []).forEach(uniform => {
const {size, type, location, data} = uniform;
gl[`uniform${size}${type}v`](location, data);
});
}
function _enableVertexAttributes (gl, attributes) {
(attributes || []).forEach(attrib => {
const {location, buffer, size, type} = attrib;
gl.enableVertexAttribArray(location);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(location, size, gl[type], false, 0, 0);
});
}
/**
* @private
* @typedef {Object} kamposSceneData
* @property {WebGLProgram} program
* @property {WebGLShader} vertexShader
* @property {WebGLShader} fragmentShader
* @property {kamposTarget} source
* @property {kamposAttribute[]} attributes
*
* @typedef {Object} kamposTarget
* @property {WebGLTexture} texture
* @property {WebGLFramebuffer|null} buffer
* @property {number} [width]
* @property {number} [height]
*
* @typedef {Object} kamposAttribute
* @property {string} name
* @property {GLint} location
* @property {WebGLBuffer} buffer
* @property {string} type
@property {number} size
*/
/**
* Initialize a ticker instance for batching animation of multiple Kampos instances.
*
* @class Ticker
*/
class Ticker {
constructor () {
this.pool = [];
}
/**
* Starts the animation loop.
*/
start () {
if ( ! this.animationFrameId ) {
const loop = () => {
this.animationFrameId = window.requestAnimationFrame(loop);
this.draw();
};
this.animationFrameId = window.requestAnimationFrame(loop);
}
}
/**
* Stops the animation loop.
*/
stop () {
window.cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = null;
}
/**
* Invoke draw() on all instances in the pool.
*/
draw () {
this.pool.forEach(instance => instance.draw());
}
/**
* Add an instance to the pool.
*
* @param {Kampos} instance
*/
add (instance) {
const index = this.pool.indexOf(instance);
if ( ! ~ index ) {
this.pool.push(instance);
instance.playing = true;
}
}
/**
* Remove an instance form the pool.
*
* @param {Kampos} instance
*/
remove (instance) {
const index = this.pool.indexOf(instance);
if ( ~ index ) {
this.pool.splice(index, 1);
instance.playing = false;
}
}
}
/**
* Initialize a webgl target with effects.
*
* @class Kampos
* @param {kamposConfig} config
* @example
* import {Ticker, Kampos, effects} from 'kampos';
* const ticker = new Ticker();
* const target = document.querySelector('#canvas');
* const hueSat = effects.hueSaturation();
* const kampos = new Kampos({ticker, target, effects: [hueSat]});
*/
class Kampos {
/**
* @constructor
*/
constructor (config) {
if ( ! config || ! config.target ) {
throw new Error('A target canvas was not provided');
}
if ( Kampos.preventContextCreation )
throw new Error('Context creation is prevented');
this._contextCreationError = function () {
Kampos.preventContextCreation = true;
if ( config && config.onContextCreationError ) {
config.onContextCreationError.call(this, config);
}
};
config.target.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
const success = this.init(config);
if ( ! success )
throw new Error('Could not create context');
this._restoreContext = (e) => {
e && e.preventDefault();
this.config.target.removeEventListener('webglcontextrestored', this._restoreContext, true);
const success = this.init();
if ( ! success )
return false;
if ( this._source ) {
this.setSource(this._source);
}
delete this._source;
if ( config && config.onContextRestored ) {
config.onContextRestored.call(this, config);
}
return true;
};
this._loseContext = (e) => {
e.preventDefault();
if ( this.gl && this.gl.isContextLost() ) {
this.lostContext = true;
this.config.target.addEventListener('webglcontextrestored', this._restoreContext, true);
this.destroy(true);
if ( config && config.onContextLost ) {
config.onContextLost.call(this, config);
}
}
};
this.config.target.addEventListener('webglcontextlost', this._loseContext, true);
}
/**
* Initializes an Kampos instance.
* This is called inside the constructor,
* but can be called again after effects have changed
* or after {@link Kampos#desotry()}.
*
* @param {kamposConfig} [config] defaults to `this.config`
* @return {boolean} success whether initializing of the context and program were successful
*/
init (config) {
config = config || this.config;
let {target, effects, ticker} = config;
if ( Kampos.preventContextCreation )
return false;
this.lostContext = false;
let gl = core.getWebGLContext(target);
if ( ! gl )
return false;
if ( gl.isContextLost() ) {
const success = this.restoreContext();
if ( ! success )
return false;
// get new context from the fresh clone
gl = core.getWebGLContext(this.config.target);
if ( ! gl )
return false;
}
const {data} = core.init(gl, effects, this.dimensions);
this.gl = gl;
this.data = data;
// cache for restoring context
this.config = config;
if ( ticker ) {
this.ticker = ticker;
ticker.add(this);
}
return true;
}
/**
* Set the source config.
*
* @param {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap|kamposSource} source
* @example
* const media = document.querySelector('#video');
* kampos.setSource(media);
*/
setSource (source) {
if ( ! source ) return;
if ( this.lostContext ) {
const success = this.restoreContext();
if ( ! success ) return;
}
let media, width, height;
if ( Object.prototype.toString.call(source) === '[object Object]' ) {
({media, width, height} = source);
}
else {
media = source;
}
if ( width && height ) {
this.dimensions = { width, height };
}
// resize the target canvas if needed
core.resize(this.gl, this.dimensions);
this._createTextures();
this.media = media;
}
/**
* Draw current scene.
*/
draw () {
if ( this.lostContext ) {
const success = this.restoreContext();
if ( ! success ) return;
}
core.draw(this.gl, this.media, this.data, this.dimensions);
}
/**
* Starts the animation loop.
*
* If using a {@see Ticker} this instance will be added to that {@see Ticker}.
*/
play () {
if ( this.ticker ) {
if ( this.animationFrameId ) {
this.stop();
}
if ( ! this.playing ) {
this.playing = true;
this.ticker.add(this);
}
}
else if ( ! this.animationFrameId ) {
const loop = () => {
this.animationFrameId = window.requestAnimationFrame(loop);
this.draw();
};
this.animationFrameId = window.requestAnimationFrame(loop);
}
}
/**
* Stops the animation loop.
*
* If using a {@see Ticker} this instance will be removed from that {@see Ticker}.
*/
stop () {
if ( this.animationFrameId ) {
window.cancelAnimationFrame(this.animationFrameId);
this.animationFrameId = null;
}
if ( this.playing ) {
this.playing = false;
this.ticker.remove(this);
}
}
/**
* Stops animation loop and frees all resources.
*
* @param {boolean} keepState for internal use.
*/
destroy (keepState) {
this.stop();
if ( this.gl && this.data ) {
core.destroy(this.gl, this.data);
}
if ( keepState ) {
const dims = this.dimensions || {};
this._source = this._source || {
media: this.media,
width: dims.width,
height: dims.height
};
}
else {
this.config.target.removeEventListener('webglcontextlost', this._loseContext, true);
this.config.target.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
this.config = null;
this.dimensions = null;
}
this.gl = null;
this.data = null;
this.media = null;
}
/**
* Restore a lost WebGL context fot the given target.
* This will replace canvas DOM element with a fresh clone.
*
* @return {boolean} success whether forcing a context restore was successful
*/
restoreContext () {
if ( Kampos.preventContextCreation )
return false;
const canvas = this.config.target;
const clone = this.config.target.cloneNode(true);
const parent = canvas.parentNode;
if ( parent ) {
parent.replaceChild(clone, canvas);
}
this.config.target = clone;
canvas.removeEventListener('webglcontextlost', this._loseContext, true);
canvas.removeEventListener('webglcontextrestored', this._restoreContext, true);
canvas.removeEventListener('webglcontextcreationerror', this._contextCreationError, false);
clone.addEventListener('webglcontextlost', this._loseContext, true);
clone.addEventListener('webglcontextcreationerror', this._contextCreationError, false);
if ( this.lostContext ) {
return this._restoreContext();
}
return true;
}
_createTextures () {
this.data && this.data.textures.forEach((texture, i) => {
const data = this.data.textures[i];
data.texture = core.createTexture(this.gl, {
width: this.dimensions.width,
height: this.dimensions.height,
format: texture.format,
data: texture.image
}).texture;
data.format = texture.format;
data.update = texture.update;
});
}
}
/**
* @function displacementTransition
* @returns {displacementTransitionEffect}
* @example displacementTransition()
*/
function transitionDisplacement () {
/**
* @typedef {Object} displacementTransitionEffect
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} to media source to transition into
* @property {ArrayBufferView|ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement|ImageBitmap} map displacement map to use
* @property {number} progress number between 0.0 and 1.0
* @property {{x: number, y: number}} sourceScale
* @property {{x: number, y: number}} toScale
* @property {boolean} disabled
*
* @example
* const img = new Image();
* img.src = 'disp.jpg';
* effect.map = img;
* effect.to = document.querySelector('#video-to');
* effect.sourceScale = {x: 0.4};
* effect.toScale = {x: 0.8};
*/
return {
vertex: {
attribute: {
a_transitionToTexCoord: 'vec2',
a_transitionDispMapTexCoord: 'vec2'
},
main: `
v_transitionToTexCoord = a_transitionToTexCoord;
v_transitionDispMapTexCoord = a_transitionDispMapTexCoord;`
},
fragment: {
uniform: {
u_transitionEnabled: 'bool',
u_transitionTo: 'sampler2D',
u_transitionDispMap: 'sampler2D',
u_transitionProgress: 'float',
u_sourceDispScale: 'vec2',
u_toDispScale: 'vec2'
},
source: `
vec3 transDispMap = vec3(1.0);
vec2 transDispVec = vec2(0.0);
if (u_transitionEnabled) {
// read the displacement texture once and create the displacement map
transDispMap = texture2D(u_transitionDispMap, v_transitionDispMapTexCoord).rgb - 0.5;
// prepare the source coordinates for sampling
transDispVec = vec2(u_sourceDispScale.x * transDispMap.r, u_sourceDispScale.y * transDispMap.g);
sourceCoord = clamp(v_texCoord + transDispVec * u_transitionProgress, 0.0, 1.0);
}`,
main: `
if (u_transitionEnabled) {
// prepare the target coordinates for sampling
transDispVec = vec2(u_toDispScale.x * transDispMap.r, u_toDispScale.y * transDispMap.g);
vec2 targetCoord = clamp(v_transitionToTexCoord + transDispVec * (1.0 - u_transitionProgress), 0.0, 1.0);
// sample the target
vec4 targetPixel = texture2D(u_transitionTo, targetCoord);
// mix the results of source and target
color = mix(color, targetPixel.rgb, u_transitionProgress);
alpha = mix(alpha, targetPixel.a, u_transitionProgress);
}`
},
get disabled () {
return !this.uniforms[0].data[0];
},
set disabled (b) {
this.uniforms[0].data[0] = +!b;
},
get progress () {
return this.uniforms[3].data[0];
},
set progress (p) {
this.uniforms[3].data[0] = p;
},
get sourceScale () {
const [x, y] = this.uniforms[4].data;
return {x, y};
},
set sourceScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[4].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[4].data[1] = y;
},
get toScale () {
const [x, y] = this.uniforms[5].data;
return {x, y};
},
set toScale ({x, y}) {
if ( typeof x !== 'undefined' )
this.uniforms[5].data[0] = x;
if ( typeof y !== 'undefined' )
this.uniforms[5].data[1] = y;
},
get to () {
return this.textures[0].image;
},
set to (media) {
this.textures[0].image = media;
},
get map () {
return this.textures[1].image;
},
set map (img) {
this.textures[1].image = img;
},
varying: {
v_transitionToTexCoord: 'vec2',
v_transitionDispMapTexCoord: 'vec2'
},
uniforms: [
{
name: 'u_transitionEnabled',
type: 'i',
data: [1]
},
{
name: 'u_transitionTo',
type: 'i',
data: [1]
},
{
name: 'u_transitionDispMap',
type: 'i',
data: [2]
},
{
name: 'u_transitionProgress',
type: 'f',
data: [0]
},
{
name: 'u_sourceDispScale',
type: 'f',
data: [0.0, 0.0]
},
{
name: 'u_toDispScale',
type: 'f',
data: [0.0, 0.0]
}
],
attributes: [
{
name: 'a_transitionToTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
},
{
name: 'a_transitionDispMapTexCoord',
data: new Float32Array([
0.0, 0.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0]),
size: 2,
type: 'FLOAT'
}
],
textures: [
{
format: 'RGBA',
update: true
},
{
format: 'RGB'
}
]
};
}
const observer = new IntersectionObserver(entries => {
entries.forEach(entry => {
const direction = entry.isIntersecting ? 'forward' : 'backward';
Transition.targets.get(entry.target)[direction]();
});
}, {
root: null,
rootMargin: '0%',
threshold: 0.8
});
class Transition {
constructor ({vid1, vid2, target, disp, ticker, dispScale}) {
this.vid1 = vid1;
this.vid2 = vid2;
this.target = target;
this.playing = 0;
this.timeupdate = 0;
this.disp = disp;
this.dispScale = dispScale;
this.transition = transitionDisplacement();
this.direction = 1;
this.startTime = 0;
this.ready = new Promise(resolve => {
this.setReady = resolve;
});
this.kampos = new Kampos({target, effects: [this.transition], ticker});
this.initVideo();
// target.addEventListener('mouseenter', forward);
// target.addEventListener('mouseleave', backward);
observer.observe(target);
Transition.targets.set(target, this);
}
initVideo () {
function canPlay (e) {
e.target.play();
}
const isPlaying = e => {
this.playing += 1;
e.target.removeEventListener('playing', isPlaying, true);
check();
};
const isTimeupdate = (e) => {
this.timeupdate += 1;
e.target.removeEventListener('timeupdate', isTimeupdate, true);
check();
};
const dispReady = new Promise(resolve => {
const img = new Image();
img.onload = function () {
resolve(this);
};
img.src = this.disp;
});
const check = () => {
if (this.playing === 2 && this.timeupdate === 2) {
const width = this.vid1.videoWidth;
const height = this.vid1.videoHeight;
this.target.style.width = `${width}px`;
this.target.style.height = `${height}px`;
dispReady.then(img => {
this.transition.map = img;
this.transition.to = this.vid2;
this.transition.sourceScale = {x: this.dispScale};
this.transition.toScale = {x: -this.dispScale};
this.kampos.setSource({media: this.vid1, width, height});
this.setReady();
});
this.vid1.removeEventListener('canplay', canPlay, true);
this.vid2.removeEventListener('canplay', canPlay, true);
}
};
[this.vid1, this.vid2].forEach(vid => {
vid.addEventListener('playing', isPlaying, true);
vid.addEventListener('timeupdate', isTimeupdate, true);
vid.addEventListener('canplay', canPlay, true);
});
}
tick (p) {
this.transition.progress = p;
}
play () {
const now = Date.now() - this.startTime;
let p = Math.abs(Math.sin(now / .5e3));
p = this.direction ? p : 1 - p;
this.tick(p);
if (this.direction) {
if (p * 1e2 < 99) {
window.requestAnimationFrame(() => this.play());
}
else {
window.requestAnimationFrame(() => this.tick(1));
}
}
else {
if (p * 1e2 > 1) {
window.requestAnimationFrame(() => this.play());
}
else {
window.requestAnimationFrame(() => this.tick(0));
}
}
}
forward () {
this.direction = 1;
this.startTime = Date.now();
this.play();
}
backward () {
this.direction = 0;
this.startTime = Date.now();
this.play();
}
}
Transition.targets = new Map();
const video = document.querySelector('#video');
const video2 = document.querySelector('#video2');
const target = document.querySelector('#target');
const video3 = document.querySelector('#video3');
const video4 = document.querySelector('#video4');
const target2 = document.querySelector('#target2');
const video5 = document.querySelector('#video5');
const video6 = document.querySelector('#video6');
const target3 = document.querySelector('#target3');
const video7 = document.querySelector('#video7');
const video8 = document.querySelector('#video8');
const target4 = document.querySelector('#target4');
const ticker = new Ticker();
const trans1 = new Transition({
vid1: video,
vid2: video2,
target: target,
ticker,
disp: 'disp-tri.jpg',
dispScale: 0.3
});
const trans2 = new Transition({
vid1: video3,
vid2: video4,
target: target2,
ticker,
disp: 'disp-snow.jpg',
dispScale: 1.0
});
const trans3 = new Transition({
vid1: video5,
vid2: video6,
target: target3,
ticker,
disp: 'disp-cloud.png',
dispScale: 0.2
});
const trans4 = new Transition({
vid1: video7,
vid2: video8,
target: target4,
ticker,
disp: 'disp-liquid.jpg',
dispScale: 0.35
});
Promise.all([trans1.ready, trans2.ready, trans3.ready, trans4.ready])
.then(() => {
ticker.start();
});
}());

@@ -5,3 +5,3 @@ import progress from 'rollup-plugin-progress';

const config = {
input: 'disp.js',
input: 'demo.js',
output: {

@@ -8,0 +8,0 @@ file: 'index.js',

@@ -487,3 +487,3 @@ (function (global, factory) {

set disabled(b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},

@@ -571,2 +571,3 @@

u_transitionEnabled: 'bool',
u_transitionProgress: 'float',
u_transitionTo: 'sampler2D'

@@ -585,2 +586,10 @@ },

get progress() {
return this.uniforms[2].data[0];
},
set progress(p) {
this.uniforms[2].data[0] = p;
},
get to() {

@@ -587,0 +596,0 @@ return this.textures[0].image;

{
"name": "kampos",
"version": "0.2.1",
"version": "0.2.2",
"description": "Tiny and fast effects compositor on WebGL",

@@ -50,3 +50,2 @@ "registry": "https://registry.npmjs.org/",

"ava": "^2.1.0",
"codemirror": "^5.48.2",
"documentation": "^11.0.1",

@@ -53,0 +52,0 @@ "electron": "^5.0.4",

@@ -44,3 +44,3 @@ /**

set disabled (b) {
return this.uniforms[0].data[0] = +!b;
this.uniforms[0].data[0] = +!b;
},

@@ -47,0 +47,0 @@ get scale () {

@@ -28,2 +28,3 @@ /**

u_transitionEnabled: 'bool',
u_transitionProgress: 'float',
u_transitionTo: 'sampler2D'

@@ -44,2 +45,8 @@ },

},
get progress () {
return this.uniforms[2].data[0];
},
set progress (p) {
this.uniforms[2].data[0] = p;
},
get to () {

@@ -46,0 +53,0 @@ return this.textures[0].image;

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc