Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@pixi/filter-kawase-blur

Package Overview
Dependencies
Maintainers
3
Versions
17
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@pixi/filter-kawase-blur - npm Package Compare versions

Comparing version 2.7.0 to 3.0.0

dist/filter-kawase-blur.js

219

lib/filter-kawase-blur.esm.js
/*!
* @pixi/filter-kawase-blur - v2.7.0
* Compiled Sun, 13 Jan 2019 22:51:52 UTC
* Compiled Fri, 10 May 2019 22:56:42 UTC
*

@@ -8,3 +8,218 @@ * @pixi/filter-kawase-blur is licensed under the MIT License.

*/
import{Point as e,Filter as t}from"pixi.js";var r="attribute vec2 aVertexPosition;\nattribute vec2 aTextureCoord;\n\nuniform mat3 projectionMatrix;\n\nvarying vec2 vTextureCoord;\n\nvoid main(void)\n{\n gl_Position = vec4((projectionMatrix * vec3(aVertexPosition, 1.0)).xy, 0.0, 1.0);\n vTextureCoord = aTextureCoord;\n}",i="\nvarying vec2 vTextureCoord;\nuniform sampler2D uSampler;\n\nuniform vec2 uOffset;\n\nvoid main(void)\n{\n vec4 color = vec4(0.0);\n\n // Sample top left pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y + uOffset.y));\n\n // Sample top right pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y + uOffset.y));\n\n // Sample bottom right pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y - uOffset.y));\n\n // Sample bottom left pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y - uOffset.y));\n\n // Average\n color *= 0.25;\n\n gl_FragColor = color;\n}",o="\nvarying vec2 vTextureCoord;\nuniform sampler2D uSampler;\n\nuniform vec2 uOffset;\nuniform vec4 filterClamp;\n\nvoid main(void)\n{\n vec4 color = vec4(0.0);\n\n // Sample top left pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y + uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Sample top right pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y + uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Sample bottom right pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y - uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Sample bottom left pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y - uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Average\n color *= 0.25;\n\n gl_FragColor = color;\n}\n",n=function(t){function n(n,l,u){void 0===n&&(n=4),void 0===l&&(l=3),void 0===u&&(u=!1),t.call(this,r,u?o:i),this.uniforms.uOffset=new Float32Array(2),this._pixelSize=new e,this.pixelSize=1,this._clamp=u,this._kernels=null,Array.isArray(n)?this.kernels=n:(this._blur=n,this.quality=l)}t&&(n.__proto__=t),n.prototype=Object.create(t&&t.prototype),n.prototype.constructor=n;var l={kernels:{configurable:!0},clamp:{configurable:!0},pixelSize:{configurable:!0},quality:{configurable:!0},blur:{configurable:!0}};return n.prototype.apply=function(e,t,r,i){var o,n=this.pixelSize.x/t.size.width,l=this.pixelSize.y/t.size.height;if(1===this._quality||0===this._blur)o=this._kernels[0]+.5,this.uniforms.uOffset[0]=o*n,this.uniforms.uOffset[1]=o*l,e.applyFilter(this,t,r,i);else{for(var u,s=e.getRenderTarget(!0),a=t,f=s,p=this._quality-1,x=0;x<p;x++)o=this._kernels[x]+.5,this.uniforms.uOffset[0]=o*n,this.uniforms.uOffset[1]=o*l,e.applyFilter(this,a,f,!0),u=a,a=f,f=u;o=this._kernels[p]+.5,this.uniforms.uOffset[0]=o*n,this.uniforms.uOffset[1]=o*l,e.applyFilter(this,a,r,i),e.returnRenderTarget(s)}},n.prototype._generateKernels=function(){var e=this._blur,t=this._quality,r=[e];if(e>0)for(var i=e,o=e/t,n=1;n<t;n++)i-=o,r.push(i);this._kernels=r},l.kernels.get=function(){return this._kernels},l.kernels.set=function(e){Array.isArray(e)&&e.length>0?(this._kernels=e,this._quality=e.length,this._blur=Math.max.apply(Math,e)):(this._kernels=[0],this._quality=1)},l.clamp.get=function(){return this._clamp},l.pixelSize.set=function(t){"number"==typeof t?(this._pixelSize.x=t,this._pixelSize.y=t):Array.isArray(t)?(this._pixelSize.x=t[0],this._pixelSize.y=t[1]):t instanceof e?(this._pixelSize.x=t.x,this._pixelSize.y=t.y):(this._pixelSize.x=1,this._pixelSize.y=1)},l.pixelSize.get=function(){return this._pixelSize},l.quality.get=function(){return this._quality},l.quality.set=function(e){this._quality=Math.max(1,Math.round(e)),this._generateKernels()},l.blur.get=function(){return this._blur},l.blur.set=function(e){this._blur=e,this._generateKernels()},Object.defineProperties(n.prototype,l),n}(t);export{n as KawaseBlurFilter};
import { Filter } from '@pixi/core';
import { Point } from '@pixi/math';
var vertex = "attribute vec2 aVertexPosition;\nattribute vec2 aTextureCoord;\n\nuniform mat3 projectionMatrix;\n\nvarying vec2 vTextureCoord;\n\nvoid main(void)\n{\n gl_Position = vec4((projectionMatrix * vec3(aVertexPosition, 1.0)).xy, 0.0, 1.0);\n vTextureCoord = aTextureCoord;\n}";
var fragment = "\nvarying vec2 vTextureCoord;\nuniform sampler2D uSampler;\n\nuniform vec2 uOffset;\n\nvoid main(void)\n{\n vec4 color = vec4(0.0);\n\n // Sample top left pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y + uOffset.y));\n\n // Sample top right pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y + uOffset.y));\n\n // Sample bottom right pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y - uOffset.y));\n\n // Sample bottom left pixel\n color += texture2D(uSampler, vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y - uOffset.y));\n\n // Average\n color *= 0.25;\n\n gl_FragColor = color;\n}";
var fragmentClamp = "\nvarying vec2 vTextureCoord;\nuniform sampler2D uSampler;\n\nuniform vec2 uOffset;\nuniform vec4 filterClamp;\n\nvoid main(void)\n{\n vec4 color = vec4(0.0);\n\n // Sample top left pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y + uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Sample top right pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y + uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Sample bottom right pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x + uOffset.x, vTextureCoord.y - uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Sample bottom left pixel\n color += texture2D(uSampler, clamp(vec2(vTextureCoord.x - uOffset.x, vTextureCoord.y - uOffset.y), filterClamp.xy, filterClamp.zw));\n\n // Average\n color *= 0.25;\n\n gl_FragColor = color;\n}\n";
/**
* A much faster blur than Gaussian blur, but more complicated to use.<br>
* ![original](../tools/screenshots/dist/original.png)![filter](../tools/screenshots/dist/kawase-blur.png)
*
* @see https://software.intel.com/en-us/blogs/2014/07/15/an-investigation-of-fast-real-time-gpu-based-image-blur-algorithms
* @class
* @extends PIXI.Filter
* @memberof PIXI.filters
* @see {@link https://www.npmjs.com/package/@pixi/filter-kawase-blur|@pixi/filter-kawase-blur}
* @see {@link https://www.npmjs.com/package/pixi-filters|pixi-filters}
* @param {number|number[]} [blur=4] - The blur of the filter. Should be greater than `0`. If
* value is an Array, setting kernels.
* @param {number} [quality=3] - The quality of the filter. Should be an integer greater than `1`.
* @param {boolean} [clamp=false] - Clamp edges, useful for removing dark edges
* from fullscreen filters or bleeding to the edge of filterArea.
*/
var KawaseBlurFilter = /*@__PURE__*/(function (Filter) {
function KawaseBlurFilter(blur, quality, clamp) {
if ( blur === void 0 ) blur = 4;
if ( quality === void 0 ) quality = 3;
if ( clamp === void 0 ) clamp = false;
Filter.call(this, vertex, clamp ? fragmentClamp : fragment);
this.uniforms.uOffset = new Float32Array(2);
this._pixelSize = new Point();
this.pixelSize = 1;
this._clamp = clamp;
this._kernels = null;
// if `blur` is array , as kernels
if (Array.isArray(blur)) {
this.kernels = blur;
}
else {
this._blur = blur;
this.quality = quality;
}
}
if ( Filter ) KawaseBlurFilter.__proto__ = Filter;
KawaseBlurFilter.prototype = Object.create( Filter && Filter.prototype );
KawaseBlurFilter.prototype.constructor = KawaseBlurFilter;
var prototypeAccessors = { kernels: { configurable: true },clamp: { configurable: true },pixelSize: { configurable: true },quality: { configurable: true },blur: { configurable: true } };
/**
* Overrides apply
* @private
*/
KawaseBlurFilter.prototype.apply = function apply (filterManager, input, output, clear) {
var uvX = this.pixelSize.x / input._frame.width;
var uvY = this.pixelSize.y / input._frame.height;
var offset;
if (this._quality === 1 || this._blur === 0) {
offset = this._kernels[0] + 0.5;
this.uniforms.uOffset[0] = offset * uvX;
this.uniforms.uOffset[1] = offset * uvY;
filterManager.applyFilter(this, input, output, clear);
}
else {
var renderTarget = filterManager.getFilterTexture();
var source = input;
var target = renderTarget;
var tmp;
var last = this._quality - 1;
for (var i = 0; i < last; i++) {
offset = this._kernels[i] + 0.5;
this.uniforms.uOffset[0] = offset * uvX;
this.uniforms.uOffset[1] = offset * uvY;
filterManager.applyFilter(this, source, target, true);
tmp = source;
source = target;
target = tmp;
}
offset = this._kernels[last] + 0.5;
this.uniforms.uOffset[0] = offset * uvX;
this.uniforms.uOffset[1] = offset * uvY;
filterManager.applyFilter(this, source, output, clear);
filterManager.returnFilterTexture(renderTarget);
}
};
/**
* Auto generate kernels by blur & quality
* @private
*/
KawaseBlurFilter.prototype._generateKernels = function _generateKernels () {
var blur = this._blur;
var quality = this._quality;
var kernels = [ blur ];
if (blur > 0) {
var k = blur;
var step = blur / quality;
for (var i = 1; i < quality; i++) {
k -= step;
kernels.push(k);
}
}
this._kernels = kernels;
};
/**
* The kernel size of the blur filter, for advanced usage.
*
* @member {number[]}
* @default [0]
*/
prototypeAccessors.kernels.get = function () {
return this._kernels;
};
prototypeAccessors.kernels.set = function (value) {
if (Array.isArray(value) && value.length > 0) {
this._kernels = value;
this._quality = value.length;
this._blur = Math.max.apply(Math, value);
}
else {
// if value is invalid , set default value
this._kernels = [0];
this._quality = 1;
}
};
/**
* Get the if the filter is clampped.
*
* @readonly
* @member {boolean}
* @default false
*/
prototypeAccessors.clamp.get = function () {
return this._clamp;
};
/**
* Sets the pixel size of the filter. Large size is blurrier. For advanced usage.
*
* @member {PIXI.Point|number[]}
* @default [1, 1]
*/
prototypeAccessors.pixelSize.set = function (value) {
if (typeof value === 'number') {
this._pixelSize.x = value;
this._pixelSize.y = value;
}
else if (Array.isArray(value)) {
this._pixelSize.x = value[0];
this._pixelSize.y = value[1];
}
else if (value instanceof Point) {
this._pixelSize.x = value.x;
this._pixelSize.y = value.y;
}
else {
// if value is invalid , set default value
this._pixelSize.x = 1;
this._pixelSize.y = 1;
}
};
prototypeAccessors.pixelSize.get = function () {
return this._pixelSize;
};
/**
* The quality of the filter, integer greater than `1`.
*
* @member {number}
* @default 3
*/
prototypeAccessors.quality.get = function () {
return this._quality;
};
prototypeAccessors.quality.set = function (value) {
this._quality = Math.max(1, Math.round(value));
this._generateKernels();
};
/**
* The amount of blur, value greater than `0`.
*
* @member {number}
* @default 4
*/
prototypeAccessors.blur.get = function () {
return this._blur;
};
prototypeAccessors.blur.set = function (value) {
this._blur = value;
this._generateKernels();
};
Object.defineProperties( KawaseBlurFilter.prototype, prototypeAccessors );
return KawaseBlurFilter;
}(Filter));
export { KawaseBlurFilter };
//# sourceMappingURL=filter-kawase-blur.esm.js.map

17

package.json
{
"name": "@pixi/filter-kawase-blur",
"version": "2.7.0",
"main": "lib/filter-kawase-blur.js",
"description": "PixiJS v4 filter to apply an alternative, fast blur effect to Gaussian",
"version": "3.0.0",
"main": "lib/filter-kawase-blur.cjs.js",
"bundle": "dist/filter-kawase-blur.js",
"description": "PixiJS filter to apply an alternative, fast blur effect to Gaussian",
"author": "finscn <finscn@gmail.com>",

@@ -21,11 +22,13 @@ "module": "lib/filter-kawase-blur.esm.js",

"lib",
"dist",
"types.d.ts"
],
"peerDependencies": {
"pixi.js": ">=4.5.0"
"dependencies": {
"@pixi/core": "^5.0.0-X",
"@pixi/math": "^5.0.0-X"
},
"devDependencies": {
"@tools/fragments": "^2.0.0"
"@tools/fragments": "^3.0.0"
},
"gitHead": "a1329fadd2910c48842c92f254be77d971a823ca"
"gitHead": "0902eca3fe476dd7dacc9330f504d8e2ade6c9bd"
}
# KawaseBlurFilter
PixiJS v4 filter to apply an alternative, fast blur effect to Gaussian.
PixiJS filter to apply an alternative, fast blur effect to Gaussian.

@@ -5,0 +5,0 @@ ## Installation

/// <reference types="pixi.js" />
declare namespace PIXI.filters {
class KawaseBlurFilter extends PIXI.Filter<{}> {
declare module "@pixi/filter-kawase-blur" {
export class KawaseBlurFilter extends PIXI.Filter {
constructor(blur?:number|number[], quality?:number, clamp?:boolean);

@@ -12,5 +12,1 @@ kernels:number[];

}
declare module "@pixi/filter-kawase-blur" {
export = PIXI.filters;
}

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc