New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

pam-diff

Package Overview
Dependencies
Maintainers
1
Versions
53
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

pam-diff - npm Package Compare versions

Comparing version 0.0.1 to 0.1.0

247

index.js

@@ -12,8 +12,6 @@ // jshint esversion: 6, globalstrict: true, strict: true

Transform.call(this, {objectMode: true});
this._rgb2gray = this._parseOptions('rgb2gray', options);
this._sensitivity = this._validateNumber(this._parseOptions('sensitivity', options), 30, 1, 255);
this._threshold = this._validateNumber(this._parseOptions('threshold', options), 300, 1, 20000);
this._newPix = null;
this._oldPix = null;
this._comparePixels = null;//pixel difference engine, to be determined
this.setGrayscale(this._parseOptions('grayscale', options));
this.setDifference(this._parseOptions('difference', options));
this.setPercent(this._parseOptions('percent', options));
this._parseChunk = this._parseFirstChunk;
}

@@ -23,2 +21,32 @@

PamDiff.prototype.setGrayscale = function (value) {
switch (value) {
case 'red' :
this._grayscale = this._redToGray;
break;
case 'green' :
this._grayscale = this._greenToGray;
break;
case 'blue' :
this._grayscale = this._blueToGray;
break;
case 'desaturation' :
this._grayscale = this._desaturationToGray;
break;
case 'luminosity' :
this._grayscale = this._luminosityToGray;
break;
default :
this._grayscale = this._averageToGray;
}
};
PamDiff.prototype.setDifference = function (value) {
this._difference = this._validateNumber(value, 5, 1, 255);
};
PamDiff.prototype.setPercent = function (value) {
this._percent = this._validateNumber(value, 5, 1, 100);
};
PamDiff.prototype._parseOptions = function (option, options) {

@@ -43,14 +71,4 @@ if (options && options.hasOwnProperty(option)) {

PamDiff.prototype._getGrayEngine = function (type) {
if (type === 'lightness') {
return PamDiff.prototype._lightness;
} else if (type === 'luminosity') {
return PamDiff.prototype._luminosity;
} else {
return PamDiff.prototype._average;
}
};
//convert rgb to gray
PamDiff.prototype._average = function (r, g, b) {
PamDiff.prototype._averageToGray = function (r, g, b) {
return (r + g + b) / 3;

@@ -60,3 +78,3 @@ };

//convert rgb to gray
PamDiff.prototype._lightness = function (r, g, b) {
PamDiff.prototype._desaturationToGray = function (r, g, b) {
return (Math.max(r, g, b) + Math.min(r, g, b)) / 2;

@@ -66,79 +84,122 @@ };

//convert rgb to gray
PamDiff.prototype._luminosity = function (r, g, b) {
return 0.299 * r + 0.587 * g + 0.114 * b;
PamDiff.prototype._luminosityToGray = function (r, g, b) {
//return 0.299 * r + 0.587 * g + 0.114 * b;
return 0.3 * r + 0.59 * g + 0.11 * b;
};
PamDiff.prototype._transform = function (chunk, encoding, callback) {
//console.log('transform', chunk.pixels.length);
//convert rgb to gray
PamDiff.prototype._redToGray = function (r, g, b) {
return r;
};
//convert rgb to gray
PamDiff.prototype._greenToGray = function (r, g, b) {
return g;
};
//convert rgb to gray
PamDiff.prototype._blueToGray = function (r, g, b) {
return b;
};
PamDiff.prototype._blackAndWhitePixelDiff = function (chunk) {
this._newPix = chunk.pixels;
let diffPix = [];
for (let i = 0, x = 0, y = 0; i < this._length; i++, x++) {
if (x === this._width) {
x = 0;
y++;
}
if (this._oldPix[i] !== this._newPix[i]) {
diffPix.push({x: x, y: y});
}
}
let percent = Math.ceil(diffPix.length / this._length * 100);
if (percent >= this._percent) {
this.emit('diff', {diffPix: diffPix, percent: percent});
}
this._oldPix = this._newPix;
};
PamDiff.prototype._grayScalePixelDiff = function (chunk) {
this._newPix = chunk.pixels;
if (this._oldPix !== null) {
this._comparePixels(this._oldPix, this._newPix);
} else {
let width = parseInt(chunk.width);
let length = this._newPix.length;
let sensitivity = this._sensitivity;//amount of difference between pixels
let threshold = this._threshold;//number of pixels that are different
//let height = parseInt(chunk.height);
//let maxval = parseInt(chunk.maxval);
//this._diffPixels = new Uint8ClampedArray(width * height * 4);//size it once based on rgba output, reuse for each diff
switch (chunk.tupltype) {
case 'blackandwhite' ://each pixel will only be 1 byte with value 0 or 1, depth is 1
this._comparePixels = (oldPix, newPix) => {
let coords = [];
for (let i = 0, x = 0, y = 0, p = 0; i < length; i++, x++, p+=4) {
if (x === width) {
x = 0;
y++;
}
if (oldPix[i] !== newPix[i]) {
coords.push({p: p, x: x, y: y});
}
}
if (coords.length > threshold) {
this.emit('diff', {coords: coords});
}
};
break;
case 'grayscale' ://each pixel will only be 1 byte with value 0 to 255, depth is 1
this._comparePixels = (oldPix, newPix) => {
let coords = [];
for (let i = 0, x = 0, y = 0, p = 0; i < length; i++, x++, p+=4) {
if (x === width) {
x = 0;
y++;
}
if (Math.abs(oldPix[i] - newPix[i]) > sensitivity) {
coords.push({p: p, x: x, y: y});
}
}
if (coords.length > threshold) {
this.emit('diff', {coords: coords, percent: Math.round(coords.length/length * 100)});
}
};
break;
case 'rgb' ://each pixel will be 3 bytes with value 0 to 255, depth is 3
case 'rgb_alpha' ://each pixel will be 4 bytes with value 0 to 255, depth is 4
let depth = parseInt(chunk.depth);
let toGray = this._getGrayEngine(this._rgb2gray);
this._comparePixels = (oldPix, newPix) => {
let coords = [];
for (let i = 0, x = 0, y = 0, p = 0; i < length; i+=depth, x++, p+=4) {
if (x === width) {
x = 0;
y++;
}
if (Math.abs(toGray(oldPix[i], oldPix[i + 1], oldPix[i + 2]) - toGray(newPix[i], newPix[i + 1], newPix[i + 2])) > sensitivity) {
coords.push({p: p, x: x, y: y});
}
}
if (coords.length > threshold) {
this.emit('diff', {coords: coords});
}
};
break;
default :
throw Error(`Unsupported tupltype: ${chunk.tupltype}. Supported tupltypes include grayscale(gray), blackandwhite(monob), rgb(rgb24), and rgb_alpha(rgba).`);
let diffPix = [];
for (let i = 0, x = 0, y = 0; i < this._length; i++, x++) {
if (x === this._width) {
x = 0;
y++;
}
if (Math.abs(this._oldPix[i] - this._newPix[i]) >= this._difference) {
diffPix.push({x: x, y: y});
}
}
let percent = Math.ceil(diffPix.length / this._length * 100);
if (percent >= this._percent) {
this.emit('diff', {diffPix: diffPix, percent: percent});
}
this._oldPix = this._newPix;
};
PamDiff.prototype._rgbPixelDiff = function (chunk) {
this._newPix = chunk.pixels;
let diffPix = [];
for (let i = 0, x = 0, y = 0; i < this._length; i+=3, x++) {
if (x === this._width) {
x = 0;
y++;
}
if (Math.abs(this._grayscale(this._oldPix[i], this._oldPix[i + 1], this._oldPix[i + 2]) - this._grayscale(this._newPix[i], this._newPix[i + 1], this._newPix[i + 2])) >= this._difference) {
diffPix.push({x: x, y: y});
}
}
let percent = Math.ceil(diffPix.length / this._length * 100);
if (percent >= this._percent) {
this.emit('diff', {diffPix: diffPix, percent: percent});
}
this._oldPix = this._newPix;
};
PamDiff.prototype._rgbAlphaPixelDiff = function (chunk) {
this._newPix = chunk.pixels;
let diffPix = [];
for (let i = 0, x = 0, y = 0; i < this._length; i+=4, x++) {
if (x === this._width) {
x = 0;
y++;
}
if (Math.abs(this._grayscale(this._oldPix[i], this._oldPix[i + 1], this._oldPix[i + 2]) - this._grayscale(this._newPix[i], this._newPix[i + 1], this._newPix[i + 2])) >= this._difference) {
diffPix.push({x: x, y: y});
}
}
let percent = Math.ceil(diffPix.length / this._length * 100);
if (percent >= this._percent) {
this.emit('diff', {diffPix: diffPix, percent: percent});
}
this._oldPix = this._newPix;
};
PamDiff.prototype._parseFirstChunk = function (chunk) {
this._width = parseInt(chunk.width);
this._oldPix = chunk.pixels;
this._length = this._oldPix.length;
switch (chunk.tupltype) {
case 'blackandwhite' :
this._parseChunk = this._blackAndWhitePixelDiff;
break;
case 'grayscale' :
this._parseChunk = this._grayScalePixelDiff;
break;
case 'rgb' :
this._parseChunk = this._rgbPixelDiff;
break;
case 'rgb_alpha' :
this._parseChunk = this._rgbAlphaPixelDiff;
break;
default :
throw Error(`Unsupported tupltype: ${chunk.tupltype}. Supported tupltypes include grayscale(gray), blackandwhite(monob), rgb(rgb24), and rgb_alpha(rgba).`);
}
};
PamDiff.prototype._transform = function (chunk, encoding, callback) {
this._parseChunk(chunk);
callback();

@@ -148,5 +209,7 @@ };

PamDiff.prototype._flush = function (callback) {
this._newPix = null;
this._oldPix = null;
this._comparePixels = null;
delete this._oldPix;
delete this._newPix;
delete this._width;
delete this._length;
this._parseChunk = this._parseFirstChunk;
callback();

@@ -153,0 +216,0 @@ };

{
"name": "pam-diff",
"version": "0.0.1",
"version": "0.1.0",
"description": "Measure differences between pixel arrays extracted from pam images",

@@ -26,6 +26,3 @@ "main": "index.js",

"homepage": "https://github.com/kevinGodell/pam-diff#readme",
"dependencies": {
"stream": "0.0.2",
"util": "^0.10.3"
}
"dependencies": {}
}
# pam-diff
Measure differences between pixel arrays extracted from pam images
Measure differences between pixel arrays extracted from pam images. Works well with node module [pipe2pam](https://www.npmjs.com/package/pipe2pam) to extract pam images from an ffmpeg pipe. Supported tupltypes are rgb24, rgb_alpha, grayscale, and monob.
example coming soon...
### installation:
```
npm install pam-diff --save
```
**To run the example below, also install pipe2pam:**
```
npm install pipe2pam --save
```
### usage:
The following [example](https://github.com/kevinGodell/pam-diff/tree/master/examples/example.js) uses ffmpeg's testsrc to simulate a video input and generate 1000 downscaled grayscale pam images at a rate of 1 per second. The pam images are piped from ffmpeg's stdout into pipe2pam to parse them into into pam objects. The pam objects are then piped into pam-diff to measure pixel differences. For each compared pixel that has a **difference** that exceeds the setting, it will be added to an array of x y coordinates. If the **percent** of changed pixels exceeds the setting, a **diff** event will be emitted which contains an array of pixel coordinates that have changed.
```
const P2P = require('pipe2pam');
const PamDiff = require('pam-diff');
const spawn = require('child_process').spawn;
const params = [
'-loglevel',
'quiet',
/* use hardware acceleration */
//'-hwaccel',
//'auto', //vda, videotoolbox, none, auto
/* use an artificial video input */
'-re',
'-f',
'lavfi',
'-i',
'testsrc=size=1920x1080:rate=15',
/* use an rtsp ip cam video input */
/*'-rtsp_transport',
'tcp', //udp, http, tcp
'-i',
'rtsp://192.168.1.22:554/user=admin_password=pass_channel=1_stream=0.sdp',*/
'-an',
'-c:v',
'pam',
'-f',
'image2pipe',
'-pix_fmt',
'gray',//rgb24, rgba, monob, gray
'-vf',
'fps=1,scale=iw*1/6:ih*1/6',
'-frames',
'1000',
'pipe:1'
];
const ffmpeg = spawn('ffmpeg', params);
console.log(ffmpeg.spawnargs.join(' '));
ffmpeg.on('error', function(error) {
console.log(error);
});
ffmpeg.on('exit', function(code, signal) {
console.log('exit', code, signal);
});
const p2p = new P2P();
let counter = 0;
p2p.on('pam', function(data) {
//you do not have to do anything here if you are just piping this data to pam-diff
console.log('received pam', ++counter);
});
const pamDiff = new PamDiff({grayscale: 'average', difference: 4, percent: 5});
pamDiff.on('diff', function(data) {
//further analyze the pixels for regions or trigger motion detection from this event
console.log(`${data.diffPix.length} pixels different, ${data.percent}%`);
});
ffmpeg.stdout.pipe(p2p).pipe(pamDiff);
```
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc