New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@privateid/cryptonets-web-sdk

Package Overview
Dependencies
Maintainers
5
Versions
226
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@privateid/cryptonets-web-sdk - npm Package Compare versions

Comparing version 2.0.11-alpha to 2.0.12-alpha

wasm/face_mask/noSimd/privid_fhe.js

8

dist/faceModule.d.ts

@@ -106,3 +106,5 @@ import { CameraFaceMode, FaceStatuses, Base64, DocType } from './types';

portrait?: Base64;
}) => void, element?: string, deviceId?: string): Promise<unknown>;
}) => void, config?: {
pin: number | string;
}, element?: string, deviceId?: string): Promise<unknown>;
export declare function predictOneFA(callback: (input: {

@@ -114,3 +116,5 @@ status: FaceStatuses;

portrait?: Base64;
}) => void, element?: string, deviceId?: string): Promise<unknown>;
}) => void, config?: {
pin: number | string;
}, element?: string, deviceId?: string): Promise<unknown>;
/**

@@ -117,0 +121,0 @@ * This function performs continuious predict/enroll, the camera should already be open. the functions performs the identification return the result then restart again.

@@ -731,3 +731,3 @@ /* eslint-disable default-param-last */

}
export function enrollOneFA(callback, element, deviceId) {
export function enrollOneFA(callback, config, element, deviceId) {
return __awaiter(this, void 0, void 0, function* () {

@@ -776,3 +776,4 @@ printLogs('--------------------ENROLL_ONE_FA-----------------------', '', debugType);

privid_wasm_result = createResultFunc(callback, portrait);
const { result, href } = yield FHE_enrollOnefa(originalImages, isSimd, debugType, proxy(privid_wasm_result));
const configJSON = JSON.stringify(config);
const { result, href } = yield FHE_enrollOnefa(originalImages, isSimd, debugType, proxy(privid_wasm_result), configJSON);
if (isDebugWithImages) {

@@ -786,3 +787,3 @@ yield createImages(href, ImageType.augmented, true);

// eslint-disable-next-line consistent-return
return enrollOneFA(callback, element, deviceId);
return enrollOneFA(callback, config, element, deviceId);
}

@@ -795,3 +796,3 @@ // if (element) stream.getTracks().forEach((track) => track.stop());

}
export function predictOneFA(callback, element, deviceId) {
export function predictOneFA(callback, config, element, deviceId) {
return __awaiter(this, void 0, void 0, function* () {

@@ -827,3 +828,4 @@ printLogs('---------------------PREDICT_ONE_FA----------------------', '', debugType);

privid_wasm_result = createResultFunc(callback, portrait);
const { result, href } = yield FHE_predictOnefa(originalImages, isSimd, debugType, proxy(privid_wasm_result));
const configJSON = JSON.stringify(config);
const { result, href } = yield FHE_predictOnefa(originalImages, isSimd, debugType, proxy(privid_wasm_result), configJSON);
if (isDebugWithImages) {

@@ -837,3 +839,3 @@ yield createImages(href, ImageType.augmented, true);

// eslint-disable-next-line consistent-return
return predictOneFA(callback, element, deviceId);
return predictOneFA(callback, config, element, deviceId);
}

@@ -840,0 +842,0 @@ // if (element && !action) stream.getTracks().forEach((track) => track.stop());

@@ -72,7 +72,7 @@ export declare type Base64 = ArrayBuffer | string;

}>;
FHE_enrollOnefa: (originalImages: Array<ImageData>, simd: boolean, debug_type: string, cb: any) => Promise<{
FHE_enrollOnefa: (originalImages: Array<ImageData>, simd: boolean, debug_type: string, cb: any, config?: string) => Promise<{
result: number;
href: ImageData[];
}>;
FHE_predictOnefa: (originalImages: Array<ImageData>, simd: boolean, debug_type: string, cb: any) => Promise<{
FHE_predictOnefa: (originalImages: Array<ImageData>, simd: boolean, debug_type: string, cb: any, config?: string) => Promise<{
result: number;

@@ -79,0 +79,0 @@ href: ImageData[];

@@ -8,6 +8,6 @@ import { Base64, ImageType, LOGTYPE } from './types';

href: ImageData[];
}>, FHE_enrollOnefa: (originalImages: ImageData[], simd: boolean, debug_type: string, cb: any) => Promise<{
}>, FHE_enrollOnefa: (originalImages: ImageData[], simd: boolean, debug_type: string, cb: any, config?: string) => Promise<{
result: number;
href: ImageData[];
}>, FHE_predictOnefa: (originalImages: ImageData[], simd: boolean, debug_type: string, cb: any) => Promise<{
}>, FHE_predictOnefa: (originalImages: ImageData[], simd: boolean, debug_type: string, cb: any, config?: string) => Promise<{
result: number;

@@ -14,0 +14,0 @@ href: ImageData[];

@@ -5,3 +5,3 @@ {

"description": "CryptoNets WebAssembly SDK",
"version": "2.0.11-alpha",
"version": "2.0.12-alpha",
"keywords": [

@@ -8,0 +8,0 @@ "Face recognition",

{
"version": "9301.1"
"version": "10011.1"
}
{
"version": "9301.0"
"version": "10011.0"
}

@@ -35,3 +35,3 @@ /* eslint-disable no-eval */

} else if (['face', 'face_mask'].includes(module)) {
const moduleName = module === 'face' ? 'tflite_new' : 'tflite_new_mask';
const moduleName = 'privid_fhe';
const modulePath = simd ? 'simd' : 'noSimd';

@@ -485,3 +485,3 @@

const FHE_enrollOnefa = (originalImages, simd, debug_type = 0, cb) =>
const FHE_enrollOnefa = (originalImages, simd, debug_type = 0, cb, config = {}) =>
new Promise(async (resolve) => {

@@ -502,8 +502,8 @@ privid_wasm_result = cb;

// const encoder = new TextEncoder();
// const config_bytes = encoder.encode(`${config}\0`);
const encoder = new TextEncoder();
const config_bytes = encoder.encode(`${config}\0`);
// const configInputSize = config.length;
// const configInputPtr = wasmPrivModule._malloc(configInputSize);
// wasmPrivModule.HEAP8.set(config_bytes, configInputPtr / config_bytes.BYTES_PER_ELEMENT);
const configInputSize = config.length;
const configInputPtr = wasmPrivModule._malloc(configInputSize);
wasmPrivModule.HEAP8.set(config_bytes, configInputPtr / config_bytes.BYTES_PER_ELEMENT);

@@ -546,4 +546,4 @@ const imageInputSize = imageInput.length * imageInput.BYTES_PER_ELEMENT;

sessionSecPtr /* session pointer */,
null /* user configuration */,
0 /* user configuration length */,
configInputPtr,
configInputSize,
imageInputPtr /* input images */,

@@ -596,3 +596,3 @@ numImages /* number of input images */,

const FHE_predictOnefa = (originalImages, simd, debug_type = 0, cb) =>
const FHE_predictOnefa = (originalImages, simd, debug_type = 0, cb, config = {}) =>
new Promise(async (resolve) => {

@@ -613,2 +613,9 @@ privid_wasm_result = cb;

const encoder = new TextEncoder();
const config_bytes = encoder.encode(`${config}\0`);
const configInputSize = config.length;
const configInputPtr = wasmPrivModule._malloc(configInputSize);
wasmPrivModule.HEAP8.set(config_bytes, configInputPtr / config_bytes.BYTES_PER_ELEMENT);
const imageInputSize = imageInput.length * imageInput.BYTES_PER_ELEMENT;

@@ -647,4 +654,4 @@ const imageInputPtr = wasmPrivModule._malloc(imageInputSize);

sessionSecPtr /* session pointer */,
null /* user configuration */,
0 /* user configuration length */,
configInputPtr,
configInputSize,
imageInputPtr /* input images */,

@@ -729,3 +736,3 @@ numImages /* number of input images */,

console.log('[FAR_DEBUG] : is_valid result = ', result);
if ( result === 0 ) {
if ( result >= 0 ) {
console.log('[FAR_DEBUG] : Operation executed successfully. Result shall be returned in the JS callback synchronously or asynchronously');

@@ -768,3 +775,3 @@ } else {

const data_BYTES_PER_ELEMENT = 2;
const voiceSizeMax = fs * channels * recordDuration * data_BYTES_PER_ELEMENT;
const voiceSizeMax = fs * channels * recordDuration * data_BYTEPER_ELEMENT;
const voiceSize = data.byteLength;

@@ -771,0 +778,0 @@

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc