Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@noir-lang/backend_barretenberg

Package Overview
Dependencies
Maintainers
1
Versions
263
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@noir-lang/backend_barretenberg - npm Package Compare versions

Comparing version 0.32.0 to 0.33.0-e349f30.nightly

17

lib/cjs/backend.d.ts

@@ -40,1 +40,18 @@ import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types';

}
export declare class UltraHonkBackend implements Backend, VerifierBackend {
protected options: BackendOptions;
protected api: Barretenberg;
protected acirUncompressedBytecode: Uint8Array;
constructor(acirCircuit: CompiledCircuit, options?: BackendOptions);
/** @ignore */
instantiate(): Promise<void>;
generateProof(decompressedWitness: Uint8Array): Promise<ProofData>;
verifyProof(proofData: ProofData): Promise<boolean>;
getVerificationKey(): Promise<Uint8Array>;
generateRecursiveProofArtifacts(_proofData: ProofData, _numOfPublicInputs: number): Promise<{
proofAsFields: string[];
vkAsFields: string[];
vkHash: string;
}>;
destroy(): Promise<void>;
}

110

lib/cjs/backend.js

@@ -26,3 +26,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.BarretenbergBackend = void 0;
exports.UltraHonkBackend = exports.BarretenbergBackend = void 0;
const fflate_1 = require("fflate");

@@ -84,3 +84,3 @@ const serialize_js_1 = require("./serialize.js");

const proof = proofWithPublicInputs.slice(splitIndex);
const publicInputs = (0, public_inputs_js_1.deflattenPublicInputs)(publicInputsConcatenated);
const publicInputs = (0, public_inputs_js_1.deflattenFields)(publicInputsConcatenated);
return { proof, publicInputs };

@@ -139,1 +139,107 @@ }

exports.BarretenbergBackend = BarretenbergBackend;
// Buffers are prepended with their size. The size takes 4 bytes.
const serializedBufferSize = 4;
const fieldByteSize = 32;
const publicInputOffset = 3;
const publicInputsOffsetBytes = publicInputOffset * fieldByteSize;
class UltraHonkBackend {
options;
// These type assertions are used so that we don't
// have to initialize `api` in the constructor.
// These are initialized asynchronously in the `init` function,
// constructors cannot be asynchronous which is why we do this.
api;
acirUncompressedBytecode;
constructor(acirCircuit, options = { threads: 1 }) {
this.options = options;
const acirBytecodeBase64 = acirCircuit.bytecode;
this.acirUncompressedBytecode = (0, serialize_js_1.acirToUint8Array)(acirBytecodeBase64);
}
/** @ignore */
async instantiate() {
if (!this.api) {
if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) {
this.options.threads = navigator.hardwareConcurrency;
}
else {
try {
const os = await Promise.resolve().then(() => __importStar(require('os')));
this.options.threads = os.cpus().length;
}
catch (e) {
console.log('Could not detect environment. Falling back to one thread.', e);
}
}
const { Barretenberg, RawBuffer, Crs } = await Promise.resolve().then(() => __importStar(require('@aztec/bb.js')));
const api = await Barretenberg.new(this.options);
const honkRecursion = true;
const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode, honkRecursion);
const crs = await Crs.new(subgroupSize + 1);
await api.commonInitSlabAllocator(subgroupSize);
await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data()));
// We don't init a proving key here in the Honk API
// await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode);
this.api = api;
}
}
async generateProof(decompressedWitness) {
await this.instantiate();
const proofWithPublicInputs = await this.api.acirProveUltraHonk(this.acirUncompressedBytecode, (0, fflate_1.decompressSync)(decompressedWitness));
const proofAsStrings = (0, public_inputs_js_1.deflattenFields)(proofWithPublicInputs.slice(4));
const numPublicInputs = Number(proofAsStrings[1]);
// Account for the serialized buffer size at start
const publicInputsOffset = publicInputsOffsetBytes + serializedBufferSize;
// Get the part before and after the public inputs
const proofStart = proofWithPublicInputs.slice(0, publicInputsOffset);
const publicInputsSplitIndex = numPublicInputs * fieldByteSize;
const proofEnd = proofWithPublicInputs.slice(publicInputsOffset + publicInputsSplitIndex);
// Construct the proof without the public inputs
const proof = new Uint8Array([...proofStart, ...proofEnd]);
// Fetch the number of public inputs out of the proof string
const publicInputsConcatenated = proofWithPublicInputs.slice(publicInputsOffset, publicInputsOffset + publicInputsSplitIndex);
const publicInputs = (0, public_inputs_js_1.deflattenFields)(publicInputsConcatenated);
return { proof, publicInputs };
}
async verifyProof(proofData) {
const { RawBuffer } = await Promise.resolve().then(() => __importStar(require('@aztec/bb.js')));
const proof = (0, verifier_js_1.reconstructProofWithPublicInputsHonk)(proofData);
await this.instantiate();
const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(vkBuf));
}
async getVerificationKey() {
await this.instantiate();
return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
}
// TODO(https://github.com/noir-lang/noir/issues/5661): Update this to handle Honk recursive aggregation in the browser once it is ready in the backend itself
async generateRecursiveProofArtifacts(_proofData, _numOfPublicInputs) {
await this.instantiate();
// TODO(https://github.com/noir-lang/noir/issues/5661): This needs to be updated to handle recursive aggregation.
// There is still a proofAsFields method but we could consider getting rid of it as the proof itself
// is a list of field elements.
// UltraHonk also does not have public inputs directly prepended to the proof and they are still instead
// inserted at an offset.
// const proof = reconstructProofWithPublicInputs(proofData);
// const proofAsFields = (await this.api.acirProofAsFieldsUltraHonk(proof)).slice(numOfPublicInputs);
// TODO: perhaps we should put this in the init function. Need to benchmark
// TODO how long it takes.
const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
const vk = await this.api.acirVkAsFieldsUltraHonk(vkBuf);
return {
// TODO(https://github.com/noir-lang/noir/issues/5661)
proofAsFields: [],
vkAsFields: vk.map((vk) => vk.toString()),
// We use an empty string for the vk hash here as it is unneeded as part of the recursive artifacts
// The user can be expected to hash the vk inside their circuit to check whether the vk is the circuit
// they expect
vkHash: '',
};
}
async destroy() {
if (!this.api) {
return;
}
await this.api.destroy();
}
}
exports.UltraHonkBackend = UltraHonkBackend;

4

lib/cjs/index.d.ts

@@ -1,4 +0,4 @@

export { BarretenbergBackend } from './backend.js';
export { BarretenbergVerifier } from './verifier.js';
export { BarretenbergBackend, UltraHonkBackend } from './backend.js';
export { BarretenbergVerifier, UltraHonkVerifier } from './verifier.js';
export { Backend, CompiledCircuit, ProofData } from '@noir-lang/types';
export { BackendOptions } from './types.js';
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BarretenbergVerifier = exports.BarretenbergBackend = void 0;
exports.UltraHonkVerifier = exports.BarretenbergVerifier = exports.UltraHonkBackend = exports.BarretenbergBackend = void 0;
var backend_js_1 = require("./backend.js");
Object.defineProperty(exports, "BarretenbergBackend", { enumerable: true, get: function () { return backend_js_1.BarretenbergBackend; } });
Object.defineProperty(exports, "UltraHonkBackend", { enumerable: true, get: function () { return backend_js_1.UltraHonkBackend; } });
var verifier_js_1 = require("./verifier.js");
Object.defineProperty(exports, "BarretenbergVerifier", { enumerable: true, get: function () { return verifier_js_1.BarretenbergVerifier; } });
Object.defineProperty(exports, "UltraHonkVerifier", { enumerable: true, get: function () { return verifier_js_1.UltraHonkVerifier; } });
import { WitnessMap } from '@noir-lang/types';
export declare function flattenPublicInputsAsArray(publicInputs: string[]): Uint8Array;
export declare function deflattenPublicInputs(flattenedPublicInputs: Uint8Array): string[];
export declare function flattenFieldsAsArray(fields: string[]): Uint8Array;
export declare function deflattenFields(flattenedFields: Uint8Array): string[];
export declare function witnessMapToPublicInputs(publicInputs: WitnessMap): string[];
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.witnessMapToPublicInputs = exports.deflattenPublicInputs = exports.flattenPublicInputsAsArray = void 0;
function flattenPublicInputsAsArray(publicInputs) {
const flattenedPublicInputs = publicInputs.map(hexToUint8Array);
exports.witnessMapToPublicInputs = exports.deflattenFields = exports.flattenFieldsAsArray = void 0;
function flattenFieldsAsArray(fields) {
const flattenedPublicInputs = fields.map(hexToUint8Array);
return flattenUint8Arrays(flattenedPublicInputs);
}
exports.flattenPublicInputsAsArray = flattenPublicInputsAsArray;
function deflattenPublicInputs(flattenedPublicInputs) {
exports.flattenFieldsAsArray = flattenFieldsAsArray;
function deflattenFields(flattenedFields) {
const publicInputSize = 32;
const chunkedFlattenedPublicInputs = [];
for (let i = 0; i < flattenedPublicInputs.length; i += publicInputSize) {
const publicInput = flattenedPublicInputs.slice(i, i + publicInputSize);
for (let i = 0; i < flattenedFields.length; i += publicInputSize) {
const publicInput = flattenedFields.slice(i, i + publicInputSize);
chunkedFlattenedPublicInputs.push(publicInput);

@@ -18,3 +18,3 @@ }

}
exports.deflattenPublicInputs = deflattenPublicInputs;
exports.deflattenFields = deflattenFields;
function witnessMapToPublicInputs(publicInputs) {

@@ -21,0 +21,0 @@ const publicInputIndices = [...publicInputs.keys()].sort((a, b) => a - b);

@@ -15,1 +15,12 @@ import { ProofData } from '@noir-lang/types';

export declare function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array;
export declare class UltraHonkVerifier {
private options;
private api;
constructor(options?: BackendOptions);
/** @ignore */
instantiate(): Promise<void>;
/** @description Verifies a proof */
verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise<boolean>;
destroy(): Promise<void>;
}
export declare function reconstructProofWithPublicInputsHonk(proofData: ProofData): Uint8Array;

@@ -26,3 +26,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.reconstructProofWithPublicInputs = exports.BarretenbergVerifier = void 0;
exports.reconstructProofWithPublicInputsHonk = exports.UltraHonkVerifier = exports.reconstructProofWithPublicInputs = exports.BarretenbergVerifier = void 0;
const public_inputs_js_1 = require("./public_inputs.js");

@@ -86,3 +86,3 @@ class BarretenbergVerifier {

// Flatten publicInputs
const publicInputsConcatenated = (0, public_inputs_js_1.flattenPublicInputsAsArray)(proofData.publicInputs);
const publicInputsConcatenated = (0, public_inputs_js_1.flattenFieldsAsArray)(proofData.publicInputs);
// Concatenate publicInputs and proof

@@ -93,1 +93,64 @@ const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]);

exports.reconstructProofWithPublicInputs = reconstructProofWithPublicInputs;
class UltraHonkVerifier {
options;
// These type assertions are used so that we don't
// have to initialize `api` in the constructor.
// These are initialized asynchronously in the `init` function,
// constructors cannot be asynchronous which is why we do this.
api;
constructor(options = { threads: 1 }) {
this.options = options;
}
/** @ignore */
async instantiate() {
if (!this.api) {
if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) {
this.options.threads = navigator.hardwareConcurrency;
}
else {
try {
const os = await Promise.resolve().then(() => __importStar(require('os')));
this.options.threads = os.cpus().length;
}
catch (e) {
console.log('Could not detect environment. Falling back to one thread.', e);
}
}
const { Barretenberg, RawBuffer, Crs } = await Promise.resolve().then(() => __importStar(require('@aztec/bb.js')));
// This is the number of CRS points necessary to verify a Barretenberg proof.
const NUM_CRS_POINTS_FOR_VERIFICATION = 0;
const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]);
await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION);
await api.srsInitSrs(new RawBuffer([] /* crs.getG1Data() */), NUM_CRS_POINTS_FOR_VERIFICATION, new RawBuffer(crs.getG2Data()));
this.api = api;
}
}
/** @description Verifies a proof */
async verifyProof(proofData, verificationKey) {
const { RawBuffer } = await Promise.resolve().then(() => __importStar(require('@aztec/bb.js')));
await this.instantiate();
const proof = reconstructProofWithPublicInputsHonk(proofData);
return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(verificationKey));
}
async destroy() {
if (!this.api) {
return;
}
await this.api.destroy();
}
}
exports.UltraHonkVerifier = UltraHonkVerifier;
const serializedBufferSize = 4;
const fieldByteSize = 32;
const publicInputOffset = 3;
const publicInputsOffsetBytes = publicInputOffset * fieldByteSize;
function reconstructProofWithPublicInputsHonk(proofData) {
// Flatten publicInputs
const publicInputsConcatenated = (0, public_inputs_js_1.flattenFieldsAsArray)(proofData.publicInputs);
const proofStart = proofData.proof.slice(0, publicInputsOffsetBytes + serializedBufferSize);
const proofEnd = proofData.proof.slice(publicInputsOffsetBytes + serializedBufferSize);
// Concatenate publicInputs and proof
const proofWithPublicInputs = Uint8Array.from([...proofStart, ...publicInputsConcatenated, ...proofEnd]);
return proofWithPublicInputs;
}
exports.reconstructProofWithPublicInputsHonk = reconstructProofWithPublicInputsHonk;

@@ -40,1 +40,18 @@ import { Backend, CompiledCircuit, ProofData, VerifierBackend } from '@noir-lang/types';

}
export declare class UltraHonkBackend implements Backend, VerifierBackend {
protected options: BackendOptions;
protected api: Barretenberg;
protected acirUncompressedBytecode: Uint8Array;
constructor(acirCircuit: CompiledCircuit, options?: BackendOptions);
/** @ignore */
instantiate(): Promise<void>;
generateProof(decompressedWitness: Uint8Array): Promise<ProofData>;
verifyProof(proofData: ProofData): Promise<boolean>;
getVerificationKey(): Promise<Uint8Array>;
generateRecursiveProofArtifacts(_proofData: ProofData, _numOfPublicInputs: number): Promise<{
proofAsFields: string[];
vkAsFields: string[];
vkHash: string;
}>;
destroy(): Promise<void>;
}
import { decompressSync as gunzip } from 'fflate';
import { acirToUint8Array } from './serialize.js';
import { deflattenPublicInputs } from './public_inputs.js';
import { reconstructProofWithPublicInputs } from './verifier.js';
import { deflattenFields } from './public_inputs.js';
import { reconstructProofWithPublicInputs, reconstructProofWithPublicInputsHonk } from './verifier.js';
// This is the number of bytes in a UltraPlonk proof

@@ -57,3 +57,3 @@ // minus the public inputs.

const proof = proofWithPublicInputs.slice(splitIndex);
const publicInputs = deflattenPublicInputs(publicInputsConcatenated);
const publicInputs = deflattenFields(publicInputsConcatenated);
return { proof, publicInputs };

@@ -111,1 +111,106 @@ }

}
// Buffers are prepended with their size. The size takes 4 bytes.
const serializedBufferSize = 4;
const fieldByteSize = 32;
const publicInputOffset = 3;
const publicInputsOffsetBytes = publicInputOffset * fieldByteSize;
export class UltraHonkBackend {
options;
// These type assertions are used so that we don't
// have to initialize `api` in the constructor.
// These are initialized asynchronously in the `init` function,
// constructors cannot be asynchronous which is why we do this.
api;
acirUncompressedBytecode;
constructor(acirCircuit, options = { threads: 1 }) {
this.options = options;
const acirBytecodeBase64 = acirCircuit.bytecode;
this.acirUncompressedBytecode = acirToUint8Array(acirBytecodeBase64);
}
/** @ignore */
async instantiate() {
if (!this.api) {
if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) {
this.options.threads = navigator.hardwareConcurrency;
}
else {
try {
const os = await import('os');
this.options.threads = os.cpus().length;
}
catch (e) {
console.log('Could not detect environment. Falling back to one thread.', e);
}
}
const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js');
const api = await Barretenberg.new(this.options);
const honkRecursion = true;
const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode, honkRecursion);
const crs = await Crs.new(subgroupSize + 1);
await api.commonInitSlabAllocator(subgroupSize);
await api.srsInitSrs(new RawBuffer(crs.getG1Data()), crs.numPoints, new RawBuffer(crs.getG2Data()));
// We don't init a proving key here in the Honk API
// await api.acirInitProvingKey(this.acirComposer, this.acirUncompressedBytecode);
this.api = api;
}
}
async generateProof(decompressedWitness) {
await this.instantiate();
const proofWithPublicInputs = await this.api.acirProveUltraHonk(this.acirUncompressedBytecode, gunzip(decompressedWitness));
const proofAsStrings = deflattenFields(proofWithPublicInputs.slice(4));
const numPublicInputs = Number(proofAsStrings[1]);
// Account for the serialized buffer size at start
const publicInputsOffset = publicInputsOffsetBytes + serializedBufferSize;
// Get the part before and after the public inputs
const proofStart = proofWithPublicInputs.slice(0, publicInputsOffset);
const publicInputsSplitIndex = numPublicInputs * fieldByteSize;
const proofEnd = proofWithPublicInputs.slice(publicInputsOffset + publicInputsSplitIndex);
// Construct the proof without the public inputs
const proof = new Uint8Array([...proofStart, ...proofEnd]);
// Fetch the number of public inputs out of the proof string
const publicInputsConcatenated = proofWithPublicInputs.slice(publicInputsOffset, publicInputsOffset + publicInputsSplitIndex);
const publicInputs = deflattenFields(publicInputsConcatenated);
return { proof, publicInputs };
}
async verifyProof(proofData) {
const { RawBuffer } = await import('@aztec/bb.js');
const proof = reconstructProofWithPublicInputsHonk(proofData);
await this.instantiate();
const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(vkBuf));
}
async getVerificationKey() {
await this.instantiate();
return await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
}
// TODO(https://github.com/noir-lang/noir/issues/5661): Update this to handle Honk recursive aggregation in the browser once it is ready in the backend itself
async generateRecursiveProofArtifacts(_proofData, _numOfPublicInputs) {
await this.instantiate();
// TODO(https://github.com/noir-lang/noir/issues/5661): This needs to be updated to handle recursive aggregation.
// There is still a proofAsFields method but we could consider getting rid of it as the proof itself
// is a list of field elements.
// UltraHonk also does not have public inputs directly prepended to the proof and they are still instead
// inserted at an offset.
// const proof = reconstructProofWithPublicInputs(proofData);
// const proofAsFields = (await this.api.acirProofAsFieldsUltraHonk(proof)).slice(numOfPublicInputs);
// TODO: perhaps we should put this in the init function. Need to benchmark
// TODO how long it takes.
const vkBuf = await this.api.acirWriteVkUltraHonk(this.acirUncompressedBytecode);
const vk = await this.api.acirVkAsFieldsUltraHonk(vkBuf);
return {
// TODO(https://github.com/noir-lang/noir/issues/5661)
proofAsFields: [],
vkAsFields: vk.map((vk) => vk.toString()),
// We use an empty string for the vk hash here as it is unneeded as part of the recursive artifacts
// The user can be expected to hash the vk inside their circuit to check whether the vk is the circuit
// they expect
vkHash: '',
};
}
async destroy() {
if (!this.api) {
return;
}
await this.api.destroy();
}
}

@@ -1,4 +0,4 @@

export { BarretenbergBackend } from './backend.js';
export { BarretenbergVerifier } from './verifier.js';
export { BarretenbergBackend, UltraHonkBackend } from './backend.js';
export { BarretenbergVerifier, UltraHonkVerifier } from './verifier.js';
export { Backend, CompiledCircuit, ProofData } from '@noir-lang/types';
export { BackendOptions } from './types.js';

@@ -1,2 +0,2 @@

export { BarretenbergBackend } from './backend.js';
export { BarretenbergVerifier } from './verifier.js';
export { BarretenbergBackend, UltraHonkBackend } from './backend.js';
export { BarretenbergVerifier, UltraHonkVerifier } from './verifier.js';
import { WitnessMap } from '@noir-lang/types';
export declare function flattenPublicInputsAsArray(publicInputs: string[]): Uint8Array;
export declare function deflattenPublicInputs(flattenedPublicInputs: Uint8Array): string[];
export declare function flattenFieldsAsArray(fields: string[]): Uint8Array;
export declare function deflattenFields(flattenedFields: Uint8Array): string[];
export declare function witnessMapToPublicInputs(publicInputs: WitnessMap): string[];

@@ -1,10 +0,10 @@

export function flattenPublicInputsAsArray(publicInputs) {
const flattenedPublicInputs = publicInputs.map(hexToUint8Array);
export function flattenFieldsAsArray(fields) {
const flattenedPublicInputs = fields.map(hexToUint8Array);
return flattenUint8Arrays(flattenedPublicInputs);
}
export function deflattenPublicInputs(flattenedPublicInputs) {
export function deflattenFields(flattenedFields) {
const publicInputSize = 32;
const chunkedFlattenedPublicInputs = [];
for (let i = 0; i < flattenedPublicInputs.length; i += publicInputSize) {
const publicInput = flattenedPublicInputs.slice(i, i + publicInputSize);
for (let i = 0; i < flattenedFields.length; i += publicInputSize) {
const publicInput = flattenedFields.slice(i, i + publicInputSize);
chunkedFlattenedPublicInputs.push(publicInput);

@@ -11,0 +11,0 @@ }

@@ -15,1 +15,12 @@ import { ProofData } from '@noir-lang/types';

export declare function reconstructProofWithPublicInputs(proofData: ProofData): Uint8Array;
export declare class UltraHonkVerifier {
private options;
private api;
constructor(options?: BackendOptions);
/** @ignore */
instantiate(): Promise<void>;
/** @description Verifies a proof */
verifyProof(proofData: ProofData, verificationKey: Uint8Array): Promise<boolean>;
destroy(): Promise<void>;
}
export declare function reconstructProofWithPublicInputsHonk(proofData: ProofData): Uint8Array;

@@ -1,2 +0,2 @@

import { flattenPublicInputsAsArray } from './public_inputs.js';
import { flattenFieldsAsArray } from './public_inputs.js';
export class BarretenbergVerifier {

@@ -58,3 +58,3 @@ options;

// Flatten publicInputs
const publicInputsConcatenated = flattenPublicInputsAsArray(proofData.publicInputs);
const publicInputsConcatenated = flattenFieldsAsArray(proofData.publicInputs);
// Concatenate publicInputs and proof

@@ -64,1 +64,62 @@ const proofWithPublicInputs = Uint8Array.from([...publicInputsConcatenated, ...proofData.proof]);

}
export class UltraHonkVerifier {
options;
// These type assertions are used so that we don't
// have to initialize `api` in the constructor.
// These are initialized asynchronously in the `init` function,
// constructors cannot be asynchronous which is why we do this.
api;
constructor(options = { threads: 1 }) {
this.options = options;
}
/** @ignore */
async instantiate() {
if (!this.api) {
if (typeof navigator !== 'undefined' && navigator.hardwareConcurrency) {
this.options.threads = navigator.hardwareConcurrency;
}
else {
try {
const os = await import('os');
this.options.threads = os.cpus().length;
}
catch (e) {
console.log('Could not detect environment. Falling back to one thread.', e);
}
}
const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js');
// This is the number of CRS points necessary to verify a Barretenberg proof.
const NUM_CRS_POINTS_FOR_VERIFICATION = 0;
const [api, crs] = await Promise.all([Barretenberg.new(this.options), Crs.new(NUM_CRS_POINTS_FOR_VERIFICATION)]);
await api.commonInitSlabAllocator(NUM_CRS_POINTS_FOR_VERIFICATION);
await api.srsInitSrs(new RawBuffer([] /* crs.getG1Data() */), NUM_CRS_POINTS_FOR_VERIFICATION, new RawBuffer(crs.getG2Data()));
this.api = api;
}
}
/** @description Verifies a proof */
async verifyProof(proofData, verificationKey) {
const { RawBuffer } = await import('@aztec/bb.js');
await this.instantiate();
const proof = reconstructProofWithPublicInputsHonk(proofData);
return await this.api.acirVerifyUltraHonk(proof, new RawBuffer(verificationKey));
}
async destroy() {
if (!this.api) {
return;
}
await this.api.destroy();
}
}
const serializedBufferSize = 4;
const fieldByteSize = 32;
const publicInputOffset = 3;
const publicInputsOffsetBytes = publicInputOffset * fieldByteSize;
export function reconstructProofWithPublicInputsHonk(proofData) {
// Flatten publicInputs
const publicInputsConcatenated = flattenFieldsAsArray(proofData.publicInputs);
const proofStart = proofData.proof.slice(0, publicInputsOffsetBytes + serializedBufferSize);
const proofEnd = proofData.proof.slice(publicInputsOffsetBytes + serializedBufferSize);
// Concatenate publicInputs and proof
const proofWithPublicInputs = Uint8Array.from([...proofStart, ...publicInputsConcatenated, ...proofEnd]);
return proofWithPublicInputs;
}

@@ -6,3 +6,3 @@ {

],
"version": "0.32.0",
"version": "0.33.0-e349f30.nightly",
"packageManager": "yarn@3.5.1",

@@ -45,4 +45,4 @@ "license": "(MIT OR Apache-2.0)",

"dependencies": {
"@aztec/bb.js": "0.46.1",
"@noir-lang/types": "0.32.0",
"@aztec/bb.js": "0.51.1",
"@noir-lang/types": "0.33.0-e349f30.nightly",
"fflate": "^0.8.0"

@@ -49,0 +49,0 @@ },

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc