@guildofweavers/genstark
Advanced tools
Comparing version 0.5.1 to 0.5.2
@@ -10,3 +10,3 @@ declare module '@guildofweavers/genstark' { | ||
// -------------------------------------------------------------------------------------------- | ||
export { FiniteField, PrimeField, Polynom } from '@guildofweavers/galois'; | ||
export { FiniteField, createPrimeField, Vector, Matrix } from '@guildofweavers/galois'; | ||
export { MerkleTree, BatchMerkleProof, HashAlgorithm, getHashDigestSize } from '@guildofweavers/merkle'; | ||
@@ -31,2 +31,11 @@ | ||
export interface OptimizationOptions { | ||
/** Initial number of bytes to allocate for WASM optimization */ | ||
initialMemory: number; | ||
/** Maximum number of bytes to allocate for WASM optimization */ | ||
maximumMemory: number; | ||
} | ||
export class Stark { | ||
@@ -37,6 +46,7 @@ | ||
* @param source AirScript source for the STARK | ||
* @param options Security options for the STARK instance | ||
* @param security Security options for the STARK instance | ||
* @param optimization WebAssembly optimization options; null turns optimization off | ||
* @param logger Optional logger; defaults to console logging; set to null to disable | ||
*/ | ||
constructor(source: string, options?: Partial<SecurityOptions>, logger?: Logger); | ||
constructor(source: string, security?: Partial<SecurityOptions>, optimization?: Partial<OptimizationOptions> | null, logger?: Logger); | ||
@@ -43,0 +53,0 @@ /** |
@@ -13,3 +13,3 @@ "use strict"; | ||
var galois_1 = require("@guildofweavers/galois"); | ||
exports.PrimeField = galois_1.PrimeField; | ||
exports.createPrimeField = galois_1.createPrimeField; | ||
//# sourceMappingURL=index.js.map |
@@ -15,2 +15,3 @@ "use strict"; | ||
let x = field.exp(context.rootOfUnity, BigInt(c.step * extensionFactor)); | ||
let zPoly = this.field.newVectorFrom([this.field.neg(x), this.field.one]); | ||
let data = rData.get(c.register); | ||
@@ -20,6 +21,6 @@ if (data) { | ||
data.ys.push(c.value); | ||
data.zPoly = this.field.mulPolys(data.zPoly, [-x, 1n]); | ||
data.zPoly = this.field.mulPolys(data.zPoly, zPoly); | ||
} | ||
else { | ||
data = { xs: [x], ys: [c.value], zPoly: [-x, 1n] }; | ||
data = { xs: [x], ys: [c.value], zPoly: zPoly }; | ||
rData.set(c.register, data); | ||
@@ -30,3 +31,5 @@ } | ||
for (let [register, data] of rData) { | ||
let iPoly = this.field.interpolate(data.xs, data.ys); | ||
let xs = this.field.newVectorFrom(data.xs); | ||
let ys = this.field.newVectorFrom(data.ys); | ||
let iPoly = this.field.interpolate(xs, ys); | ||
this.polys.set(register, { iPoly, zPoly: data.zPoly }); | ||
@@ -55,13 +58,19 @@ } | ||
evaluateAll(pEvaluations, domain) { | ||
const bEvaluations = new Array(); | ||
const pVectors = this.field.matrixRowsToVectors(pEvaluations); | ||
const pValues = new Array(); | ||
const iPolys = new Array(); | ||
const zPolys = new Array(); | ||
for (let [register, c] of this.polys) { | ||
let iEvaluations = this.field.evalPolyAtRoots(c.iPoly, domain); | ||
let zEvaluations = this.field.evalPolyAtRoots(c.zPoly, domain); | ||
let zEvaluationsInverse = this.field.invMany(zEvaluations); | ||
// B(x) = (P(x) - I(x)) / Z(x) | ||
let b = this.field.subVectorElements(pEvaluations[register], iEvaluations); | ||
b = this.field.mulVectorElements(b, zEvaluationsInverse); | ||
bEvaluations.push(b); | ||
pValues.push(pVectors[register]); | ||
iPolys.push(c.iPoly); | ||
zPolys.push(c.zPoly); | ||
} | ||
return bEvaluations; | ||
const iPolyMatrix = this.field.vectorsToMatrix(iPolys); | ||
const zPolyMatrix = this.field.vectorsToMatrix(zPolys); | ||
const iValues = this.field.evalPolysAtRoots(iPolyMatrix, domain); | ||
const zValues = this.field.evalPolysAtRoots(zPolyMatrix, domain); | ||
// B(x) = (P(x) - I(x)) / Z(x) | ||
const piValues = this.field.subMatrixElementsFromVectors(pValues, iValues); | ||
const bValues = this.field.divMatrixElements(piValues, zValues); | ||
return bValues; | ||
} | ||
@@ -68,0 +77,0 @@ } |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var TracePolynomial_1 = require("./TracePolynomial"); | ||
exports.TracePolynomial = TracePolynomial_1.TracePolynomial; | ||
var ZeroPolynomial_1 = require("./ZeroPolynomial"); | ||
@@ -13,2 +11,4 @@ exports.ZeroPolynomial = ZeroPolynomial_1.ZeroPolynomial; | ||
exports.LowDegreeProver = LowDegreeProver_1.LowDegreeProver; | ||
var QueryIndexGenerator_1 = require("./QueryIndexGenerator"); | ||
exports.QueryIndexGenerator = QueryIndexGenerator_1.QueryIndexGenerator; | ||
//# sourceMappingURL=index.js.map |
@@ -49,2 +49,5 @@ "use strict"; | ||
let allEvaluations, psbPowers; | ||
const pVectors = this.field.matrixRowsToVectors(pEvaluations); | ||
const bVectors = this.field.matrixRowsToVectors(bEvaluations); | ||
const dVectors = this.field.matrixRowsToVectors(dEvaluations); | ||
// raise degree of D evaluations to match combination degree | ||
@@ -65,7 +68,7 @@ const dEvaluations2 = []; | ||
for (let i of indexes) { | ||
dEvaluations2.push(this.field.mulVectorElements(dEvaluations[i], powers)); | ||
dEvaluations2.push(this.field.mulVectorElements(dVectors[i], powers)); | ||
} | ||
} | ||
// raise degree of P, S, B evaluations to match combination degree | ||
const psbEvaluations = [...pEvaluations, ...sEvaluations, ...bEvaluations]; | ||
// raise degree of P, S, B evaluations to match combination degree | ||
const psbEvaluations = [...pVectors, ...sEvaluations, ...bVectors]; | ||
const psbEvaluations2 = []; | ||
@@ -85,6 +88,6 @@ if (this.psbIncrementalDegree > 0n) { | ||
// put all evaluations together | ||
allEvaluations = [...psbEvaluations, ...psbEvaluations2, ...dEvaluations, ...dEvaluations2]; | ||
allEvaluations = [...psbEvaluations, ...psbEvaluations2, ...dVectors, ...dEvaluations2]; | ||
// compute a linear combination of all evaluations | ||
this.coefficients = this.field.prng(this.seed, allEvaluations.length); | ||
return this.field.combineMany(allEvaluations, this.coefficients); | ||
return this.field.combineManyVectors(allEvaluations, this.coefficients); | ||
} | ||
@@ -106,9 +109,10 @@ computeOne(x, pValues, sValues, bValues, dValues) { | ||
const psbValues = [...pValues, ...sValues, ...bValues]; | ||
let psbVector = this.field.newVectorFrom(psbValues); | ||
let psbValues2 = []; | ||
if (this.psbIncrementalDegree > 0n) { | ||
let power = this.field.exp(x, this.psbIncrementalDegree); | ||
psbValues2 = this.field.mulVectorElements(psbValues, power); | ||
psbValues2 = this.field.mulVectorElements(psbVector, power).toValues(); | ||
} | ||
// put all evaluations together | ||
allValues = [...psbValues, ...psbValues2, ...dValues, ...dValues2]; | ||
allValues = this.field.newVectorFrom([...psbValues, ...psbValues2, ...dValues, ...dValues2]); | ||
if (!this.coefficients) { | ||
@@ -115,0 +119,0 @@ this.coefficients = this.field.prng(this.seed, allValues.length); |
@@ -11,7 +11,6 @@ "use strict"; | ||
// -------------------------------------------------------------------------------------------- | ||
constructor(queryCount, hasAlgorithm, context) { | ||
constructor(field, indexGenerator, hasAlgorithm) { | ||
this.field = field; | ||
this.hashAlgorithm = hasAlgorithm; | ||
this.queryCount = queryCount; | ||
this.field = context.field; | ||
this.skipMultiplesOf = context.extensionFactor; | ||
this.indexGenerator = indexGenerator; | ||
} | ||
@@ -40,3 +39,3 @@ // PUBLIC METHODS | ||
let columnLength = Math.floor(rouDegree / 4); | ||
let positions = utils_1.getPseudorandomIndexes(columnRoot, this.queryCount, columnLength, this.skipMultiplesOf); | ||
let positions = this.indexGenerator.getFriIndexes(columnRoot, columnLength); | ||
// verify Merkle proof for the column | ||
@@ -80,6 +79,7 @@ if (!merkle_1.MerkleTree.verifyBatch(columnRoot, positions, columnProof, this.hashAlgorithm)) { | ||
// one point from the column that are on that y coordinate are on the same deg < 4 polynomial | ||
const polys = this.field.interpolateQuarticBatch(xs, ys); | ||
const polys = this.field.interpolateQuarticBatch(this.field.newMatrixFrom(xs), this.field.newMatrixFrom(ys)); | ||
const columnValues = utils_1.buffersToBigInts(columnProof.values); | ||
for (let i = 0; i < polys.length; i++) { | ||
if (this.field.evalPolyAt(polys[i], specialX) !== columnValues[i]) { | ||
const polyVectors = this.field.matrixRowsToVectors(polys); | ||
for (let i = 0; i < polys.rowCount; i++) { | ||
if (this.field.evalPolyAt(polyVectors[i], specialX) !== columnValues[i]) { | ||
throw new StarkError_1.StarkError(`Degree 4 polynomial didn't evaluate to column value at depth ${depth}`); | ||
@@ -104,3 +104,3 @@ } | ||
const remainder = utils_1.buffersToBigInts(proof.remainder); | ||
this.verifyRemainder(remainder, maxDegreePlus1, rootOfUnity); | ||
this.verifyRemainder(this.field.newVectorFrom(remainder), maxDegreePlus1, rootOfUnity); | ||
return true; | ||
@@ -113,3 +113,3 @@ } | ||
if (values.length <= 256) { | ||
const rootOfUnity = this.field.exp(domain[1], BigInt(4 ** depth)); | ||
const rootOfUnity = this.field.exp(domain.getValue(1), BigInt(4 ** depth)); | ||
this.verifyRemainder(values, maxDegreePlus1, rootOfUnity); | ||
@@ -121,17 +121,4 @@ result.remainder = lTree.values; | ||
const domainStep = (4 ** depth); | ||
const columnLength = Math.floor(values.length / 4); | ||
let xs = new Array(columnLength); | ||
let ys = new Array(columnLength); | ||
for (let i = 0; i < columnLength; i++) { | ||
xs[i] = new Array(4); | ||
xs[i][0] = domain[i * domainStep]; | ||
xs[i][1] = domain[(i + columnLength) * domainStep]; | ||
xs[i][2] = domain[(i + columnLength * 2) * domainStep]; | ||
xs[i][3] = domain[(i + columnLength * 3) * domainStep]; | ||
ys[i] = new Array(4); | ||
ys[i][0] = values[i]; | ||
ys[i][1] = values[i + columnLength]; | ||
ys[i][2] = values[i + columnLength * 2]; | ||
ys[i][3] = values[i + columnLength * 3]; | ||
} | ||
const xs = this.field.vectorToMatrix(domain, 4, domainStep); | ||
const ys = this.field.vectorToMatrix(values, 4); | ||
// build polynomials from values in each row | ||
@@ -141,11 +128,9 @@ const xPolys = this.field.interpolateQuarticBatch(xs, ys); | ||
const specialX = this.field.prng(lTree.root); | ||
const column = new Array(xPolys.length); | ||
for (let i = 0; i < column.length; i++) { | ||
column[i] = this.field.evalPolyAt(xPolys[i], specialX); | ||
} | ||
const column = this.field.evalQuarticBatch(xPolys, specialX); | ||
// put the resulting column into a merkle tree | ||
const hashDigestSize = merkle_1.getHashDigestSize(this.hashAlgorithm); | ||
const cTree = merkle_1.MerkleTree.create(utils_1.bigIntsToBuffers(column, hashDigestSize), this.hashAlgorithm); | ||
const cTree = merkle_1.MerkleTree.create(utils_1.vectorToBuffers(column, hashDigestSize), this.hashAlgorithm); | ||
// compute spot check positions in the column and corresponding positions in the original values | ||
const positions = utils_1.getPseudorandomIndexes(cTree.root, this.queryCount, column.length, this.skipMultiplesOf); | ||
const columnLength = column.length; | ||
const positions = this.indexGenerator.getFriIndexes(cTree.root, columnLength); | ||
const polyPositions = new Array(positions.length * 4); | ||
@@ -171,3 +156,3 @@ for (let i = 0; i < positions.length; i++) { | ||
for (let i = 0; i < remainder.length; i++) { | ||
if (!this.skipMultiplesOf || i % this.skipMultiplesOf) { | ||
if (!this.indexGenerator.extensionFactor || i % this.indexGenerator.extensionFactor) { | ||
positions.push(i); | ||
@@ -177,3 +162,3 @@ } | ||
// pick a subset of points from the remainder and interpolate them into a polynomial | ||
const domain = this.field.getPowerCycle(rootOfUnity); | ||
const domain = this.field.getPowerSeries(rootOfUnity, remainder.length); | ||
const xs = new Array(maxDegreePlus1); | ||
@@ -183,10 +168,12 @@ const ys = new Array(maxDegreePlus1); | ||
let p = positions[i]; | ||
xs[i] = domain[p]; | ||
ys[i] = remainder[p]; | ||
xs[i] = domain.getValue(p); | ||
ys[i] = remainder.getValue(p); | ||
} | ||
const poly = this.field.interpolate(xs, ys); | ||
const xVector = this.field.newVectorFrom(xs); | ||
const yVector = this.field.newVectorFrom(ys); | ||
const poly = this.field.interpolate(xVector, yVector); | ||
// check that polynomial evaluates correctly for all other points in the remainder | ||
for (let i = maxDegreePlus1; i < positions.length; i++) { | ||
let p = positions[i]; | ||
if (this.field.evalPolyAt(poly, domain[p]) !== remainder[p]) { | ||
if (this.field.evalPolyAt(poly, domain.getValue(p)) !== remainder.getValue(p)) { | ||
throw new StarkError_1.StarkError(`Remainder is not a valid degree ${maxDegreePlus1 - 1} polynomial`); | ||
@@ -193,0 +180,0 @@ } |
@@ -20,3 +20,3 @@ "use strict"; | ||
const xToTheSteps = this.field.exp(x, this.traceLength); | ||
const numValue = this.field.sub(xToTheSteps, 1n); | ||
const numValue = this.field.sub(xToTheSteps, this.field.one); | ||
const denValue = this.field.sub(x, this.xAtLastStep); | ||
@@ -29,11 +29,5 @@ const z = this.field.div(numValue, denValue); | ||
const traceLength = Number.parseInt(this.traceLength.toString(10), 10); | ||
const numEvaluations = new Array(domainSize); | ||
const denEvaluations = new Array(domainSize); | ||
for (let step = 0; step < domainSize; step++) { | ||
// calculate position of x^steps, and then just look it up | ||
let numIndex = (step * traceLength) % domainSize; | ||
numEvaluations[step] = this.field.sub(domain[numIndex], 1n); | ||
let x = domain[step]; | ||
denEvaluations[step] = this.field.sub(x, this.xAtLastStep); | ||
} | ||
const xToTheSteps = this.field.pluckVector(domain, traceLength, domainSize); | ||
const numEvaluations = this.field.subVectorElements(xToTheSteps, this.field.one); | ||
const denEvaluations = this.field.subVectorElements(domain, this.xAtLastStep); | ||
return { numerators: numEvaluations, denominators: denEvaluations }; | ||
@@ -40,0 +34,0 @@ } |
@@ -17,15 +17,12 @@ "use strict"; | ||
// -------------------------------------------------------------------------------------------- | ||
mergeValues([pValues, sValues], position) { | ||
mergeValues(pValues, sValues, position) { | ||
const valueSize = this.fieldElementSize; | ||
const valueCount = this.getValueCount(); | ||
const buffer = Buffer.allocUnsafe(valueCount * valueSize); | ||
const padLength = valueSize * 2; | ||
let offset = 0; | ||
for (let register = 0; register < this.stateWidth; register++) { | ||
let hex = pValues[register][position].toString(16).padStart(padLength, '0'); | ||
offset += buffer.write(hex, offset, valueSize, 'hex'); | ||
offset += pValues.copyValue(register, position, buffer, offset); | ||
} | ||
for (let register = 0; register < this.secretInputCount; register++) { | ||
let hex = sValues[register][position].toString(16).padStart(padLength, '0'); | ||
offset += buffer.write(hex, offset, valueSize, 'hex'); | ||
offset += sValues[register].copyValue(position, buffer, offset); | ||
} | ||
@@ -39,7 +36,7 @@ return buffer; | ||
for (let i = 0; i < this.stateWidth; i++, offset += elementSize) { | ||
pValues[i] = BigInt('0x' + buffer.toString('hex', offset, offset + elementSize)); | ||
pValues[i] = utils.readBigInt(buffer, offset, this.fieldElementSize); | ||
} | ||
const sValues = new Array(this.secretInputCount); | ||
for (let i = 0; i < this.secretInputCount; i++, offset += elementSize) { | ||
sValues[i] = BigInt('0x' + buffer.toString('hex', offset, offset + elementSize)); | ||
sValues[i] = utils.readBigInt(buffer, offset, this.fieldElementSize); | ||
} | ||
@@ -46,0 +43,0 @@ return [pValues, sValues]; |
@@ -15,3 +15,3 @@ "use strict"; | ||
const MAX_FRI_QUERY_COUNT = 64; | ||
const HASH_ALGORITHMS = ['sha256', 'blake2s256']; | ||
const HASH_ALGORITHMS = ['sha256', 'blake2s256', 'wasmBlake2s256']; | ||
const DEFAULT_HASH_ALGORITHM = 'sha256'; | ||
@@ -23,3 +23,3 @@ // CLASS DEFINITION | ||
// -------------------------------------------------------------------------------------------- | ||
constructor(source, options, logger) { | ||
constructor(source, security, optimization, logger) { | ||
if (typeof source !== 'string') | ||
@@ -29,7 +29,7 @@ throw new TypeError('Source script must be a string'); | ||
throw new TypeError('Source script cannot be an empty string'); | ||
const vOptions = validateSecurityOptions(options); | ||
this.air = air_script_1.parseScript(source, undefined, vOptions.extensionFactor); | ||
this.exeQueryCount = vOptions.exeQueryCount; | ||
const vOptions = validateSecurityOptions(security); | ||
this.air = air_script_1.parseScript(source, undefined, { extensionFactor: vOptions.extensionFactor, wasmOptions: optimization }); | ||
this.indexGenerator = new components_1.QueryIndexGenerator(this.air.extensionFactor, vOptions); | ||
this.hashAlgorithm = vOptions.hashAlgorithm; | ||
this.ldProver = new components_1.LowDegreeProver(vOptions.friQueryCount, this.hashAlgorithm, this.air); | ||
this.ldProver = new components_1.LowDegreeProver(this.air.field, this.indexGenerator, this.hashAlgorithm); | ||
this.serializer = new Serializer_1.Serializer(this.air); | ||
@@ -42,3 +42,2 @@ this.logger = logger || new utils_1.Logger(); | ||
const label = this.logger.start('Starting STARK computation'); | ||
const extensionFactor = this.air.extensionFactor; | ||
// 0 ----- validate parameters | ||
@@ -66,4 +65,4 @@ if (!Array.isArray(assertions)) | ||
// 3 ----- compute P(x) polynomials and low-degree extend them | ||
const pPoly = new components_1.TracePolynomial(context); | ||
const pEvaluations = pPoly.evaluate(executionTrace); | ||
const pPolys = this.air.field.interpolateRoots(context.executionDomain, executionTrace); | ||
const pEvaluations = this.air.field.evalPolysAtRoots(pPolys, context.evaluationDomain); | ||
this.logger.log(label, 'Converted execution trace into polynomials and low-degree extended them'); | ||
@@ -84,8 +83,7 @@ // 4 ----- compute constraint polynomials Q(x) = C(P(x)) | ||
// 6 ----- compute D(x) = Q(x) / Z(x) | ||
// first, invert numerators of Z(x) | ||
const zNumInverses = this.air.field.invMany(zEvaluations.numerators); | ||
this.logger.log(label, 'Inverted Z(x) numerators'); | ||
// first, compute inverse of Z(x) | ||
const zInverses = this.air.field.divVectorElements(zEvaluations.denominators, zEvaluations.numerators); | ||
this.logger.log(label, 'Computed Z(x) inverses'); | ||
// then, multiply all values together to compute D(x) | ||
const zDenominators = zEvaluations.denominators; | ||
const dEvaluations = this.air.field.mulMany(qEvaluations, zDenominators, zNumInverses); | ||
const dEvaluations = this.air.field.mulMatrixRows(qEvaluations, zInverses); | ||
this.logger.log(label, 'Computed D(x) polynomials'); | ||
@@ -96,9 +94,7 @@ // 7 ----- compute boundary constraints B(x) | ||
this.logger.log(label, 'Computed B(x) polynomials'); | ||
// 8 ----- build merkle tree for evaluations of P(x), D(x), and B(x) | ||
// 8 ----- build merkle tree for evaluations of P(x) and S(x) | ||
const hash = merkle_1.getHashFunction(this.hashAlgorithm); | ||
const mergedEvaluations = new Array(evaluationDomainSize); | ||
const hashedEvaluations = new Array(evaluationDomainSize); | ||
for (let i = 0; i < evaluationDomainSize; i++) { | ||
let v = this.serializer.mergeValues([pEvaluations, context.sEvaluations], i); | ||
mergedEvaluations[i] = v; | ||
let v = this.serializer.mergeValues(pEvaluations, context.sEvaluations, i); | ||
hashedEvaluations[i] = hash(v); | ||
@@ -110,11 +106,11 @@ } | ||
// 9 ----- spot check evaluation tree at pseudo-random positions | ||
const queryCount = Math.min(this.exeQueryCount, evaluationDomainSize - evaluationDomainSize / extensionFactor); | ||
const positions = utils_1.getPseudorandomIndexes(eTree.root, queryCount, evaluationDomainSize, extensionFactor); | ||
const positions = this.indexGenerator.getExeIndexes(eTree.root, evaluationDomainSize); | ||
const augmentedPositions = this.getAugmentedPositions(positions, evaluationDomainSize); | ||
const eValues = new Array(augmentedPositions.length); | ||
for (let i = 0; i < augmentedPositions.length; i++) { | ||
eValues[i] = mergedEvaluations[augmentedPositions[i]]; | ||
let p = augmentedPositions[i]; | ||
eValues[i] = this.serializer.mergeValues(pEvaluations, context.sEvaluations, p); | ||
} | ||
const eProof = eTree.proveBatch(augmentedPositions); | ||
this.logger.log(label, `Computed ${queryCount} evaluation spot checks`); | ||
this.logger.log(label, `Computed ${positions.length} evaluation spot checks`); | ||
// 10 ---- compute random linear combination of evaluations | ||
@@ -126,4 +122,5 @@ const lCombination = new components_1.LinearCombination(eTree.root, this.air.constraints, context); | ||
const hashDigestSize = merkle_1.getHashDigestSize(this.hashAlgorithm); | ||
const lEvaluations2 = utils_1.bigIntsToBuffers(lEvaluations, hashDigestSize); | ||
const lEvaluations2 = utils_1.vectorToBuffers(lEvaluations, hashDigestSize); | ||
const lTree = merkle_1.MerkleTree.create(lEvaluations2, this.hashAlgorithm); | ||
this.logger.log(label, 'Built liner combination merkle tree'); | ||
const lcProof = lTree.proveBatch(positions); | ||
@@ -172,4 +169,3 @@ let ldProof; | ||
// 2 ----- compute positions for evaluation spot-checks | ||
const queryCount = Math.min(this.exeQueryCount, evaluationDomainSize - evaluationDomainSize / extensionFactor); | ||
const positions = utils_1.getPseudorandomIndexes(eRoot, queryCount, evaluationDomainSize, extensionFactor); | ||
const positions = this.indexGenerator.getExeIndexes(eRoot, evaluationDomainSize); | ||
const augmentedPositions = this.getAugmentedPositions(positions, evaluationDomainSize); | ||
@@ -229,3 +225,3 @@ this.logger.log(label, `Computed positions for evaluation spot checks`); | ||
let qValues = this.air.evaluateConstraintsAt(x, pValues, nValues, sValues, context); | ||
let dValues = this.air.field.divVectorElements(qValues, zValue); | ||
let dValues = this.air.field.divVectorElements(this.air.field.newVectorFrom(qValues), zValue).toValues(); | ||
let bValues = bPoly.evaluateAt(pValues, x); | ||
@@ -240,3 +236,3 @@ // compute linear combination of all evaluations | ||
const lcProof = { | ||
values: utils_1.bigIntsToBuffers(lcValues, hashDigestSize), | ||
values: utils_1.vectorToBuffers(this.air.field.newVectorFrom(lcValues), hashDigestSize), | ||
nodes: proof.lcProof.nodes, | ||
@@ -306,4 +302,4 @@ depth: proof.lcProof.depth | ||
function validateAssertions(trace, assertions) { | ||
const registers = trace.length; | ||
const steps = trace[0].length; | ||
const registers = trace.rowCount; | ||
const steps = trace.colCount; | ||
for (let a of assertions) { | ||
@@ -319,3 +315,3 @@ // make sure register references are correct | ||
// make sure assertions don't contradict execution trace | ||
if (trace[a.register][a.step] !== a.value) { | ||
if (trace.getValue(a.register, a.step) !== a.value) { | ||
throw new StarkError_1.StarkError(`Assertion at step ${a.step}, register ${a.register} conflicts with execution trace`); | ||
@@ -322,0 +318,0 @@ } |
@@ -5,13 +5,12 @@ "use strict"; | ||
// ================================================================================================ | ||
const crypto = require("crypto"); | ||
const inliners = require("./inliners"); | ||
// RE-EXPORTS | ||
// ================================================================================================ | ||
var serializaton_1 = require("./serializaton"); | ||
exports.writeMerkleProof = serializaton_1.writeMerkleProof; | ||
exports.readMerkleProof = serializaton_1.readMerkleProof; | ||
exports.writeMatrix = serializaton_1.writeMatrix; | ||
exports.readMatrix = serializaton_1.readMatrix; | ||
exports.writeArray = serializaton_1.writeArray; | ||
exports.readArray = serializaton_1.readArray; | ||
var serialization_1 = require("./serialization"); | ||
exports.writeMerkleProof = serialization_1.writeMerkleProof; | ||
exports.readMerkleProof = serialization_1.readMerkleProof; | ||
exports.writeMatrix = serialization_1.writeMatrix; | ||
exports.readMatrix = serialization_1.readMatrix; | ||
exports.writeArray = serialization_1.writeArray; | ||
exports.readArray = serialization_1.readArray; | ||
var sizeof_1 = require("./sizeof"); | ||
@@ -33,49 +32,20 @@ exports.sizeOf = sizeof_1.sizeOf; | ||
exports.isPowerOf2 = isPowerOf2; | ||
function getPseudorandomIndexes(seed, count, max, excludeMultiplesOf = 0) { | ||
const maxCount = excludeMultiplesOf ? max - max / excludeMultiplesOf : max; | ||
if (maxCount < count) | ||
throw Error(`Cannot select ${count} unique pseudorandom indexes from ${max} values`); | ||
const maxIterations = BigInt(count * 1000); | ||
const modulus = BigInt(max); | ||
const skip = BigInt(excludeMultiplesOf); | ||
const indexes = new Set(); | ||
const state = sha256(seed); | ||
for (let i = 0n; i < maxIterations; i++) { | ||
let index = sha256(state + i) % modulus; | ||
if (skip && index % skip === 0n) | ||
continue; // if the index should be excluded, skip it | ||
if (indexes.has(index)) | ||
continue; // if the index is already in the list, skip it | ||
indexes.add(index); | ||
if (indexes.size >= count) | ||
break; // if we have enough indexes, break the loop | ||
function vectorToBuffers(values, size) { | ||
const result = new Array(values.length); | ||
if (values.elementSize > size) { | ||
throw Error('Cannot convert vector to buffer: vector elements are too large'); | ||
} | ||
// if we couldn't generate enough indexes within max iterations, throw an error | ||
if (indexes.size < count) | ||
throw new Error(`Could not generate ${count} pseudorandom indexes`); | ||
const result = []; | ||
for (let index of indexes) { | ||
result.push(Number.parseInt(index.toString(16), 16)); | ||
} | ||
return result; | ||
} | ||
exports.getPseudorandomIndexes = getPseudorandomIndexes; | ||
function bigIntsToBuffers(values, size) { | ||
const result = new Array(values.length); | ||
const maxValue = 2n ** BigInt(size * 8); | ||
const hexSize = size * 2; | ||
for (let i = 0; i < values.length; i++) { | ||
let v = values[i]; | ||
if (v >= maxValue) { | ||
throw Error('Cannot convert bigint to buffer: value is too large'); | ||
} | ||
result[i] = Buffer.from(v.toString(16).padStart(hexSize, '0'), 'hex'); | ||
let buffer = Buffer.alloc(size); | ||
values.copyValue(i, buffer, 0); | ||
result[i] = buffer; | ||
} | ||
return result; | ||
} | ||
exports.bigIntsToBuffers = bigIntsToBuffers; | ||
exports.vectorToBuffers = vectorToBuffers; | ||
function buffersToBigInts(values) { | ||
const result = new Array(values.length); | ||
for (let i = 0; i < values.length; i++) { | ||
result[i] = BigInt('0x' + values[i].toString('hex')); | ||
let buffer = values[i]; | ||
result[i] = readBigInt(buffer, 0, buffer.byteLength); | ||
} | ||
@@ -85,10 +55,12 @@ return result; | ||
exports.buffersToBigInts = buffersToBigInts; | ||
function sha256(value) { | ||
const buffer = (typeof value === 'bigint') | ||
? Buffer.from(value.toString(16), 'hex') | ||
: value; | ||
const hash = crypto.createHash('sha256').update(buffer); | ||
return BigInt('0x' + hash.digest().toString('hex')); | ||
function readBigInt(buffer, offset, elementSize) { | ||
const blocks = elementSize >> 3; | ||
let value = 0n; | ||
for (let i = 0n; i < blocks; i++) { | ||
value = (buffer.readBigUInt64LE(offset) << (64n * i)) | value; | ||
offset += 8; | ||
} | ||
return value; | ||
} | ||
exports.sha256 = sha256; | ||
exports.readBigInt = readBigInt; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@guildofweavers/genstark", | ||
"version": "0.5.1", | ||
"version": "0.5.2", | ||
"description": "zk-STARK generation library", | ||
@@ -22,14 +22,14 @@ "main": "index.js", | ||
"engines": { | ||
"node": "12.6.x" | ||
"node": ">=12.7.x" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "12.6.x", | ||
"del": "4.1.x", | ||
"@types/node": "12.7.x", | ||
"del": "5.0.x", | ||
"gulp": "4.0.x" | ||
}, | ||
"dependencies": { | ||
"@guildofweavers/air-script": "0.2.x", | ||
"@guildofweavers/galois": "0.2.x", | ||
"@guildofweavers/merkle": "0.1.x" | ||
"@guildofweavers/air-script": "0.3.x", | ||
"@guildofweavers/galois": "0.4.x", | ||
"@guildofweavers/merkle": "0.2.x" | ||
} | ||
} |
@@ -53,34 +53,35 @@ # genSTARK | ||
When you run the examples, you should get a nice log documenting each step. Here is an example output of running MiMC STARK for 2<sup>13</sup> steps: | ||
When you run the examples, you should get a nice log documenting each step. Here is an example output of running 128-bit MiMC STARK for 2<sup>13</sup> steps: | ||
``` | ||
Starting STARK computation | ||
Set up evaluation context in 86 ms | ||
Generated execution trace in 33 ms | ||
Converted execution trace into polynomials and low-degree extended them in 826 ms | ||
Computed Q(x) polynomials in 337 ms | ||
Computed Z(x) polynomial in 24 ms | ||
Inverted Z(x) numerators in 148 ms | ||
Computed D(x) polynomials in 85 ms | ||
Computed B(x) polynomials in 478 ms | ||
Serialized evaluations of P(x), B(x), and D(x) polynomials in 451 ms | ||
Built evaluation merkle tree in 233 ms | ||
Computed 80 evaluation spot checks in 2 ms | ||
Computed random linear combination of evaluations in 468 ms | ||
Computed low-degree proof in 1358 ms | ||
STARK computed in 4530 ms | ||
Set up evaluation context in 6 ms | ||
Generated execution trace in 41 ms | ||
Converted execution trace into polynomials and low-degree extended them in 47 ms | ||
Computed Q(x) polynomials in 254 ms | ||
Computed Z(x) polynomial in 3 ms | ||
Computed Z(x) inverses in 13 ms | ||
Computed D(x) polynomials in 4 ms | ||
Computed B(x) polynomials in 46 ms | ||
Serialized evaluations of P(x) and S(x) polynomials in 152 ms | ||
Built evaluation merkle tree in 75 ms | ||
Computed 80 evaluation spot checks in 3 ms | ||
Computed random linear combination of evaluations in 34 ms | ||
Built liner combination merkle tree in 116 ms | ||
Computed low-degree proof in 149 ms | ||
STARK computed in 949 ms | ||
-------------------- | ||
Proof serialized in 3 ms; size: 214.19 KB | ||
Proof serialized in 8 ms; size: 211.99 KB | ||
-------------------- | ||
Proof parsed in 8 ms | ||
Proof parsed in 5 ms | ||
-------------------- | ||
Starting STARK verification | ||
Set up evaluation context in 22 ms | ||
Computed positions for evaluation spot checks in 1 ms | ||
Decoded evaluation spot checks in 2 ms | ||
Verified evaluation merkle proof in 6 ms | ||
Verified liner combination proof in 3 ms | ||
Verified low-degree proof in 70 ms | ||
Verified transition and boundary constraints in 29 ms | ||
STARK verified in 137 ms | ||
------------------- | ||
Set up evaluation context in 2 ms | ||
Computed positions for evaluation spot checks in 2 ms | ||
Decoded evaluation spot checks in 1 ms | ||
Verified evaluation merkle proof in 5 ms | ||
Verified low-degree proof in 46 ms | ||
Verified transition and boundary constraints in 16 ms | ||
Verified liner combination merkle proof in 2 ms | ||
STARK verified in 78 ms | ||
-------------------- | ||
``` | ||
@@ -96,3 +97,3 @@ | ||
```TypeScript | ||
const myStark = new Stark(source, options, logger); | ||
const myStark = new Stark(source, security, optimization, logger); | ||
``` | ||
@@ -105,3 +106,4 @@ | ||
| source | [AirScript](https://github.com/GuildOfWeavers/AirScript) source defining transition function, transition constraints, and other properties of the STARK. | | ||
| options? | An optional property specifying [security parameters](#Security-options) for the STARK. | | ||
| security? | An optional property specifying [security parameters](#Security-options) for the STARK. | | ||
| optimization? | An optional property specifying [WASM optimization parameters](#Optimization-options) for the STARK. Set this to `null` to turn WASM-optimization off. If you provide this parameter to STARKS over fields that don't support WASM-optimization, an error will be thrown.| | ||
| logger? | An optional logger. The default logger prints output to the console, but it can be replaced with anything that complies with the Logger interface. | | ||
@@ -117,4 +119,14 @@ | ||
| friQueryCount? | Number of queries of the columns of low degree proof to include into the proof. This property is optional; the default is 40; the max is 64. | | ||
| hashAlgorithm? | Hash algorithm to use when building Merkle trees for the proof. Currently, can be one of two values: `sha256` or `blake2s256`. This property is optional; the default is `sha256`. | | ||
| hashAlgorithm? | Hash algorithm to use when building Merkle trees for the proof. Currently, can be one of the following values: `sha256`, `blake2s256`, or `wasmBlake2s256`. This property is optional; the default is `sha256`. | | ||
**Note:** `wasmBlake2s256` is an experimental implementation of Blake2s algorithm in WebAssembly. On small inputs, it performs about 3x better than Node's native Blake2s implementation (and about 3.5x better than SHA256). | ||
### Optimization options | ||
Optimization options parameter should have the following form: | ||
| Property | Description | | ||
| ------------------ | ----------- | | ||
| initialMemory? | Initial number of bytes to allocate for WASM optimization. | | ||
| maximumMemory? | Maximum number of bytes to allocate for WASM optimization. | | ||
## Generating proofs | ||
@@ -203,14 +215,12 @@ Once you have a `Stark` object, you can start generating proofs using `Stark.prove()` method like so: | ||
| ------------------- | :--------: | :----: | :-------: | :------------: | :--------: | :--------: | | ||
| Fibonacci | 32 bits | 1 | 2 | 2<sup>6</sup> | 50 ms | 9 KB | | ||
| Fibonacci | 32 bits | 1 | 2 | 2<sup>13</sup> | 1 sec | 140 KB | | ||
| Fibonacci | 32 bits | 1 | 2 | 2<sup>17</sup> | 13 sec | 280 KB | | ||
| MiMC | 256 bits | 3 | 1 | 2<sup>6</sup> | 100 ms | 36 KB | | ||
| MiMC | 256 bits | 3 | 1 | 2<sup>13</sup> | 4.5 sec | 214 KB | | ||
| MiMC | 256 bits | 3 | 1 | 2<sup>17</sup> | 72 sec | 381 KB | | ||
| Merkle Proof (d=8) | 128 bits | 4 | 8 | 2<sup>8</sup> | 800 ms | 88 KB | | ||
| Merkle Proof (d=16) | 128 bits | 4 | 8 | 2<sup>9</sup> | 1.6 sec | 109 KB | | ||
| MiMC | 128 bits | 3 | 1 | 2<sup>13</sup> | 1 sec | 212 KB | | ||
| MiMC | 128 bits | 3 | 1 | 2<sup>17</sup> | 13.5 sec | 377 KB | | ||
| MiMC | 256 bits | 3 | 1 | 2<sup>13</sup> | 3.8 sec | 213 KB | | ||
| MiMC | 256 bits | 3 | 1 | 2<sup>17</sup> | 64 sec | 384 KB | | ||
| Merkle Proof (d=8) | 128 bits | 4 | 8 | 2<sup>8</sup> | 650 ms | 88 KB | | ||
| Merkle Proof (d=16) | 128 bits | 4 | 8 | 2<sup>9</sup> | 1.3 sec | 111 KB | | ||
Merkle proofs are based on a modified version of [Rescue](/examples/rescue) hash function, and in addition to 8 state registers require 1 public input register and 1 secret input register. | ||
The potential to improve proof time is at least 10x (by moving hashing and math functions out of JavaScript), and potentially much higher (by using SIMD and parallelism). | ||
At this point, 128-bit fields are able to take advantage of WASM-based optimization which makes arithmetic operations significantly faster (e.g. over 3x improvement over native JavaScript counterparts). Nevertheless, there is still opportunity to improve proof time by at least 10x by moving more operations out of JavaScript and leveraging SIMD and multithreading. | ||
@@ -217,0 +227,0 @@ # References |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
79402
240
1319
+ Added@assemblyscript/loader@0.8.1(transitive)
+ Added@guildofweavers/air-script@0.3.3(transitive)
+ Added@guildofweavers/galois@0.4.22(transitive)
+ Added@guildofweavers/merkle@0.2.0(transitive)
- Removed@guildofweavers/air-script@0.2.3(transitive)
- Removed@guildofweavers/galois@0.2.0(transitive)
- Removed@guildofweavers/merkle@0.1.2(transitive)
Updated@guildofweavers/galois@0.4.x
Updated@guildofweavers/merkle@0.2.x