Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@dfinity/agent

Package Overview
Dependencies
Maintainers
11
Versions
124
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@dfinity/agent - npm Package Compare versions

Comparing version 0.19.2 to 0.19.3-react-native

lib/cjs/der.d.ts

5

lib/cjs/actor.d.ts

@@ -7,3 +7,2 @@ import { Agent, HttpDetailsResponse, QueryResponseRejected, SubmitResponse } from './agent';

import { RequestId } from './request_id';
import { CreateCertificateOptions } from './certificate';
import _SERVICE from './canisters/management_service';

@@ -65,6 +64,2 @@ export declare class ActorCallError extends AgentError {

queryTransform?(methodName: string, args: unknown[], callConfig: CallConfig): Partial<CallConfig> | void;
/**
* Polyfill for BLS Certificate verification in case wasm is not supported
*/
blsVerify?: CreateCertificateOptions['blsVerify'];
}

@@ -71,0 +66,0 @@ /**

11

lib/cjs/actor.js

@@ -123,2 +123,4 @@ "use strict";

constructor(config) {
if (!config.canisterId)
throw new errors_1.AgentError(`Canister ID is required, but received ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
const canisterId = typeof config.canisterId === 'string'

@@ -135,3 +137,3 @@ ? principal_1.Principal.fromText(config.canisterId)

}
this[methodName] = _createActorMethod(this, methodName, func, config.blsVerify);
this[methodName] = _createActorMethod(this, methodName, func);
}

@@ -143,2 +145,5 @@ }

static createActor(interfaceFactory, configuration) {
if (!configuration.canisterId) {
throw new errors_1.AgentError(`Canister ID is required, but received ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
}
return new (this.createActorClass(interfaceFactory))(configuration);

@@ -169,3 +174,3 @@ }

exports.ACTOR_METHOD_WITH_HTTP_DETAILS = 'http-details';
function _createActorMethod(actor, methodName, func, blsVerify) {
function _createActorMethod(actor, methodName, func) {
let caller;

@@ -213,3 +218,3 @@ if (func.annotations.includes('query') || func.annotations.includes('composite_query')) {

const pollStrategy = pollingStrategyFactory();
const responseBytes = await (0, polling_1.pollForResponse)(agent, ecid, requestId, pollStrategy, blsVerify);
const responseBytes = await (0, polling_1.pollForResponse)(agent, ecid, requestId, pollStrategy);
const shouldIncludeHttpDetails = func.annotations.includes(exports.ACTOR_METHOD_WITH_HTTP_DETAILS);

@@ -216,0 +221,0 @@ if (responseBytes !== undefined) {

@@ -42,2 +42,3 @@ import { Principal } from '@dfinity/principal';

httpDetails: HttpDetailsResponse;
requestId: RequestId;
};

@@ -47,2 +48,7 @@ export interface QueryResponseBase {

}
export declare type NodeSignature = {
timestamp: bigint;
signature: Uint8Array;
identity: Uint8Array;
};
export interface QueryResponseReplied extends QueryResponseBase {

@@ -53,2 +59,3 @@ status: QueryResponseStatus.Replied;

};
signatures?: NodeSignature[];
}

@@ -59,2 +66,4 @@ export interface QueryResponseRejected extends QueryResponseBase {

reject_message: string;
error_code: string;
signatures?: NodeSignature[];
}

@@ -61,0 +70,0 @@ /**

@@ -33,14 +33,11 @@ import { JsonObject } from '@dfinity/candid';

/**
* Prevents the agent from providing a unique {@link Nonce} with each call.
* Enabling may cause rate limiting of identical requests
* at the boundary nodes.
* Adds a unique {@link Nonce} with each query.
* Enabling will prevent queries from being answered with a cached response.
*
* To add your own nonce generation logic, you can use the following:
* @example
* import {makeNonceTransform, makeNonce} from '@dfinity/agent';
* const agent = new HttpAgent({ disableNonce: true });
* const agent = new HttpAgent({ useQueryNonces: true });
* agent.addTransform(makeNonceTransform(makeNonce);
* @default false
*/
disableNonce?: boolean;
useQueryNonces?: boolean;
/**

@@ -51,6 +48,11 @@ * Number of times to retry requests before throwing an error

retryTimes?: number;
/**
* Whether the agent should verify signatures signed by node keys on query responses. Increases security, but adds overhead and must make a separate request to cache the node keys for the canister's subnet.
* @default true
*/
verifyQuerySignatures?: boolean;
}
export declare class HttpAgent implements Agent {
#private;
rootKey: ArrayBuffer;
private readonly _pipeline;
private _identity;

@@ -68,3 +70,3 @@ private readonly _fetch;

isLocal(): boolean;
addTransform(fn: HttpAgentRequestTransformFn, priority?: number): void;
addTransform(type: 'update' | 'query', fn: HttpAgentRequestTransformFn, priority?: number): void;
getPrincipal(): Promise<Principal>;

@@ -89,3 +91,4 @@ call(canisterId: Principal | string, options: {

replaceIdentity(identity: Identity): void;
fetchSubnetKeys(canisterId: Principal | string): Promise<any>;
protected _transform(request: HttpAgentRequest): Promise<HttpAgentRequest>;
}

@@ -28,2 +28,14 @@ "use strict";

};
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _HttpAgent_queryPipeline, _HttpAgent_updatePipeline, _HttpAgent_subnetKeys, _HttpAgent_verifyQuerySignatures, _HttpAgent_verifyQueryResponse;
Object.defineProperty(exports, "__esModule", { value: true });

@@ -40,2 +52,6 @@ exports.HttpAgent = exports.IdentityInvalidError = exports.RequestStatusResponseStatus = exports.makeNonce = void 0;

const errors_2 = require("./errors");
const canisterStatus_1 = require("../../canisterStatus");
const certificate_1 = require("../../certificate");
const ed25519_1 = require("@noble/curves/ed25519");
const public_key_1 = require("../../public_key");
__exportStar(require("./transforms"), exports);

@@ -122,6 +138,67 @@ var types_2 = require("./types");

this.rootKey = (0, buffer_1.fromHex)(IC_ROOT_KEY);
this._pipeline = [];
this._timeDiffMsecs = 0;
this._rootKeyFetched = false;
this._isAgent = true;
_HttpAgent_queryPipeline.set(this, []);
_HttpAgent_updatePipeline.set(this, []);
_HttpAgent_subnetKeys.set(this, new Map());
_HttpAgent_verifyQuerySignatures.set(this, true);
/**
* See https://internetcomputer.org/docs/current/references/ic-interface-spec/#http-query for details on validation
* @param queryResponse - The response from the query
* @param subnetStatus - The subnet status, including all node keys
* @returns ApiQueryResponse
*/
_HttpAgent_verifyQueryResponse.set(this, (queryResponse, subnetStatus) => {
if (__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f") === false) {
// This should not be called if the user has disabled verification
return queryResponse;
}
if (!subnetStatus) {
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const { status, signatures, requestId } = queryResponse;
const domainSeparator = new TextEncoder().encode('\x0Bic-response');
signatures === null || signatures === void 0 ? void 0 : signatures.forEach(sig => {
const { timestamp, identity } = sig;
const nodeId = principal_1.Principal.fromUint8Array(identity).toText();
let hash;
// Hash is constructed differently depending on the status
if (status === 'replied') {
const { reply } = queryResponse;
hash = (0, request_id_1.hashOfMap)({
status: status,
reply: reply,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else if (status === 'rejected') {
const { reject_code, reject_message, error_code } = queryResponse;
hash = (0, request_id_1.hashOfMap)({
status: status,
reject_code: reject_code,
reject_message: reject_message,
error_code: error_code,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else {
throw new Error(`Unknown status: ${status}`);
}
const separatorWithHash = (0, buffer_1.concat)(domainSeparator, new Uint8Array(hash));
// FIX: check for match without verifying N times
const pubKey = subnetStatus === null || subnetStatus === void 0 ? void 0 : subnetStatus.nodeKeys.get(nodeId);
if (!pubKey) {
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const rawKey = public_key_1.Ed25519PublicKey.fromDer(pubKey).rawKey;
const valid = ed25519_1.ed25519.verify(sig.signature, new Uint8Array(separatorWithHash), new Uint8Array(rawKey));
if (valid)
return queryResponse;
throw new certificate_1.CertificateVerificationError(`Invalid signature from replica ${nodeId} signed query.`);
});
return queryResponse;
});
if (options.source) {

@@ -131,3 +208,2 @@ if (!(options.source instanceof HttpAgent)) {

}
this._pipeline = [...options.source._pipeline];
this._identity = options.source._identity;

@@ -162,3 +238,3 @@ this._fetch = options.source._fetch;

// Mainnet and local will have the api route available
const knownHosts = ['ic0.app', 'icp0.io', 'localhost', '127.0.0.1'];
const knownHosts = ['ic0.app', 'icp0.io', '127.0.0.1', '127.0.0.1'];
const hostname = location === null || location === void 0 ? void 0 : location.hostname;

@@ -178,2 +254,5 @@ let knownHost;

}
if (options.verifyQuerySignatures !== undefined) {
__classPrivateFieldSet(this, _HttpAgent_verifyQuerySignatures, options.verifyQuerySignatures, "f");
}
// Default is 3, only set from option if greater or equal to 0

@@ -198,4 +277,5 @@ this._retryTimes =

// Add a nonce transform to ensure calls are unique
if (!options.disableNonce) {
this.addTransform((0, transforms_1.makeNonceTransform)(types_1.makeNonce));
this.addTransform('update', (0, transforms_1.makeNonceTransform)(types_1.makeNonce));
if (options.useQueryNonces) {
this.addTransform('query', (0, transforms_1.makeNonceTransform)(types_1.makeNonce));
}

@@ -205,8 +285,15 @@ }

const hostname = this._host.hostname;
return hostname === '127.0.0.1' || hostname.endsWith('localhost');
return hostname === '127.0.0.1' || hostname.endsWith('127.0.0.1');
}
addTransform(fn, priority = fn.priority || 0) {
// Keep the pipeline sorted at all time, by priority.
const i = this._pipeline.findIndex(x => (x.priority || 0) < priority);
this._pipeline.splice(i >= 0 ? i : this._pipeline.length, 0, Object.assign(fn, { priority }));
addTransform(type, fn, priority = fn.priority || 0) {
if (type === 'update') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").length, 0, Object.assign(fn, { priority }));
}
else if (type === 'query') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").length, 0, Object.assign(fn, { priority }));
}
}

@@ -273,3 +360,15 @@ async getPrincipal() {

async _requestAndRetry(request, tries = 0) {
const response = await request();
let response;
try {
response = await request();
}
catch (error) {
if (this._retryTimes > tries) {
console.warn(`Caught exception while attempting to make request:\n` +
` ${error}\n` +
` Retrying request.`);
return await this._requestAndRetry(request, tries + 1);
}
throw error;
}
if (response.ok) {

@@ -294,37 +393,73 @@ return response;

async query(canisterId, fields, identity) {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = typeof canisterId === 'string' ? principal_1.Principal.fromText(canisterId) : canisterId;
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || principal_1.Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new transforms_1.Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
const makeQuery = async () => {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = principal_1.Principal.from(canisterId);
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || principal_1.Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new transforms_1.Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
};
const requestId = await (0, request_id_1.requestIdOf)(request);
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: (0, transforms_1.httpHeadersTransform)(response.headers),
}, requestId });
};
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
const queryPromise = new Promise((resolve, reject) => {
makeQuery()
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: (0, transforms_1.httpHeadersTransform)(response.headers),
} });
const subnetStatusPromise = new Promise((resolve, reject) => {
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
resolve(undefined);
}
const subnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString());
if (subnetStatus) {
resolve(subnetStatus);
}
else {
this.fetchSubnetKeys(canisterId)
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
}
});
const [query, subnetStatus] = await Promise.all([queryPromise, subnetStatusPromise]);
// Skip verification if the user has disabled it
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
return query;
}
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, subnetStatus);
}

@@ -419,7 +554,27 @@ async createReadStateRequest(fields, identity) {

}
async fetchSubnetKeys(canisterId) {
const effectiveCanisterId = principal_1.Principal.from(canisterId);
const response = await (0, canisterStatus_1.request)({
canisterId: effectiveCanisterId,
paths: ['subnet'],
agent: this,
});
const subnetResponse = response.get('subnet');
if (subnetResponse && typeof subnetResponse === 'object' && 'nodeKeys' in subnetResponse) {
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").set(effectiveCanisterId.toText(), subnetResponse);
}
return subnetResponse;
}
_transform(request) {
let p = Promise.resolve(request);
for (const fn of this._pipeline) {
p = p.then(r => fn(r).then(r2 => r2 || r));
if (request.endpoint === "call" /* Endpoint.Call */) {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
else {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
return p;

@@ -429,2 +584,3 @@ }

exports.HttpAgent = HttpAgent;
_HttpAgent_queryPipeline = new WeakMap(), _HttpAgent_updatePipeline = new WeakMap(), _HttpAgent_subnetKeys = new WeakMap(), _HttpAgent_verifyQuerySignatures = new WeakMap(), _HttpAgent_verifyQueryResponse = new WeakMap();
//# sourceMappingURL=index.js.map

@@ -31,9 +31,14 @@ "use strict";

const NANOSECONDS_PER_MILLISECONDS = BigInt(1000000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = BigInt(60 * 1000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = 60 * 1000;
class Expiry {
constructor(deltaInMSec) {
// Use bigint because it can overflow the maximum number allowed in a double float.
this._value =
(BigInt(Date.now()) + BigInt(deltaInMSec) - REPLICA_PERMITTED_DRIFT_MILLISECONDS) *
NANOSECONDS_PER_MILLISECONDS;
const raw_value = BigInt(Math.floor(Date.now() + deltaInMSec - REPLICA_PERMITTED_DRIFT_MILLISECONDS)) *
NANOSECONDS_PER_MILLISECONDS;
// round down to the nearest second
const ingress_as_seconds = raw_value / BigInt(1000000000);
// round down to nearest minute
const ingress_as_minutes = ingress_as_seconds / BigInt(60);
const rounded_down_nanos = ingress_as_minutes * BigInt(60) * BigInt(1000000000);
this._value = rounded_down_nanos;
}

@@ -56,3 +61,2 @@ toCBOR() {

return async (request) => {
const nonce = nonceFn();
// Nonce needs to be inserted into the header for all requests, to enable logs to be correlated with requests.

@@ -59,0 +63,0 @@ const headers = request.request.headers;

@@ -27,2 +27,4 @@ import { Principal } from '@dfinity/principal';

toDer(): DerEncodedPublicKey;
rawKey?: ArrayBuffer;
derKey?: DerEncodedPublicKey;
}

@@ -29,0 +31,0 @@ /**

/** @module CanisterStatus */
import { Principal } from '@dfinity/principal';
import { HttpAgent } from '../agent/http';
import { CreateCertificateOptions } from '../certificate';
import { DerEncodedPublicKey } from '..';
/**
* Represents the useful information about a subnet
* @param {string} subnetId the principal id of the canister's subnet
* @param {string[]} nodeKeys the keys of the individual nodes in the subnet
*/
export declare type SubnetStatus = {
subnetId: string;
nodeKeys: Map<string, DerEncodedPublicKey>;
metrics?: {
num_canisters: bigint;
canister_state_bytes: bigint;
consumed_cycles_total: {
current: bigint;
deleted: bigint;
};
update_transactions_total: bigint;
};
};
/**
* Types of an entry on the canisterStatus map.
* An entry of null indicates that the request failed, due to lack of permissions or the result being missing.
*/
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | bigint | null;
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | SubnetStatus | bigint | null;
/**

@@ -38,3 +56,2 @@ * Interface to define a custom path. Nested paths will be represented as individual buffers, and can be created from text using {@link TextEncoder}

paths?: Path[] | Set<Path>;
blsVerify?: CreateCertificateOptions['blsVerify'];
};

@@ -61,2 +78,3 @@ /**

}) => Promise<StatusMap>;
export declare const fetchNodeKeys: (certificate: ArrayBuffer, canisterId: Principal, root_key?: ArrayBuffer | Uint8Array) => SubnetStatus;
export declare const encodePath: (path: Path, canisterId: Principal) => ArrayBuffer[];
"use strict";
/** @module CanisterStatus */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {

@@ -27,4 +26,4 @@ if (k2 === undefined) k2 = k;

Object.defineProperty(exports, "__esModule", { value: true });
exports.encodePath = exports.request = void 0;
const candid_1 = require("@dfinity/candid");
exports.encodePath = exports.fetchNodeKeys = exports.request = void 0;
/** @module CanisterStatus */
const principal_1 = require("@dfinity/principal");

@@ -35,2 +34,3 @@ const errors_1 = require("../errors");

const Cbor = __importStar(require("../cbor"));
const leb_1 = require("../utils/leb");
/**

@@ -52,3 +52,4 @@ *

const request = async (options) => {
const { canisterId, agent, paths } = options;
const { agent, paths } = options;
const canisterId = principal_1.Principal.from(options.canisterId);
const uniquePaths = [...new Set(paths)];

@@ -72,3 +73,19 @@ // Map path options to their correct formats

});
const data = cert.lookup((0, exports.encodePath)(uniquePaths[index], canisterId));
const lookup = (cert, path) => {
if (path === 'subnet') {
const data = (0, exports.fetchNodeKeys)(response.certificate, canisterId, agent.rootKey);
return {
path: path,
data,
};
}
else {
return {
path: path,
data: (0, certificate_1.lookupResultToBuffer)(cert.lookup((0, exports.encodePath)(path, canisterId))),
};
}
};
// must pass in the rootKey if we have no delegation
const { path, data } = lookup(cert, uniquePaths[index]);
if (!data) {

@@ -87,3 +104,3 @@ // Typically, the cert lookup will throw

case 'time': {
status.set(path, decodeTime(data));
status.set(path, (0, leb_1.decodeTime)(data));
break;

@@ -99,2 +116,6 @@ }

}
case 'subnet': {
status.set(path, data);
break;
}
case 'candid': {

@@ -112,3 +133,3 @@ status.set(path, new TextDecoder().decode(data));

case 'leb128': {
status.set(path.key, decodeLeb128(data));
status.set(path.key, (0, leb_1.decodeLeb128)(data));
break;

@@ -156,2 +177,54 @@ }

exports.request = request;
const fetchNodeKeys = (certificate, canisterId, root_key) => {
if (!canisterId._isPrincipal) {
throw new Error('Invalid canisterId');
}
const cert = Cbor.decode(new Uint8Array(certificate));
const tree = cert.tree;
let delegation = cert.delegation;
let subnetId;
if (delegation && delegation.subnet_id) {
subnetId = principal_1.Principal.fromUint8Array(new Uint8Array(delegation.subnet_id));
}
// On local replica, with System type subnet, there is no delegation
else if (!delegation && typeof root_key !== 'undefined') {
subnetId = principal_1.Principal.selfAuthenticating(new Uint8Array(root_key));
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
// otherwise use default NNS subnet id
else {
subnetId = principal_1.Principal.selfAuthenticating(principal_1.Principal.fromText('tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe').toUint8Array());
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
const canisterInRange = (0, certificate_1.check_canister_ranges)({ canisterId, subnetId, tree });
if (!canisterInRange) {
throw new Error('Canister not in range');
}
const nodeTree = (0, certificate_1.lookup_path)(['subnet', delegation === null || delegation === void 0 ? void 0 : delegation.subnet_id, 'node'], tree);
const nodeForks = (0, certificate_1.flatten_forks)(nodeTree);
nodeForks.length;
const nodeKeys = new Map();
nodeForks.forEach(fork => {
Object.getPrototypeOf(new Uint8Array(fork[1]));
const node_id = principal_1.Principal.from(new Uint8Array(fork[1])).toText();
const derEncodedPublicKey = (0, certificate_1.lookup_path)(['public_key'], fork[2]);
if (derEncodedPublicKey.byteLength !== 44) {
throw new Error('Invalid public key length');
}
else {
nodeKeys.set(node_id, derEncodedPublicKey);
}
});
return {
subnetId: principal_1.Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)).toText(),
nodeKeys,
};
};
exports.fetchNodeKeys = fetchNodeKeys;
const encodePath = (path, canisterId) => {

@@ -196,5 +269,2 @@ const encoder = new TextEncoder();

};
const decodeLeb128 = (buf) => {
return (0, candid_1.lebDecode)(new candid_1.PipeArrayBuffer(buf));
};
const decodeCbor = (buf) => {

@@ -206,7 +276,2 @@ return Cbor.decode(buf);

};
// time is a LEB128-encoded Nat
const decodeTime = (buf) => {
const decoded = decodeLeb128(buf);
return new Date(Number(decoded / BigInt(1000000)));
};
// Controllers are CBOR-encoded buffers, starting with a Tag we don't need

@@ -213,0 +278,0 @@ const decodeControllers = (buf) => {

@@ -9,10 +9,17 @@ import { AgentError } from './errors';

}
declare const enum NodeId {
Empty = 0,
Fork = 1,
Labeled = 2,
Leaf = 3,
Pruned = 4
export interface Cert {
tree: HashTree;
signature: ArrayBuffer;
delegation?: Delegation;
}
export declare type HashTree = [NodeId.Empty] | [NodeId.Fork, HashTree, HashTree] | [NodeId.Labeled, ArrayBuffer, HashTree] | [NodeId.Leaf, ArrayBuffer] | [NodeId.Pruned, ArrayBuffer];
declare const NodeId: {
Empty: number;
Fork: number;
Labeled: number;
Leaf: number;
Pruned: number;
};
export declare type NodeIdType = typeof NodeId[keyof typeof NodeId];
export { NodeId };
export declare type HashTree = [typeof NodeId.Empty] | [typeof NodeId.Fork, HashTree, HashTree] | [typeof NodeId.Labeled, ArrayBuffer, HashTree] | [typeof NodeId.Leaf, ArrayBuffer] | [typeof NodeId.Pruned, ArrayBuffer];
/**

@@ -23,2 +30,6 @@ * Make a human readable string out of a hash tree.

export declare function hashTreeToString(tree: HashTree): string;
interface Delegation extends Record<string, any> {
subnet_id: ArrayBuffer;
certificate: ArrayBuffer;
}
declare type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>;

@@ -44,2 +55,9 @@ export interface CreateCertificateOptions {

blsVerify?: VerifyFunc;
/**
* The maximum age of the certificate in minutes. Default is 5 minutes.
* @default 5
* This is used to verify the time the certificate was signed, particularly for validating Delegation certificates, which can live for longer than the default window of +/- 5 minutes. If the certificate is
* older than the specified age, it will fail verification.
*/
maxAgeInMinutes?: number;
}

@@ -49,3 +67,3 @@ export declare class Certificate {

private _canisterId;
private _blsVerify;
private _maxAgeInMinutes;
private readonly cert;

@@ -55,8 +73,8 @@ /**

* CertificateVerificationError if the certificate cannot be verified.
* @constructs {@link AuthClient}
* @param {CreateCertificateOptions} options
* @see {@link CreateCertificateOptions}
* @constructs Certificate
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions}
* @param {ArrayBuffer} options.certificate The bytes of the certificate
* @param {ArrayBuffer} options.rootKey The root key to verify against
* @param {Principal} options.canisterId The effective or signing canister ID
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes.
* @throws {CertificateVerificationError}

@@ -67,2 +85,3 @@ */

lookup(path: Array<ArrayBuffer | string>): ArrayBuffer | undefined;
lookup_label(label: ArrayBuffer): ArrayBuffer | HashTree | undefined;
private verify;

@@ -72,2 +91,8 @@ private _checkDelegationAndGetKey;

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
export declare function lookupResultToBuffer(result: ArrayBuffer | HashTree | undefined): ArrayBuffer | undefined;
/**
* @param t

@@ -80,3 +105,19 @@ */

*/
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | undefined;
export {};
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | HashTree | undefined;
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
export declare function flatten_forks(t: HashTree): HashTree[];
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
export declare function check_canister_ranges(params: {
canisterId: Principal;
subnetId: Principal;
tree: HashTree;
}): boolean;

@@ -26,3 +26,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.lookup_path = exports.reconstruct = exports.Certificate = exports.hashTreeToString = exports.CertificateVerificationError = void 0;
exports.check_canister_ranges = exports.flatten_forks = exports.lookup_path = exports.reconstruct = exports.lookupResultToBuffer = exports.Certificate = exports.hashTreeToString = exports.NodeId = exports.CertificateVerificationError = void 0;
const cbor = __importStar(require("./cbor"));

@@ -33,3 +33,4 @@ const errors_1 = require("./errors");

const principal_1 = require("@dfinity/principal");
const bls = __importStar(require("./utils/bls"));
const bls_verify_1 = require("@dfinity/bls-verify");
const leb_1 = require("./utils/leb");
/**

@@ -44,2 +45,10 @@ * A certificate may fail verification with respect to the provided public key

exports.CertificateVerificationError = CertificateVerificationError;
const NodeId = {
Empty: 0,
Fork: 1,
Labeled: 2,
Leaf: 3,
Pruned: 4,
};
exports.NodeId = NodeId;
/**

@@ -64,18 +73,40 @@ * Make a human readable string out of a hash tree.

switch (tree[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return '()';
case 1 /* NodeId.Fork */: {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
case NodeId.Fork: {
if (tree[1] instanceof Array && tree[2] instanceof ArrayBuffer) {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
}
else {
throw new Error('Invalid tree structure for fork');
}
}
case 2 /* NodeId.Labeled */: {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
case NodeId.Labeled: {
if (tree[1] instanceof ArrayBuffer && tree[2] instanceof ArrayBuffer) {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
}
else {
throw new Error('Invalid tree structure for labeled');
}
}
case 3 /* NodeId.Leaf */: {
case NodeId.Leaf: {
if (!tree[1]) {
throw new Error('Invalid tree structure for leaf');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `leaf(...${tree[1].byteLength} bytes)`;
}
case 4 /* NodeId.Pruned */: {
case NodeId.Pruned: {
if (!tree[1]) {
throw new Error('Invalid tree structure for pruned');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `pruned(${(0, buffer_1.toHex)(new Uint8Array(tree[1]))}`;

@@ -103,6 +134,6 @@ }

class Certificate {
constructor(certificate, _rootKey, _canisterId, _blsVerify) {
constructor(certificate, _rootKey, _canisterId, _maxAgeInMinutes = 5) {
this._rootKey = _rootKey;
this._canisterId = _canisterId;
this._blsVerify = _blsVerify;
this._maxAgeInMinutes = _maxAgeInMinutes;
this.cert = cbor.decode(new Uint8Array(certificate));

@@ -113,16 +144,12 @@ }

* CertificateVerificationError if the certificate cannot be verified.
* @constructs {@link AuthClient}
* @param {CreateCertificateOptions} options
* @see {@link CreateCertificateOptions}
* @constructs Certificate
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions}
* @param {ArrayBuffer} options.certificate The bytes of the certificate
* @param {ArrayBuffer} options.rootKey The root key to verify against
* @param {Principal} options.canisterId The effective or signing canister ID
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes.
* @throws {CertificateVerificationError}
*/
static async create(options) {
let blsVerify = options.blsVerify;
if (!blsVerify) {
blsVerify = bls.blsVerify;
}
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, blsVerify);
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, options.maxAgeInMinutes);
await cert.verify();

@@ -132,4 +159,8 @@ return cert;

lookup(path) {
return lookup_path(path, this.cert.tree);
// constrain the type of the result, so that empty HashTree is undefined
return lookupResultToBuffer(lookup_path(path, this.cert.tree));
}
lookup_label(label) {
return this.lookup([label]);
}
async verify() {

@@ -142,4 +173,27 @@ const rootHash = await reconstruct(this.cert.tree);

let sigVer = false;
const lookupTime = this.lookup(['time']);
if (!lookupTime) {
// Should never happen - time is always present in IC certificates
throw new CertificateVerificationError('Certificate does not contain a time');
}
const FIVE_MINUTES_IN_MSEC = 5 * 60 * 1000;
const MAX_AGE_IN_MSEC = this._maxAgeInMinutes * 60 * 1000;
const now = Date.now();
const earliestCertificateTime = now - MAX_AGE_IN_MSEC;
const fiveMinutesFromNow = now + FIVE_MINUTES_IN_MSEC;
const certTime = (0, leb_1.decodeTime)(lookupTime);
if (certTime.getTime() < earliestCertificateTime) {
throw new CertificateVerificationError(`Certificate is signed more than ${this._maxAgeInMinutes} minutes in the past. Certificate time: ` +
certTime.toISOString() +
' Current time: ' +
new Date(now).toISOString());
}
else if (certTime.getTime() > fiveMinutesFromNow) {
throw new CertificateVerificationError('Certificate is signed more than 5 minutes in the future. Certificate time: ' +
certTime.toISOString() +
' Current time: ' +
new Date(now).toISOString());
}
try {
sigVer = await this._blsVerify(new Uint8Array(key), new Uint8Array(sig), new Uint8Array(msg));
sigVer = await (0, bls_verify_1.blsVerify)(new Uint8Array(key), new Uint8Array(sig), new Uint8Array(msg));
}

@@ -161,13 +215,10 @@ catch (err) {

canisterId: this._canisterId,
// Do not check max age for delegation certificates
maxAgeInMinutes: Infinity,
});
const rangeLookup = cert.lookup(['subnet', d.subnet_id, 'canister_ranges']);
if (!rangeLookup) {
throw new CertificateVerificationError(`Could not find canister ranges for subnet 0x${(0, buffer_1.toHex)(d.subnet_id)}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
principal_1.Principal.fromUint8Array(v[0]),
principal_1.Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(this._canisterId) && r[1].gtEq(this._canisterId));
const canisterInRange = check_canister_ranges({
canisterId: this._canisterId,
subnetId: principal_1.Principal.fromUint8Array(new Uint8Array(d.subnet_id)),
tree: cert.cert.tree,
});
if (!canisterInRange) {

@@ -198,2 +249,17 @@ throw new CertificateVerificationError(`Canister ${this._canisterId} not in range of delegations for subnet 0x${(0, buffer_1.toHex)(d.subnet_id)}`);

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
function lookupResultToBuffer(result) {
if (result instanceof ArrayBuffer) {
return result;
}
else if (result instanceof Uint8Array) {
return result.buffer;
}
return undefined;
}
exports.lookupResultToBuffer = lookupResultToBuffer;
/**
* @param t

@@ -203,11 +269,11 @@ */

switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return (0, request_id_1.hash)(domain_sep('ic-hashtree-empty'));
case 4 /* NodeId.Pruned */:
case NodeId.Pruned:
return t[1];
case 3 /* NodeId.Leaf */:
case NodeId.Leaf:
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-leaf'), t[1]));
case 2 /* NodeId.Labeled */:
case NodeId.Labeled:
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-labeled'), t[1], await reconstruct(t[2])));
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-fork'), await reconstruct(t[1]), await reconstruct(t[2])));

@@ -231,7 +297,20 @@ default:

switch (tree[0]) {
case 3 /* NodeId.Leaf */: {
return new Uint8Array(tree[1]).buffer;
case NodeId.Leaf: {
// should not be undefined
if (!tree[1])
throw new Error('Invalid tree structure for leaf');
if (tree[1] instanceof ArrayBuffer) {
return tree[1];
}
else if (tree[1] instanceof Uint8Array) {
return tree[1].buffer;
}
else
return tree[1];
}
case NodeId.Fork: {
return tree;
}
default: {
return undefined;
return tree;
}

@@ -247,7 +326,12 @@ }

exports.lookup_path = lookup_path;
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
function flatten_forks(t) {
switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return [];
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return flatten_forks(t[1]).concat(flatten_forks(t[2]));

@@ -258,2 +342,3 @@ default:

}
exports.flatten_forks = flatten_forks;
function find_label(l, trees) {

@@ -264,3 +349,3 @@ if (trees.length === 0) {

for (const t of trees) {
if (t[0] === 2 /* NodeId.Labeled */) {
if (t[0] === NodeId.Labeled) {
const p = t[1];

@@ -273,2 +358,23 @@ if (isBufferEqual(l, p)) {

}
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
function check_canister_ranges(params) {
const { canisterId, subnetId, tree } = params;
const rangeLookup = lookup_path(['subnet', subnetId.toUint8Array(), 'canister_ranges'], tree);
if (!rangeLookup || !(rangeLookup instanceof ArrayBuffer)) {
throw new Error(`Could not find canister ranges for subnet ${subnetId}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
principal_1.Principal.fromUint8Array(v[0]),
principal_1.Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(canisterId) && r[1].gtEq(canisterId));
return canisterInRange;
}
exports.check_canister_ranges = check_canister_ranges;
//# sourceMappingURL=certificate.js.map
import { ActorSubclass } from './actor';
export * from './actor';
export * from './agent';
export * from './auth';
export * from './certificate';
export * from './agent/http/transforms';
export * from './agent/http/types';
export * from './auth';
export * from './canisters/asset';
export * from './certificate';
export * from './der';
export * from './fetch_candid';
export * from './public_key';
export * from './request_id';
export * from './utils/bls';
export * from './utils/buffer';

@@ -13,0 +14,0 @@ export * from './utils/random';

@@ -32,10 +32,11 @@ "use strict";

__exportStar(require("./agent"), exports);
__exportStar(require("./auth"), exports);
__exportStar(require("./certificate"), exports);
__exportStar(require("./agent/http/transforms"), exports);
__exportStar(require("./agent/http/types"), exports);
__exportStar(require("./auth"), exports);
__exportStar(require("./canisters/asset"), exports);
__exportStar(require("./certificate"), exports);
__exportStar(require("./der"), exports);
__exportStar(require("./fetch_candid"), exports);
__exportStar(require("./public_key"), exports);
__exportStar(require("./request_id"), exports);
__exportStar(require("./utils/bls"), exports);
__exportStar(require("./utils/buffer"), exports);

@@ -42,0 +43,0 @@ __exportStar(require("./utils/random"), exports);

import { Principal } from '@dfinity/principal';
import { Agent, RequestStatusResponseStatus } from '../agent';
import { CreateCertificateOptions } from '../certificate';
import { RequestId } from '../request_id';

@@ -18,2 +17,2 @@ export * as strategy from './strategy';

*/
export declare function pollForResponse(agent: Agent, canisterId: Principal, requestId: RequestId, strategy: PollStrategy, request?: any, blsVerify?: CreateCertificateOptions['blsVerify']): Promise<ArrayBuffer>;
export declare function pollForResponse(agent: Agent, canisterId: Principal, requestId: RequestId, strategy: PollStrategy, request?: any): Promise<ArrayBuffer>;

@@ -44,3 +44,3 @@ "use strict";

// eslint-disable-next-line
request, blsVerify) {
request) {
var _a;

@@ -56,3 +56,2 @@ const path = [new TextEncoder().encode('request_status'), requestId];

canisterId: canisterId,
blsVerify,
});

@@ -59,0 +58,0 @@ const maybeBuf = cert.lookup([...path, new TextEncoder().encode('status')]);

@@ -22,1 +22,9 @@ export declare type RequestId = ArrayBuffer & {

export declare function requestIdOf(request: Record<string, any>): RequestId;
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
export declare function hashOfMap(map: Record<string, unknown>): ArrayBuffer;

@@ -6,3 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.requestIdOf = exports.hashValue = exports.hash = void 0;
exports.hashOfMap = exports.requestIdOf = exports.hashValue = exports.hash = void 0;
const candid_1 = require("@dfinity/candid");

@@ -54,2 +54,5 @@ const borc_1 = __importDefault(require("borc"));

}
else if (typeof value === 'object') {
return hashOfMap(value);
}
else if (typeof value === 'bigint') {

@@ -80,3 +83,14 @@ // Do this check much later than the other bigint check because this one is much less

function requestIdOf(request) {
const hashed = Object.entries(request)
return hashOfMap(request);
}
exports.requestIdOf = requestIdOf;
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
function hashOfMap(map) {
const hashed = Object.entries(map)
.filter(([, value]) => value !== undefined)

@@ -93,6 +107,6 @@ .map(([key, value]) => {

const concatenated = (0, buffer_1.concat)(...sorted.map(x => (0, buffer_1.concat)(...x)));
const requestId = hash(concatenated);
return requestId;
const result = hash(concatenated);
return result;
}
exports.requestIdOf = requestIdOf;
exports.hashOfMap = hashOfMap;
//# sourceMappingURL=request_id.js.map

@@ -16,4 +16,17 @@ /**

export declare function fromHex(hex: string): ArrayBuffer;
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
export declare function compare(b1: ArrayBuffer, b2: ArrayBuffer): number;
/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
export declare function bufEquals(b1: ArrayBuffer, b2: ArrayBuffer): boolean;
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -20,0 +33,0 @@ * @param {Uint8Array} arr Uint8Array to convert

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.uint8ToBuf = exports.compare = exports.fromHex = exports.toHex = exports.concat = void 0;
exports.uint8ToBuf = exports.bufEquals = exports.compare = exports.fromHex = exports.toHex = exports.concat = void 0;
/**

@@ -45,2 +45,8 @@ * Concatenate multiple array buffers.

exports.fromHex = fromHex;
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
function compare(b1, b2) {

@@ -61,2 +67,12 @@ if (b1.byteLength !== b2.byteLength) {

/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
function bufEquals(b1, b2) {
return compare(b1, b2) === 0;
}
exports.bufEquals = bufEquals;
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -63,0 +79,0 @@ * @param {Uint8Array} arr Uint8Array to convert

@@ -7,3 +7,2 @@ import { Agent, HttpDetailsResponse, QueryResponseRejected, SubmitResponse } from './agent';

import { RequestId } from './request_id';
import { CreateCertificateOptions } from './certificate';
import _SERVICE from './canisters/management_service';

@@ -65,6 +64,2 @@ export declare class ActorCallError extends AgentError {

queryTransform?(methodName: string, args: unknown[], callConfig: CallConfig): Partial<CallConfig> | void;
/**
* Polyfill for BLS Certificate verification in case wasm is not supported
*/
blsVerify?: CreateCertificateOptions['blsVerify'];
}

@@ -71,0 +66,0 @@ /**

@@ -114,2 +114,4 @@ import { Buffer } from 'buffer/';

constructor(config) {
if (!config.canisterId)
throw new AgentError(`Canister ID is required, but received ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
const canisterId = typeof config.canisterId === 'string'

@@ -126,3 +128,3 @@ ? Principal.fromText(config.canisterId)

}
this[methodName] = _createActorMethod(this, methodName, func, config.blsVerify);
this[methodName] = _createActorMethod(this, methodName, func);
}

@@ -134,2 +136,5 @@ }

static createActor(interfaceFactory, configuration) {
if (!configuration.canisterId) {
throw new AgentError(`Canister ID is required, but received ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
}
return new (this.createActorClass(interfaceFactory))(configuration);

@@ -159,3 +164,3 @@ }

export const ACTOR_METHOD_WITH_HTTP_DETAILS = 'http-details';
function _createActorMethod(actor, methodName, func, blsVerify) {
function _createActorMethod(actor, methodName, func) {
let caller;

@@ -203,3 +208,3 @@ if (func.annotations.includes('query') || func.annotations.includes('composite_query')) {

const pollStrategy = pollingStrategyFactory();
const responseBytes = await pollForResponse(agent, ecid, requestId, pollStrategy, blsVerify);
const responseBytes = await pollForResponse(agent, ecid, requestId, pollStrategy);
const shouldIncludeHttpDetails = func.annotations.includes(ACTOR_METHOD_WITH_HTTP_DETAILS);

@@ -206,0 +211,0 @@ if (responseBytes !== undefined) {

@@ -42,2 +42,3 @@ import { Principal } from '@dfinity/principal';

httpDetails: HttpDetailsResponse;
requestId: RequestId;
};

@@ -47,2 +48,7 @@ export interface QueryResponseBase {

}
export declare type NodeSignature = {
timestamp: bigint;
signature: Uint8Array;
identity: Uint8Array;
};
export interface QueryResponseReplied extends QueryResponseBase {

@@ -53,2 +59,3 @@ status: QueryResponseStatus.Replied;

};
signatures?: NodeSignature[];
}

@@ -59,2 +66,4 @@ export interface QueryResponseRejected extends QueryResponseBase {

reject_message: string;
error_code: string;
signatures?: NodeSignature[];
}

@@ -61,0 +70,0 @@ /**

@@ -33,14 +33,11 @@ import { JsonObject } from '@dfinity/candid';

/**
* Prevents the agent from providing a unique {@link Nonce} with each call.
* Enabling may cause rate limiting of identical requests
* at the boundary nodes.
* Adds a unique {@link Nonce} with each query.
* Enabling will prevent queries from being answered with a cached response.
*
* To add your own nonce generation logic, you can use the following:
* @example
* import {makeNonceTransform, makeNonce} from '@dfinity/agent';
* const agent = new HttpAgent({ disableNonce: true });
* const agent = new HttpAgent({ useQueryNonces: true });
* agent.addTransform(makeNonceTransform(makeNonce);
* @default false
*/
disableNonce?: boolean;
useQueryNonces?: boolean;
/**

@@ -51,6 +48,11 @@ * Number of times to retry requests before throwing an error

retryTimes?: number;
/**
* Whether the agent should verify signatures signed by node keys on query responses. Increases security, but adds overhead and must make a separate request to cache the node keys for the canister's subnet.
* @default true
*/
verifyQuerySignatures?: boolean;
}
export declare class HttpAgent implements Agent {
#private;
rootKey: ArrayBuffer;
private readonly _pipeline;
private _identity;

@@ -68,3 +70,3 @@ private readonly _fetch;

isLocal(): boolean;
addTransform(fn: HttpAgentRequestTransformFn, priority?: number): void;
addTransform(type: 'update' | 'query', fn: HttpAgentRequestTransformFn, priority?: number): void;
getPrincipal(): Promise<Principal>;

@@ -89,3 +91,4 @@ call(canisterId: Principal | string, options: {

replaceIdentity(identity: Identity): void;
fetchSubnetKeys(canisterId: Principal | string): Promise<any>;
protected _transform(request: HttpAgentRequest): Promise<HttpAgentRequest>;
}

@@ -0,1 +1,13 @@

var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _HttpAgent_queryPipeline, _HttpAgent_updatePipeline, _HttpAgent_subnetKeys, _HttpAgent_verifyQuerySignatures, _HttpAgent_verifyQueryResponse;
import { Principal } from '@dfinity/principal';

@@ -5,7 +17,11 @@ import { AgentError } from '../../errors';

import * as cbor from '../../cbor';
import { requestIdOf } from '../../request_id';
import { fromHex } from '../../utils/buffer';
import { hashOfMap, requestIdOf } from '../../request_id';
import { concat, fromHex } from '../../utils/buffer';
import { Expiry, httpHeadersTransform, makeNonceTransform } from './transforms';
import { makeNonce, SubmitRequestType, } from './types';
import { AgentHTTPResponseError } from './errors';
import { request } from '../../canisterStatus';
import { CertificateVerificationError } from '../../certificate';
import { ed25519 } from '@noble/curves/ed25519';
import { Ed25519PublicKey } from '../../public_key';
export * from './transforms';

@@ -90,6 +106,67 @@ export { makeNonce } from './types';

this.rootKey = fromHex(IC_ROOT_KEY);
this._pipeline = [];
this._timeDiffMsecs = 0;
this._rootKeyFetched = false;
this._isAgent = true;
_HttpAgent_queryPipeline.set(this, []);
_HttpAgent_updatePipeline.set(this, []);
_HttpAgent_subnetKeys.set(this, new Map());
_HttpAgent_verifyQuerySignatures.set(this, true);
/**
* See https://internetcomputer.org/docs/current/references/ic-interface-spec/#http-query for details on validation
* @param queryResponse - The response from the query
* @param subnetStatus - The subnet status, including all node keys
* @returns ApiQueryResponse
*/
_HttpAgent_verifyQueryResponse.set(this, (queryResponse, subnetStatus) => {
if (__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f") === false) {
// This should not be called if the user has disabled verification
return queryResponse;
}
if (!subnetStatus) {
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const { status, signatures, requestId } = queryResponse;
const domainSeparator = new TextEncoder().encode('\x0Bic-response');
signatures === null || signatures === void 0 ? void 0 : signatures.forEach(sig => {
const { timestamp, identity } = sig;
const nodeId = Principal.fromUint8Array(identity).toText();
let hash;
// Hash is constructed differently depending on the status
if (status === 'replied') {
const { reply } = queryResponse;
hash = hashOfMap({
status: status,
reply: reply,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else if (status === 'rejected') {
const { reject_code, reject_message, error_code } = queryResponse;
hash = hashOfMap({
status: status,
reject_code: reject_code,
reject_message: reject_message,
error_code: error_code,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else {
throw new Error(`Unknown status: ${status}`);
}
const separatorWithHash = concat(domainSeparator, new Uint8Array(hash));
// FIX: check for match without verifying N times
const pubKey = subnetStatus === null || subnetStatus === void 0 ? void 0 : subnetStatus.nodeKeys.get(nodeId);
if (!pubKey) {
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const rawKey = Ed25519PublicKey.fromDer(pubKey).rawKey;
const valid = ed25519.verify(sig.signature, new Uint8Array(separatorWithHash), new Uint8Array(rawKey));
if (valid)
return queryResponse;
throw new CertificateVerificationError(`Invalid signature from replica ${nodeId} signed query.`);
});
return queryResponse;
});
if (options.source) {

@@ -99,3 +176,2 @@ if (!(options.source instanceof HttpAgent)) {

}
this._pipeline = [...options.source._pipeline];
this._identity = options.source._identity;

@@ -130,3 +206,3 @@ this._fetch = options.source._fetch;

// Mainnet and local will have the api route available
const knownHosts = ['ic0.app', 'icp0.io', 'localhost', '127.0.0.1'];
const knownHosts = ['ic0.app', 'icp0.io', '127.0.0.1', '127.0.0.1'];
const hostname = location === null || location === void 0 ? void 0 : location.hostname;

@@ -146,2 +222,5 @@ let knownHost;

}
if (options.verifyQuerySignatures !== undefined) {
__classPrivateFieldSet(this, _HttpAgent_verifyQuerySignatures, options.verifyQuerySignatures, "f");
}
// Default is 3, only set from option if greater or equal to 0

@@ -166,4 +245,5 @@ this._retryTimes =

// Add a nonce transform to ensure calls are unique
if (!options.disableNonce) {
this.addTransform(makeNonceTransform(makeNonce));
this.addTransform('update', makeNonceTransform(makeNonce));
if (options.useQueryNonces) {
this.addTransform('query', makeNonceTransform(makeNonce));
}

@@ -173,8 +253,15 @@ }

const hostname = this._host.hostname;
return hostname === '127.0.0.1' || hostname.endsWith('localhost');
return hostname === '127.0.0.1' || hostname.endsWith('127.0.0.1');
}
addTransform(fn, priority = fn.priority || 0) {
// Keep the pipeline sorted at all time, by priority.
const i = this._pipeline.findIndex(x => (x.priority || 0) < priority);
this._pipeline.splice(i >= 0 ? i : this._pipeline.length, 0, Object.assign(fn, { priority }));
addTransform(type, fn, priority = fn.priority || 0) {
if (type === 'update') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").length, 0, Object.assign(fn, { priority }));
}
else if (type === 'query') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").length, 0, Object.assign(fn, { priority }));
}
}

@@ -241,3 +328,15 @@ async getPrincipal() {

async _requestAndRetry(request, tries = 0) {
const response = await request();
let response;
try {
response = await request();
}
catch (error) {
if (this._retryTimes > tries) {
console.warn(`Caught exception while attempting to make request:\n` +
` ${error}\n` +
` Retrying request.`);
return await this._requestAndRetry(request, tries + 1);
}
throw error;
}
if (response.ok) {

@@ -262,37 +361,73 @@ return response;

async query(canisterId, fields, identity) {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = typeof canisterId === 'string' ? Principal.fromText(canisterId) : canisterId;
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
const makeQuery = async () => {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = Principal.from(canisterId);
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
};
const requestId = await requestIdOf(request);
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: httpHeadersTransform(response.headers),
}, requestId });
};
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
const queryPromise = new Promise((resolve, reject) => {
makeQuery()
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: httpHeadersTransform(response.headers),
} });
const subnetStatusPromise = new Promise((resolve, reject) => {
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
resolve(undefined);
}
const subnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString());
if (subnetStatus) {
resolve(subnetStatus);
}
else {
this.fetchSubnetKeys(canisterId)
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
}
});
const [query, subnetStatus] = await Promise.all([queryPromise, subnetStatusPromise]);
// Skip verification if the user has disabled it
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
return query;
}
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, subnetStatus);
}

@@ -387,10 +522,31 @@ async createReadStateRequest(fields, identity) {

}
async fetchSubnetKeys(canisterId) {
const effectiveCanisterId = Principal.from(canisterId);
const response = await request({
canisterId: effectiveCanisterId,
paths: ['subnet'],
agent: this,
});
const subnetResponse = response.get('subnet');
if (subnetResponse && typeof subnetResponse === 'object' && 'nodeKeys' in subnetResponse) {
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").set(effectiveCanisterId.toText(), subnetResponse);
}
return subnetResponse;
}
_transform(request) {
let p = Promise.resolve(request);
for (const fn of this._pipeline) {
p = p.then(r => fn(r).then(r2 => r2 || r));
if (request.endpoint === "call" /* Endpoint.Call */) {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
else {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
return p;
}
}
_HttpAgent_queryPipeline = new WeakMap(), _HttpAgent_updatePipeline = new WeakMap(), _HttpAgent_subnetKeys = new WeakMap(), _HttpAgent_verifyQuerySignatures = new WeakMap(), _HttpAgent_verifyQueryResponse = new WeakMap();
//# sourceMappingURL=index.js.map

@@ -5,9 +5,14 @@ import { lebEncode } from '@dfinity/candid';

const NANOSECONDS_PER_MILLISECONDS = BigInt(1000000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = BigInt(60 * 1000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = 60 * 1000;
export class Expiry {
constructor(deltaInMSec) {
// Use bigint because it can overflow the maximum number allowed in a double float.
this._value =
(BigInt(Date.now()) + BigInt(deltaInMSec) - REPLICA_PERMITTED_DRIFT_MILLISECONDS) *
NANOSECONDS_PER_MILLISECONDS;
const raw_value = BigInt(Math.floor(Date.now() + deltaInMSec - REPLICA_PERMITTED_DRIFT_MILLISECONDS)) *
NANOSECONDS_PER_MILLISECONDS;
// round down to the nearest second
const ingress_as_seconds = raw_value / BigInt(1000000000);
// round down to nearest minute
const ingress_as_minutes = ingress_as_seconds / BigInt(60);
const rounded_down_nanos = ingress_as_minutes * BigInt(60) * BigInt(1000000000);
this._value = rounded_down_nanos;
}

@@ -29,3 +34,2 @@ toCBOR() {

return async (request) => {
const nonce = nonceFn();
// Nonce needs to be inserted into the header for all requests, to enable logs to be correlated with requests.

@@ -32,0 +36,0 @@ const headers = request.request.headers;

@@ -27,2 +27,4 @@ import { Principal } from '@dfinity/principal';

toDer(): DerEncodedPublicKey;
rawKey?: ArrayBuffer;
derKey?: DerEncodedPublicKey;
}

@@ -29,0 +31,0 @@ /**

/** @module CanisterStatus */
import { Principal } from '@dfinity/principal';
import { HttpAgent } from '../agent/http';
import { CreateCertificateOptions } from '../certificate';
import { DerEncodedPublicKey } from '..';
/**
* Represents the useful information about a subnet
* @param {string} subnetId the principal id of the canister's subnet
* @param {string[]} nodeKeys the keys of the individual nodes in the subnet
*/
export declare type SubnetStatus = {
subnetId: string;
nodeKeys: Map<string, DerEncodedPublicKey>;
metrics?: {
num_canisters: bigint;
canister_state_bytes: bigint;
consumed_cycles_total: {
current: bigint;
deleted: bigint;
};
update_transactions_total: bigint;
};
};
/**
* Types of an entry on the canisterStatus map.
* An entry of null indicates that the request failed, due to lack of permissions or the result being missing.
*/
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | bigint | null;
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | SubnetStatus | bigint | null;
/**

@@ -38,3 +56,2 @@ * Interface to define a custom path. Nested paths will be represented as individual buffers, and can be created from text using {@link TextEncoder}

paths?: Path[] | Set<Path>;
blsVerify?: CreateCertificateOptions['blsVerify'];
};

@@ -61,2 +78,3 @@ /**

}) => Promise<StatusMap>;
export declare const fetchNodeKeys: (certificate: ArrayBuffer, canisterId: Principal, root_key?: ArrayBuffer | Uint8Array) => SubnetStatus;
export declare const encodePath: (path: Path, canisterId: Principal) => ArrayBuffer[];
/** @module CanisterStatus */
import { lebDecode, PipeArrayBuffer } from '@dfinity/candid';
import { Principal } from '@dfinity/principal';
import { AgentError } from '../errors';
import { Certificate } from '../certificate';
import { Certificate, flatten_forks, check_canister_ranges, lookupResultToBuffer, lookup_path, } from '../certificate';
import { toHex } from '../utils/buffer';
import * as Cbor from '../cbor';
import { decodeLeb128, decodeTime } from '../utils/leb';
/**

@@ -24,3 +24,4 @@ *

export const request = async (options) => {
const { canisterId, agent, paths } = options;
const { agent, paths } = options;
const canisterId = Principal.from(options.canisterId);
const uniquePaths = [...new Set(paths)];

@@ -44,3 +45,19 @@ // Map path options to their correct formats

});
const data = cert.lookup(encodePath(uniquePaths[index], canisterId));
const lookup = (cert, path) => {
if (path === 'subnet') {
const data = fetchNodeKeys(response.certificate, canisterId, agent.rootKey);
return {
path: path,
data,
};
}
else {
return {
path: path,
data: lookupResultToBuffer(cert.lookup(encodePath(path, canisterId))),
};
}
};
// must pass in the rootKey if we have no delegation
const { path, data } = lookup(cert, uniquePaths[index]);
if (!data) {

@@ -70,2 +87,6 @@ // Typically, the cert lookup will throw

}
case 'subnet': {
status.set(path, data);
break;
}
case 'candid': {

@@ -125,2 +146,53 @@ status.set(path, new TextDecoder().decode(data));

};
export const fetchNodeKeys = (certificate, canisterId, root_key) => {
if (!canisterId._isPrincipal) {
throw new Error('Invalid canisterId');
}
const cert = Cbor.decode(new Uint8Array(certificate));
const tree = cert.tree;
let delegation = cert.delegation;
let subnetId;
if (delegation && delegation.subnet_id) {
subnetId = Principal.fromUint8Array(new Uint8Array(delegation.subnet_id));
}
// On local replica, with System type subnet, there is no delegation
else if (!delegation && typeof root_key !== 'undefined') {
subnetId = Principal.selfAuthenticating(new Uint8Array(root_key));
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
// otherwise use default NNS subnet id
else {
subnetId = Principal.selfAuthenticating(Principal.fromText('tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe').toUint8Array());
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
const canisterInRange = check_canister_ranges({ canisterId, subnetId, tree });
if (!canisterInRange) {
throw new Error('Canister not in range');
}
const nodeTree = lookup_path(['subnet', delegation === null || delegation === void 0 ? void 0 : delegation.subnet_id, 'node'], tree);
const nodeForks = flatten_forks(nodeTree);
nodeForks.length;
const nodeKeys = new Map();
nodeForks.forEach(fork => {
Object.getPrototypeOf(new Uint8Array(fork[1]));
const node_id = Principal.from(new Uint8Array(fork[1])).toText();
const derEncodedPublicKey = lookup_path(['public_key'], fork[2]);
if (derEncodedPublicKey.byteLength !== 44) {
throw new Error('Invalid public key length');
}
else {
nodeKeys.set(node_id, derEncodedPublicKey);
}
});
return {
subnetId: Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)).toText(),
nodeKeys,
};
};
export const encodePath = (path, canisterId) => {

@@ -164,5 +236,2 @@ const encoder = new TextEncoder();

};
const decodeLeb128 = (buf) => {
return lebDecode(new PipeArrayBuffer(buf));
};
const decodeCbor = (buf) => {

@@ -174,7 +243,2 @@ return Cbor.decode(buf);

};
// time is a LEB128-encoded Nat
const decodeTime = (buf) => {
const decoded = decodeLeb128(buf);
return new Date(Number(decoded / BigInt(1000000)));
};
// Controllers are CBOR-encoded buffers, starting with a Tag we don't need

@@ -181,0 +245,0 @@ const decodeControllers = (buf) => {

@@ -9,10 +9,17 @@ import { AgentError } from './errors';

}
declare const enum NodeId {
Empty = 0,
Fork = 1,
Labeled = 2,
Leaf = 3,
Pruned = 4
export interface Cert {
tree: HashTree;
signature: ArrayBuffer;
delegation?: Delegation;
}
export declare type HashTree = [NodeId.Empty] | [NodeId.Fork, HashTree, HashTree] | [NodeId.Labeled, ArrayBuffer, HashTree] | [NodeId.Leaf, ArrayBuffer] | [NodeId.Pruned, ArrayBuffer];
declare const NodeId: {
Empty: number;
Fork: number;
Labeled: number;
Leaf: number;
Pruned: number;
};
export declare type NodeIdType = typeof NodeId[keyof typeof NodeId];
export { NodeId };
export declare type HashTree = [typeof NodeId.Empty] | [typeof NodeId.Fork, HashTree, HashTree] | [typeof NodeId.Labeled, ArrayBuffer, HashTree] | [typeof NodeId.Leaf, ArrayBuffer] | [typeof NodeId.Pruned, ArrayBuffer];
/**

@@ -23,2 +30,6 @@ * Make a human readable string out of a hash tree.

export declare function hashTreeToString(tree: HashTree): string;
interface Delegation extends Record<string, any> {
subnet_id: ArrayBuffer;
certificate: ArrayBuffer;
}
declare type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>;

@@ -44,2 +55,9 @@ export interface CreateCertificateOptions {

blsVerify?: VerifyFunc;
/**
* The maximum age of the certificate in minutes. Default is 5 minutes.
* @default 5
* This is used to verify the time the certificate was signed, particularly for validating Delegation certificates, which can live for longer than the default window of +/- 5 minutes. If the certificate is
* older than the specified age, it will fail verification.
*/
maxAgeInMinutes?: number;
}

@@ -49,3 +67,3 @@ export declare class Certificate {

private _canisterId;
private _blsVerify;
private _maxAgeInMinutes;
private readonly cert;

@@ -55,8 +73,8 @@ /**

* CertificateVerificationError if the certificate cannot be verified.
* @constructs {@link AuthClient}
* @param {CreateCertificateOptions} options
* @see {@link CreateCertificateOptions}
* @constructs Certificate
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions}
* @param {ArrayBuffer} options.certificate The bytes of the certificate
* @param {ArrayBuffer} options.rootKey The root key to verify against
* @param {Principal} options.canisterId The effective or signing canister ID
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes.
* @throws {CertificateVerificationError}

@@ -67,2 +85,3 @@ */

lookup(path: Array<ArrayBuffer | string>): ArrayBuffer | undefined;
lookup_label(label: ArrayBuffer): ArrayBuffer | HashTree | undefined;
private verify;

@@ -72,2 +91,8 @@ private _checkDelegationAndGetKey;

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
export declare function lookupResultToBuffer(result: ArrayBuffer | HashTree | undefined): ArrayBuffer | undefined;
/**
* @param t

@@ -80,3 +105,19 @@ */

*/
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | undefined;
export {};
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | HashTree | undefined;
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
export declare function flatten_forks(t: HashTree): HashTree[];
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
export declare function check_canister_ranges(params: {
canisterId: Principal;
subnetId: Principal;
tree: HashTree;
}): boolean;

@@ -6,3 +6,4 @@ import * as cbor from './cbor';

import { Principal } from '@dfinity/principal';
import * as bls from './utils/bls';
import { blsVerify } from '@dfinity/bls-verify';
import { decodeTime } from './utils/leb';
/**

@@ -16,2 +17,10 @@ * A certificate may fail verification with respect to the provided public key

}
const NodeId = {
Empty: 0,
Fork: 1,
Labeled: 2,
Leaf: 3,
Pruned: 4,
};
export { NodeId };
/**

@@ -36,18 +45,40 @@ * Make a human readable string out of a hash tree.

switch (tree[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return '()';
case 1 /* NodeId.Fork */: {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
case NodeId.Fork: {
if (tree[1] instanceof Array && tree[2] instanceof ArrayBuffer) {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
}
else {
throw new Error('Invalid tree structure for fork');
}
}
case 2 /* NodeId.Labeled */: {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
case NodeId.Labeled: {
if (tree[1] instanceof ArrayBuffer && tree[2] instanceof ArrayBuffer) {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
}
else {
throw new Error('Invalid tree structure for labeled');
}
}
case 3 /* NodeId.Leaf */: {
case NodeId.Leaf: {
if (!tree[1]) {
throw new Error('Invalid tree structure for leaf');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `leaf(...${tree[1].byteLength} bytes)`;
}
case 4 /* NodeId.Pruned */: {
case NodeId.Pruned: {
if (!tree[1]) {
throw new Error('Invalid tree structure for pruned');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `pruned(${toHex(new Uint8Array(tree[1]))}`;

@@ -74,6 +105,6 @@ }

export class Certificate {
constructor(certificate, _rootKey, _canisterId, _blsVerify) {
constructor(certificate, _rootKey, _canisterId, _maxAgeInMinutes = 5) {
this._rootKey = _rootKey;
this._canisterId = _canisterId;
this._blsVerify = _blsVerify;
this._maxAgeInMinutes = _maxAgeInMinutes;
this.cert = cbor.decode(new Uint8Array(certificate));

@@ -84,16 +115,12 @@ }

* CertificateVerificationError if the certificate cannot be verified.
* @constructs {@link AuthClient}
* @param {CreateCertificateOptions} options
* @see {@link CreateCertificateOptions}
* @constructs Certificate
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions}
* @param {ArrayBuffer} options.certificate The bytes of the certificate
* @param {ArrayBuffer} options.rootKey The root key to verify against
* @param {Principal} options.canisterId The effective or signing canister ID
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes.
* @throws {CertificateVerificationError}
*/
static async create(options) {
let blsVerify = options.blsVerify;
if (!blsVerify) {
blsVerify = bls.blsVerify;
}
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, blsVerify);
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, options.maxAgeInMinutes);
await cert.verify();

@@ -103,4 +130,8 @@ return cert;

lookup(path) {
return lookup_path(path, this.cert.tree);
// constrain the type of the result, so that empty HashTree is undefined
return lookupResultToBuffer(lookup_path(path, this.cert.tree));
}
lookup_label(label) {
return this.lookup([label]);
}
async verify() {

@@ -113,4 +144,27 @@ const rootHash = await reconstruct(this.cert.tree);

let sigVer = false;
const lookupTime = this.lookup(['time']);
if (!lookupTime) {
// Should never happen - time is always present in IC certificates
throw new CertificateVerificationError('Certificate does not contain a time');
}
const FIVE_MINUTES_IN_MSEC = 5 * 60 * 1000;
const MAX_AGE_IN_MSEC = this._maxAgeInMinutes * 60 * 1000;
const now = Date.now();
const earliestCertificateTime = now - MAX_AGE_IN_MSEC;
const fiveMinutesFromNow = now + FIVE_MINUTES_IN_MSEC;
const certTime = decodeTime(lookupTime);
if (certTime.getTime() < earliestCertificateTime) {
throw new CertificateVerificationError(`Certificate is signed more than ${this._maxAgeInMinutes} minutes in the past. Certificate time: ` +
certTime.toISOString() +
' Current time: ' +
new Date(now).toISOString());
}
else if (certTime.getTime() > fiveMinutesFromNow) {
throw new CertificateVerificationError('Certificate is signed more than 5 minutes in the future. Certificate time: ' +
certTime.toISOString() +
' Current time: ' +
new Date(now).toISOString());
}
try {
sigVer = await this._blsVerify(new Uint8Array(key), new Uint8Array(sig), new Uint8Array(msg));
sigVer = await blsVerify(new Uint8Array(key), new Uint8Array(sig), new Uint8Array(msg));
}

@@ -132,13 +186,10 @@ catch (err) {

canisterId: this._canisterId,
// Do not check max age for delegation certificates
maxAgeInMinutes: Infinity,
});
const rangeLookup = cert.lookup(['subnet', d.subnet_id, 'canister_ranges']);
if (!rangeLookup) {
throw new CertificateVerificationError(`Could not find canister ranges for subnet 0x${toHex(d.subnet_id)}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
Principal.fromUint8Array(v[0]),
Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(this._canisterId) && r[1].gtEq(this._canisterId));
const canisterInRange = check_canister_ranges({
canisterId: this._canisterId,
subnetId: Principal.fromUint8Array(new Uint8Array(d.subnet_id)),
tree: cert.cert.tree,
});
if (!canisterInRange) {

@@ -168,2 +219,16 @@ throw new CertificateVerificationError(`Canister ${this._canisterId} not in range of delegations for subnet 0x${toHex(d.subnet_id)}`);

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
export function lookupResultToBuffer(result) {
if (result instanceof ArrayBuffer) {
return result;
}
else if (result instanceof Uint8Array) {
return result.buffer;
}
return undefined;
}
/**
* @param t

@@ -173,11 +238,11 @@ */

switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return hash(domain_sep('ic-hashtree-empty'));
case 4 /* NodeId.Pruned */:
case NodeId.Pruned:
return t[1];
case 3 /* NodeId.Leaf */:
case NodeId.Leaf:
return hash(concat(domain_sep('ic-hashtree-leaf'), t[1]));
case 2 /* NodeId.Labeled */:
case NodeId.Labeled:
return hash(concat(domain_sep('ic-hashtree-labeled'), t[1], await reconstruct(t[2])));
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return hash(concat(domain_sep('ic-hashtree-fork'), await reconstruct(t[1]), await reconstruct(t[2])));

@@ -200,7 +265,20 @@ default:

switch (tree[0]) {
case 3 /* NodeId.Leaf */: {
return new Uint8Array(tree[1]).buffer;
case NodeId.Leaf: {
// should not be undefined
if (!tree[1])
throw new Error('Invalid tree structure for leaf');
if (tree[1] instanceof ArrayBuffer) {
return tree[1];
}
else if (tree[1] instanceof Uint8Array) {
return tree[1].buffer;
}
else
return tree[1];
}
case NodeId.Fork: {
return tree;
}
default: {
return undefined;
return tree;
}

@@ -215,7 +293,12 @@ }

}
function flatten_forks(t) {
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
export function flatten_forks(t) {
switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return [];
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return flatten_forks(t[1]).concat(flatten_forks(t[2]));

@@ -231,3 +314,3 @@ default:

for (const t of trees) {
if (t[0] === 2 /* NodeId.Labeled */) {
if (t[0] === NodeId.Labeled) {
const p = t[1];

@@ -240,2 +323,22 @@ if (isBufferEqual(l, p)) {

}
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
export function check_canister_ranges(params) {
const { canisterId, subnetId, tree } = params;
const rangeLookup = lookup_path(['subnet', subnetId.toUint8Array(), 'canister_ranges'], tree);
if (!rangeLookup || !(rangeLookup instanceof ArrayBuffer)) {
throw new Error(`Could not find canister ranges for subnet ${subnetId}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
Principal.fromUint8Array(v[0]),
Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(canisterId) && r[1].gtEq(canisterId));
return canisterInRange;
}
//# sourceMappingURL=certificate.js.map
import { ActorSubclass } from './actor';
export * from './actor';
export * from './agent';
export * from './auth';
export * from './certificate';
export * from './agent/http/transforms';
export * from './agent/http/types';
export * from './auth';
export * from './canisters/asset';
export * from './certificate';
export * from './der';
export * from './fetch_candid';
export * from './public_key';
export * from './request_id';
export * from './utils/bls';
export * from './utils/buffer';

@@ -13,0 +14,0 @@ export * from './utils/random';

export * from './actor';
export * from './agent';
export * from './auth';
export * from './certificate';
export * from './agent/http/transforms';
export * from './agent/http/types';
export * from './auth';
export * from './canisters/asset';
export * from './certificate';
export * from './der';
export * from './fetch_candid';
export * from './public_key';
export * from './request_id';
export * from './utils/bls';
export * from './utils/buffer';

@@ -12,0 +13,0 @@ export * from './utils/random';

import { Principal } from '@dfinity/principal';
import { Agent, RequestStatusResponseStatus } from '../agent';
import { CreateCertificateOptions } from '../certificate';
import { RequestId } from '../request_id';

@@ -18,2 +17,2 @@ export * as strategy from './strategy';

*/
export declare function pollForResponse(agent: Agent, canisterId: Principal, requestId: RequestId, strategy: PollStrategy, request?: any, blsVerify?: CreateCertificateOptions['blsVerify']): Promise<ArrayBuffer>;
export declare function pollForResponse(agent: Agent, canisterId: Principal, requestId: RequestId, strategy: PollStrategy, request?: any): Promise<ArrayBuffer>;

@@ -17,3 +17,3 @@ import { RequestStatusResponseStatus } from '../agent';

// eslint-disable-next-line
request, blsVerify) {
request) {
var _a;

@@ -29,3 +29,2 @@ const path = [new TextEncoder().encode('request_status'), requestId];

canisterId: canisterId,
blsVerify,
});

@@ -32,0 +31,0 @@ const maybeBuf = cert.lookup([...path, new TextEncoder().encode('status')]);

@@ -22,1 +22,9 @@ export declare type RequestId = ArrayBuffer & {

export declare function requestIdOf(request: Record<string, any>): RequestId;
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
export declare function hashOfMap(map: Record<string, unknown>): ArrayBuffer;

@@ -46,2 +46,5 @@ import { lebEncode } from '@dfinity/candid';

}
else if (typeof value === 'object') {
return hashOfMap(value);
}
else if (typeof value === 'bigint') {

@@ -71,3 +74,13 @@ // Do this check much later than the other bigint check because this one is much less

export function requestIdOf(request) {
const hashed = Object.entries(request)
return hashOfMap(request);
}
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
export function hashOfMap(map) {
const hashed = Object.entries(map)
.filter(([, value]) => value !== undefined)

@@ -84,5 +97,5 @@ .map(([key, value]) => {

const concatenated = concat(...sorted.map(x => concat(...x)));
const requestId = hash(concatenated);
return requestId;
const result = hash(concatenated);
return result;
}
//# sourceMappingURL=request_id.js.map

@@ -16,4 +16,17 @@ /**

export declare function fromHex(hex: string): ArrayBuffer;
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
export declare function compare(b1: ArrayBuffer, b2: ArrayBuffer): number;
/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
export declare function bufEquals(b1: ArrayBuffer, b2: ArrayBuffer): boolean;
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -20,0 +33,0 @@ * @param {Uint8Array} arr Uint8Array to convert

@@ -39,2 +39,8 @@ /**

}
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
export function compare(b1, b2) {

@@ -54,2 +60,11 @@ if (b1.byteLength !== b2.byteLength) {

/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
export function bufEquals(b1, b2) {
return compare(b1, b2) === 0;
}
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -56,0 +71,0 @@ * @param {Uint8Array} arr Uint8Array to convert

{
"name": "@dfinity/agent",
"version": "0.19.2",
"version": "0.19.3-react-native",
"author": "DFINITY Stiftung <sdk@dfinity.org>",

@@ -46,3 +46,2 @@ "license": "Apache-2.0",

"make:docs/reference": "typedoc src/index.ts --out ../../docs/generated/agent --excludeInternal",
"release": "npm publish",
"test": "jest --verbose",

@@ -54,6 +53,9 @@ "test:coverage": "jest --verbose --collectCoverage",

"peerDependencies": {
"@dfinity/candid": "^0.19.2",
"@dfinity/principal": "^0.19.2"
"@dfinity/candid": "^0.19.3-react-native",
"@dfinity/principal": "^0.19.3-react-native",
"@dfinity/bls-verify": "0.19.3-react-native",
"@noble/curves": "file:noble-curves-1.2.0.tgz"
},
"dependencies": {
"@noble/curves": "^1.2.0",
"@noble/hashes": "^1.3.1",

@@ -65,2 +67,3 @@ "base64-arraybuffer": "^0.2.0",

"devDependencies": {
"@dfinity/utils": "^0.0.22",
"@peculiar/webcrypto": "^1.4.3",

@@ -67,0 +70,0 @@ "@trust/webcrypto": "^0.9.2",

@@ -89,3 +89,3 @@ # @dfinity/agent

const host = process.env.DFX_NETWORK === 'local' ? 'http://localhost:4943' : 'https://icp-api.io';
const host = process.env.DFX_NETWORK === 'local' ? 'http://127.0.0.1:4943' : 'https://icp-api.io';

@@ -103,3 +103,3 @@ const agent = new HttpAgent({ fetch, host });

const host = process.env.DFX_NETWORK === 'local' ? 'http://localhost:4943' : 'https://ic0.app';
const host = process.env.DFX_NETWORK === 'local' ? 'http://127.0.0.1:4943' : 'https://ic0.app';

@@ -106,0 +106,0 @@ /**

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc