Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@dfinity/agent

Package Overview
Dependencies
Maintainers
11
Versions
123
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@dfinity/agent - npm Package Compare versions

Comparing version 0.19.3 to 0.20.0-beta.0

lib/cjs/der.d.ts

4

lib/cjs/actor.js

@@ -124,3 +124,3 @@ "use strict";

if (!config.canisterId)
throw new errors_1.AgentError(`Canister ID is required, but recieved ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
throw new errors_1.AgentError(`Canister ID is required, but received ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
const canisterId = typeof config.canisterId === 'string'

@@ -145,3 +145,3 @@ ? principal_1.Principal.fromText(config.canisterId)

if (!configuration.canisterId) {
throw new errors_1.AgentError(`Canister ID is required, but recieved ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
throw new errors_1.AgentError(`Canister ID is required, but received ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
}

@@ -148,0 +148,0 @@ return new (this.createActorClass(interfaceFactory))(configuration);

@@ -42,2 +42,3 @@ import { Principal } from '@dfinity/principal';

httpDetails: HttpDetailsResponse;
requestId: RequestId;
};

@@ -47,2 +48,7 @@ export interface QueryResponseBase {

}
export declare type NodeSignature = {
timestamp: bigint;
signature: Uint8Array;
identity: Uint8Array;
};
export interface QueryResponseReplied extends QueryResponseBase {

@@ -53,2 +59,3 @@ status: QueryResponseStatus.Replied;

};
signatures?: NodeSignature[];
}

@@ -59,2 +66,4 @@ export interface QueryResponseRejected extends QueryResponseBase {

reject_message: string;
error_code: string;
signatures?: NodeSignature[];
}

@@ -61,0 +70,0 @@ /**

@@ -33,14 +33,11 @@ import { JsonObject } from '@dfinity/candid';

/**
* Prevents the agent from providing a unique {@link Nonce} with each call.
* Enabling may cause rate limiting of identical requests
* at the boundary nodes.
* Adds a unique {@link Nonce} with each query.
* Enabling will prevent queries from being answered with a cached response.
*
* To add your own nonce generation logic, you can use the following:
* @example
* import {makeNonceTransform, makeNonce} from '@dfinity/agent';
* const agent = new HttpAgent({ disableNonce: true });
* const agent = new HttpAgent({ useQueryNonces: true });
* agent.addTransform(makeNonceTransform(makeNonce);
* @default false
*/
disableNonce?: boolean;
useQueryNonces?: boolean;
/**

@@ -51,6 +48,11 @@ * Number of times to retry requests before throwing an error

retryTimes?: number;
/**
* Whether the agent should verify signatures signed by node keys on query responses. Increases security, but adds overhead and must make a separate request to cache the node keys for the canister's subnet.
* @default true
*/
verifyQuerySignatures?: boolean;
}
export declare class HttpAgent implements Agent {
#private;
rootKey: ArrayBuffer;
private readonly _pipeline;
private _identity;

@@ -68,3 +70,3 @@ private readonly _fetch;

isLocal(): boolean;
addTransform(fn: HttpAgentRequestTransformFn, priority?: number): void;
addTransform(type: 'update' | 'query', fn: HttpAgentRequestTransformFn, priority?: number): void;
getPrincipal(): Promise<Principal>;

@@ -89,3 +91,4 @@ call(canisterId: Principal | string, options: {

replaceIdentity(identity: Identity): void;
fetchSubnetKeys(canisterId: Principal | string): Promise<any>;
protected _transform(request: HttpAgentRequest): Promise<HttpAgentRequest>;
}

@@ -28,2 +28,14 @@ "use strict";

};
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _HttpAgent_queryPipeline, _HttpAgent_updatePipeline, _HttpAgent_subnetKeys, _HttpAgent_verifyQuerySignatures, _HttpAgent_verifyQueryResponse;
Object.defineProperty(exports, "__esModule", { value: true });

@@ -40,2 +52,6 @@ exports.HttpAgent = exports.IdentityInvalidError = exports.RequestStatusResponseStatus = exports.makeNonce = void 0;

const errors_2 = require("./errors");
const canisterStatus_1 = require("../../canisterStatus");
const certificate_1 = require("../../certificate");
const ed25519_1 = require("@noble/curves/ed25519");
const public_key_1 = require("../../public_key");
__exportStar(require("./transforms"), exports);

@@ -122,6 +138,67 @@ var types_2 = require("./types");

this.rootKey = (0, buffer_1.fromHex)(IC_ROOT_KEY);
this._pipeline = [];
this._timeDiffMsecs = 0;
this._rootKeyFetched = false;
this._isAgent = true;
_HttpAgent_queryPipeline.set(this, []);
_HttpAgent_updatePipeline.set(this, []);
_HttpAgent_subnetKeys.set(this, new Map());
_HttpAgent_verifyQuerySignatures.set(this, true);
/**
* See https://internetcomputer.org/docs/current/references/ic-interface-spec/#http-query for details on validation
* @param queryResponse - The response from the query
* @param subnetStatus - The subnet status, including all node keys
* @returns ApiQueryResponse
*/
_HttpAgent_verifyQueryResponse.set(this, (queryResponse, subnetStatus) => {
if (__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f") === false) {
// This should not be called if the user has disabled verification
return queryResponse;
}
if (!subnetStatus) {
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const { status, signatures, requestId } = queryResponse;
const domainSeparator = new TextEncoder().encode('\x0Bic-response');
signatures === null || signatures === void 0 ? void 0 : signatures.forEach(sig => {
const { timestamp, identity } = sig;
const nodeId = principal_1.Principal.fromUint8Array(identity).toText();
let hash;
// Hash is constructed differently depending on the status
if (status === 'replied') {
const { reply } = queryResponse;
hash = (0, request_id_1.hashOfMap)({
status: status,
reply: reply,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else if (status === 'rejected') {
const { reject_code, reject_message, error_code } = queryResponse;
hash = (0, request_id_1.hashOfMap)({
status: status,
reject_code: reject_code,
reject_message: reject_message,
error_code: error_code,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else {
throw new Error(`Unknown status: ${status}`);
}
const separatorWithHash = (0, buffer_1.concat)(domainSeparator, new Uint8Array(hash));
// FIX: check for match without verifying N times
const pubKey = subnetStatus === null || subnetStatus === void 0 ? void 0 : subnetStatus.nodeKeys.get(nodeId);
if (!pubKey) {
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const rawKey = public_key_1.Ed25519PublicKey.fromDer(pubKey).rawKey;
const valid = ed25519_1.ed25519.verify(sig.signature, new Uint8Array(separatorWithHash), new Uint8Array(rawKey));
if (valid)
return queryResponse;
throw new certificate_1.CertificateVerificationError(`Invalid signature from replica ${nodeId} signed query.`);
});
return queryResponse;
});
if (options.source) {

@@ -131,3 +208,2 @@ if (!(options.source instanceof HttpAgent)) {

}
this._pipeline = [...options.source._pipeline];
this._identity = options.source._identity;

@@ -162,3 +238,3 @@ this._fetch = options.source._fetch;

// Mainnet and local will have the api route available
const knownHosts = ['ic0.app', 'icp0.io', 'localhost', '127.0.0.1'];
const knownHosts = ['ic0.app', 'icp0.io', '127.0.0.1', '127.0.0.1'];
const hostname = location === null || location === void 0 ? void 0 : location.hostname;

@@ -178,2 +254,5 @@ let knownHost;

}
if (options.verifyQuerySignatures !== undefined) {
__classPrivateFieldSet(this, _HttpAgent_verifyQuerySignatures, options.verifyQuerySignatures, "f");
}
// Default is 3, only set from option if greater or equal to 0

@@ -198,4 +277,5 @@ this._retryTimes =

// Add a nonce transform to ensure calls are unique
if (!options.disableNonce) {
this.addTransform((0, transforms_1.makeNonceTransform)(types_1.makeNonce));
this.addTransform('update', (0, transforms_1.makeNonceTransform)(types_1.makeNonce));
if (options.useQueryNonces) {
this.addTransform('query', (0, transforms_1.makeNonceTransform)(types_1.makeNonce));
}

@@ -205,8 +285,15 @@ }

const hostname = this._host.hostname;
return hostname === '127.0.0.1' || hostname.endsWith('localhost');
return hostname === '127.0.0.1' || hostname.endsWith('127.0.0.1');
}
addTransform(fn, priority = fn.priority || 0) {
// Keep the pipeline sorted at all time, by priority.
const i = this._pipeline.findIndex(x => (x.priority || 0) < priority);
this._pipeline.splice(i >= 0 ? i : this._pipeline.length, 0, Object.assign(fn, { priority }));
addTransform(type, fn, priority = fn.priority || 0) {
if (type === 'update') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").length, 0, Object.assign(fn, { priority }));
}
else if (type === 'query') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").length, 0, Object.assign(fn, { priority }));
}
}

@@ -273,3 +360,15 @@ async getPrincipal() {

async _requestAndRetry(request, tries = 0) {
const response = await request();
let response;
try {
response = await request();
}
catch (error) {
if (this._retryTimes > tries) {
console.warn(`Caught exception while attempting to make request:\n` +
` ${error}\n` +
` Retrying request.`);
return await this._requestAndRetry(request, tries + 1);
}
throw error;
}
if (response.ok) {

@@ -294,37 +393,73 @@ return response;

async query(canisterId, fields, identity) {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = typeof canisterId === 'string' ? principal_1.Principal.fromText(canisterId) : canisterId;
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || principal_1.Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new transforms_1.Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
const makeQuery = async () => {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = principal_1.Principal.from(canisterId);
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || principal_1.Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new transforms_1.Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
};
const requestId = await (0, request_id_1.requestIdOf)(request);
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: (0, transforms_1.httpHeadersTransform)(response.headers),
}, requestId });
};
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
const queryPromise = new Promise((resolve, reject) => {
makeQuery()
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: (0, transforms_1.httpHeadersTransform)(response.headers),
} });
const subnetStatusPromise = new Promise((resolve, reject) => {
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
resolve(undefined);
}
const subnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString());
if (subnetStatus) {
resolve(subnetStatus);
}
else {
this.fetchSubnetKeys(canisterId)
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
}
});
const [query, subnetStatus] = await Promise.all([queryPromise, subnetStatusPromise]);
// Skip verification if the user has disabled it
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
return query;
}
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, subnetStatus);
}

@@ -419,7 +554,27 @@ async createReadStateRequest(fields, identity) {

}
async fetchSubnetKeys(canisterId) {
const effectiveCanisterId = principal_1.Principal.from(canisterId);
const response = await (0, canisterStatus_1.request)({
canisterId: effectiveCanisterId,
paths: ['subnet'],
agent: this,
});
const subnetResponse = response.get('subnet');
if (subnetResponse && typeof subnetResponse === 'object' && 'nodeKeys' in subnetResponse) {
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").set(effectiveCanisterId.toText(), subnetResponse);
}
return subnetResponse;
}
_transform(request) {
let p = Promise.resolve(request);
for (const fn of this._pipeline) {
p = p.then(r => fn(r).then(r2 => r2 || r));
if (request.endpoint === "call" /* Endpoint.Call */) {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
else {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
return p;

@@ -429,2 +584,3 @@ }

exports.HttpAgent = HttpAgent;
_HttpAgent_queryPipeline = new WeakMap(), _HttpAgent_updatePipeline = new WeakMap(), _HttpAgent_subnetKeys = new WeakMap(), _HttpAgent_verifyQuerySignatures = new WeakMap(), _HttpAgent_verifyQueryResponse = new WeakMap();
//# sourceMappingURL=index.js.map

@@ -31,9 +31,14 @@ "use strict";

const NANOSECONDS_PER_MILLISECONDS = BigInt(1000000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = BigInt(60 * 1000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = 60 * 1000;
class Expiry {
constructor(deltaInMSec) {
// Use bigint because it can overflow the maximum number allowed in a double float.
this._value =
(BigInt(Date.now()) + BigInt(deltaInMSec) - REPLICA_PERMITTED_DRIFT_MILLISECONDS) *
NANOSECONDS_PER_MILLISECONDS;
const raw_value = BigInt(Math.floor(Date.now() + deltaInMSec - REPLICA_PERMITTED_DRIFT_MILLISECONDS)) *
NANOSECONDS_PER_MILLISECONDS;
// round down to the nearest second
const ingress_as_seconds = raw_value / BigInt(1000000000);
// round down to nearest minute
const ingress_as_minutes = ingress_as_seconds / BigInt(60);
const rounded_down_nanos = ingress_as_minutes * BigInt(60) * BigInt(1000000000);
this._value = rounded_down_nanos;
}

@@ -56,3 +61,2 @@ toCBOR() {

return async (request) => {
const nonce = nonceFn();
// Nonce needs to be inserted into the header for all requests, to enable logs to be correlated with requests.

@@ -59,0 +63,0 @@ const headers = request.request.headers;

@@ -27,2 +27,4 @@ import { Principal } from '@dfinity/principal';

toDer(): DerEncodedPublicKey;
rawKey?: ArrayBuffer;
derKey?: DerEncodedPublicKey;
}

@@ -29,0 +31,0 @@ /**

@@ -5,7 +5,26 @@ /** @module CanisterStatus */

import { CreateCertificateOptions } from '../certificate';
import { DerEncodedPublicKey } from '..';
/**
* Represents the useful information about a subnet
* @param {string} subnetId the principal id of the canister's subnet
* @param {string[]} nodeKeys the keys of the individual nodes in the subnet
*/
export declare type SubnetStatus = {
subnetId: string;
nodeKeys: Map<string, DerEncodedPublicKey>;
metrics?: {
num_canisters: bigint;
canister_state_bytes: bigint;
consumed_cycles_total: {
current: bigint;
deleted: bigint;
};
update_transactions_total: bigint;
};
};
/**
* Types of an entry on the canisterStatus map.
* An entry of null indicates that the request failed, due to lack of permissions or the result being missing.
*/
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | bigint | null;
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | SubnetStatus | bigint | null;
/**

@@ -61,2 +80,3 @@ * Interface to define a custom path. Nested paths will be represented as individual buffers, and can be created from text using {@link TextEncoder}

}) => Promise<StatusMap>;
export declare const fetchNodeKeys: (certificate: ArrayBuffer, canisterId: Principal, root_key?: ArrayBuffer | Uint8Array) => SubnetStatus;
export declare const encodePath: (path: Path, canisterId: Principal) => ArrayBuffer[];
"use strict";
/** @module CanisterStatus */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {

@@ -27,3 +26,4 @@ if (k2 === undefined) k2 = k;

Object.defineProperty(exports, "__esModule", { value: true });
exports.encodePath = exports.request = void 0;
exports.encodePath = exports.fetchNodeKeys = exports.request = void 0;
/** @module CanisterStatus */
const principal_1 = require("@dfinity/principal");

@@ -51,3 +51,4 @@ const errors_1 = require("../errors");

const request = async (options) => {
const { canisterId, agent, paths } = options;
const { agent, paths } = options;
const canisterId = principal_1.Principal.from(options.canisterId);
const uniquePaths = [...new Set(paths)];

@@ -71,3 +72,19 @@ // Map path options to their correct formats

});
const data = cert.lookup((0, exports.encodePath)(uniquePaths[index], canisterId));
const lookup = (cert, path) => {
if (path === 'subnet') {
const data = (0, exports.fetchNodeKeys)(response.certificate, canisterId, agent.rootKey);
return {
path: path,
data,
};
}
else {
return {
path: path,
data: (0, certificate_1.lookupResultToBuffer)(cert.lookup((0, exports.encodePath)(path, canisterId))),
};
}
};
// must pass in the rootKey if we have no delegation
const { path, data } = lookup(cert, uniquePaths[index]);
if (!data) {

@@ -97,2 +114,6 @@ // Typically, the cert lookup will throw

}
case 'subnet': {
status.set(path, data);
break;
}
case 'candid': {

@@ -153,2 +174,54 @@ status.set(path, new TextDecoder().decode(data));

exports.request = request;
const fetchNodeKeys = (certificate, canisterId, root_key) => {
if (!canisterId._isPrincipal) {
throw new Error('Invalid canisterId');
}
const cert = Cbor.decode(new Uint8Array(certificate));
const tree = cert.tree;
let delegation = cert.delegation;
let subnetId;
if (delegation && delegation.subnet_id) {
subnetId = principal_1.Principal.fromUint8Array(new Uint8Array(delegation.subnet_id));
}
// On local replica, with System type subnet, there is no delegation
else if (!delegation && typeof root_key !== 'undefined') {
subnetId = principal_1.Principal.selfAuthenticating(new Uint8Array(root_key));
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
// otherwise use default NNS subnet id
else {
subnetId = principal_1.Principal.selfAuthenticating(principal_1.Principal.fromText('tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe').toUint8Array());
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
const canisterInRange = (0, certificate_1.check_canister_ranges)({ canisterId, subnetId, tree });
if (!canisterInRange) {
throw new Error('Canister not in range');
}
const nodeTree = (0, certificate_1.lookup_path)(['subnet', delegation === null || delegation === void 0 ? void 0 : delegation.subnet_id, 'node'], tree);
const nodeForks = (0, certificate_1.flatten_forks)(nodeTree);
nodeForks.length;
const nodeKeys = new Map();
nodeForks.forEach(fork => {
Object.getPrototypeOf(new Uint8Array(fork[1]));
const node_id = principal_1.Principal.from(new Uint8Array(fork[1])).toText();
const derEncodedPublicKey = (0, certificate_1.lookup_path)(['public_key'], fork[2]);
if (derEncodedPublicKey.byteLength !== 44) {
throw new Error('Invalid public key length');
}
else {
nodeKeys.set(node_id, derEncodedPublicKey);
}
});
return {
subnetId: principal_1.Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)).toText(),
nodeKeys,
};
};
exports.fetchNodeKeys = fetchNodeKeys;
const encodePath = (path, canisterId) => {

@@ -155,0 +228,0 @@ const encoder = new TextEncoder();

@@ -9,10 +9,17 @@ import { AgentError } from './errors';

}
declare const enum NodeId {
Empty = 0,
Fork = 1,
Labeled = 2,
Leaf = 3,
Pruned = 4
export interface Cert {
tree: HashTree;
signature: ArrayBuffer;
delegation?: Delegation;
}
export declare type HashTree = [NodeId.Empty] | [NodeId.Fork, HashTree, HashTree] | [NodeId.Labeled, ArrayBuffer, HashTree] | [NodeId.Leaf, ArrayBuffer] | [NodeId.Pruned, ArrayBuffer];
declare const NodeId: {
Empty: number;
Fork: number;
Labeled: number;
Leaf: number;
Pruned: number;
};
export declare type NodeIdType = typeof NodeId[keyof typeof NodeId];
export { NodeId };
export declare type HashTree = [typeof NodeId.Empty] | [typeof NodeId.Fork, HashTree, HashTree] | [typeof NodeId.Labeled, ArrayBuffer, HashTree] | [typeof NodeId.Leaf, ArrayBuffer] | [typeof NodeId.Pruned, ArrayBuffer];
/**

@@ -23,2 +30,6 @@ * Make a human readable string out of a hash tree.

export declare function hashTreeToString(tree: HashTree): string;
interface Delegation extends Record<string, any> {
subnet_id: ArrayBuffer;
certificate: ArrayBuffer;
}
declare type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>;

@@ -72,2 +83,3 @@ export interface CreateCertificateOptions {

lookup(path: Array<ArrayBuffer | string>): ArrayBuffer | undefined;
lookup_label(label: ArrayBuffer): ArrayBuffer | HashTree | undefined;
private verify;

@@ -77,2 +89,8 @@ private _checkDelegationAndGetKey;

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
export declare function lookupResultToBuffer(result: ArrayBuffer | HashTree | undefined): ArrayBuffer | undefined;
/**
* @param t

@@ -85,3 +103,19 @@ */

*/
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | undefined;
export {};
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | HashTree | undefined;
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
export declare function flatten_forks(t: HashTree): HashTree[];
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
export declare function check_canister_ranges(params: {
canisterId: Principal;
subnetId: Principal;
tree: HashTree;
}): boolean;

@@ -26,3 +26,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.lookup_path = exports.reconstruct = exports.Certificate = exports.hashTreeToString = exports.CertificateVerificationError = void 0;
exports.check_canister_ranges = exports.flatten_forks = exports.lookup_path = exports.reconstruct = exports.lookupResultToBuffer = exports.Certificate = exports.hashTreeToString = exports.NodeId = exports.CertificateVerificationError = void 0;
const cbor = __importStar(require("./cbor"));

@@ -44,2 +44,10 @@ const errors_1 = require("./errors");

exports.CertificateVerificationError = CertificateVerificationError;
const NodeId = {
Empty: 0,
Fork: 1,
Labeled: 2,
Leaf: 3,
Pruned: 4,
};
exports.NodeId = NodeId;
/**

@@ -64,18 +72,40 @@ * Make a human readable string out of a hash tree.

switch (tree[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return '()';
case 1 /* NodeId.Fork */: {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
case NodeId.Fork: {
if (tree[1] instanceof Array && tree[2] instanceof ArrayBuffer) {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
}
else {
throw new Error('Invalid tree structure for fork');
}
}
case 2 /* NodeId.Labeled */: {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
case NodeId.Labeled: {
if (tree[1] instanceof ArrayBuffer && tree[2] instanceof ArrayBuffer) {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
}
else {
throw new Error('Invalid tree structure for labeled');
}
}
case 3 /* NodeId.Leaf */: {
case NodeId.Leaf: {
if (!tree[1]) {
throw new Error('Invalid tree structure for leaf');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `leaf(...${tree[1].byteLength} bytes)`;
}
case 4 /* NodeId.Pruned */: {
case NodeId.Pruned: {
if (!tree[1]) {
throw new Error('Invalid tree structure for pruned');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `pruned(${(0, buffer_1.toHex)(new Uint8Array(tree[1]))}`;

@@ -133,4 +163,8 @@ }

lookup(path) {
return lookup_path(path, this.cert.tree);
// constrain the type of the result, so that empty HashTree is undefined
return lookupResultToBuffer(lookup_path(path, this.cert.tree));
}
lookup_label(label) {
return this.lookup([label]);
}
async verify() {

@@ -185,15 +219,10 @@ const rootHash = await reconstruct(this.cert.tree);

blsVerify: this._blsVerify,
// Maximum age of 30 days for delegation certificates
maxAgeInMinutes: 60 * 24 * 30,
// Do not check max age for delegation certificates
maxAgeInMinutes: Infinity,
});
const rangeLookup = cert.lookup(['subnet', d.subnet_id, 'canister_ranges']);
if (!rangeLookup) {
throw new CertificateVerificationError(`Could not find canister ranges for subnet 0x${(0, buffer_1.toHex)(d.subnet_id)}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
principal_1.Principal.fromUint8Array(v[0]),
principal_1.Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(this._canisterId) && r[1].gtEq(this._canisterId));
const canisterInRange = check_canister_ranges({
canisterId: this._canisterId,
subnetId: principal_1.Principal.fromUint8Array(new Uint8Array(d.subnet_id)),
tree: cert.cert.tree,
});
if (!canisterInRange) {

@@ -224,2 +253,17 @@ throw new CertificateVerificationError(`Canister ${this._canisterId} not in range of delegations for subnet 0x${(0, buffer_1.toHex)(d.subnet_id)}`);

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
function lookupResultToBuffer(result) {
if (result instanceof ArrayBuffer) {
return result;
}
else if (result instanceof Uint8Array) {
return result.buffer;
}
return undefined;
}
exports.lookupResultToBuffer = lookupResultToBuffer;
/**
* @param t

@@ -229,11 +273,11 @@ */

switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return (0, request_id_1.hash)(domain_sep('ic-hashtree-empty'));
case 4 /* NodeId.Pruned */:
case NodeId.Pruned:
return t[1];
case 3 /* NodeId.Leaf */:
case NodeId.Leaf:
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-leaf'), t[1]));
case 2 /* NodeId.Labeled */:
case NodeId.Labeled:
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-labeled'), t[1], await reconstruct(t[2])));
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-fork'), await reconstruct(t[1]), await reconstruct(t[2])));

@@ -257,7 +301,20 @@ default:

switch (tree[0]) {
case 3 /* NodeId.Leaf */: {
return new Uint8Array(tree[1]).buffer;
case NodeId.Leaf: {
// should not be undefined
if (!tree[1])
throw new Error('Invalid tree structure for leaf');
if (tree[1] instanceof ArrayBuffer) {
return tree[1];
}
else if (tree[1] instanceof Uint8Array) {
return tree[1].buffer;
}
else
return tree[1];
}
case NodeId.Fork: {
return tree;
}
default: {
return undefined;
return tree;
}

@@ -273,7 +330,12 @@ }

exports.lookup_path = lookup_path;
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
function flatten_forks(t) {
switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return [];
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return flatten_forks(t[1]).concat(flatten_forks(t[2]));

@@ -284,2 +346,3 @@ default:

}
exports.flatten_forks = flatten_forks;
function find_label(l, trees) {

@@ -290,3 +353,3 @@ if (trees.length === 0) {

for (const t of trees) {
if (t[0] === 2 /* NodeId.Labeled */) {
if (t[0] === NodeId.Labeled) {
const p = t[1];

@@ -299,2 +362,23 @@ if (isBufferEqual(l, p)) {

}
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
function check_canister_ranges(params) {
const { canisterId, subnetId, tree } = params;
const rangeLookup = lookup_path(['subnet', subnetId.toUint8Array(), 'canister_ranges'], tree);
if (!rangeLookup || !(rangeLookup instanceof ArrayBuffer)) {
throw new Error(`Could not find canister ranges for subnet ${subnetId}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
principal_1.Principal.fromUint8Array(v[0]),
principal_1.Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(canisterId) && r[1].gtEq(canisterId));
return canisterInRange;
}
exports.check_canister_ranges = check_canister_ranges;
//# sourceMappingURL=certificate.js.map
import { ActorSubclass } from './actor';
export * from './actor';
export * from './agent';
export * from './auth';
export * from './certificate';
export * from './agent/http/transforms';
export * from './agent/http/types';
export * from './auth';
export * from './canisters/asset';
export * from './certificate';
export * from './der';
export * from './fetch_candid';
export * from './public_key';
export * from './request_id';

@@ -11,0 +13,0 @@ export * from './utils/bls';

@@ -32,8 +32,10 @@ "use strict";

__exportStar(require("./agent"), exports);
__exportStar(require("./auth"), exports);
__exportStar(require("./certificate"), exports);
__exportStar(require("./agent/http/transforms"), exports);
__exportStar(require("./agent/http/types"), exports);
__exportStar(require("./auth"), exports);
__exportStar(require("./canisters/asset"), exports);
__exportStar(require("./certificate"), exports);
__exportStar(require("./der"), exports);
__exportStar(require("./fetch_candid"), exports);
__exportStar(require("./public_key"), exports);
__exportStar(require("./request_id"), exports);

@@ -40,0 +42,0 @@ __exportStar(require("./utils/bls"), exports);

@@ -22,1 +22,9 @@ export declare type RequestId = ArrayBuffer & {

export declare function requestIdOf(request: Record<string, any>): RequestId;
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
export declare function hashOfMap(map: Record<string, unknown>): ArrayBuffer;

@@ -6,3 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.requestIdOf = exports.hashValue = exports.hash = void 0;
exports.hashOfMap = exports.requestIdOf = exports.hashValue = exports.hash = void 0;
const candid_1 = require("@dfinity/candid");

@@ -54,2 +54,5 @@ const borc_1 = __importDefault(require("borc"));

}
else if (typeof value === 'object') {
return hashOfMap(value);
}
else if (typeof value === 'bigint') {

@@ -80,3 +83,14 @@ // Do this check much later than the other bigint check because this one is much less

function requestIdOf(request) {
const hashed = Object.entries(request)
return hashOfMap(request);
}
exports.requestIdOf = requestIdOf;
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
function hashOfMap(map) {
const hashed = Object.entries(map)
.filter(([, value]) => value !== undefined)

@@ -93,6 +107,6 @@ .map(([key, value]) => {

const concatenated = (0, buffer_1.concat)(...sorted.map(x => (0, buffer_1.concat)(...x)));
const requestId = hash(concatenated);
return requestId;
const result = hash(concatenated);
return result;
}
exports.requestIdOf = requestIdOf;
exports.hashOfMap = hashOfMap;
//# sourceMappingURL=request_id.js.map

@@ -16,4 +16,17 @@ /**

export declare function fromHex(hex: string): ArrayBuffer;
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
export declare function compare(b1: ArrayBuffer, b2: ArrayBuffer): number;
/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
export declare function bufEquals(b1: ArrayBuffer, b2: ArrayBuffer): boolean;
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -20,0 +33,0 @@ * @param {Uint8Array} arr Uint8Array to convert

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.uint8ToBuf = exports.compare = exports.fromHex = exports.toHex = exports.concat = void 0;
exports.uint8ToBuf = exports.bufEquals = exports.compare = exports.fromHex = exports.toHex = exports.concat = void 0;
/**

@@ -45,2 +45,8 @@ * Concatenate multiple array buffers.

exports.fromHex = fromHex;
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
function compare(b1, b2) {

@@ -61,2 +67,12 @@ if (b1.byteLength !== b2.byteLength) {

/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
function bufEquals(b1, b2) {
return compare(b1, b2) === 0;
}
exports.bufEquals = bufEquals;
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -63,0 +79,0 @@ * @param {Uint8Array} arr Uint8Array to convert

@@ -115,3 +115,3 @@ import { Buffer } from 'buffer/';

if (!config.canisterId)
throw new AgentError(`Canister ID is required, but recieved ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
throw new AgentError(`Canister ID is required, but received ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
const canisterId = typeof config.canisterId === 'string'

@@ -136,3 +136,3 @@ ? Principal.fromText(config.canisterId)

if (!configuration.canisterId) {
throw new AgentError(`Canister ID is required, but recieved ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
throw new AgentError(`Canister ID is required, but received ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`);
}

@@ -139,0 +139,0 @@ return new (this.createActorClass(interfaceFactory))(configuration);

@@ -42,2 +42,3 @@ import { Principal } from '@dfinity/principal';

httpDetails: HttpDetailsResponse;
requestId: RequestId;
};

@@ -47,2 +48,7 @@ export interface QueryResponseBase {

}
export declare type NodeSignature = {
timestamp: bigint;
signature: Uint8Array;
identity: Uint8Array;
};
export interface QueryResponseReplied extends QueryResponseBase {

@@ -53,2 +59,3 @@ status: QueryResponseStatus.Replied;

};
signatures?: NodeSignature[];
}

@@ -59,2 +66,4 @@ export interface QueryResponseRejected extends QueryResponseBase {

reject_message: string;
error_code: string;
signatures?: NodeSignature[];
}

@@ -61,0 +70,0 @@ /**

@@ -33,14 +33,11 @@ import { JsonObject } from '@dfinity/candid';

/**
* Prevents the agent from providing a unique {@link Nonce} with each call.
* Enabling may cause rate limiting of identical requests
* at the boundary nodes.
* Adds a unique {@link Nonce} with each query.
* Enabling will prevent queries from being answered with a cached response.
*
* To add your own nonce generation logic, you can use the following:
* @example
* import {makeNonceTransform, makeNonce} from '@dfinity/agent';
* const agent = new HttpAgent({ disableNonce: true });
* const agent = new HttpAgent({ useQueryNonces: true });
* agent.addTransform(makeNonceTransform(makeNonce);
* @default false
*/
disableNonce?: boolean;
useQueryNonces?: boolean;
/**

@@ -51,6 +48,11 @@ * Number of times to retry requests before throwing an error

retryTimes?: number;
/**
* Whether the agent should verify signatures signed by node keys on query responses. Increases security, but adds overhead and must make a separate request to cache the node keys for the canister's subnet.
* @default true
*/
verifyQuerySignatures?: boolean;
}
export declare class HttpAgent implements Agent {
#private;
rootKey: ArrayBuffer;
private readonly _pipeline;
private _identity;

@@ -68,3 +70,3 @@ private readonly _fetch;

isLocal(): boolean;
addTransform(fn: HttpAgentRequestTransformFn, priority?: number): void;
addTransform(type: 'update' | 'query', fn: HttpAgentRequestTransformFn, priority?: number): void;
getPrincipal(): Promise<Principal>;

@@ -89,3 +91,4 @@ call(canisterId: Principal | string, options: {

replaceIdentity(identity: Identity): void;
fetchSubnetKeys(canisterId: Principal | string): Promise<any>;
protected _transform(request: HttpAgentRequest): Promise<HttpAgentRequest>;
}

@@ -0,1 +1,13 @@

var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
};
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var _HttpAgent_queryPipeline, _HttpAgent_updatePipeline, _HttpAgent_subnetKeys, _HttpAgent_verifyQuerySignatures, _HttpAgent_verifyQueryResponse;
import { Principal } from '@dfinity/principal';

@@ -5,7 +17,11 @@ import { AgentError } from '../../errors';

import * as cbor from '../../cbor';
import { requestIdOf } from '../../request_id';
import { fromHex } from '../../utils/buffer';
import { hashOfMap, requestIdOf } from '../../request_id';
import { concat, fromHex } from '../../utils/buffer';
import { Expiry, httpHeadersTransform, makeNonceTransform } from './transforms';
import { makeNonce, SubmitRequestType, } from './types';
import { AgentHTTPResponseError } from './errors';
import { request } from '../../canisterStatus';
import { CertificateVerificationError } from '../../certificate';
import { ed25519 } from '@noble/curves/ed25519';
import { Ed25519PublicKey } from '../../public_key';
export * from './transforms';

@@ -90,6 +106,67 @@ export { makeNonce } from './types';

this.rootKey = fromHex(IC_ROOT_KEY);
this._pipeline = [];
this._timeDiffMsecs = 0;
this._rootKeyFetched = false;
this._isAgent = true;
_HttpAgent_queryPipeline.set(this, []);
_HttpAgent_updatePipeline.set(this, []);
_HttpAgent_subnetKeys.set(this, new Map());
_HttpAgent_verifyQuerySignatures.set(this, true);
/**
* See https://internetcomputer.org/docs/current/references/ic-interface-spec/#http-query for details on validation
* @param queryResponse - The response from the query
* @param subnetStatus - The subnet status, including all node keys
* @returns ApiQueryResponse
*/
_HttpAgent_verifyQueryResponse.set(this, (queryResponse, subnetStatus) => {
if (__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f") === false) {
// This should not be called if the user has disabled verification
return queryResponse;
}
if (!subnetStatus) {
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const { status, signatures, requestId } = queryResponse;
const domainSeparator = new TextEncoder().encode('\x0Bic-response');
signatures === null || signatures === void 0 ? void 0 : signatures.forEach(sig => {
const { timestamp, identity } = sig;
const nodeId = Principal.fromUint8Array(identity).toText();
let hash;
// Hash is constructed differently depending on the status
if (status === 'replied') {
const { reply } = queryResponse;
hash = hashOfMap({
status: status,
reply: reply,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else if (status === 'rejected') {
const { reject_code, reject_message, error_code } = queryResponse;
hash = hashOfMap({
status: status,
reject_code: reject_code,
reject_message: reject_message,
error_code: error_code,
timestamp: BigInt(timestamp),
request_id: requestId,
});
}
else {
throw new Error(`Unknown status: ${status}`);
}
const separatorWithHash = concat(domainSeparator, new Uint8Array(hash));
// FIX: check for match without verifying N times
const pubKey = subnetStatus === null || subnetStatus === void 0 ? void 0 : subnetStatus.nodeKeys.get(nodeId);
if (!pubKey) {
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.');
}
const rawKey = Ed25519PublicKey.fromDer(pubKey).rawKey;
const valid = ed25519.verify(sig.signature, new Uint8Array(separatorWithHash), new Uint8Array(rawKey));
if (valid)
return queryResponse;
throw new CertificateVerificationError(`Invalid signature from replica ${nodeId} signed query.`);
});
return queryResponse;
});
if (options.source) {

@@ -99,3 +176,2 @@ if (!(options.source instanceof HttpAgent)) {

}
this._pipeline = [...options.source._pipeline];
this._identity = options.source._identity;

@@ -130,3 +206,3 @@ this._fetch = options.source._fetch;

// Mainnet and local will have the api route available
const knownHosts = ['ic0.app', 'icp0.io', 'localhost', '127.0.0.1'];
const knownHosts = ['ic0.app', 'icp0.io', '127.0.0.1', '127.0.0.1'];
const hostname = location === null || location === void 0 ? void 0 : location.hostname;

@@ -146,2 +222,5 @@ let knownHost;

}
if (options.verifyQuerySignatures !== undefined) {
__classPrivateFieldSet(this, _HttpAgent_verifyQuerySignatures, options.verifyQuerySignatures, "f");
}
// Default is 3, only set from option if greater or equal to 0

@@ -166,4 +245,5 @@ this._retryTimes =

// Add a nonce transform to ensure calls are unique
if (!options.disableNonce) {
this.addTransform(makeNonceTransform(makeNonce));
this.addTransform('update', makeNonceTransform(makeNonce));
if (options.useQueryNonces) {
this.addTransform('query', makeNonceTransform(makeNonce));
}

@@ -173,8 +253,15 @@ }

const hostname = this._host.hostname;
return hostname === '127.0.0.1' || hostname.endsWith('localhost');
return hostname === '127.0.0.1' || hostname.endsWith('127.0.0.1');
}
addTransform(fn, priority = fn.priority || 0) {
// Keep the pipeline sorted at all time, by priority.
const i = this._pipeline.findIndex(x => (x.priority || 0) < priority);
this._pipeline.splice(i >= 0 ? i : this._pipeline.length, 0, Object.assign(fn, { priority }));
addTransform(type, fn, priority = fn.priority || 0) {
if (type === 'update') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").length, 0, Object.assign(fn, { priority }));
}
else if (type === 'query') {
// Keep the pipeline sorted at all time, by priority.
const i = __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").findIndex(x => (x.priority || 0) < priority);
__classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").length, 0, Object.assign(fn, { priority }));
}
}

@@ -241,3 +328,15 @@ async getPrincipal() {

async _requestAndRetry(request, tries = 0) {
const response = await request();
let response;
try {
response = await request();
}
catch (error) {
if (this._retryTimes > tries) {
console.warn(`Caught exception while attempting to make request:\n` +
` ${error}\n` +
` Retrying request.`);
return await this._requestAndRetry(request, tries + 1);
}
throw error;
}
if (response.ok) {

@@ -262,37 +361,73 @@ return response;

async query(canisterId, fields, identity) {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = typeof canisterId === 'string' ? Principal.fromText(canisterId) : canisterId;
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
const makeQuery = async () => {
const id = await (identity !== undefined ? await identity : await this._identity);
if (!id) {
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication.");
}
const canister = Principal.from(canisterId);
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || Principal.anonymous();
const request = {
request_type: "query" /* ReadRequestType.Query */,
canister_id: canister,
method_name: fields.methodName,
arg: fields.arg,
sender,
ingress_expiry: new Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS),
};
const requestId = await requestIdOf(request);
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: httpHeadersTransform(response.headers),
}, requestId });
};
// TODO: remove this any. This can be a Signed or UnSigned request.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
let transformedRequest = await this._transform({
request: {
method: 'POST',
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})),
},
endpoint: "read" /* Endpoint.Query */,
body: request,
const queryPromise = new Promise((resolve, reject) => {
makeQuery()
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
});
// Apply transform for identity.
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest));
const body = cbor.encode(transformedRequest.body);
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body })));
const queryResponse = cbor.decode(await response.arrayBuffer());
return Object.assign(Object.assign({}, queryResponse), { httpDetails: {
ok: response.ok,
status: response.status,
statusText: response.statusText,
headers: httpHeadersTransform(response.headers),
} });
const subnetStatusPromise = new Promise((resolve, reject) => {
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
resolve(undefined);
}
const subnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString());
if (subnetStatus) {
resolve(subnetStatus);
}
else {
this.fetchSubnetKeys(canisterId)
.then(response => {
resolve(response);
})
.catch(error => {
reject(error);
});
}
});
const [query, subnetStatus] = await Promise.all([queryPromise, subnetStatusPromise]);
// Skip verification if the user has disabled it
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) {
return query;
}
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, subnetStatus);
}

@@ -387,10 +522,31 @@ async createReadStateRequest(fields, identity) {

}
async fetchSubnetKeys(canisterId) {
const effectiveCanisterId = Principal.from(canisterId);
const response = await request({
canisterId: effectiveCanisterId,
paths: ['subnet'],
agent: this,
});
const subnetResponse = response.get('subnet');
if (subnetResponse && typeof subnetResponse === 'object' && 'nodeKeys' in subnetResponse) {
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").set(effectiveCanisterId.toText(), subnetResponse);
}
return subnetResponse;
}
_transform(request) {
let p = Promise.resolve(request);
for (const fn of this._pipeline) {
p = p.then(r => fn(r).then(r2 => r2 || r));
if (request.endpoint === "call" /* Endpoint.Call */) {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
else {
for (const fn of __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f")) {
p = p.then(r => fn(r).then(r2 => r2 || r));
}
}
return p;
}
}
_HttpAgent_queryPipeline = new WeakMap(), _HttpAgent_updatePipeline = new WeakMap(), _HttpAgent_subnetKeys = new WeakMap(), _HttpAgent_verifyQuerySignatures = new WeakMap(), _HttpAgent_verifyQueryResponse = new WeakMap();
//# sourceMappingURL=index.js.map

@@ -5,9 +5,14 @@ import { lebEncode } from '@dfinity/candid';

const NANOSECONDS_PER_MILLISECONDS = BigInt(1000000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = BigInt(60 * 1000);
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = 60 * 1000;
export class Expiry {
constructor(deltaInMSec) {
// Use bigint because it can overflow the maximum number allowed in a double float.
this._value =
(BigInt(Date.now()) + BigInt(deltaInMSec) - REPLICA_PERMITTED_DRIFT_MILLISECONDS) *
NANOSECONDS_PER_MILLISECONDS;
const raw_value = BigInt(Math.floor(Date.now() + deltaInMSec - REPLICA_PERMITTED_DRIFT_MILLISECONDS)) *
NANOSECONDS_PER_MILLISECONDS;
// round down to the nearest second
const ingress_as_seconds = raw_value / BigInt(1000000000);
// round down to nearest minute
const ingress_as_minutes = ingress_as_seconds / BigInt(60);
const rounded_down_nanos = ingress_as_minutes * BigInt(60) * BigInt(1000000000);
this._value = rounded_down_nanos;
}

@@ -29,3 +34,2 @@ toCBOR() {

return async (request) => {
const nonce = nonceFn();
// Nonce needs to be inserted into the header for all requests, to enable logs to be correlated with requests.

@@ -32,0 +36,0 @@ const headers = request.request.headers;

@@ -27,2 +27,4 @@ import { Principal } from '@dfinity/principal';

toDer(): DerEncodedPublicKey;
rawKey?: ArrayBuffer;
derKey?: DerEncodedPublicKey;
}

@@ -29,0 +31,0 @@ /**

@@ -5,7 +5,26 @@ /** @module CanisterStatus */

import { CreateCertificateOptions } from '../certificate';
import { DerEncodedPublicKey } from '..';
/**
* Represents the useful information about a subnet
* @param {string} subnetId the principal id of the canister's subnet
* @param {string[]} nodeKeys the keys of the individual nodes in the subnet
*/
export declare type SubnetStatus = {
subnetId: string;
nodeKeys: Map<string, DerEncodedPublicKey>;
metrics?: {
num_canisters: bigint;
canister_state_bytes: bigint;
consumed_cycles_total: {
current: bigint;
deleted: bigint;
};
update_transactions_total: bigint;
};
};
/**
* Types of an entry on the canisterStatus map.
* An entry of null indicates that the request failed, due to lack of permissions or the result being missing.
*/
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | bigint | null;
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | SubnetStatus | bigint | null;
/**

@@ -61,2 +80,3 @@ * Interface to define a custom path. Nested paths will be represented as individual buffers, and can be created from text using {@link TextEncoder}

}) => Promise<StatusMap>;
export declare const fetchNodeKeys: (certificate: ArrayBuffer, canisterId: Principal, root_key?: ArrayBuffer | Uint8Array) => SubnetStatus;
export declare const encodePath: (path: Path, canisterId: Principal) => ArrayBuffer[];
/** @module CanisterStatus */
import { Principal } from '@dfinity/principal';
import { AgentError } from '../errors';
import { Certificate } from '../certificate';
import { Certificate, flatten_forks, check_canister_ranges, lookupResultToBuffer, lookup_path, } from '../certificate';
import { toHex } from '../utils/buffer';

@@ -24,3 +24,4 @@ import * as Cbor from '../cbor';

export const request = async (options) => {
const { canisterId, agent, paths } = options;
const { agent, paths } = options;
const canisterId = Principal.from(options.canisterId);
const uniquePaths = [...new Set(paths)];

@@ -44,3 +45,19 @@ // Map path options to their correct formats

});
const data = cert.lookup(encodePath(uniquePaths[index], canisterId));
const lookup = (cert, path) => {
if (path === 'subnet') {
const data = fetchNodeKeys(response.certificate, canisterId, agent.rootKey);
return {
path: path,
data,
};
}
else {
return {
path: path,
data: lookupResultToBuffer(cert.lookup(encodePath(path, canisterId))),
};
}
};
// must pass in the rootKey if we have no delegation
const { path, data } = lookup(cert, uniquePaths[index]);
if (!data) {

@@ -70,2 +87,6 @@ // Typically, the cert lookup will throw

}
case 'subnet': {
status.set(path, data);
break;
}
case 'candid': {

@@ -125,2 +146,53 @@ status.set(path, new TextDecoder().decode(data));

};
export const fetchNodeKeys = (certificate, canisterId, root_key) => {
if (!canisterId._isPrincipal) {
throw new Error('Invalid canisterId');
}
const cert = Cbor.decode(new Uint8Array(certificate));
const tree = cert.tree;
let delegation = cert.delegation;
let subnetId;
if (delegation && delegation.subnet_id) {
subnetId = Principal.fromUint8Array(new Uint8Array(delegation.subnet_id));
}
// On local replica, with System type subnet, there is no delegation
else if (!delegation && typeof root_key !== 'undefined') {
subnetId = Principal.selfAuthenticating(new Uint8Array(root_key));
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
// otherwise use default NNS subnet id
else {
subnetId = Principal.selfAuthenticating(Principal.fromText('tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe').toUint8Array());
delegation = {
subnet_id: subnetId.toUint8Array(),
certificate: new ArrayBuffer(0),
};
}
const canisterInRange = check_canister_ranges({ canisterId, subnetId, tree });
if (!canisterInRange) {
throw new Error('Canister not in range');
}
const nodeTree = lookup_path(['subnet', delegation === null || delegation === void 0 ? void 0 : delegation.subnet_id, 'node'], tree);
const nodeForks = flatten_forks(nodeTree);
nodeForks.length;
const nodeKeys = new Map();
nodeForks.forEach(fork => {
Object.getPrototypeOf(new Uint8Array(fork[1]));
const node_id = Principal.from(new Uint8Array(fork[1])).toText();
const derEncodedPublicKey = lookup_path(['public_key'], fork[2]);
if (derEncodedPublicKey.byteLength !== 44) {
throw new Error('Invalid public key length');
}
else {
nodeKeys.set(node_id, derEncodedPublicKey);
}
});
return {
subnetId: Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)).toText(),
nodeKeys,
};
};
export const encodePath = (path, canisterId) => {

@@ -127,0 +199,0 @@ const encoder = new TextEncoder();

@@ -9,10 +9,17 @@ import { AgentError } from './errors';

}
declare const enum NodeId {
Empty = 0,
Fork = 1,
Labeled = 2,
Leaf = 3,
Pruned = 4
export interface Cert {
tree: HashTree;
signature: ArrayBuffer;
delegation?: Delegation;
}
export declare type HashTree = [NodeId.Empty] | [NodeId.Fork, HashTree, HashTree] | [NodeId.Labeled, ArrayBuffer, HashTree] | [NodeId.Leaf, ArrayBuffer] | [NodeId.Pruned, ArrayBuffer];
declare const NodeId: {
Empty: number;
Fork: number;
Labeled: number;
Leaf: number;
Pruned: number;
};
export declare type NodeIdType = typeof NodeId[keyof typeof NodeId];
export { NodeId };
export declare type HashTree = [typeof NodeId.Empty] | [typeof NodeId.Fork, HashTree, HashTree] | [typeof NodeId.Labeled, ArrayBuffer, HashTree] | [typeof NodeId.Leaf, ArrayBuffer] | [typeof NodeId.Pruned, ArrayBuffer];
/**

@@ -23,2 +30,6 @@ * Make a human readable string out of a hash tree.

export declare function hashTreeToString(tree: HashTree): string;
interface Delegation extends Record<string, any> {
subnet_id: ArrayBuffer;
certificate: ArrayBuffer;
}
declare type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>;

@@ -72,2 +83,3 @@ export interface CreateCertificateOptions {

lookup(path: Array<ArrayBuffer | string>): ArrayBuffer | undefined;
lookup_label(label: ArrayBuffer): ArrayBuffer | HashTree | undefined;
private verify;

@@ -77,2 +89,8 @@ private _checkDelegationAndGetKey;

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
export declare function lookupResultToBuffer(result: ArrayBuffer | HashTree | undefined): ArrayBuffer | undefined;
/**
* @param t

@@ -85,3 +103,19 @@ */

*/
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | undefined;
export {};
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | HashTree | undefined;
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
export declare function flatten_forks(t: HashTree): HashTree[];
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
export declare function check_canister_ranges(params: {
canisterId: Principal;
subnetId: Principal;
tree: HashTree;
}): boolean;

@@ -16,2 +16,10 @@ import * as cbor from './cbor';

}
const NodeId = {
Empty: 0,
Fork: 1,
Labeled: 2,
Leaf: 3,
Pruned: 4,
};
export { NodeId };
/**

@@ -36,18 +44,40 @@ * Make a human readable string out of a hash tree.

switch (tree[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return '()';
case 1 /* NodeId.Fork */: {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
case NodeId.Fork: {
if (tree[1] instanceof Array && tree[2] instanceof ArrayBuffer) {
const left = hashTreeToString(tree[1]);
const right = hashTreeToString(tree[2]);
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`;
}
else {
throw new Error('Invalid tree structure for fork');
}
}
case 2 /* NodeId.Labeled */: {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
case NodeId.Labeled: {
if (tree[1] instanceof ArrayBuffer && tree[2] instanceof ArrayBuffer) {
const label = labelToString(tree[1]);
const sub = hashTreeToString(tree[2]);
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`;
}
else {
throw new Error('Invalid tree structure for labeled');
}
}
case 3 /* NodeId.Leaf */: {
case NodeId.Leaf: {
if (!tree[1]) {
throw new Error('Invalid tree structure for leaf');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `leaf(...${tree[1].byteLength} bytes)`;
}
case 4 /* NodeId.Pruned */: {
case NodeId.Pruned: {
if (!tree[1]) {
throw new Error('Invalid tree structure for pruned');
}
else if (Array.isArray(tree[1])) {
return JSON.stringify(tree[1]);
}
return `pruned(${toHex(new Uint8Array(tree[1]))}`;

@@ -104,4 +134,8 @@ }

lookup(path) {
return lookup_path(path, this.cert.tree);
// constrain the type of the result, so that empty HashTree is undefined
return lookupResultToBuffer(lookup_path(path, this.cert.tree));
}
lookup_label(label) {
return this.lookup([label]);
}
async verify() {

@@ -156,15 +190,10 @@ const rootHash = await reconstruct(this.cert.tree);

blsVerify: this._blsVerify,
// Maximum age of 30 days for delegation certificates
maxAgeInMinutes: 60 * 24 * 30,
// Do not check max age for delegation certificates
maxAgeInMinutes: Infinity,
});
const rangeLookup = cert.lookup(['subnet', d.subnet_id, 'canister_ranges']);
if (!rangeLookup) {
throw new CertificateVerificationError(`Could not find canister ranges for subnet 0x${toHex(d.subnet_id)}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
Principal.fromUint8Array(v[0]),
Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(this._canisterId) && r[1].gtEq(this._canisterId));
const canisterInRange = check_canister_ranges({
canisterId: this._canisterId,
subnetId: Principal.fromUint8Array(new Uint8Array(d.subnet_id)),
tree: cert.cert.tree,
});
if (!canisterInRange) {

@@ -194,2 +223,16 @@ throw new CertificateVerificationError(`Canister ${this._canisterId} not in range of delegations for subnet 0x${toHex(d.subnet_id)}`);

/**
* utility function to constrain the type of a path
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup
* @returns ArrayBuffer or Undefined
*/
export function lookupResultToBuffer(result) {
if (result instanceof ArrayBuffer) {
return result;
}
else if (result instanceof Uint8Array) {
return result.buffer;
}
return undefined;
}
/**
* @param t

@@ -199,11 +242,11 @@ */

switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return hash(domain_sep('ic-hashtree-empty'));
case 4 /* NodeId.Pruned */:
case NodeId.Pruned:
return t[1];
case 3 /* NodeId.Leaf */:
case NodeId.Leaf:
return hash(concat(domain_sep('ic-hashtree-leaf'), t[1]));
case 2 /* NodeId.Labeled */:
case NodeId.Labeled:
return hash(concat(domain_sep('ic-hashtree-labeled'), t[1], await reconstruct(t[2])));
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return hash(concat(domain_sep('ic-hashtree-fork'), await reconstruct(t[1]), await reconstruct(t[2])));

@@ -226,7 +269,20 @@ default:

switch (tree[0]) {
case 3 /* NodeId.Leaf */: {
return new Uint8Array(tree[1]).buffer;
case NodeId.Leaf: {
// should not be undefined
if (!tree[1])
throw new Error('Invalid tree structure for leaf');
if (tree[1] instanceof ArrayBuffer) {
return tree[1];
}
else if (tree[1] instanceof Uint8Array) {
return tree[1].buffer;
}
else
return tree[1];
}
case NodeId.Fork: {
return tree;
}
default: {
return undefined;
return tree;
}

@@ -241,7 +297,12 @@ }

}
function flatten_forks(t) {
/**
* If the tree is a fork, flatten it into an array of trees
* @param t - the tree to flatten
* @returns HashTree[] - the flattened tree
*/
export function flatten_forks(t) {
switch (t[0]) {
case 0 /* NodeId.Empty */:
case NodeId.Empty:
return [];
case 1 /* NodeId.Fork */:
case NodeId.Fork:
return flatten_forks(t[1]).concat(flatten_forks(t[2]));

@@ -257,3 +318,3 @@ default:

for (const t of trees) {
if (t[0] === 2 /* NodeId.Labeled */) {
if (t[0] === NodeId.Labeled) {
const p = t[1];

@@ -266,2 +327,22 @@ if (isBufferEqual(l, p)) {

}
/**
* Check if a canister falls within a range of canisters
* @param canisterId Principal
* @param ranges [Principal, Principal][]
* @returns
*/
export function check_canister_ranges(params) {
const { canisterId, subnetId, tree } = params;
const rangeLookup = lookup_path(['subnet', subnetId.toUint8Array(), 'canister_ranges'], tree);
if (!rangeLookup || !(rangeLookup instanceof ArrayBuffer)) {
throw new Error(`Could not find canister ranges for subnet ${subnetId}`);
}
const ranges_arr = cbor.decode(rangeLookup);
const ranges = ranges_arr.map(v => [
Principal.fromUint8Array(v[0]),
Principal.fromUint8Array(v[1]),
]);
const canisterInRange = ranges.some(r => r[0].ltEq(canisterId) && r[1].gtEq(canisterId));
return canisterInRange;
}
//# sourceMappingURL=certificate.js.map
import { ActorSubclass } from './actor';
export * from './actor';
export * from './agent';
export * from './auth';
export * from './certificate';
export * from './agent/http/transforms';
export * from './agent/http/types';
export * from './auth';
export * from './canisters/asset';
export * from './certificate';
export * from './der';
export * from './fetch_candid';
export * from './public_key';
export * from './request_id';

@@ -11,0 +13,0 @@ export * from './utils/bls';

export * from './actor';
export * from './agent';
export * from './auth';
export * from './certificate';
export * from './agent/http/transforms';
export * from './agent/http/types';
export * from './auth';
export * from './canisters/asset';
export * from './certificate';
export * from './der';
export * from './fetch_candid';
export * from './public_key';
export * from './request_id';

@@ -10,0 +12,0 @@ export * from './utils/bls';

@@ -22,1 +22,9 @@ export declare type RequestId = ArrayBuffer & {

export declare function requestIdOf(request: Record<string, any>): RequestId;
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
export declare function hashOfMap(map: Record<string, unknown>): ArrayBuffer;

@@ -46,2 +46,5 @@ import { lebEncode } from '@dfinity/candid';

}
else if (typeof value === 'object') {
return hashOfMap(value);
}
else if (typeof value === 'bigint') {

@@ -71,3 +74,13 @@ // Do this check much later than the other bigint check because this one is much less

export function requestIdOf(request) {
const hashed = Object.entries(request)
return hashOfMap(request);
}
/**
* Hash a map into an ArrayBuffer using the representation-independent-hash function.
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map
* @param map - Any non-nested object
* @param domainSeparator - optional domain separator
* @returns ArrayBuffer
*/
export function hashOfMap(map) {
const hashed = Object.entries(map)
.filter(([, value]) => value !== undefined)

@@ -84,5 +97,5 @@ .map(([key, value]) => {

const concatenated = concat(...sorted.map(x => concat(...x)));
const requestId = hash(concatenated);
return requestId;
const result = hash(concatenated);
return result;
}
//# sourceMappingURL=request_id.js.map

@@ -16,4 +16,17 @@ /**

export declare function fromHex(hex: string): ArrayBuffer;
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
export declare function compare(b1: ArrayBuffer, b2: ArrayBuffer): number;
/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
export declare function bufEquals(b1: ArrayBuffer, b2: ArrayBuffer): boolean;
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -20,0 +33,0 @@ * @param {Uint8Array} arr Uint8Array to convert

@@ -39,2 +39,8 @@ /**

}
/**
*
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2
*/
export function compare(b1, b2) {

@@ -54,2 +60,11 @@ if (b1.byteLength !== b2.byteLength) {

/**
* Checks two array buffers for equality.
* @param b1 array buffer 1
* @param b2 array buffer 2
* @returns boolean
*/
export function bufEquals(b1, b2) {
return compare(b1, b2) === 0;
}
/**
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe.

@@ -56,0 +71,0 @@ * @param {Uint8Array} arr Uint8Array to convert

{
"name": "@dfinity/agent",
"version": "0.19.3",
"version": "0.20.0-beta.0",
"author": "DFINITY Stiftung <sdk@dfinity.org>",

@@ -46,3 +46,2 @@ "license": "Apache-2.0",

"make:docs/reference": "typedoc src/index.ts --out ../../docs/generated/agent --excludeInternal",
"release": "npm publish",
"test": "jest --verbose",

@@ -54,6 +53,7 @@ "test:coverage": "jest --verbose --collectCoverage",

"peerDependencies": {
"@dfinity/candid": "^0.19.3",
"@dfinity/principal": "^0.19.3"
"@dfinity/candid": "^0.20.0-beta.0",
"@dfinity/principal": "^0.20.0-beta.0"
},
"dependencies": {
"@noble/curves": "^1.2.0",
"@noble/hashes": "^1.3.1",

@@ -60,0 +60,0 @@ "base64-arraybuffer": "^0.2.0",

@@ -89,3 +89,3 @@ # @dfinity/agent

const host = process.env.DFX_NETWORK === 'local' ? 'http://localhost:4943' : 'https://icp-api.io';
const host = process.env.DFX_NETWORK === 'local' ? 'http://127.0.0.1:4943' : 'https://icp-api.io';

@@ -103,3 +103,3 @@ const agent = new HttpAgent({ fetch, host });

const host = process.env.DFX_NETWORK === 'local' ? 'http://localhost:4943' : 'https://ic0.app';
const host = process.env.DFX_NETWORK === 'local' ? 'http://127.0.0.1:4943' : 'https://ic0.app';

@@ -106,0 +106,0 @@ /**

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc