@dfinity/agent
Advanced tools
Comparing version 0.20.2 to 0.21.1
@@ -1,2 +0,2 @@ | ||
import { Agent, HttpDetailsResponse, QueryResponseRejected, SubmitResponse } from './agent'; | ||
import { Agent, QueryResponseRejected, SubmitResponse } from './agent'; | ||
import { AgentError } from './errors'; | ||
@@ -72,7 +72,7 @@ import { IDL } from '@dfinity/candid'; | ||
*/ | ||
export declare type ActorSubclass<T = Record<string, ActorMethod>> = Actor & T; | ||
export type ActorSubclass<T = Record<string, ActorMethod>> = Actor & T; | ||
/** | ||
* An actor method type, defined for each methods of the actor service. | ||
*/ | ||
export interface ActorMethod<Args extends unknown[] = unknown[], Ret = unknown> { | ||
export interface ActorMethod<Args extends unknown[] = unknown[], Ret extends unknown = unknown> { | ||
(...args: Args): Promise<Ret>; | ||
@@ -82,15 +82,2 @@ withOptions(options: CallConfig): (...args: Args) => Promise<Ret>; | ||
/** | ||
* An actor method type, defined for each methods of the actor service. | ||
*/ | ||
export interface ActorMethodWithHttpDetails<Args extends unknown[] = unknown[], Ret = unknown> extends ActorMethod { | ||
(...args: Args): Promise<{ | ||
httpDetails: HttpDetailsResponse; | ||
result: Ret; | ||
}>; | ||
} | ||
export declare type FunctionWithArgsAndReturn<Args extends unknown[] = unknown[], Ret = unknown> = (...args: Args) => Ret; | ||
export declare type ActorMethodMappedWithHttpDetails<T> = { | ||
[K in keyof T]: T[K] extends FunctionWithArgsAndReturn<infer Args, infer Ret> ? ActorMethodWithHttpDetails<Args, Ret> : never; | ||
}; | ||
/** | ||
* The mode used when installing a canister. | ||
@@ -114,5 +101,2 @@ */ | ||
declare const metadataSymbol: unique symbol; | ||
export interface CreateActorClassOpts { | ||
httpDetails?: boolean; | ||
} | ||
/** | ||
@@ -145,11 +129,9 @@ * An actor base class. An actor is an object containing only functions that will | ||
}, config?: CallConfig): Promise<ActorSubclass>; | ||
static createActorClass(interfaceFactory: IDL.InterfaceFactory, options?: CreateActorClassOpts): ActorConstructor; | ||
static createActorClass(interfaceFactory: IDL.InterfaceFactory): ActorConstructor; | ||
static createActor<T = Record<string, ActorMethod>>(interfaceFactory: IDL.InterfaceFactory, configuration: ActorConfig): ActorSubclass<T>; | ||
static createActorWithHttpDetails<T = Record<string, ActorMethod>>(interfaceFactory: IDL.InterfaceFactory, configuration: ActorConfig): ActorSubclass<ActorMethodMappedWithHttpDetails<T>>; | ||
private [metadataSymbol]; | ||
protected constructor(metadata: ActorMetadata); | ||
} | ||
export declare type ActorConstructor = new (config: ActorConfig) => ActorSubclass; | ||
export declare const ACTOR_METHOD_WITH_HTTP_DETAILS = "http-details"; | ||
export declare type ManagementCanisterRecord = _SERVICE; | ||
export type ActorConstructor = new (config: ActorConfig) => ActorSubclass; | ||
export type ManagementCanisterRecord = _SERVICE; | ||
/** | ||
@@ -156,0 +138,0 @@ * Create a management canister actor |
@@ -6,3 +6,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getManagementCanister = exports.ACTOR_METHOD_WITH_HTTP_DETAILS = exports.Actor = exports.CanisterInstallMode = exports.UpdateCallRejectedError = exports.QueryCallRejectedError = exports.ActorCallError = void 0; | ||
exports.getManagementCanister = exports.Actor = exports.CanisterInstallMode = exports.UpdateCallRejectedError = exports.QueryCallRejectedError = exports.ActorCallError = void 0; | ||
const buffer_1 = require("buffer/"); | ||
@@ -45,11 +45,7 @@ const agent_1 = require("./agent"); | ||
constructor(canisterId, methodName, requestId, response) { | ||
super(canisterId, methodName, 'update', Object.assign({ 'Request ID': (0, buffer_2.toHex)(requestId) }, (response.body | ||
? Object.assign(Object.assign({}, (response.body.error_code | ||
? { | ||
'Error code': response.body.error_code, | ||
} | ||
: {})), { 'Reject code': String(response.body.reject_code), 'Reject message': response.body.reject_message }) : { | ||
super(canisterId, methodName, 'update', { | ||
'Request ID': (0, buffer_2.toHex)(requestId), | ||
'HTTP status code': response.status.toString(), | ||
'HTTP status text': response.statusText, | ||
}))); | ||
}); | ||
this.requestId = requestId; | ||
@@ -68,3 +64,3 @@ this.response = response; | ||
CanisterInstallMode["Upgrade"] = "upgrade"; | ||
})(CanisterInstallMode = exports.CanisterInstallMode || (exports.CanisterInstallMode = {})); | ||
})(CanisterInstallMode || (exports.CanisterInstallMode = CanisterInstallMode = {})); | ||
const metadataSymbol = Symbol.for('ic-agent-metadata'); | ||
@@ -76,5 +72,2 @@ /** | ||
class Actor { | ||
constructor(metadata) { | ||
this[metadataSymbol] = Object.freeze(metadata); | ||
} | ||
/** | ||
@@ -123,8 +116,6 @@ * Get the Agent class this Actor would call, or undefined if the Actor would use | ||
} | ||
static createActorClass(interfaceFactory, options) { | ||
static createActorClass(interfaceFactory) { | ||
const service = interfaceFactory({ IDL: candid_1.IDL }); | ||
class CanisterActor extends Actor { | ||
constructor(config) { | ||
if (!config.canisterId) | ||
throw new errors_1.AgentError(`Canister ID is required, but received ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`); | ||
const canisterId = typeof config.canisterId === 'string' | ||
@@ -138,5 +129,2 @@ ? principal_1.Principal.fromText(config.canisterId) | ||
for (const [methodName, func] of service._fields) { | ||
if (options === null || options === void 0 ? void 0 : options.httpDetails) { | ||
func.annotations.push(exports.ACTOR_METHOD_WITH_HTTP_DETAILS); | ||
} | ||
this[methodName] = _createActorMethod(this, methodName, func, config.blsVerify); | ||
@@ -149,9 +137,6 @@ } | ||
static createActor(interfaceFactory, configuration) { | ||
if (!configuration.canisterId) { | ||
throw new errors_1.AgentError(`Canister ID is required, but received ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`); | ||
} | ||
return new (this.createActorClass(interfaceFactory))(configuration); | ||
} | ||
static createActorWithHttpDetails(interfaceFactory, configuration) { | ||
return new (this.createActorClass(interfaceFactory, { httpDetails: true }))(configuration); | ||
constructor(metadata) { | ||
this[metadataSymbol] = Object.freeze(metadata); | ||
} | ||
@@ -177,6 +162,5 @@ } | ||
}; | ||
exports.ACTOR_METHOD_WITH_HTTP_DETAILS = 'http-details'; | ||
function _createActorMethod(actor, methodName, func, blsVerify) { | ||
let caller; | ||
if (func.annotations.includes('query') || func.annotations.includes('composite_query')) { | ||
if (func.annotations.includes('query')) { | ||
caller = async (options, ...args) => { | ||
@@ -194,8 +178,3 @@ var _a, _b; | ||
case "replied" /* QueryResponseStatus.Replied */: | ||
return func.annotations.includes(exports.ACTOR_METHOD_WITH_HTTP_DETAILS) | ||
? { | ||
httpDetails: result.httpDetails, | ||
result: decodeReturnValue(func.retTypes, result.reply.arg), | ||
} | ||
: decodeReturnValue(func.retTypes, result.reply.arg); | ||
return decodeReturnValue(func.retTypes, result.reply.arg); | ||
} | ||
@@ -219,3 +198,3 @@ }; | ||
}); | ||
if (!response.ok || response.body /* IC-1462 */) { | ||
if (!response.ok) { | ||
throw new UpdateCallRejectedError(cid, methodName, requestId, response); | ||
@@ -225,18 +204,7 @@ } | ||
const responseBytes = await (0, polling_1.pollForResponse)(agent, ecid, requestId, pollStrategy, blsVerify); | ||
const shouldIncludeHttpDetails = func.annotations.includes(exports.ACTOR_METHOD_WITH_HTTP_DETAILS); | ||
if (responseBytes !== undefined) { | ||
return shouldIncludeHttpDetails | ||
? { | ||
httpDetails: response, | ||
result: decodeReturnValue(func.retTypes, responseBytes), | ||
} | ||
: decodeReturnValue(func.retTypes, responseBytes); | ||
return decodeReturnValue(func.retTypes, responseBytes); | ||
} | ||
else if (func.retTypes.length === 0) { | ||
return shouldIncludeHttpDetails | ||
? { | ||
httpDetails: response, | ||
result: undefined, | ||
} | ||
: undefined; | ||
return undefined; | ||
} | ||
@@ -243,0 +211,0 @@ else { |
@@ -5,3 +5,2 @@ import { Principal } from '@dfinity/principal'; | ||
import { Identity } from '../auth'; | ||
import { HttpHeaderField } from './http/types'; | ||
/** | ||
@@ -30,3 +29,3 @@ * Codes used by the replica for rejecting a message. | ||
*/ | ||
export declare type QueryResponse = QueryResponseReplied | QueryResponseRejected; | ||
export type QueryResponse = QueryResponseReplied | QueryResponseRejected; | ||
export declare const enum QueryResponseStatus { | ||
@@ -36,20 +35,5 @@ Replied = "replied", | ||
} | ||
export interface HttpDetailsResponse { | ||
ok: boolean; | ||
status: number; | ||
statusText: string; | ||
headers: HttpHeaderField[]; | ||
} | ||
export declare type ApiQueryResponse = QueryResponse & { | ||
httpDetails: HttpDetailsResponse; | ||
requestId: RequestId; | ||
}; | ||
export interface QueryResponseBase { | ||
status: QueryResponseStatus; | ||
} | ||
export declare type NodeSignature = { | ||
timestamp: bigint; | ||
signature: Uint8Array; | ||
identity: Uint8Array; | ||
}; | ||
export interface QueryResponseReplied extends QueryResponseBase { | ||
@@ -60,3 +44,2 @@ status: QueryResponseStatus.Replied; | ||
}; | ||
signatures?: NodeSignature[]; | ||
} | ||
@@ -67,4 +50,2 @@ export interface QueryResponseRejected extends QueryResponseBase { | ||
reject_message: string; | ||
error_code: string; | ||
signatures?: NodeSignature[]; | ||
} | ||
@@ -111,8 +92,2 @@ /** | ||
statusText: string; | ||
body: { | ||
error_code?: string; | ||
reject_code: number; | ||
reject_message: string; | ||
} | null; | ||
headers: HttpHeaderField[]; | ||
}; | ||
@@ -162,3 +137,2 @@ } | ||
* @param options Options to use to create and send the query. | ||
* @param identity Sender principal to use when sending the query. | ||
* @returns The response from the replica. The Promise will only reject when the communication | ||
@@ -168,3 +142,3 @@ * failed. If the query itself failed but no protocol errors happened, the response will | ||
*/ | ||
query(canisterId: Principal | string, options: QueryFields, identity?: Identity | Promise<Identity>): Promise<ApiQueryResponse>; | ||
query(canisterId: Principal | string, options: QueryFields): Promise<QueryResponse>; | ||
/** | ||
@@ -171,0 +145,0 @@ * By default, the agent is configured to talk to the main Internet Computer, |
@@ -15,3 +15,3 @@ "use strict"; | ||
ReplicaRejectCode[ReplicaRejectCode["CanisterError"] = 5] = "CanisterError"; | ||
})(ReplicaRejectCode = exports.ReplicaRejectCode || (exports.ReplicaRejectCode = {})); | ||
})(ReplicaRejectCode || (exports.ReplicaRejectCode = ReplicaRejectCode = {})); | ||
//# sourceMappingURL=api.js.map |
@@ -5,5 +5,4 @@ import { JsonObject } from '@dfinity/candid'; | ||
import { Identity } from '../../auth'; | ||
import { Agent, ApiQueryResponse, QueryFields, ReadStateOptions, ReadStateResponse, SubmitResponse } from '../api'; | ||
import { Agent, QueryFields, QueryResponse, ReadStateOptions, ReadStateResponse, SubmitResponse } from '../api'; | ||
import { HttpAgentRequest, HttpAgentRequestTransformFn } from './types'; | ||
import { SubnetStatus } from '../../canisterStatus'; | ||
export * from './transforms'; | ||
@@ -35,11 +34,14 @@ export { Nonce, makeNonce } from './types'; | ||
/** | ||
* Adds a unique {@link Nonce} with each query. | ||
* Enabling will prevent queries from being answered with a cached response. | ||
* Prevents the agent from providing a unique {@link Nonce} with each call. | ||
* Enabling may cause rate limiting of identical requests | ||
* at the boundary nodes. | ||
* | ||
* To add your own nonce generation logic, you can use the following: | ||
* @example | ||
* const agent = new HttpAgent({ useQueryNonces: true }); | ||
* import {makeNonceTransform, makeNonce} from '@dfinity/agent'; | ||
* const agent = new HttpAgent({ disableNonce: true }); | ||
* agent.addTransform(makeNonceTransform(makeNonce); | ||
* @default false | ||
*/ | ||
useQueryNonces?: boolean; | ||
disableNonce?: boolean; | ||
/** | ||
@@ -50,11 +52,6 @@ * Number of times to retry requests before throwing an error | ||
retryTimes?: number; | ||
/** | ||
* Whether the agent should verify signatures signed by node keys on query responses. Increases security, but adds overhead and must make a separate request to cache the node keys for the canister's subnet. | ||
* @default true | ||
*/ | ||
verifyQuerySignatures?: boolean; | ||
} | ||
export declare class HttpAgent implements Agent { | ||
#private; | ||
rootKey: ArrayBuffer; | ||
private readonly _pipeline; | ||
private _identity; | ||
@@ -68,7 +65,7 @@ private readonly _fetch; | ||
private _rootKeyFetched; | ||
private readonly _retryTimes; | ||
private _retryTimes; | ||
readonly _isAgent = true; | ||
constructor(options?: HttpAgentOptions); | ||
isLocal(): boolean; | ||
addTransform(type: 'update' | 'query', fn: HttpAgentRequestTransformFn, priority?: number): void; | ||
addTransform(fn: HttpAgentRequestTransformFn, priority?: number): void; | ||
getPrincipal(): Promise<Principal>; | ||
@@ -81,3 +78,3 @@ call(canisterId: Principal | string, options: { | ||
private _requestAndRetry; | ||
query(canisterId: Principal | string, fields: QueryFields, identity?: Identity | Promise<Identity>): Promise<ApiQueryResponse>; | ||
query(canisterId: Principal | string, fields: QueryFields, identity?: Identity | Promise<Identity>): Promise<QueryResponse>; | ||
createReadStateRequest(fields: ReadStateOptions, identity?: Identity | Promise<Identity>): Promise<any>; | ||
@@ -87,3 +84,3 @@ readState(canisterId: Principal | string, fields: ReadStateOptions, identity?: Identity | Promise<Identity>, request?: any): Promise<ReadStateResponse>; | ||
* Allows agent to sync its time with the network. Can be called during intialization or mid-lifecycle if the device's clock has drifted away from the network time. This is necessary to set the Expiry for a request | ||
* @param {Principal} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
* @param {PrincipalLike} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
*/ | ||
@@ -95,4 +92,3 @@ syncTime(canisterId?: Principal): Promise<void>; | ||
replaceIdentity(identity: Identity): void; | ||
fetchSubnetKeys(canisterId: Principal | string): Promise<SubnetStatus | undefined>; | ||
protected _transform(request: HttpAgentRequest): Promise<HttpAgentRequest>; | ||
} |
@@ -28,14 +28,2 @@ "use strict"; | ||
}; | ||
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { | ||
if (kind === "m") throw new TypeError("Private method is not writable"); | ||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); | ||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); | ||
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; | ||
}; | ||
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { | ||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); | ||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); | ||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); | ||
}; | ||
var _HttpAgent_queryPipeline, _HttpAgent_updatePipeline, _HttpAgent_subnetKeys, _HttpAgent_verifyQuerySignatures, _HttpAgent_verifyQueryResponse; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
@@ -51,8 +39,2 @@ exports.HttpAgent = exports.IdentityInvalidError = exports.RequestStatusResponseStatus = exports.makeNonce = void 0; | ||
const types_1 = require("./types"); | ||
const errors_2 = require("./errors"); | ||
const canisterStatus_1 = require("../../canisterStatus"); | ||
const certificate_1 = require("../../certificate"); | ||
const ed25519_1 = require("@noble/curves/ed25519"); | ||
const expirableMap_1 = require("../../utils/expirableMap"); | ||
const public_key_1 = require("../../public_key"); | ||
__exportStar(require("./transforms"), exports); | ||
@@ -69,3 +51,3 @@ var types_2 = require("./types"); | ||
RequestStatusResponseStatus["Done"] = "done"; | ||
})(RequestStatusResponseStatus = exports.RequestStatusResponseStatus || (exports.RequestStatusResponseStatus = {})); | ||
})(RequestStatusResponseStatus || (exports.RequestStatusResponseStatus = RequestStatusResponseStatus = {})); | ||
// Default delta for ingress expiry is 5 minutes. | ||
@@ -140,69 +122,7 @@ const DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS = 5 * 60 * 1000; | ||
this.rootKey = (0, buffer_1.fromHex)(IC_ROOT_KEY); | ||
this._pipeline = []; | ||
this._timeDiffMsecs = 0; | ||
this._rootKeyFetched = false; | ||
this._retryTimes = 3; // Retry requests 3 times before erroring by default | ||
this._isAgent = true; | ||
_HttpAgent_queryPipeline.set(this, []); | ||
_HttpAgent_updatePipeline.set(this, []); | ||
_HttpAgent_subnetKeys.set(this, new expirableMap_1.ExpirableMap({ | ||
expirationTime: 5 * 60 * 1000, // 5 minutes | ||
})); | ||
_HttpAgent_verifyQuerySignatures.set(this, true); | ||
/** | ||
* See https://internetcomputer.org/docs/current/references/ic-interface-spec/#http-query for details on validation | ||
* @param queryResponse - The response from the query | ||
* @param subnetStatus - The subnet status, including all node keys | ||
* @returns ApiQueryResponse | ||
*/ | ||
_HttpAgent_verifyQueryResponse.set(this, (queryResponse, subnetStatus) => { | ||
if (__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f") === false) { | ||
// This should not be called if the user has disabled verification | ||
return queryResponse; | ||
} | ||
if (!subnetStatus) { | ||
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.'); | ||
} | ||
const { status, signatures = [], requestId } = queryResponse; | ||
const domainSeparator = new TextEncoder().encode('\x0Bic-response'); | ||
for (const sig of signatures) { | ||
const { timestamp, identity } = sig; | ||
const nodeId = principal_1.Principal.fromUint8Array(identity).toText(); | ||
let hash; | ||
// Hash is constructed differently depending on the status | ||
if (status === 'replied') { | ||
const { reply } = queryResponse; | ||
hash = (0, request_id_1.hashOfMap)({ | ||
status: status, | ||
reply: reply, | ||
timestamp: BigInt(timestamp), | ||
request_id: requestId, | ||
}); | ||
} | ||
else if (status === 'rejected') { | ||
const { reject_code, reject_message, error_code } = queryResponse; | ||
hash = (0, request_id_1.hashOfMap)({ | ||
status: status, | ||
reject_code: reject_code, | ||
reject_message: reject_message, | ||
error_code: error_code, | ||
timestamp: BigInt(timestamp), | ||
request_id: requestId, | ||
}); | ||
} | ||
else { | ||
throw new Error(`Unknown status: ${status}`); | ||
} | ||
const separatorWithHash = (0, buffer_1.concat)(domainSeparator, new Uint8Array(hash)); | ||
// FIX: check for match without verifying N times | ||
const pubKey = subnetStatus === null || subnetStatus === void 0 ? void 0 : subnetStatus.nodeKeys.get(nodeId); | ||
if (!pubKey) { | ||
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.'); | ||
} | ||
const rawKey = public_key_1.Ed25519PublicKey.fromDer(pubKey).rawKey; | ||
const valid = ed25519_1.ed25519.verify(sig.signature, new Uint8Array(separatorWithHash), new Uint8Array(rawKey)); | ||
if (valid) | ||
return queryResponse; | ||
throw new certificate_1.CertificateVerificationError(`Invalid signature from replica ${nodeId} signed query.`); | ||
} | ||
return queryResponse; | ||
}); | ||
if (options.source) { | ||
@@ -212,2 +132,3 @@ if (!(options.source instanceof HttpAgent)) { | ||
} | ||
this._pipeline = [...options.source._pipeline]; | ||
this._identity = options.source._identity; | ||
@@ -238,27 +159,10 @@ this._fetch = options.source._fetch; | ||
if (!location) { | ||
this._host = new URL('https://icp-api.io'); | ||
console.warn('Could not infer host from window.location, defaulting to mainnet gateway of https://icp-api.io. Please provide a host to the HttpAgent constructor to avoid this warning.'); | ||
throw new Error('Must specify a host to connect to.'); | ||
} | ||
// Mainnet and local will have the api route available | ||
const knownHosts = ['ic0.app', 'icp0.io', '127.0.0.1', 'localhost']; | ||
const hostname = location === null || location === void 0 ? void 0 : location.hostname; | ||
let knownHost; | ||
if (hostname && typeof hostname === 'string') { | ||
knownHost = knownHosts.find(host => hostname.endsWith(host)); | ||
} | ||
if (location && knownHost) { | ||
// If the user is on a boundary-node provided host, we can use the same host for the agent | ||
this._host = new URL(`${location.protocol}//${knownHost}${location.port ? ':' + location.port : ''}`); | ||
} | ||
else { | ||
this._host = new URL('https://icp-api.io'); | ||
console.warn('Could not infer host from window.location, defaulting to mainnet gateway of https://icp-api.io. Please provide a host to the HttpAgent constructor to avoid this warning.'); | ||
} | ||
this._host = new URL(location + ''); | ||
} | ||
if (options.verifyQuerySignatures !== undefined) { | ||
__classPrivateFieldSet(this, _HttpAgent_verifyQuerySignatures, options.verifyQuerySignatures, "f"); | ||
// Default is 3, only set if option is provided | ||
if (options.retryTimes !== undefined) { | ||
this._retryTimes = options.retryTimes; | ||
} | ||
// Default is 3, only set from option if greater or equal to 0 | ||
this._retryTimes = | ||
options.retryTimes !== undefined && options.retryTimes >= 0 ? options.retryTimes : 3; | ||
// Rewrite to avoid redirects | ||
@@ -280,5 +184,4 @@ if (this._host.hostname.endsWith(IC0_SUB_DOMAIN)) { | ||
// Add a nonce transform to ensure calls are unique | ||
this.addTransform('update', (0, transforms_1.makeNonceTransform)(types_1.makeNonce)); | ||
if (options.useQueryNonces) { | ||
this.addTransform('query', (0, transforms_1.makeNonceTransform)(types_1.makeNonce)); | ||
if (!options.disableNonce) { | ||
this.addTransform((0, transforms_1.makeNonceTransform)(types_1.makeNonce)); | ||
} | ||
@@ -288,15 +191,8 @@ } | ||
const hostname = this._host.hostname; | ||
return hostname === '127.0.0.1' || hostname.endsWith('127.0.0.1'); | ||
return hostname === '127.0.0.1' || hostname.endsWith('localhost'); | ||
} | ||
addTransform(type, fn, priority = fn.priority || 0) { | ||
if (type === 'update') { | ||
// Keep the pipeline sorted at all time, by priority. | ||
const i = __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").findIndex(x => (x.priority || 0) < priority); | ||
__classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").length, 0, Object.assign(fn, { priority })); | ||
} | ||
else if (type === 'query') { | ||
// Keep the pipeline sorted at all time, by priority. | ||
const i = __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").findIndex(x => (x.priority || 0) < priority); | ||
__classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").length, 0, Object.assign(fn, { priority })); | ||
} | ||
addTransform(fn, priority = fn.priority || 0) { | ||
// Keep the pipeline sorted at all time, by priority. | ||
const i = this._pipeline.findIndex(x => (x.priority || 0) < priority); | ||
this._pipeline.splice(i >= 0 ? i : this._pipeline.length, 0, Object.assign(fn, { priority })); | ||
} | ||
@@ -349,4 +245,2 @@ async getPrincipal() { | ||
const [response, requestId] = await Promise.all([request, (0, request_id_1.requestIdOf)(submit)]); | ||
const responseBuffer = await response.arrayBuffer(); | ||
const responseBody = (response.status === 200 && responseBuffer.byteLength > 0 ? cbor.decode(responseBuffer) : null); | ||
return { | ||
@@ -358,4 +252,2 @@ requestId, | ||
statusText: response.statusText, | ||
body: responseBody, | ||
headers: (0, transforms_1.httpHeadersTransform)(response.headers), | ||
}, | ||
@@ -365,103 +257,51 @@ }; | ||
async _requestAndRetry(request, tries = 0) { | ||
let response; | ||
try { | ||
response = await request(); | ||
if (tries > this._retryTimes && this._retryTimes !== 0) { | ||
throw new Error(`AgentError: Exceeded configured limit of ${this._retryTimes} retry attempts. Please check your network connection or try again in a few moments`); | ||
} | ||
catch (error) { | ||
const response = await request(); | ||
if (!response.ok) { | ||
const responseText = await response.clone().text(); | ||
const errorMessage = `Server returned an error:\n` + | ||
` Code: ${response.status} (${response.statusText})\n` + | ||
` Body: ${responseText}\n`; | ||
if (this._retryTimes > tries) { | ||
console.warn(`Caught exception while attempting to make request:\n` + | ||
` ${error}\n` + | ||
` Retrying request.`); | ||
console.warn(errorMessage + ` Retrying request.`); | ||
return await this._requestAndRetry(request, tries + 1); | ||
} | ||
throw error; | ||
else { | ||
throw new Error(errorMessage); | ||
} | ||
} | ||
if (response.ok) { | ||
return response; | ||
} | ||
const responseText = await response.clone().text(); | ||
const errorMessage = `Server returned an error:\n` + | ||
` Code: ${response.status} (${response.statusText})\n` + | ||
` Body: ${responseText}\n`; | ||
if (this._retryTimes > tries) { | ||
console.warn(errorMessage + ` Retrying request.`); | ||
return await this._requestAndRetry(request, tries + 1); | ||
} | ||
throw new errors_2.AgentHTTPResponseError(errorMessage, { | ||
ok: response.ok, | ||
status: response.status, | ||
statusText: response.statusText, | ||
headers: (0, transforms_1.httpHeadersTransform)(response.headers), | ||
}); | ||
return response; | ||
} | ||
async query(canisterId, fields, identity) { | ||
const makeQuery = async () => { | ||
const id = await (identity !== undefined ? await identity : await this._identity); | ||
if (!id) { | ||
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication."); | ||
} | ||
const canister = principal_1.Principal.from(canisterId); | ||
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || principal_1.Principal.anonymous(); | ||
const request = { | ||
request_type: "query" /* ReadRequestType.Query */, | ||
canister_id: canister, | ||
method_name: fields.methodName, | ||
arg: fields.arg, | ||
sender, | ||
ingress_expiry: new transforms_1.Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS), | ||
}; | ||
const requestId = await (0, request_id_1.requestIdOf)(request); | ||
// TODO: remove this any. This can be a Signed or UnSigned request. | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
let transformedRequest = await this._transform({ | ||
request: { | ||
method: 'POST', | ||
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})), | ||
}, | ||
endpoint: "read" /* Endpoint.Query */, | ||
body: request, | ||
}); | ||
// Apply transform for identity. | ||
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest)); | ||
const body = cbor.encode(transformedRequest.body); | ||
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body }))); | ||
const queryResponse = cbor.decode(await response.arrayBuffer()); | ||
return Object.assign(Object.assign({}, queryResponse), { httpDetails: { | ||
ok: response.ok, | ||
status: response.status, | ||
statusText: response.statusText, | ||
headers: (0, transforms_1.httpHeadersTransform)(response.headers), | ||
}, requestId }); | ||
const id = await (identity !== undefined ? await identity : await this._identity); | ||
if (!id) { | ||
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication."); | ||
} | ||
const canister = typeof canisterId === 'string' ? principal_1.Principal.fromText(canisterId) : canisterId; | ||
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || principal_1.Principal.anonymous(); | ||
const request = { | ||
request_type: "query" /* ReadRequestType.Query */, | ||
canister_id: canister, | ||
method_name: fields.methodName, | ||
arg: fields.arg, | ||
sender, | ||
ingress_expiry: new transforms_1.Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS), | ||
}; | ||
const getSubnetStatus = async () => { | ||
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) { | ||
return undefined; | ||
} | ||
const subnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString()); | ||
if (subnetStatus) { | ||
return subnetStatus; | ||
} | ||
await this.fetchSubnetKeys(canisterId.toString()); | ||
return __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString()); | ||
}; | ||
// Make query and fetch subnet keys in parallel | ||
const [query, subnetStatus] = await Promise.all([makeQuery(), getSubnetStatus()]); | ||
// Skip verification if the user has disabled it | ||
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) { | ||
return query; | ||
} | ||
try { | ||
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, subnetStatus); | ||
} | ||
catch (_) { | ||
// In case the node signatures have changed, refresh the subnet keys and try again | ||
console.warn('Query response verification failed. Retrying with fresh subnet keys.'); | ||
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").delete(canisterId.toString()); | ||
await this.fetchSubnetKeys(canisterId.toString()); | ||
const updatedSubnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString()); | ||
if (!updatedSubnetStatus) { | ||
throw new certificate_1.CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.'); | ||
} | ||
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, updatedSubnetStatus); | ||
} | ||
// TODO: remove this any. This can be a Signed or UnSigned request. | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
let transformedRequest = await this._transform({ | ||
request: { | ||
method: 'POST', | ||
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})), | ||
}, | ||
endpoint: "read" /* Endpoint.Query */, | ||
body: request, | ||
}); | ||
// Apply transform for identity. | ||
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest)); | ||
const body = cbor.encode(transformedRequest.body); | ||
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body }))); | ||
return cbor.decode(await response.arrayBuffer()); | ||
} | ||
@@ -509,3 +349,3 @@ async createReadStateRequest(fields, identity) { | ||
* Allows agent to sync its time with the network. Can be called during intialization or mid-lifecycle if the device's clock has drifted away from the network time. This is necessary to set the Expiry for a request | ||
* @param {Principal} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
* @param {PrincipalLike} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
*/ | ||
@@ -557,29 +397,7 @@ async syncTime(canisterId) { | ||
} | ||
async fetchSubnetKeys(canisterId) { | ||
const effectiveCanisterId = principal_1.Principal.from(canisterId); | ||
const response = await (0, canisterStatus_1.request)({ | ||
canisterId: effectiveCanisterId, | ||
paths: ['subnet'], | ||
agent: this, | ||
}); | ||
const subnetResponse = response.get('subnet'); | ||
if (subnetResponse && typeof subnetResponse === 'object' && 'nodeKeys' in subnetResponse) { | ||
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").set(effectiveCanisterId.toText(), subnetResponse); | ||
return subnetResponse; | ||
} | ||
// If the subnet status is not returned, return undefined | ||
return undefined; | ||
} | ||
_transform(request) { | ||
let p = Promise.resolve(request); | ||
if (request.endpoint === "call" /* Endpoint.Call */) { | ||
for (const fn of __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f")) { | ||
p = p.then(r => fn(r).then(r2 => r2 || r)); | ||
} | ||
for (const fn of this._pipeline) { | ||
p = p.then(r => fn(r).then(r2 => r2 || r)); | ||
} | ||
else { | ||
for (const fn of __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f")) { | ||
p = p.then(r => fn(r).then(r2 => r2 || r)); | ||
} | ||
} | ||
return p; | ||
@@ -589,3 +407,2 @@ } | ||
exports.HttpAgent = HttpAgent; | ||
_HttpAgent_queryPipeline = new WeakMap(), _HttpAgent_updatePipeline = new WeakMap(), _HttpAgent_subnetKeys = new WeakMap(), _HttpAgent_verifyQuerySignatures = new WeakMap(), _HttpAgent_verifyQueryResponse = new WeakMap(); | ||
//# sourceMappingURL=index.js.map |
import * as cbor from 'simple-cbor'; | ||
import { HttpAgentRequestTransformFn, HttpHeaderField, Nonce } from './types'; | ||
import { HttpAgentRequestTransformFn, Nonce } from './types'; | ||
export declare class Expiry { | ||
@@ -21,8 +21,1 @@ private readonly _value; | ||
export declare function makeExpiryTransform(delayInMilliseconds: number): HttpAgentRequestTransformFn; | ||
/** | ||
* Maps the default fetch headers field to the serializable HttpHeaderField. | ||
* | ||
* @param headers Fetch definition of the headers type | ||
* @returns array of header fields | ||
*/ | ||
export declare function httpHeadersTransform(headers: Headers): HttpHeaderField[]; |
@@ -26,3 +26,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.httpHeadersTransform = exports.makeExpiryTransform = exports.makeNonceTransform = exports.Expiry = void 0; | ||
exports.makeExpiryTransform = exports.makeNonceTransform = exports.Expiry = void 0; | ||
const candid_1 = require("@dfinity/candid"); | ||
@@ -32,14 +32,9 @@ const cbor = __importStar(require("simple-cbor")); | ||
const NANOSECONDS_PER_MILLISECONDS = BigInt(1000000); | ||
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = 60 * 1000; | ||
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = BigInt(60 * 1000); | ||
class Expiry { | ||
constructor(deltaInMSec) { | ||
// Use bigint because it can overflow the maximum number allowed in a double float. | ||
const raw_value = BigInt(Math.floor(Date.now() + deltaInMSec - REPLICA_PERMITTED_DRIFT_MILLISECONDS)) * | ||
NANOSECONDS_PER_MILLISECONDS; | ||
// round down to the nearest second | ||
const ingress_as_seconds = raw_value / BigInt(1000000000); | ||
// round down to nearest minute | ||
const ingress_as_minutes = ingress_as_seconds / BigInt(60); | ||
const rounded_down_nanos = ingress_as_minutes * BigInt(60) * BigInt(1000000000); | ||
this._value = rounded_down_nanos; | ||
this._value = | ||
(BigInt(Date.now()) + BigInt(deltaInMSec) - REPLICA_PERMITTED_DRIFT_MILLISECONDS) * | ||
NANOSECONDS_PER_MILLISECONDS; | ||
} | ||
@@ -62,2 +57,3 @@ toCBOR() { | ||
return async (request) => { | ||
const nonce = nonceFn(); | ||
// Nonce needs to be inserted into the header for all requests, to enable logs to be correlated with requests. | ||
@@ -86,16 +82,2 @@ const headers = request.request.headers; | ||
exports.makeExpiryTransform = makeExpiryTransform; | ||
/** | ||
* Maps the default fetch headers field to the serializable HttpHeaderField. | ||
* | ||
* @param headers Fetch definition of the headers type | ||
* @returns array of header fields | ||
*/ | ||
function httpHeadersTransform(headers) { | ||
const headerFields = []; | ||
headers.forEach((value, key) => { | ||
headerFields.push([key, value]); | ||
}); | ||
return headerFields; | ||
} | ||
exports.httpHeadersTransform = httpHeadersTransform; | ||
//# sourceMappingURL=transforms.js.map |
@@ -11,3 +11,3 @@ import type { Principal } from '@dfinity/principal'; | ||
} | ||
export declare type HttpAgentRequest = HttpAgentQueryRequest | HttpAgentSubmitRequest | HttpAgentReadStateRequest; | ||
export type HttpAgentRequest = HttpAgentQueryRequest | HttpAgentSubmitRequest | HttpAgentReadStateRequest; | ||
export interface HttpAgentBaseRequest { | ||
@@ -17,3 +17,2 @@ readonly endpoint: Endpoint; | ||
} | ||
export declare type HttpHeaderField = [string, string]; | ||
export interface HttpAgentSubmitRequest extends HttpAgentBaseRequest { | ||
@@ -39,3 +38,3 @@ readonly endpoint: Endpoint.Call; | ||
} | ||
export declare type Envelope<T> = Signed<T> | UnSigned<T>; | ||
export type Envelope<T> = Signed<T> | UnSigned<T>; | ||
export interface HttpAgentRequestTransformFn { | ||
@@ -74,9 +73,9 @@ (args: HttpAgentRequest): Promise<HttpAgentRequest | undefined | void>; | ||
} | ||
export declare type ReadRequest = QueryRequest | ReadStateRequest; | ||
export declare type Nonce = Uint8Array & { | ||
export type ReadRequest = QueryRequest | ReadStateRequest; | ||
export type Nonce = Uint8Array & { | ||
__nonce__: void; | ||
}; | ||
/** | ||
* Create a random Nonce, based on random values | ||
* Create a random Nonce, based on date and a random suffix. | ||
*/ | ||
export declare function makeNonce(): Nonce; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.makeNonce = exports.SubmitRequestType = void 0; | ||
const random_1 = require("../../utils/random"); | ||
// tslint:enable:camel-case | ||
// The types of values allowed in the `request_type` field for submit requests. | ||
@@ -9,5 +9,5 @@ var SubmitRequestType; | ||
SubmitRequestType["Call"] = "call"; | ||
})(SubmitRequestType = exports.SubmitRequestType || (exports.SubmitRequestType = {})); | ||
})(SubmitRequestType || (exports.SubmitRequestType = SubmitRequestType = {})); | ||
/** | ||
* Create a random Nonce, based on random values | ||
* Create a random Nonce, based on date and a random suffix. | ||
*/ | ||
@@ -18,10 +18,16 @@ function makeNonce() { | ||
const view = new DataView(buffer); | ||
const rand1 = (0, random_1.randomNumber)(); | ||
const rand2 = (0, random_1.randomNumber)(); | ||
const rand3 = (0, random_1.randomNumber)(); | ||
const rand4 = (0, random_1.randomNumber)(); | ||
view.setUint32(0, rand1); | ||
view.setUint32(4, rand2); | ||
view.setUint32(8, rand3); | ||
view.setUint32(12, rand4); | ||
const now = BigInt(+Date.now()); | ||
const randHi = Math.floor(Math.random() * 0xffffffff); | ||
const randLo = Math.floor(Math.random() * 0xffffffff); | ||
// Fix for IOS < 14.8 setBigUint64 absence | ||
if (typeof view.setBigUint64 === 'function') { | ||
view.setBigUint64(0, now); | ||
} | ||
else { | ||
const TWO_TO_THE_32 = BigInt(1) << BigInt(32); | ||
view.setUint32(0, Number(now >> BigInt(32))); | ||
view.setUint32(4, Number(now % TWO_TO_THE_32)); | ||
} | ||
view.setUint32(8, randHi); | ||
view.setUint32(12, randLo); | ||
return buffer; | ||
@@ -28,0 +34,0 @@ } |
import { JsonObject } from '@dfinity/candid'; | ||
import { Agent, ApiQueryResponse, CallOptions, QueryFields, QueryResponse, ReadStateOptions, ReadStateResponse, SubmitResponse } from './api'; | ||
import { Agent, CallOptions, QueryFields, QueryResponse, ReadStateOptions, ReadStateResponse, SubmitResponse } from './api'; | ||
import { Principal } from '@dfinity/principal'; | ||
@@ -63,3 +63,3 @@ export declare enum ProxyMessageKind { | ||
} | ||
export declare type ProxyMessage = ProxyMessageError | ProxyMessageGetPrincipal | ProxyMessageGetPrincipalResponse | ProxyMessageQuery | ProxyMessageQueryResponse | ProxyMessageCall | ProxyMessageReadState | ProxyMessageReadStateResponse | ProxyMessageCallResponse | ProxyMessageStatus | ProxyMessageStatusResponse; | ||
export type ProxyMessage = ProxyMessageError | ProxyMessageGetPrincipal | ProxyMessageGetPrincipalResponse | ProxyMessageQuery | ProxyMessageQueryResponse | ProxyMessageCall | ProxyMessageReadState | ProxyMessageReadStateResponse | ProxyMessageCallResponse | ProxyMessageStatus | ProxyMessageStatusResponse; | ||
export declare class ProxyStubAgent { | ||
@@ -82,5 +82,5 @@ private _frontend; | ||
status(): Promise<JsonObject>; | ||
query(canisterId: Principal | string, fields: QueryFields): Promise<ApiQueryResponse>; | ||
query(canisterId: Principal | string, fields: QueryFields): Promise<QueryResponse>; | ||
private _sendAndWait; | ||
fetchRootKey(): Promise<ArrayBuffer>; | ||
} |
@@ -18,3 +18,3 @@ "use strict"; | ||
ProxyMessageKind["StatusResponse"] = "sr"; | ||
})(ProxyMessageKind = exports.ProxyMessageKind || (exports.ProxyMessageKind = {})); | ||
})(ProxyMessageKind || (exports.ProxyMessageKind = ProxyMessageKind = {})); | ||
// A Stub Agent that forwards calls to another Agent implementation. | ||
@@ -21,0 +21,0 @@ class ProxyStubAgent { |
@@ -13,3 +13,3 @@ import { Principal } from '@dfinity/principal'; | ||
*/ | ||
export declare type DerEncodedPublicKey = ArrayBuffer & { | ||
export type DerEncodedPublicKey = ArrayBuffer & { | ||
__derEncodedPublicKey__?: void; | ||
@@ -20,3 +20,3 @@ }; | ||
*/ | ||
export declare type Signature = ArrayBuffer & { | ||
export type Signature = ArrayBuffer & { | ||
__signature__: void; | ||
@@ -29,4 +29,2 @@ }; | ||
toDer(): DerEncodedPublicKey; | ||
rawKey?: ArrayBuffer; | ||
derKey?: DerEncodedPublicKey; | ||
} | ||
@@ -87,3 +85,3 @@ /** | ||
} | ||
export declare type IdentityDescriptor = AnonymousIdentityDescriptor | PublicKeyIdentityDescriptor; | ||
export type IdentityDescriptor = AnonymousIdentityDescriptor | PublicKeyIdentityDescriptor; | ||
/** | ||
@@ -90,0 +88,0 @@ * Create an IdentityDescriptor from a @dfinity/identity Identity |
@@ -6,3 +6,3 @@ "use strict"; | ||
*/ | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
/* tslint:disable */ | ||
// @ts-ignore | ||
@@ -9,0 +9,0 @@ exports.default = ({ IDL }) => { |
@@ -9,2 +9,3 @@ "use strict"; | ||
const asset_idl_1 = __importDefault(require("./asset_idl")); | ||
/* tslint:enable */ | ||
/** | ||
@@ -11,0 +12,0 @@ * Create a management canister actor. |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
/** | ||
* This file is generated from the candid for asset management. | ||
*/ | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
/* tslint:disable */ | ||
// @ts-ignore | ||
@@ -8,0 +8,0 @@ exports.default = ({ IDL }) => { |
@@ -5,3 +5,3 @@ /** | ||
import type { Principal } from '@dfinity/principal'; | ||
export declare type canister_id = Principal; | ||
export type canister_id = Principal; | ||
export interface canister_settings { | ||
@@ -19,4 +19,4 @@ controllers: [] | [Array<Principal>]; | ||
} | ||
export declare type user_id = Principal; | ||
export declare type wasm_module = Array<number>; | ||
export type user_id = Principal; | ||
export type wasm_module = Array<number>; | ||
export default interface _SERVICE { | ||
@@ -23,0 +23,0 @@ canister_status: (arg_0: { |
"use strict"; | ||
/** | ||
* This file is generated from the candid for asset management. | ||
*/ | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
//# sourceMappingURL=management_service.js.map |
@@ -5,26 +5,7 @@ /** @module CanisterStatus */ | ||
import { CreateCertificateOptions } from '../certificate'; | ||
import { DerEncodedPublicKey } from '..'; | ||
/** | ||
* Represents the useful information about a subnet | ||
* @param {string} subnetId the principal id of the canister's subnet | ||
* @param {string[]} nodeKeys the keys of the individual nodes in the subnet | ||
*/ | ||
export declare type SubnetStatus = { | ||
subnetId: string; | ||
nodeKeys: Map<string, DerEncodedPublicKey>; | ||
metrics?: { | ||
num_canisters: bigint; | ||
canister_state_bytes: bigint; | ||
consumed_cycles_total: { | ||
current: bigint; | ||
deleted: bigint; | ||
}; | ||
update_transactions_total: bigint; | ||
}; | ||
}; | ||
/** | ||
* Types of an entry on the canisterStatus map. | ||
* An entry of null indicates that the request failed, due to lack of permissions or the result being missing. | ||
*/ | ||
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | SubnetStatus | bigint | null; | ||
export type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | bigint | null; | ||
/** | ||
@@ -52,5 +33,5 @@ * Interface to define a custom path. Nested paths will be represented as individual buffers, and can be created from text using {@link TextEncoder} | ||
*/ | ||
export declare type Path = 'time' | 'controllers' | 'subnet' | 'module_hash' | 'candid' | MetaData | CustomPath; | ||
export declare type StatusMap = Map<Path | string, Status>; | ||
export declare type CanisterStatusOptions = { | ||
export type Path = 'time' | 'controllers' | 'subnet' | 'module_hash' | 'candid' | MetaData | CustomPath; | ||
export type StatusMap = Map<Path | string, Status>; | ||
export type CanisterStatusOptions = { | ||
canisterId: Principal; | ||
@@ -81,3 +62,2 @@ agent: HttpAgent; | ||
}) => Promise<StatusMap>; | ||
export declare const fetchNodeKeys: (certificate: ArrayBuffer, canisterId: Principal, root_key?: ArrayBuffer | Uint8Array) => SubnetStatus; | ||
export declare const encodePath: (path: Path, canisterId: Principal) => ArrayBuffer[]; |
"use strict"; | ||
/** @module CanisterStatus */ | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
@@ -26,4 +27,4 @@ if (k2 === undefined) k2 = k; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.encodePath = exports.fetchNodeKeys = exports.request = void 0; | ||
/** @module CanisterStatus */ | ||
exports.encodePath = exports.request = void 0; | ||
const candid_1 = require("@dfinity/candid"); | ||
const principal_1 = require("@dfinity/principal"); | ||
@@ -34,3 +35,2 @@ const errors_1 = require("../errors"); | ||
const Cbor = __importStar(require("../cbor")); | ||
const leb_1 = require("../utils/leb"); | ||
/** | ||
@@ -52,4 +52,3 @@ * | ||
const request = async (options) => { | ||
const { agent, paths } = options; | ||
const canisterId = principal_1.Principal.from(options.canisterId); | ||
const { canisterId, agent, paths } = options; | ||
const uniquePaths = [...new Set(paths)]; | ||
@@ -73,19 +72,3 @@ // Map path options to their correct formats | ||
}); | ||
const lookup = (cert, path) => { | ||
if (path === 'subnet') { | ||
const data = (0, exports.fetchNodeKeys)(response.certificate, canisterId, agent.rootKey); | ||
return { | ||
path: path, | ||
data, | ||
}; | ||
} | ||
else { | ||
return { | ||
path: path, | ||
data: (0, certificate_1.lookupResultToBuffer)(cert.lookup((0, exports.encodePath)(path, canisterId))), | ||
}; | ||
} | ||
}; | ||
// must pass in the rootKey if we have no delegation | ||
const { path, data } = lookup(cert, uniquePaths[index]); | ||
const data = cert.lookup((0, exports.encodePath)(uniquePaths[index], canisterId)); | ||
if (!data) { | ||
@@ -104,3 +87,3 @@ // Typically, the cert lookup will throw | ||
case 'time': { | ||
status.set(path, (0, leb_1.decodeTime)(data)); | ||
status.set(path, decodeTime(data)); | ||
break; | ||
@@ -116,6 +99,2 @@ } | ||
} | ||
case 'subnet': { | ||
status.set(path, data); | ||
break; | ||
} | ||
case 'candid': { | ||
@@ -133,3 +112,3 @@ status.set(path, new TextDecoder().decode(data)); | ||
case 'leb128': { | ||
status.set(path.key, (0, leb_1.decodeLeb128)(data)); | ||
status.set(path.key, decodeLeb128(data)); | ||
break; | ||
@@ -177,54 +156,2 @@ } | ||
exports.request = request; | ||
const fetchNodeKeys = (certificate, canisterId, root_key) => { | ||
if (!canisterId._isPrincipal) { | ||
throw new Error('Invalid canisterId'); | ||
} | ||
const cert = Cbor.decode(new Uint8Array(certificate)); | ||
const tree = cert.tree; | ||
let delegation = cert.delegation; | ||
let subnetId; | ||
if (delegation && delegation.subnet_id) { | ||
subnetId = principal_1.Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)); | ||
} | ||
// On local replica, with System type subnet, there is no delegation | ||
else if (!delegation && typeof root_key !== 'undefined') { | ||
subnetId = principal_1.Principal.selfAuthenticating(new Uint8Array(root_key)); | ||
delegation = { | ||
subnet_id: subnetId.toUint8Array(), | ||
certificate: new ArrayBuffer(0), | ||
}; | ||
} | ||
// otherwise use default NNS subnet id | ||
else { | ||
subnetId = principal_1.Principal.selfAuthenticating(principal_1.Principal.fromText('tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe').toUint8Array()); | ||
delegation = { | ||
subnet_id: subnetId.toUint8Array(), | ||
certificate: new ArrayBuffer(0), | ||
}; | ||
} | ||
const canisterInRange = (0, certificate_1.check_canister_ranges)({ canisterId, subnetId, tree }); | ||
if (!canisterInRange) { | ||
throw new Error('Canister not in range'); | ||
} | ||
const nodeTree = (0, certificate_1.lookup_path)(['subnet', delegation === null || delegation === void 0 ? void 0 : delegation.subnet_id, 'node'], tree); | ||
const nodeForks = (0, certificate_1.flatten_forks)(nodeTree); | ||
nodeForks.length; | ||
const nodeKeys = new Map(); | ||
nodeForks.forEach(fork => { | ||
Object.getPrototypeOf(new Uint8Array(fork[1])); | ||
const node_id = principal_1.Principal.from(new Uint8Array(fork[1])).toText(); | ||
const derEncodedPublicKey = (0, certificate_1.lookup_path)(['public_key'], fork[2]); | ||
if (derEncodedPublicKey.byteLength !== 44) { | ||
throw new Error('Invalid public key length'); | ||
} | ||
else { | ||
nodeKeys.set(node_id, derEncodedPublicKey); | ||
} | ||
}); | ||
return { | ||
subnetId: principal_1.Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)).toText(), | ||
nodeKeys, | ||
}; | ||
}; | ||
exports.fetchNodeKeys = fetchNodeKeys; | ||
const encodePath = (path, canisterId) => { | ||
@@ -269,2 +196,5 @@ const encoder = new TextEncoder(); | ||
}; | ||
const decodeLeb128 = (buf) => { | ||
return (0, candid_1.lebDecode)(new candid_1.PipeArrayBuffer(buf)); | ||
}; | ||
const decodeCbor = (buf) => { | ||
@@ -276,6 +206,11 @@ return Cbor.decode(buf); | ||
}; | ||
// Controllers are CBOR-encoded buffers | ||
// time is a LEB128-encoded Nat | ||
const decodeTime = (buf) => { | ||
const decoded = decodeLeb128(buf); | ||
return new Date(Number(decoded / BigInt(1000000))); | ||
}; | ||
// Controllers are CBOR-encoded buffers, starting with a Tag we don't need | ||
const decodeControllers = (buf) => { | ||
// eslint-disable-next-line @typescript-eslint/no-unused-vars | ||
const controllersRaw = decodeCbor(buf); | ||
const [tag, ...controllersRaw] = decodeCbor(buf); | ||
return controllersRaw.map((buf) => { | ||
@@ -282,0 +217,0 @@ return principal_1.Principal.fromUint8Array(new Uint8Array(buf)); |
@@ -30,3 +30,5 @@ "use strict"; | ||
exports.decode = exports.encode = exports.CborTag = void 0; | ||
// tslint:disable:max-classes-per-file | ||
// This file is based on: | ||
// tslint:disable-next-line: max-line-length | ||
// https://github.com/dfinity-lab/dfinity/blob/9bca65f8edd65701ea6bdb00e0752f9186bbc893/docs/spec/public/index.adoc#cbor-encoding-of-requests-and-responses | ||
@@ -98,3 +100,3 @@ const borc_1 = __importDefault(require("borc")); | ||
CborTag[CborTag["Semantic"] = 55799] = "Semantic"; | ||
})(CborTag = exports.CborTag || (exports.CborTag = {})); | ||
})(CborTag || (exports.CborTag = CborTag = {})); | ||
/** | ||
@@ -111,2 +113,3 @@ * Encode a JavaScript value into CBOR. | ||
for (let i = 0; i < len; i++) { | ||
// tslint:disable-next-line:no-bitwise | ||
res = res * BigInt(0x100) + BigInt(buf[i]); | ||
@@ -113,0 +116,0 @@ } |
@@ -9,17 +9,10 @@ import { AgentError } from './errors'; | ||
} | ||
export interface Cert { | ||
tree: HashTree; | ||
signature: ArrayBuffer; | ||
delegation?: Delegation; | ||
declare const enum NodeId { | ||
Empty = 0, | ||
Fork = 1, | ||
Labeled = 2, | ||
Leaf = 3, | ||
Pruned = 4 | ||
} | ||
declare const NodeId: { | ||
Empty: number; | ||
Fork: number; | ||
Labeled: number; | ||
Leaf: number; | ||
Pruned: number; | ||
}; | ||
export declare type NodeIdType = typeof NodeId[keyof typeof NodeId]; | ||
export { NodeId }; | ||
export declare type HashTree = [typeof NodeId.Empty] | [typeof NodeId.Fork, HashTree, HashTree] | [typeof NodeId.Labeled, ArrayBuffer, HashTree] | [typeof NodeId.Leaf, ArrayBuffer] | [typeof NodeId.Pruned, ArrayBuffer]; | ||
export type HashTree = [NodeId.Empty] | [NodeId.Fork, HashTree, HashTree] | [NodeId.Labeled, ArrayBuffer, HashTree] | [NodeId.Leaf, ArrayBuffer] | [NodeId.Pruned, ArrayBuffer]; | ||
/** | ||
@@ -30,7 +23,3 @@ * Make a human readable string out of a hash tree. | ||
export declare function hashTreeToString(tree: HashTree): string; | ||
interface Delegation extends Record<string, any> { | ||
subnet_id: ArrayBuffer; | ||
certificate: ArrayBuffer; | ||
} | ||
declare type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>; | ||
type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>; | ||
export interface CreateCertificateOptions { | ||
@@ -55,9 +44,2 @@ /** | ||
blsVerify?: VerifyFunc; | ||
/** | ||
* The maximum age of the certificate in minutes. Default is 5 minutes. | ||
* @default 5 | ||
* This is used to verify the time the certificate was signed, particularly for validating Delegation certificates, which can live for longer than the default window of +/- 5 minutes. If the certificate is | ||
* older than the specified age, it will fail verification. | ||
*/ | ||
maxAgeInMinutes?: number; | ||
} | ||
@@ -68,3 +50,2 @@ export declare class Certificate { | ||
private _blsVerify; | ||
private _maxAgeInMinutes; | ||
private readonly cert; | ||
@@ -74,8 +55,8 @@ /** | ||
* CertificateVerificationError if the certificate cannot be verified. | ||
* @constructs Certificate | ||
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions} | ||
* @constructs {@link AuthClient} | ||
* @param {CreateCertificateOptions} options | ||
* @see {@link CreateCertificateOptions} | ||
* @param {ArrayBuffer} options.certificate The bytes of the certificate | ||
* @param {ArrayBuffer} options.rootKey The root key to verify against | ||
* @param {Principal} options.canisterId The effective or signing canister ID | ||
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes. | ||
* @throws {CertificateVerificationError} | ||
@@ -86,3 +67,2 @@ */ | ||
lookup(path: Array<ArrayBuffer | string>): ArrayBuffer | undefined; | ||
lookup_label(label: ArrayBuffer): ArrayBuffer | HashTree | undefined; | ||
private verify; | ||
@@ -92,8 +72,2 @@ private _checkDelegationAndGetKey; | ||
/** | ||
* utility function to constrain the type of a path | ||
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup | ||
* @returns ArrayBuffer or Undefined | ||
*/ | ||
export declare function lookupResultToBuffer(result: ArrayBuffer | HashTree | undefined): ArrayBuffer | undefined; | ||
/** | ||
* @param t | ||
@@ -106,19 +80,3 @@ */ | ||
*/ | ||
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | HashTree | undefined; | ||
/** | ||
* If the tree is a fork, flatten it into an array of trees | ||
* @param t - the tree to flatten | ||
* @returns HashTree[] - the flattened tree | ||
*/ | ||
export declare function flatten_forks(t: HashTree): HashTree[]; | ||
/** | ||
* Check if a canister falls within a range of canisters | ||
* @param canisterId Principal | ||
* @param ranges [Principal, Principal][] | ||
* @returns | ||
*/ | ||
export declare function check_canister_ranges(params: { | ||
canisterId: Principal; | ||
subnetId: Principal; | ||
tree: HashTree; | ||
}): boolean; | ||
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | undefined; | ||
export {}; |
@@ -26,3 +26,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.check_canister_ranges = exports.flatten_forks = exports.lookup_path = exports.reconstruct = exports.lookupResultToBuffer = exports.Certificate = exports.hashTreeToString = exports.NodeId = exports.CertificateVerificationError = void 0; | ||
exports.lookup_path = exports.reconstruct = exports.Certificate = exports.hashTreeToString = exports.CertificateVerificationError = void 0; | ||
const cbor = __importStar(require("./cbor")); | ||
@@ -34,3 +34,2 @@ const errors_1 = require("./errors"); | ||
const bls = __importStar(require("./utils/bls")); | ||
const leb_1 = require("./utils/leb"); | ||
/** | ||
@@ -45,10 +44,2 @@ * A certificate may fail verification with respect to the provided public key | ||
exports.CertificateVerificationError = CertificateVerificationError; | ||
const NodeId = { | ||
Empty: 0, | ||
Fork: 1, | ||
Labeled: 2, | ||
Leaf: 3, | ||
Pruned: 4, | ||
}; | ||
exports.NodeId = NodeId; | ||
/** | ||
@@ -73,40 +64,18 @@ * Make a human readable string out of a hash tree. | ||
switch (tree[0]) { | ||
case NodeId.Empty: | ||
case 0 /* NodeId.Empty */: | ||
return '()'; | ||
case NodeId.Fork: { | ||
if (tree[1] instanceof Array && tree[2] instanceof ArrayBuffer) { | ||
const left = hashTreeToString(tree[1]); | ||
const right = hashTreeToString(tree[2]); | ||
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`; | ||
} | ||
else { | ||
throw new Error('Invalid tree structure for fork'); | ||
} | ||
case 1 /* NodeId.Fork */: { | ||
const left = hashTreeToString(tree[1]); | ||
const right = hashTreeToString(tree[2]); | ||
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`; | ||
} | ||
case NodeId.Labeled: { | ||
if (tree[1] instanceof ArrayBuffer && tree[2] instanceof ArrayBuffer) { | ||
const label = labelToString(tree[1]); | ||
const sub = hashTreeToString(tree[2]); | ||
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`; | ||
} | ||
else { | ||
throw new Error('Invalid tree structure for labeled'); | ||
} | ||
case 2 /* NodeId.Labeled */: { | ||
const label = labelToString(tree[1]); | ||
const sub = hashTreeToString(tree[2]); | ||
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`; | ||
} | ||
case NodeId.Leaf: { | ||
if (!tree[1]) { | ||
throw new Error('Invalid tree structure for leaf'); | ||
} | ||
else if (Array.isArray(tree[1])) { | ||
return JSON.stringify(tree[1]); | ||
} | ||
case 3 /* NodeId.Leaf */: { | ||
return `leaf(...${tree[1].byteLength} bytes)`; | ||
} | ||
case NodeId.Pruned: { | ||
if (!tree[1]) { | ||
throw new Error('Invalid tree structure for pruned'); | ||
} | ||
else if (Array.isArray(tree[1])) { | ||
return JSON.stringify(tree[1]); | ||
} | ||
case 4 /* NodeId.Pruned */: { | ||
return `pruned(${(0, buffer_1.toHex)(new Uint8Array(tree[1]))}`; | ||
@@ -134,20 +103,11 @@ } | ||
class Certificate { | ||
constructor(certificate, _rootKey, _canisterId, _blsVerify, | ||
// Default to 5 minutes | ||
_maxAgeInMinutes = 5) { | ||
this._rootKey = _rootKey; | ||
this._canisterId = _canisterId; | ||
this._blsVerify = _blsVerify; | ||
this._maxAgeInMinutes = _maxAgeInMinutes; | ||
this.cert = cbor.decode(new Uint8Array(certificate)); | ||
} | ||
/** | ||
* Create a new instance of a certificate, automatically verifying it. Throws a | ||
* CertificateVerificationError if the certificate cannot be verified. | ||
* @constructs Certificate | ||
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions} | ||
* @constructs {@link AuthClient} | ||
* @param {CreateCertificateOptions} options | ||
* @see {@link CreateCertificateOptions} | ||
* @param {ArrayBuffer} options.certificate The bytes of the certificate | ||
* @param {ArrayBuffer} options.rootKey The root key to verify against | ||
* @param {Principal} options.canisterId The effective or signing canister ID | ||
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes. | ||
* @throws {CertificateVerificationError} | ||
@@ -160,13 +120,15 @@ */ | ||
} | ||
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, blsVerify, options.maxAgeInMinutes); | ||
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, blsVerify); | ||
await cert.verify(); | ||
return cert; | ||
} | ||
constructor(certificate, _rootKey, _canisterId, _blsVerify) { | ||
this._rootKey = _rootKey; | ||
this._canisterId = _canisterId; | ||
this._blsVerify = _blsVerify; | ||
this.cert = cbor.decode(new Uint8Array(certificate)); | ||
} | ||
lookup(path) { | ||
// constrain the type of the result, so that empty HashTree is undefined | ||
return lookupResultToBuffer(lookup_path(path, this.cert.tree)); | ||
return lookup_path(path, this.cert.tree); | ||
} | ||
lookup_label(label) { | ||
return this.lookup([label]); | ||
} | ||
async verify() { | ||
@@ -179,25 +141,2 @@ const rootHash = await reconstruct(this.cert.tree); | ||
let sigVer = false; | ||
const lookupTime = this.lookup(['time']); | ||
if (!lookupTime) { | ||
// Should never happen - time is always present in IC certificates | ||
throw new CertificateVerificationError('Certificate does not contain a time'); | ||
} | ||
const FIVE_MINUTES_IN_MSEC = 5 * 60 * 1000; | ||
const MAX_AGE_IN_MSEC = this._maxAgeInMinutes * 60 * 1000; | ||
const now = Date.now(); | ||
const earliestCertificateTime = now - MAX_AGE_IN_MSEC; | ||
const fiveMinutesFromNow = now + FIVE_MINUTES_IN_MSEC; | ||
const certTime = (0, leb_1.decodeTime)(lookupTime); | ||
if (certTime.getTime() < earliestCertificateTime) { | ||
throw new CertificateVerificationError(`Certificate is signed more than ${this._maxAgeInMinutes} minutes in the past. Certificate time: ` + | ||
certTime.toISOString() + | ||
' Current time: ' + | ||
new Date(now).toISOString()); | ||
} | ||
else if (certTime.getTime() > fiveMinutesFromNow) { | ||
throw new CertificateVerificationError('Certificate is signed more than 5 minutes in the future. Certificate time: ' + | ||
certTime.toISOString() + | ||
' Current time: ' + | ||
new Date(now).toISOString()); | ||
} | ||
try { | ||
@@ -221,11 +160,13 @@ sigVer = await this._blsVerify(new Uint8Array(key), new Uint8Array(sig), new Uint8Array(msg)); | ||
canisterId: this._canisterId, | ||
blsVerify: this._blsVerify, | ||
// Do not check max age for delegation certificates | ||
maxAgeInMinutes: Infinity, | ||
}); | ||
const canisterInRange = check_canister_ranges({ | ||
canisterId: this._canisterId, | ||
subnetId: principal_1.Principal.fromUint8Array(new Uint8Array(d.subnet_id)), | ||
tree: cert.cert.tree, | ||
}); | ||
const rangeLookup = cert.lookup(['subnet', d.subnet_id, 'canister_ranges']); | ||
if (!rangeLookup) { | ||
throw new CertificateVerificationError(`Could not find canister ranges for subnet 0x${(0, buffer_1.toHex)(d.subnet_id)}`); | ||
} | ||
const ranges_arr = cbor.decode(rangeLookup); | ||
const ranges = ranges_arr.map(v => [ | ||
principal_1.Principal.fromUint8Array(v[0]), | ||
principal_1.Principal.fromUint8Array(v[1]), | ||
]); | ||
const canisterInRange = ranges.some(r => r[0].ltEq(this._canisterId) && r[1].gtEq(this._canisterId)); | ||
if (!canisterInRange) { | ||
@@ -256,17 +197,2 @@ throw new CertificateVerificationError(`Canister ${this._canisterId} not in range of delegations for subnet 0x${(0, buffer_1.toHex)(d.subnet_id)}`); | ||
/** | ||
* utility function to constrain the type of a path | ||
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup | ||
* @returns ArrayBuffer or Undefined | ||
*/ | ||
function lookupResultToBuffer(result) { | ||
if (result instanceof ArrayBuffer) { | ||
return result; | ||
} | ||
else if (result instanceof Uint8Array) { | ||
return result.buffer; | ||
} | ||
return undefined; | ||
} | ||
exports.lookupResultToBuffer = lookupResultToBuffer; | ||
/** | ||
* @param t | ||
@@ -276,11 +202,11 @@ */ | ||
switch (t[0]) { | ||
case NodeId.Empty: | ||
case 0 /* NodeId.Empty */: | ||
return (0, request_id_1.hash)(domain_sep('ic-hashtree-empty')); | ||
case NodeId.Pruned: | ||
case 4 /* NodeId.Pruned */: | ||
return t[1]; | ||
case NodeId.Leaf: | ||
case 3 /* NodeId.Leaf */: | ||
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-leaf'), t[1])); | ||
case NodeId.Labeled: | ||
case 2 /* NodeId.Labeled */: | ||
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-labeled'), t[1], await reconstruct(t[2]))); | ||
case NodeId.Fork: | ||
case 1 /* NodeId.Fork */: | ||
return (0, request_id_1.hash)((0, buffer_1.concat)(domain_sep('ic-hashtree-fork'), await reconstruct(t[1]), await reconstruct(t[2]))); | ||
@@ -304,20 +230,7 @@ default: | ||
switch (tree[0]) { | ||
case NodeId.Leaf: { | ||
// should not be undefined | ||
if (!tree[1]) | ||
throw new Error('Invalid tree structure for leaf'); | ||
if (tree[1] instanceof ArrayBuffer) { | ||
return tree[1]; | ||
} | ||
else if (tree[1] instanceof Uint8Array) { | ||
return tree[1].buffer; | ||
} | ||
else | ||
return tree[1]; | ||
case 3 /* NodeId.Leaf */: { | ||
return new Uint8Array(tree[1]).buffer; | ||
} | ||
case NodeId.Fork: { | ||
return tree; | ||
} | ||
default: { | ||
return tree; | ||
return undefined; | ||
} | ||
@@ -333,12 +246,7 @@ } | ||
exports.lookup_path = lookup_path; | ||
/** | ||
* If the tree is a fork, flatten it into an array of trees | ||
* @param t - the tree to flatten | ||
* @returns HashTree[] - the flattened tree | ||
*/ | ||
function flatten_forks(t) { | ||
switch (t[0]) { | ||
case NodeId.Empty: | ||
case 0 /* NodeId.Empty */: | ||
return []; | ||
case NodeId.Fork: | ||
case 1 /* NodeId.Fork */: | ||
return flatten_forks(t[1]).concat(flatten_forks(t[2])); | ||
@@ -349,3 +257,2 @@ default: | ||
} | ||
exports.flatten_forks = flatten_forks; | ||
function find_label(l, trees) { | ||
@@ -356,3 +263,3 @@ if (trees.length === 0) { | ||
for (const t of trees) { | ||
if (t[0] === NodeId.Labeled) { | ||
if (t[0] === 2 /* NodeId.Labeled */) { | ||
const p = t[1]; | ||
@@ -365,23 +272,2 @@ if (isBufferEqual(l, p)) { | ||
} | ||
/** | ||
* Check if a canister falls within a range of canisters | ||
* @param canisterId Principal | ||
* @param ranges [Principal, Principal][] | ||
* @returns | ||
*/ | ||
function check_canister_ranges(params) { | ||
const { canisterId, subnetId, tree } = params; | ||
const rangeLookup = lookup_path(['subnet', subnetId.toUint8Array(), 'canister_ranges'], tree); | ||
if (!rangeLookup || !(rangeLookup instanceof ArrayBuffer)) { | ||
throw new Error(`Could not find canister ranges for subnet ${subnetId}`); | ||
} | ||
const ranges_arr = cbor.decode(rangeLookup); | ||
const ranges = ranges_arr.map(v => [ | ||
principal_1.Principal.fromUint8Array(v[0]), | ||
principal_1.Principal.fromUint8Array(v[1]), | ||
]); | ||
const canisterInRange = ranges.some(r => r[0].ltEq(canisterId) && r[1].gtEq(canisterId)); | ||
return canisterInRange; | ||
} | ||
exports.check_canister_ranges = check_canister_ranges; | ||
//# sourceMappingURL=certificate.js.map |
import { ActorSubclass } from './actor'; | ||
export * from './actor'; | ||
export * from './agent'; | ||
export * from './auth'; | ||
export * from './certificate'; | ||
export * from './agent/http/transforms'; | ||
export * from './agent/http/types'; | ||
export * from './auth'; | ||
export * from './canisters/asset'; | ||
export * from './certificate'; | ||
export * from './der'; | ||
export * from './fetch_candid'; | ||
export * from './public_key'; | ||
export * from './request_id'; | ||
export * from './utils/bls'; | ||
export * from './utils/buffer'; | ||
export * from './utils/random'; | ||
export * as polling from './polling'; | ||
@@ -17,0 +14,0 @@ /** |
@@ -32,14 +32,11 @@ "use strict"; | ||
__exportStar(require("./agent"), exports); | ||
__exportStar(require("./auth"), exports); | ||
__exportStar(require("./certificate"), exports); | ||
__exportStar(require("./agent/http/transforms"), exports); | ||
__exportStar(require("./agent/http/types"), exports); | ||
__exportStar(require("./auth"), exports); | ||
__exportStar(require("./canisters/asset"), exports); | ||
__exportStar(require("./certificate"), exports); | ||
__exportStar(require("./der"), exports); | ||
__exportStar(require("./fetch_candid"), exports); | ||
__exportStar(require("./public_key"), exports); | ||
__exportStar(require("./request_id"), exports); | ||
__exportStar(require("./utils/bls"), exports); | ||
__exportStar(require("./utils/buffer"), exports); | ||
__exportStar(require("./utils/random"), exports); | ||
exports.polling = __importStar(require("./polling")); | ||
@@ -46,0 +43,0 @@ /** |
@@ -7,4 +7,4 @@ import { Principal } from '@dfinity/principal'; | ||
export { defaultStrategy } from './strategy'; | ||
export declare type PollStrategy = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<void>; | ||
export declare type PollStrategyFactory = () => PollStrategy; | ||
export type PollStrategy = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<void>; | ||
export type PollStrategyFactory = () => PollStrategy; | ||
/** | ||
@@ -11,0 +11,0 @@ * Polls the IC to check the status of the given request then |
@@ -5,3 +5,3 @@ import { Principal } from '@dfinity/principal'; | ||
import { RequestId } from '../request_id'; | ||
export declare type Predicate<T> = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<T>; | ||
export type Predicate<T> = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<T>; | ||
/** | ||
@@ -8,0 +8,0 @@ * A best practices polling strategy: wait 2 seconds before the first poll, then 1 second |
@@ -1,2 +0,2 @@ | ||
export declare type RequestId = ArrayBuffer & { | ||
export type RequestId = ArrayBuffer & { | ||
__requestId__: void; | ||
@@ -22,8 +22,1 @@ }; | ||
export declare function requestIdOf(request: Record<string, any>): RequestId; | ||
/** | ||
* Hash a map into an ArrayBuffer using the representation-independent-hash function. | ||
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map | ||
* @param map - Any non-nested object | ||
* @returns ArrayBuffer | ||
*/ | ||
export declare function hashOfMap(map: Record<string, unknown>): ArrayBuffer; |
@@ -6,6 +6,6 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.hashOfMap = exports.requestIdOf = exports.hashValue = exports.hash = void 0; | ||
exports.requestIdOf = exports.hashValue = exports.hash = void 0; | ||
const candid_1 = require("@dfinity/candid"); | ||
const borc_1 = __importDefault(require("borc")); | ||
const sha256_1 = require("@noble/hashes/sha256"); | ||
const js_sha256_1 = require("js-sha256"); | ||
const buffer_1 = require("./utils/buffer"); | ||
@@ -17,3 +17,3 @@ /** | ||
function hash(data) { | ||
return (0, buffer_1.uint8ToBuf)(sha256_1.sha256.create().update(new Uint8Array(data)).digest()); | ||
return js_sha256_1.sha256.create().update(new Uint8Array(data)).arrayBuffer(); | ||
} | ||
@@ -28,3 +28,2 @@ exports.hash = hash; | ||
if (value instanceof borc_1.default.Tagged) { | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
return hashValue(value.value); | ||
@@ -57,5 +56,2 @@ } | ||
} | ||
else if (typeof value === 'object') { | ||
return hashOfMap(value); | ||
} | ||
else if (typeof value === 'bigint') { | ||
@@ -86,13 +82,3 @@ // Do this check much later than the other bigint check because this one is much less | ||
function requestIdOf(request) { | ||
return hashOfMap(request); | ||
} | ||
exports.requestIdOf = requestIdOf; | ||
/** | ||
* Hash a map into an ArrayBuffer using the representation-independent-hash function. | ||
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map | ||
* @param map - Any non-nested object | ||
* @returns ArrayBuffer | ||
*/ | ||
function hashOfMap(map) { | ||
const hashed = Object.entries(map) | ||
const hashed = Object.entries(request) | ||
.filter(([, value]) => value !== undefined) | ||
@@ -109,6 +95,6 @@ .map(([key, value]) => { | ||
const concatenated = (0, buffer_1.concat)(...sorted.map(x => (0, buffer_1.concat)(...x))); | ||
const result = hash(concatenated); | ||
return result; | ||
const requestId = hash(concatenated); | ||
return requestId; | ||
} | ||
exports.hashOfMap = hashOfMap; | ||
exports.requestIdOf = requestIdOf; | ||
//# sourceMappingURL=request_id.js.map |
@@ -16,21 +16,2 @@ /** | ||
export declare function fromHex(hex: string): ArrayBuffer; | ||
/** | ||
* | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2 | ||
*/ | ||
export declare function compare(b1: ArrayBuffer, b2: ArrayBuffer): number; | ||
/** | ||
* Checks two array buffers for equality. | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns boolean | ||
*/ | ||
export declare function bufEquals(b1: ArrayBuffer, b2: ArrayBuffer): boolean; | ||
/** | ||
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe. | ||
* @param {Uint8Array} arr Uint8Array to convert | ||
* @returns ArrayBuffer | ||
*/ | ||
export declare function uint8ToBuf(arr: Uint8Array): ArrayBuffer; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.uint8ToBuf = exports.bufEquals = exports.compare = exports.fromHex = exports.toHex = exports.concat = void 0; | ||
exports.compare = exports.fromHex = exports.toHex = exports.concat = void 0; | ||
/** | ||
@@ -37,2 +37,3 @@ * Concatenate multiple array buffers. | ||
.reduce((acc, curr, i) => { | ||
// tslint:disable-next-line:no-bitwise | ||
acc[(i / 2) | 0] = (acc[(i / 2) | 0] || '') + curr; | ||
@@ -45,8 +46,2 @@ return acc; | ||
exports.fromHex = fromHex; | ||
/** | ||
* | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2 | ||
*/ | ||
function compare(b1, b2) { | ||
@@ -66,21 +61,2 @@ if (b1.byteLength !== b2.byteLength) { | ||
exports.compare = compare; | ||
/** | ||
* Checks two array buffers for equality. | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns boolean | ||
*/ | ||
function bufEquals(b1, b2) { | ||
return compare(b1, b2) === 0; | ||
} | ||
exports.bufEquals = bufEquals; | ||
/** | ||
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe. | ||
* @param {Uint8Array} arr Uint8Array to convert | ||
* @returns ArrayBuffer | ||
*/ | ||
function uint8ToBuf(arr) { | ||
return new DataView(arr.buffer, arr.byteOffset, arr.byteLength).buffer; | ||
} | ||
exports.uint8ToBuf = uint8ToBuf; | ||
//# sourceMappingURL=buffer.js.map |
@@ -13,3 +13,3 @@ /** | ||
declare function init(): Promise<InitOutput>; | ||
export declare type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; | ||
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; | ||
export interface InitOutput { | ||
@@ -16,0 +16,0 @@ readonly memory: WebAssembly.Memory; |
@@ -31,2 +31,4 @@ "use strict"; | ||
const wasm_1 = require("./wasm"); | ||
/* tslint:disable */ | ||
/* eslint-disable */ | ||
let wasm; | ||
@@ -33,0 +35,0 @@ const wasmBytes = base64Arraybuffer.decode(wasm_1.wasmBytesBase64); |
@@ -1,2 +0,2 @@ | ||
import { Agent, HttpDetailsResponse, QueryResponseRejected, SubmitResponse } from './agent'; | ||
import { Agent, QueryResponseRejected, SubmitResponse } from './agent'; | ||
import { AgentError } from './errors'; | ||
@@ -72,7 +72,7 @@ import { IDL } from '@dfinity/candid'; | ||
*/ | ||
export declare type ActorSubclass<T = Record<string, ActorMethod>> = Actor & T; | ||
export type ActorSubclass<T = Record<string, ActorMethod>> = Actor & T; | ||
/** | ||
* An actor method type, defined for each methods of the actor service. | ||
*/ | ||
export interface ActorMethod<Args extends unknown[] = unknown[], Ret = unknown> { | ||
export interface ActorMethod<Args extends unknown[] = unknown[], Ret extends unknown = unknown> { | ||
(...args: Args): Promise<Ret>; | ||
@@ -82,15 +82,2 @@ withOptions(options: CallConfig): (...args: Args) => Promise<Ret>; | ||
/** | ||
* An actor method type, defined for each methods of the actor service. | ||
*/ | ||
export interface ActorMethodWithHttpDetails<Args extends unknown[] = unknown[], Ret = unknown> extends ActorMethod { | ||
(...args: Args): Promise<{ | ||
httpDetails: HttpDetailsResponse; | ||
result: Ret; | ||
}>; | ||
} | ||
export declare type FunctionWithArgsAndReturn<Args extends unknown[] = unknown[], Ret = unknown> = (...args: Args) => Ret; | ||
export declare type ActorMethodMappedWithHttpDetails<T> = { | ||
[K in keyof T]: T[K] extends FunctionWithArgsAndReturn<infer Args, infer Ret> ? ActorMethodWithHttpDetails<Args, Ret> : never; | ||
}; | ||
/** | ||
* The mode used when installing a canister. | ||
@@ -114,5 +101,2 @@ */ | ||
declare const metadataSymbol: unique symbol; | ||
export interface CreateActorClassOpts { | ||
httpDetails?: boolean; | ||
} | ||
/** | ||
@@ -145,11 +129,9 @@ * An actor base class. An actor is an object containing only functions that will | ||
}, config?: CallConfig): Promise<ActorSubclass>; | ||
static createActorClass(interfaceFactory: IDL.InterfaceFactory, options?: CreateActorClassOpts): ActorConstructor; | ||
static createActorClass(interfaceFactory: IDL.InterfaceFactory): ActorConstructor; | ||
static createActor<T = Record<string, ActorMethod>>(interfaceFactory: IDL.InterfaceFactory, configuration: ActorConfig): ActorSubclass<T>; | ||
static createActorWithHttpDetails<T = Record<string, ActorMethod>>(interfaceFactory: IDL.InterfaceFactory, configuration: ActorConfig): ActorSubclass<ActorMethodMappedWithHttpDetails<T>>; | ||
private [metadataSymbol]; | ||
protected constructor(metadata: ActorMetadata); | ||
} | ||
export declare type ActorConstructor = new (config: ActorConfig) => ActorSubclass; | ||
export declare const ACTOR_METHOD_WITH_HTTP_DETAILS = "http-details"; | ||
export declare type ManagementCanisterRecord = _SERVICE; | ||
export type ActorConstructor = new (config: ActorConfig) => ActorSubclass; | ||
export type ManagementCanisterRecord = _SERVICE; | ||
/** | ||
@@ -156,0 +138,0 @@ * Create a management canister actor |
@@ -36,11 +36,7 @@ import { Buffer } from 'buffer/'; | ||
constructor(canisterId, methodName, requestId, response) { | ||
super(canisterId, methodName, 'update', Object.assign({ 'Request ID': toHex(requestId) }, (response.body | ||
? Object.assign(Object.assign({}, (response.body.error_code | ||
? { | ||
'Error code': response.body.error_code, | ||
} | ||
: {})), { 'Reject code': String(response.body.reject_code), 'Reject message': response.body.reject_message }) : { | ||
super(canisterId, methodName, 'update', { | ||
'Request ID': toHex(requestId), | ||
'HTTP status code': response.status.toString(), | ||
'HTTP status text': response.statusText, | ||
}))); | ||
}); | ||
this.requestId = requestId; | ||
@@ -65,5 +61,2 @@ this.response = response; | ||
export class Actor { | ||
constructor(metadata) { | ||
this[metadataSymbol] = Object.freeze(metadata); | ||
} | ||
/** | ||
@@ -112,8 +105,6 @@ * Get the Agent class this Actor would call, or undefined if the Actor would use | ||
} | ||
static createActorClass(interfaceFactory, options) { | ||
static createActorClass(interfaceFactory) { | ||
const service = interfaceFactory({ IDL }); | ||
class CanisterActor extends Actor { | ||
constructor(config) { | ||
if (!config.canisterId) | ||
throw new AgentError(`Canister ID is required, but received ${typeof config.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`); | ||
const canisterId = typeof config.canisterId === 'string' | ||
@@ -127,5 +118,2 @@ ? Principal.fromText(config.canisterId) | ||
for (const [methodName, func] of service._fields) { | ||
if (options === null || options === void 0 ? void 0 : options.httpDetails) { | ||
func.annotations.push(ACTOR_METHOD_WITH_HTTP_DETAILS); | ||
} | ||
this[methodName] = _createActorMethod(this, methodName, func, config.blsVerify); | ||
@@ -138,9 +126,6 @@ } | ||
static createActor(interfaceFactory, configuration) { | ||
if (!configuration.canisterId) { | ||
throw new AgentError(`Canister ID is required, but received ${typeof configuration.canisterId} instead. If you are using automatically generated declarations, this may be because your application is not setting the canister ID in process.env correctly.`); | ||
} | ||
return new (this.createActorClass(interfaceFactory))(configuration); | ||
} | ||
static createActorWithHttpDetails(interfaceFactory, configuration) { | ||
return new (this.createActorClass(interfaceFactory, { httpDetails: true }))(configuration); | ||
constructor(metadata) { | ||
this[metadataSymbol] = Object.freeze(metadata); | ||
} | ||
@@ -165,6 +150,5 @@ } | ||
}; | ||
export const ACTOR_METHOD_WITH_HTTP_DETAILS = 'http-details'; | ||
function _createActorMethod(actor, methodName, func, blsVerify) { | ||
let caller; | ||
if (func.annotations.includes('query') || func.annotations.includes('composite_query')) { | ||
if (func.annotations.includes('query')) { | ||
caller = async (options, ...args) => { | ||
@@ -182,8 +166,3 @@ var _a, _b; | ||
case "replied" /* QueryResponseStatus.Replied */: | ||
return func.annotations.includes(ACTOR_METHOD_WITH_HTTP_DETAILS) | ||
? { | ||
httpDetails: result.httpDetails, | ||
result: decodeReturnValue(func.retTypes, result.reply.arg), | ||
} | ||
: decodeReturnValue(func.retTypes, result.reply.arg); | ||
return decodeReturnValue(func.retTypes, result.reply.arg); | ||
} | ||
@@ -207,3 +186,3 @@ }; | ||
}); | ||
if (!response.ok || response.body /* IC-1462 */) { | ||
if (!response.ok) { | ||
throw new UpdateCallRejectedError(cid, methodName, requestId, response); | ||
@@ -213,18 +192,7 @@ } | ||
const responseBytes = await pollForResponse(agent, ecid, requestId, pollStrategy, blsVerify); | ||
const shouldIncludeHttpDetails = func.annotations.includes(ACTOR_METHOD_WITH_HTTP_DETAILS); | ||
if (responseBytes !== undefined) { | ||
return shouldIncludeHttpDetails | ||
? { | ||
httpDetails: response, | ||
result: decodeReturnValue(func.retTypes, responseBytes), | ||
} | ||
: decodeReturnValue(func.retTypes, responseBytes); | ||
return decodeReturnValue(func.retTypes, responseBytes); | ||
} | ||
else if (func.retTypes.length === 0) { | ||
return shouldIncludeHttpDetails | ||
? { | ||
httpDetails: response, | ||
result: undefined, | ||
} | ||
: undefined; | ||
return undefined; | ||
} | ||
@@ -231,0 +199,0 @@ else { |
@@ -5,3 +5,2 @@ import { Principal } from '@dfinity/principal'; | ||
import { Identity } from '../auth'; | ||
import { HttpHeaderField } from './http/types'; | ||
/** | ||
@@ -30,3 +29,3 @@ * Codes used by the replica for rejecting a message. | ||
*/ | ||
export declare type QueryResponse = QueryResponseReplied | QueryResponseRejected; | ||
export type QueryResponse = QueryResponseReplied | QueryResponseRejected; | ||
export declare const enum QueryResponseStatus { | ||
@@ -36,20 +35,5 @@ Replied = "replied", | ||
} | ||
export interface HttpDetailsResponse { | ||
ok: boolean; | ||
status: number; | ||
statusText: string; | ||
headers: HttpHeaderField[]; | ||
} | ||
export declare type ApiQueryResponse = QueryResponse & { | ||
httpDetails: HttpDetailsResponse; | ||
requestId: RequestId; | ||
}; | ||
export interface QueryResponseBase { | ||
status: QueryResponseStatus; | ||
} | ||
export declare type NodeSignature = { | ||
timestamp: bigint; | ||
signature: Uint8Array; | ||
identity: Uint8Array; | ||
}; | ||
export interface QueryResponseReplied extends QueryResponseBase { | ||
@@ -60,3 +44,2 @@ status: QueryResponseStatus.Replied; | ||
}; | ||
signatures?: NodeSignature[]; | ||
} | ||
@@ -67,4 +50,2 @@ export interface QueryResponseRejected extends QueryResponseBase { | ||
reject_message: string; | ||
error_code: string; | ||
signatures?: NodeSignature[]; | ||
} | ||
@@ -111,8 +92,2 @@ /** | ||
statusText: string; | ||
body: { | ||
error_code?: string; | ||
reject_code: number; | ||
reject_message: string; | ||
} | null; | ||
headers: HttpHeaderField[]; | ||
}; | ||
@@ -162,3 +137,2 @@ } | ||
* @param options Options to use to create and send the query. | ||
* @param identity Sender principal to use when sending the query. | ||
* @returns The response from the replica. The Promise will only reject when the communication | ||
@@ -168,3 +142,3 @@ * failed. If the query itself failed but no protocol errors happened, the response will | ||
*/ | ||
query(canisterId: Principal | string, options: QueryFields, identity?: Identity | Promise<Identity>): Promise<ApiQueryResponse>; | ||
query(canisterId: Principal | string, options: QueryFields): Promise<QueryResponse>; | ||
/** | ||
@@ -171,0 +145,0 @@ * By default, the agent is configured to talk to the main Internet Computer, |
@@ -5,5 +5,4 @@ import { JsonObject } from '@dfinity/candid'; | ||
import { Identity } from '../../auth'; | ||
import { Agent, ApiQueryResponse, QueryFields, ReadStateOptions, ReadStateResponse, SubmitResponse } from '../api'; | ||
import { Agent, QueryFields, QueryResponse, ReadStateOptions, ReadStateResponse, SubmitResponse } from '../api'; | ||
import { HttpAgentRequest, HttpAgentRequestTransformFn } from './types'; | ||
import { SubnetStatus } from '../../canisterStatus'; | ||
export * from './transforms'; | ||
@@ -35,11 +34,14 @@ export { Nonce, makeNonce } from './types'; | ||
/** | ||
* Adds a unique {@link Nonce} with each query. | ||
* Enabling will prevent queries from being answered with a cached response. | ||
* Prevents the agent from providing a unique {@link Nonce} with each call. | ||
* Enabling may cause rate limiting of identical requests | ||
* at the boundary nodes. | ||
* | ||
* To add your own nonce generation logic, you can use the following: | ||
* @example | ||
* const agent = new HttpAgent({ useQueryNonces: true }); | ||
* import {makeNonceTransform, makeNonce} from '@dfinity/agent'; | ||
* const agent = new HttpAgent({ disableNonce: true }); | ||
* agent.addTransform(makeNonceTransform(makeNonce); | ||
* @default false | ||
*/ | ||
useQueryNonces?: boolean; | ||
disableNonce?: boolean; | ||
/** | ||
@@ -50,11 +52,6 @@ * Number of times to retry requests before throwing an error | ||
retryTimes?: number; | ||
/** | ||
* Whether the agent should verify signatures signed by node keys on query responses. Increases security, but adds overhead and must make a separate request to cache the node keys for the canister's subnet. | ||
* @default true | ||
*/ | ||
verifyQuerySignatures?: boolean; | ||
} | ||
export declare class HttpAgent implements Agent { | ||
#private; | ||
rootKey: ArrayBuffer; | ||
private readonly _pipeline; | ||
private _identity; | ||
@@ -68,7 +65,7 @@ private readonly _fetch; | ||
private _rootKeyFetched; | ||
private readonly _retryTimes; | ||
private _retryTimes; | ||
readonly _isAgent = true; | ||
constructor(options?: HttpAgentOptions); | ||
isLocal(): boolean; | ||
addTransform(type: 'update' | 'query', fn: HttpAgentRequestTransformFn, priority?: number): void; | ||
addTransform(fn: HttpAgentRequestTransformFn, priority?: number): void; | ||
getPrincipal(): Promise<Principal>; | ||
@@ -81,3 +78,3 @@ call(canisterId: Principal | string, options: { | ||
private _requestAndRetry; | ||
query(canisterId: Principal | string, fields: QueryFields, identity?: Identity | Promise<Identity>): Promise<ApiQueryResponse>; | ||
query(canisterId: Principal | string, fields: QueryFields, identity?: Identity | Promise<Identity>): Promise<QueryResponse>; | ||
createReadStateRequest(fields: ReadStateOptions, identity?: Identity | Promise<Identity>): Promise<any>; | ||
@@ -87,3 +84,3 @@ readState(canisterId: Principal | string, fields: ReadStateOptions, identity?: Identity | Promise<Identity>, request?: any): Promise<ReadStateResponse>; | ||
* Allows agent to sync its time with the network. Can be called during intialization or mid-lifecycle if the device's clock has drifted away from the network time. This is necessary to set the Expiry for a request | ||
* @param {Principal} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
* @param {PrincipalLike} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
*/ | ||
@@ -95,4 +92,3 @@ syncTime(canisterId?: Principal): Promise<void>; | ||
replaceIdentity(identity: Identity): void; | ||
fetchSubnetKeys(canisterId: Principal | string): Promise<SubnetStatus | undefined>; | ||
protected _transform(request: HttpAgentRequest): Promise<HttpAgentRequest>; | ||
} |
@@ -1,13 +0,1 @@ | ||
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { | ||
if (kind === "m") throw new TypeError("Private method is not writable"); | ||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); | ||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); | ||
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; | ||
}; | ||
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { | ||
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); | ||
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); | ||
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); | ||
}; | ||
var _HttpAgent_queryPipeline, _HttpAgent_updatePipeline, _HttpAgent_subnetKeys, _HttpAgent_verifyQuerySignatures, _HttpAgent_verifyQueryResponse; | ||
import { Principal } from '@dfinity/principal'; | ||
@@ -17,12 +5,6 @@ import { AgentError } from '../../errors'; | ||
import * as cbor from '../../cbor'; | ||
import { hashOfMap, requestIdOf } from '../../request_id'; | ||
import { concat, fromHex } from '../../utils/buffer'; | ||
import { Expiry, httpHeadersTransform, makeNonceTransform } from './transforms'; | ||
import { requestIdOf } from '../../request_id'; | ||
import { fromHex } from '../../utils/buffer'; | ||
import { Expiry, makeNonceTransform } from './transforms'; | ||
import { makeNonce, SubmitRequestType, } from './types'; | ||
import { AgentHTTPResponseError } from './errors'; | ||
import { request } from '../../canisterStatus'; | ||
import { CertificateVerificationError } from '../../certificate'; | ||
import { ed25519 } from '@noble/curves/ed25519'; | ||
import { ExpirableMap } from '../../utils/expirableMap'; | ||
import { Ed25519PublicKey } from '../../public_key'; | ||
export * from './transforms'; | ||
@@ -107,69 +89,7 @@ export { makeNonce } from './types'; | ||
this.rootKey = fromHex(IC_ROOT_KEY); | ||
this._pipeline = []; | ||
this._timeDiffMsecs = 0; | ||
this._rootKeyFetched = false; | ||
this._retryTimes = 3; // Retry requests 3 times before erroring by default | ||
this._isAgent = true; | ||
_HttpAgent_queryPipeline.set(this, []); | ||
_HttpAgent_updatePipeline.set(this, []); | ||
_HttpAgent_subnetKeys.set(this, new ExpirableMap({ | ||
expirationTime: 5 * 60 * 1000, // 5 minutes | ||
})); | ||
_HttpAgent_verifyQuerySignatures.set(this, true); | ||
/** | ||
* See https://internetcomputer.org/docs/current/references/ic-interface-spec/#http-query for details on validation | ||
* @param queryResponse - The response from the query | ||
* @param subnetStatus - The subnet status, including all node keys | ||
* @returns ApiQueryResponse | ||
*/ | ||
_HttpAgent_verifyQueryResponse.set(this, (queryResponse, subnetStatus) => { | ||
if (__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f") === false) { | ||
// This should not be called if the user has disabled verification | ||
return queryResponse; | ||
} | ||
if (!subnetStatus) { | ||
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.'); | ||
} | ||
const { status, signatures = [], requestId } = queryResponse; | ||
const domainSeparator = new TextEncoder().encode('\x0Bic-response'); | ||
for (const sig of signatures) { | ||
const { timestamp, identity } = sig; | ||
const nodeId = Principal.fromUint8Array(identity).toText(); | ||
let hash; | ||
// Hash is constructed differently depending on the status | ||
if (status === 'replied') { | ||
const { reply } = queryResponse; | ||
hash = hashOfMap({ | ||
status: status, | ||
reply: reply, | ||
timestamp: BigInt(timestamp), | ||
request_id: requestId, | ||
}); | ||
} | ||
else if (status === 'rejected') { | ||
const { reject_code, reject_message, error_code } = queryResponse; | ||
hash = hashOfMap({ | ||
status: status, | ||
reject_code: reject_code, | ||
reject_message: reject_message, | ||
error_code: error_code, | ||
timestamp: BigInt(timestamp), | ||
request_id: requestId, | ||
}); | ||
} | ||
else { | ||
throw new Error(`Unknown status: ${status}`); | ||
} | ||
const separatorWithHash = concat(domainSeparator, new Uint8Array(hash)); | ||
// FIX: check for match without verifying N times | ||
const pubKey = subnetStatus === null || subnetStatus === void 0 ? void 0 : subnetStatus.nodeKeys.get(nodeId); | ||
if (!pubKey) { | ||
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.'); | ||
} | ||
const rawKey = Ed25519PublicKey.fromDer(pubKey).rawKey; | ||
const valid = ed25519.verify(sig.signature, new Uint8Array(separatorWithHash), new Uint8Array(rawKey)); | ||
if (valid) | ||
return queryResponse; | ||
throw new CertificateVerificationError(`Invalid signature from replica ${nodeId} signed query.`); | ||
} | ||
return queryResponse; | ||
}); | ||
if (options.source) { | ||
@@ -179,2 +99,3 @@ if (!(options.source instanceof HttpAgent)) { | ||
} | ||
this._pipeline = [...options.source._pipeline]; | ||
this._identity = options.source._identity; | ||
@@ -205,27 +126,10 @@ this._fetch = options.source._fetch; | ||
if (!location) { | ||
this._host = new URL('https://icp-api.io'); | ||
console.warn('Could not infer host from window.location, defaulting to mainnet gateway of https://icp-api.io. Please provide a host to the HttpAgent constructor to avoid this warning.'); | ||
throw new Error('Must specify a host to connect to.'); | ||
} | ||
// Mainnet and local will have the api route available | ||
const knownHosts = ['ic0.app', 'icp0.io', '127.0.0.1', 'localhost']; | ||
const hostname = location === null || location === void 0 ? void 0 : location.hostname; | ||
let knownHost; | ||
if (hostname && typeof hostname === 'string') { | ||
knownHost = knownHosts.find(host => hostname.endsWith(host)); | ||
} | ||
if (location && knownHost) { | ||
// If the user is on a boundary-node provided host, we can use the same host for the agent | ||
this._host = new URL(`${location.protocol}//${knownHost}${location.port ? ':' + location.port : ''}`); | ||
} | ||
else { | ||
this._host = new URL('https://icp-api.io'); | ||
console.warn('Could not infer host from window.location, defaulting to mainnet gateway of https://icp-api.io. Please provide a host to the HttpAgent constructor to avoid this warning.'); | ||
} | ||
this._host = new URL(location + ''); | ||
} | ||
if (options.verifyQuerySignatures !== undefined) { | ||
__classPrivateFieldSet(this, _HttpAgent_verifyQuerySignatures, options.verifyQuerySignatures, "f"); | ||
// Default is 3, only set if option is provided | ||
if (options.retryTimes !== undefined) { | ||
this._retryTimes = options.retryTimes; | ||
} | ||
// Default is 3, only set from option if greater or equal to 0 | ||
this._retryTimes = | ||
options.retryTimes !== undefined && options.retryTimes >= 0 ? options.retryTimes : 3; | ||
// Rewrite to avoid redirects | ||
@@ -247,5 +151,4 @@ if (this._host.hostname.endsWith(IC0_SUB_DOMAIN)) { | ||
// Add a nonce transform to ensure calls are unique | ||
this.addTransform('update', makeNonceTransform(makeNonce)); | ||
if (options.useQueryNonces) { | ||
this.addTransform('query', makeNonceTransform(makeNonce)); | ||
if (!options.disableNonce) { | ||
this.addTransform(makeNonceTransform(makeNonce)); | ||
} | ||
@@ -255,15 +158,8 @@ } | ||
const hostname = this._host.hostname; | ||
return hostname === '127.0.0.1' || hostname.endsWith('127.0.0.1'); | ||
return hostname === '127.0.0.1' || hostname.endsWith('localhost'); | ||
} | ||
addTransform(type, fn, priority = fn.priority || 0) { | ||
if (type === 'update') { | ||
// Keep the pipeline sorted at all time, by priority. | ||
const i = __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").findIndex(x => (x.priority || 0) < priority); | ||
__classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f").length, 0, Object.assign(fn, { priority })); | ||
} | ||
else if (type === 'query') { | ||
// Keep the pipeline sorted at all time, by priority. | ||
const i = __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").findIndex(x => (x.priority || 0) < priority); | ||
__classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").splice(i >= 0 ? i : __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f").length, 0, Object.assign(fn, { priority })); | ||
} | ||
addTransform(fn, priority = fn.priority || 0) { | ||
// Keep the pipeline sorted at all time, by priority. | ||
const i = this._pipeline.findIndex(x => (x.priority || 0) < priority); | ||
this._pipeline.splice(i >= 0 ? i : this._pipeline.length, 0, Object.assign(fn, { priority })); | ||
} | ||
@@ -316,4 +212,2 @@ async getPrincipal() { | ||
const [response, requestId] = await Promise.all([request, requestIdOf(submit)]); | ||
const responseBuffer = await response.arrayBuffer(); | ||
const responseBody = (response.status === 200 && responseBuffer.byteLength > 0 ? cbor.decode(responseBuffer) : null); | ||
return { | ||
@@ -325,4 +219,2 @@ requestId, | ||
statusText: response.statusText, | ||
body: responseBody, | ||
headers: httpHeadersTransform(response.headers), | ||
}, | ||
@@ -332,103 +224,51 @@ }; | ||
async _requestAndRetry(request, tries = 0) { | ||
let response; | ||
try { | ||
response = await request(); | ||
if (tries > this._retryTimes && this._retryTimes !== 0) { | ||
throw new Error(`AgentError: Exceeded configured limit of ${this._retryTimes} retry attempts. Please check your network connection or try again in a few moments`); | ||
} | ||
catch (error) { | ||
const response = await request(); | ||
if (!response.ok) { | ||
const responseText = await response.clone().text(); | ||
const errorMessage = `Server returned an error:\n` + | ||
` Code: ${response.status} (${response.statusText})\n` + | ||
` Body: ${responseText}\n`; | ||
if (this._retryTimes > tries) { | ||
console.warn(`Caught exception while attempting to make request:\n` + | ||
` ${error}\n` + | ||
` Retrying request.`); | ||
console.warn(errorMessage + ` Retrying request.`); | ||
return await this._requestAndRetry(request, tries + 1); | ||
} | ||
throw error; | ||
else { | ||
throw new Error(errorMessage); | ||
} | ||
} | ||
if (response.ok) { | ||
return response; | ||
} | ||
const responseText = await response.clone().text(); | ||
const errorMessage = `Server returned an error:\n` + | ||
` Code: ${response.status} (${response.statusText})\n` + | ||
` Body: ${responseText}\n`; | ||
if (this._retryTimes > tries) { | ||
console.warn(errorMessage + ` Retrying request.`); | ||
return await this._requestAndRetry(request, tries + 1); | ||
} | ||
throw new AgentHTTPResponseError(errorMessage, { | ||
ok: response.ok, | ||
status: response.status, | ||
statusText: response.statusText, | ||
headers: httpHeadersTransform(response.headers), | ||
}); | ||
return response; | ||
} | ||
async query(canisterId, fields, identity) { | ||
const makeQuery = async () => { | ||
const id = await (identity !== undefined ? await identity : await this._identity); | ||
if (!id) { | ||
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication."); | ||
} | ||
const canister = Principal.from(canisterId); | ||
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || Principal.anonymous(); | ||
const request = { | ||
request_type: "query" /* ReadRequestType.Query */, | ||
canister_id: canister, | ||
method_name: fields.methodName, | ||
arg: fields.arg, | ||
sender, | ||
ingress_expiry: new Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS), | ||
}; | ||
const requestId = await requestIdOf(request); | ||
// TODO: remove this any. This can be a Signed or UnSigned request. | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
let transformedRequest = await this._transform({ | ||
request: { | ||
method: 'POST', | ||
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})), | ||
}, | ||
endpoint: "read" /* Endpoint.Query */, | ||
body: request, | ||
}); | ||
// Apply transform for identity. | ||
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest)); | ||
const body = cbor.encode(transformedRequest.body); | ||
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body }))); | ||
const queryResponse = cbor.decode(await response.arrayBuffer()); | ||
return Object.assign(Object.assign({}, queryResponse), { httpDetails: { | ||
ok: response.ok, | ||
status: response.status, | ||
statusText: response.statusText, | ||
headers: httpHeadersTransform(response.headers), | ||
}, requestId }); | ||
const id = await (identity !== undefined ? await identity : await this._identity); | ||
if (!id) { | ||
throw new IdentityInvalidError("This identity has expired due this application's security policy. Please refresh your authentication."); | ||
} | ||
const canister = typeof canisterId === 'string' ? Principal.fromText(canisterId) : canisterId; | ||
const sender = (id === null || id === void 0 ? void 0 : id.getPrincipal()) || Principal.anonymous(); | ||
const request = { | ||
request_type: "query" /* ReadRequestType.Query */, | ||
canister_id: canister, | ||
method_name: fields.methodName, | ||
arg: fields.arg, | ||
sender, | ||
ingress_expiry: new Expiry(DEFAULT_INGRESS_EXPIRY_DELTA_IN_MSECS), | ||
}; | ||
const getSubnetStatus = async () => { | ||
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) { | ||
return undefined; | ||
} | ||
const subnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString()); | ||
if (subnetStatus) { | ||
return subnetStatus; | ||
} | ||
await this.fetchSubnetKeys(canisterId.toString()); | ||
return __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString()); | ||
}; | ||
// Make query and fetch subnet keys in parallel | ||
const [query, subnetStatus] = await Promise.all([makeQuery(), getSubnetStatus()]); | ||
// Skip verification if the user has disabled it | ||
if (!__classPrivateFieldGet(this, _HttpAgent_verifyQuerySignatures, "f")) { | ||
return query; | ||
} | ||
try { | ||
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, subnetStatus); | ||
} | ||
catch (_) { | ||
// In case the node signatures have changed, refresh the subnet keys and try again | ||
console.warn('Query response verification failed. Retrying with fresh subnet keys.'); | ||
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").delete(canisterId.toString()); | ||
await this.fetchSubnetKeys(canisterId.toString()); | ||
const updatedSubnetStatus = __classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").get(canisterId.toString()); | ||
if (!updatedSubnetStatus) { | ||
throw new CertificateVerificationError('Invalid signature from replica signed query: no matching node key found.'); | ||
} | ||
return __classPrivateFieldGet(this, _HttpAgent_verifyQueryResponse, "f").call(this, query, updatedSubnetStatus); | ||
} | ||
// TODO: remove this any. This can be a Signed or UnSigned request. | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
let transformedRequest = await this._transform({ | ||
request: { | ||
method: 'POST', | ||
headers: Object.assign({ 'Content-Type': 'application/cbor' }, (this._credentials ? { Authorization: 'Basic ' + btoa(this._credentials) } : {})), | ||
}, | ||
endpoint: "read" /* Endpoint.Query */, | ||
body: request, | ||
}); | ||
// Apply transform for identity. | ||
transformedRequest = await (id === null || id === void 0 ? void 0 : id.transformRequest(transformedRequest)); | ||
const body = cbor.encode(transformedRequest.body); | ||
const response = await this._requestAndRetry(() => this._fetch('' + new URL(`/api/v2/canister/${canister.toText()}/query`, this._host), Object.assign(Object.assign(Object.assign({}, this._fetchOptions), transformedRequest.request), { body }))); | ||
return cbor.decode(await response.arrayBuffer()); | ||
} | ||
@@ -476,3 +316,3 @@ async createReadStateRequest(fields, identity) { | ||
* Allows agent to sync its time with the network. Can be called during intialization or mid-lifecycle if the device's clock has drifted away from the network time. This is necessary to set the Expiry for a request | ||
* @param {Principal} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
* @param {PrincipalLike} canisterId - Pass a canister ID if you need to sync the time with a particular replica. Uses the management canister by default | ||
*/ | ||
@@ -524,33 +364,10 @@ async syncTime(canisterId) { | ||
} | ||
async fetchSubnetKeys(canisterId) { | ||
const effectiveCanisterId = Principal.from(canisterId); | ||
const response = await request({ | ||
canisterId: effectiveCanisterId, | ||
paths: ['subnet'], | ||
agent: this, | ||
}); | ||
const subnetResponse = response.get('subnet'); | ||
if (subnetResponse && typeof subnetResponse === 'object' && 'nodeKeys' in subnetResponse) { | ||
__classPrivateFieldGet(this, _HttpAgent_subnetKeys, "f").set(effectiveCanisterId.toText(), subnetResponse); | ||
return subnetResponse; | ||
} | ||
// If the subnet status is not returned, return undefined | ||
return undefined; | ||
} | ||
_transform(request) { | ||
let p = Promise.resolve(request); | ||
if (request.endpoint === "call" /* Endpoint.Call */) { | ||
for (const fn of __classPrivateFieldGet(this, _HttpAgent_updatePipeline, "f")) { | ||
p = p.then(r => fn(r).then(r2 => r2 || r)); | ||
} | ||
for (const fn of this._pipeline) { | ||
p = p.then(r => fn(r).then(r2 => r2 || r)); | ||
} | ||
else { | ||
for (const fn of __classPrivateFieldGet(this, _HttpAgent_queryPipeline, "f")) { | ||
p = p.then(r => fn(r).then(r2 => r2 || r)); | ||
} | ||
} | ||
return p; | ||
} | ||
} | ||
_HttpAgent_queryPipeline = new WeakMap(), _HttpAgent_updatePipeline = new WeakMap(), _HttpAgent_subnetKeys = new WeakMap(), _HttpAgent_verifyQuerySignatures = new WeakMap(), _HttpAgent_verifyQueryResponse = new WeakMap(); | ||
//# sourceMappingURL=index.js.map |
import * as cbor from 'simple-cbor'; | ||
import { HttpAgentRequestTransformFn, HttpHeaderField, Nonce } from './types'; | ||
import { HttpAgentRequestTransformFn, Nonce } from './types'; | ||
export declare class Expiry { | ||
@@ -21,8 +21,1 @@ private readonly _value; | ||
export declare function makeExpiryTransform(delayInMilliseconds: number): HttpAgentRequestTransformFn; | ||
/** | ||
* Maps the default fetch headers field to the serializable HttpHeaderField. | ||
* | ||
* @param headers Fetch definition of the headers type | ||
* @returns array of header fields | ||
*/ | ||
export declare function httpHeadersTransform(headers: Headers): HttpHeaderField[]; |
import { lebEncode } from '@dfinity/candid'; | ||
import * as cbor from 'simple-cbor'; | ||
import { makeNonce, } from './types'; | ||
import { makeNonce } from './types'; | ||
const NANOSECONDS_PER_MILLISECONDS = BigInt(1000000); | ||
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = 60 * 1000; | ||
const REPLICA_PERMITTED_DRIFT_MILLISECONDS = BigInt(60 * 1000); | ||
export class Expiry { | ||
constructor(deltaInMSec) { | ||
// Use bigint because it can overflow the maximum number allowed in a double float. | ||
const raw_value = BigInt(Math.floor(Date.now() + deltaInMSec - REPLICA_PERMITTED_DRIFT_MILLISECONDS)) * | ||
NANOSECONDS_PER_MILLISECONDS; | ||
// round down to the nearest second | ||
const ingress_as_seconds = raw_value / BigInt(1000000000); | ||
// round down to nearest minute | ||
const ingress_as_minutes = ingress_as_seconds / BigInt(60); | ||
const rounded_down_nanos = ingress_as_minutes * BigInt(60) * BigInt(1000000000); | ||
this._value = rounded_down_nanos; | ||
this._value = | ||
(BigInt(Date.now()) + BigInt(deltaInMSec) - REPLICA_PERMITTED_DRIFT_MILLISECONDS) * | ||
NANOSECONDS_PER_MILLISECONDS; | ||
} | ||
@@ -33,2 +28,3 @@ toCBOR() { | ||
return async (request) => { | ||
const nonce = nonceFn(); | ||
// Nonce needs to be inserted into the header for all requests, to enable logs to be correlated with requests. | ||
@@ -55,15 +51,2 @@ const headers = request.request.headers; | ||
} | ||
/** | ||
* Maps the default fetch headers field to the serializable HttpHeaderField. | ||
* | ||
* @param headers Fetch definition of the headers type | ||
* @returns array of header fields | ||
*/ | ||
export function httpHeadersTransform(headers) { | ||
const headerFields = []; | ||
headers.forEach((value, key) => { | ||
headerFields.push([key, value]); | ||
}); | ||
return headerFields; | ||
} | ||
//# sourceMappingURL=transforms.js.map |
@@ -11,3 +11,3 @@ import type { Principal } from '@dfinity/principal'; | ||
} | ||
export declare type HttpAgentRequest = HttpAgentQueryRequest | HttpAgentSubmitRequest | HttpAgentReadStateRequest; | ||
export type HttpAgentRequest = HttpAgentQueryRequest | HttpAgentSubmitRequest | HttpAgentReadStateRequest; | ||
export interface HttpAgentBaseRequest { | ||
@@ -17,3 +17,2 @@ readonly endpoint: Endpoint; | ||
} | ||
export declare type HttpHeaderField = [string, string]; | ||
export interface HttpAgentSubmitRequest extends HttpAgentBaseRequest { | ||
@@ -39,3 +38,3 @@ readonly endpoint: Endpoint.Call; | ||
} | ||
export declare type Envelope<T> = Signed<T> | UnSigned<T>; | ||
export type Envelope<T> = Signed<T> | UnSigned<T>; | ||
export interface HttpAgentRequestTransformFn { | ||
@@ -74,9 +73,9 @@ (args: HttpAgentRequest): Promise<HttpAgentRequest | undefined | void>; | ||
} | ||
export declare type ReadRequest = QueryRequest | ReadStateRequest; | ||
export declare type Nonce = Uint8Array & { | ||
export type ReadRequest = QueryRequest | ReadStateRequest; | ||
export type Nonce = Uint8Array & { | ||
__nonce__: void; | ||
}; | ||
/** | ||
* Create a random Nonce, based on random values | ||
* Create a random Nonce, based on date and a random suffix. | ||
*/ | ||
export declare function makeNonce(): Nonce; |
@@ -1,2 +0,2 @@ | ||
import { randomNumber } from '../../utils/random'; | ||
// tslint:enable:camel-case | ||
// The types of values allowed in the `request_type` field for submit requests. | ||
@@ -8,3 +8,3 @@ export var SubmitRequestType; | ||
/** | ||
* Create a random Nonce, based on random values | ||
* Create a random Nonce, based on date and a random suffix. | ||
*/ | ||
@@ -15,12 +15,18 @@ export function makeNonce() { | ||
const view = new DataView(buffer); | ||
const rand1 = randomNumber(); | ||
const rand2 = randomNumber(); | ||
const rand3 = randomNumber(); | ||
const rand4 = randomNumber(); | ||
view.setUint32(0, rand1); | ||
view.setUint32(4, rand2); | ||
view.setUint32(8, rand3); | ||
view.setUint32(12, rand4); | ||
const now = BigInt(+Date.now()); | ||
const randHi = Math.floor(Math.random() * 0xffffffff); | ||
const randLo = Math.floor(Math.random() * 0xffffffff); | ||
// Fix for IOS < 14.8 setBigUint64 absence | ||
if (typeof view.setBigUint64 === 'function') { | ||
view.setBigUint64(0, now); | ||
} | ||
else { | ||
const TWO_TO_THE_32 = BigInt(1) << BigInt(32); | ||
view.setUint32(0, Number(now >> BigInt(32))); | ||
view.setUint32(4, Number(now % TWO_TO_THE_32)); | ||
} | ||
view.setUint32(8, randHi); | ||
view.setUint32(12, randLo); | ||
return buffer; | ||
} | ||
//# sourceMappingURL=types.js.map |
import { JsonObject } from '@dfinity/candid'; | ||
import { Agent, ApiQueryResponse, CallOptions, QueryFields, QueryResponse, ReadStateOptions, ReadStateResponse, SubmitResponse } from './api'; | ||
import { Agent, CallOptions, QueryFields, QueryResponse, ReadStateOptions, ReadStateResponse, SubmitResponse } from './api'; | ||
import { Principal } from '@dfinity/principal'; | ||
@@ -63,3 +63,3 @@ export declare enum ProxyMessageKind { | ||
} | ||
export declare type ProxyMessage = ProxyMessageError | ProxyMessageGetPrincipal | ProxyMessageGetPrincipalResponse | ProxyMessageQuery | ProxyMessageQueryResponse | ProxyMessageCall | ProxyMessageReadState | ProxyMessageReadStateResponse | ProxyMessageCallResponse | ProxyMessageStatus | ProxyMessageStatusResponse; | ||
export type ProxyMessage = ProxyMessageError | ProxyMessageGetPrincipal | ProxyMessageGetPrincipalResponse | ProxyMessageQuery | ProxyMessageQueryResponse | ProxyMessageCall | ProxyMessageReadState | ProxyMessageReadStateResponse | ProxyMessageCallResponse | ProxyMessageStatus | ProxyMessageStatusResponse; | ||
export declare class ProxyStubAgent { | ||
@@ -82,5 +82,5 @@ private _frontend; | ||
status(): Promise<JsonObject>; | ||
query(canisterId: Principal | string, fields: QueryFields): Promise<ApiQueryResponse>; | ||
query(canisterId: Principal | string, fields: QueryFields): Promise<QueryResponse>; | ||
private _sendAndWait; | ||
fetchRootKey(): Promise<ArrayBuffer>; | ||
} |
@@ -13,3 +13,3 @@ import { Principal } from '@dfinity/principal'; | ||
*/ | ||
export declare type DerEncodedPublicKey = ArrayBuffer & { | ||
export type DerEncodedPublicKey = ArrayBuffer & { | ||
__derEncodedPublicKey__?: void; | ||
@@ -20,3 +20,3 @@ }; | ||
*/ | ||
export declare type Signature = ArrayBuffer & { | ||
export type Signature = ArrayBuffer & { | ||
__signature__: void; | ||
@@ -29,4 +29,2 @@ }; | ||
toDer(): DerEncodedPublicKey; | ||
rawKey?: ArrayBuffer; | ||
derKey?: DerEncodedPublicKey; | ||
} | ||
@@ -87,3 +85,3 @@ /** | ||
} | ||
export declare type IdentityDescriptor = AnonymousIdentityDescriptor | PublicKeyIdentityDescriptor; | ||
export type IdentityDescriptor = AnonymousIdentityDescriptor | PublicKeyIdentityDescriptor; | ||
/** | ||
@@ -90,0 +88,0 @@ * Create an IdentityDescriptor from a @dfinity/identity Identity |
/** | ||
* This file is generated from the candid for asset management. | ||
*/ | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
/* tslint:disable */ | ||
// @ts-ignore | ||
@@ -6,0 +6,0 @@ export default ({ IDL }) => { |
import { Actor } from '../actor'; | ||
import assetCanister from './asset_idl'; | ||
/* tslint:enable */ | ||
/** | ||
@@ -4,0 +5,0 @@ * Create a management canister actor. |
/** | ||
* This file is generated from the candid for asset management. | ||
*/ | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
/* tslint:disable */ | ||
// @ts-ignore | ||
@@ -6,0 +6,0 @@ export default ({ IDL }) => { |
@@ -5,3 +5,3 @@ /** | ||
import type { Principal } from '@dfinity/principal'; | ||
export declare type canister_id = Principal; | ||
export type canister_id = Principal; | ||
export interface canister_settings { | ||
@@ -19,4 +19,4 @@ controllers: [] | [Array<Principal>]; | ||
} | ||
export declare type user_id = Principal; | ||
export declare type wasm_module = Array<number>; | ||
export type user_id = Principal; | ||
export type wasm_module = Array<number>; | ||
export default interface _SERVICE { | ||
@@ -23,0 +23,0 @@ canister_status: (arg_0: { |
@@ -1,5 +0,2 @@ | ||
/** | ||
* This file is generated from the candid for asset management. | ||
*/ | ||
export {}; | ||
//# sourceMappingURL=management_service.js.map |
@@ -5,26 +5,7 @@ /** @module CanisterStatus */ | ||
import { CreateCertificateOptions } from '../certificate'; | ||
import { DerEncodedPublicKey } from '..'; | ||
/** | ||
* Represents the useful information about a subnet | ||
* @param {string} subnetId the principal id of the canister's subnet | ||
* @param {string[]} nodeKeys the keys of the individual nodes in the subnet | ||
*/ | ||
export declare type SubnetStatus = { | ||
subnetId: string; | ||
nodeKeys: Map<string, DerEncodedPublicKey>; | ||
metrics?: { | ||
num_canisters: bigint; | ||
canister_state_bytes: bigint; | ||
consumed_cycles_total: { | ||
current: bigint; | ||
deleted: bigint; | ||
}; | ||
update_transactions_total: bigint; | ||
}; | ||
}; | ||
/** | ||
* Types of an entry on the canisterStatus map. | ||
* An entry of null indicates that the request failed, due to lack of permissions or the result being missing. | ||
*/ | ||
export declare type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | SubnetStatus | bigint | null; | ||
export type Status = string | ArrayBuffer | Date | ArrayBuffer[] | Principal[] | bigint | null; | ||
/** | ||
@@ -52,5 +33,5 @@ * Interface to define a custom path. Nested paths will be represented as individual buffers, and can be created from text using {@link TextEncoder} | ||
*/ | ||
export declare type Path = 'time' | 'controllers' | 'subnet' | 'module_hash' | 'candid' | MetaData | CustomPath; | ||
export declare type StatusMap = Map<Path | string, Status>; | ||
export declare type CanisterStatusOptions = { | ||
export type Path = 'time' | 'controllers' | 'subnet' | 'module_hash' | 'candid' | MetaData | CustomPath; | ||
export type StatusMap = Map<Path | string, Status>; | ||
export type CanisterStatusOptions = { | ||
canisterId: Principal; | ||
@@ -81,3 +62,2 @@ agent: HttpAgent; | ||
}) => Promise<StatusMap>; | ||
export declare const fetchNodeKeys: (certificate: ArrayBuffer, canisterId: Principal, root_key?: ArrayBuffer | Uint8Array) => SubnetStatus; | ||
export declare const encodePath: (path: Path, canisterId: Principal) => ArrayBuffer[]; |
/** @module CanisterStatus */ | ||
import { lebDecode, PipeArrayBuffer } from '@dfinity/candid'; | ||
import { Principal } from '@dfinity/principal'; | ||
import { AgentError } from '../errors'; | ||
import { Certificate, flatten_forks, check_canister_ranges, lookupResultToBuffer, lookup_path, } from '../certificate'; | ||
import { Certificate } from '../certificate'; | ||
import { toHex } from '../utils/buffer'; | ||
import * as Cbor from '../cbor'; | ||
import { decodeLeb128, decodeTime } from '../utils/leb'; | ||
/** | ||
@@ -24,4 +24,3 @@ * | ||
export const request = async (options) => { | ||
const { agent, paths } = options; | ||
const canisterId = Principal.from(options.canisterId); | ||
const { canisterId, agent, paths } = options; | ||
const uniquePaths = [...new Set(paths)]; | ||
@@ -45,19 +44,3 @@ // Map path options to their correct formats | ||
}); | ||
const lookup = (cert, path) => { | ||
if (path === 'subnet') { | ||
const data = fetchNodeKeys(response.certificate, canisterId, agent.rootKey); | ||
return { | ||
path: path, | ||
data, | ||
}; | ||
} | ||
else { | ||
return { | ||
path: path, | ||
data: lookupResultToBuffer(cert.lookup(encodePath(path, canisterId))), | ||
}; | ||
} | ||
}; | ||
// must pass in the rootKey if we have no delegation | ||
const { path, data } = lookup(cert, uniquePaths[index]); | ||
const data = cert.lookup(encodePath(uniquePaths[index], canisterId)); | ||
if (!data) { | ||
@@ -87,6 +70,2 @@ // Typically, the cert lookup will throw | ||
} | ||
case 'subnet': { | ||
status.set(path, data); | ||
break; | ||
} | ||
case 'candid': { | ||
@@ -146,53 +125,2 @@ status.set(path, new TextDecoder().decode(data)); | ||
}; | ||
export const fetchNodeKeys = (certificate, canisterId, root_key) => { | ||
if (!canisterId._isPrincipal) { | ||
throw new Error('Invalid canisterId'); | ||
} | ||
const cert = Cbor.decode(new Uint8Array(certificate)); | ||
const tree = cert.tree; | ||
let delegation = cert.delegation; | ||
let subnetId; | ||
if (delegation && delegation.subnet_id) { | ||
subnetId = Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)); | ||
} | ||
// On local replica, with System type subnet, there is no delegation | ||
else if (!delegation && typeof root_key !== 'undefined') { | ||
subnetId = Principal.selfAuthenticating(new Uint8Array(root_key)); | ||
delegation = { | ||
subnet_id: subnetId.toUint8Array(), | ||
certificate: new ArrayBuffer(0), | ||
}; | ||
} | ||
// otherwise use default NNS subnet id | ||
else { | ||
subnetId = Principal.selfAuthenticating(Principal.fromText('tdb26-jop6k-aogll-7ltgs-eruif-6kk7m-qpktf-gdiqx-mxtrf-vb5e6-eqe').toUint8Array()); | ||
delegation = { | ||
subnet_id: subnetId.toUint8Array(), | ||
certificate: new ArrayBuffer(0), | ||
}; | ||
} | ||
const canisterInRange = check_canister_ranges({ canisterId, subnetId, tree }); | ||
if (!canisterInRange) { | ||
throw new Error('Canister not in range'); | ||
} | ||
const nodeTree = lookup_path(['subnet', delegation === null || delegation === void 0 ? void 0 : delegation.subnet_id, 'node'], tree); | ||
const nodeForks = flatten_forks(nodeTree); | ||
nodeForks.length; | ||
const nodeKeys = new Map(); | ||
nodeForks.forEach(fork => { | ||
Object.getPrototypeOf(new Uint8Array(fork[1])); | ||
const node_id = Principal.from(new Uint8Array(fork[1])).toText(); | ||
const derEncodedPublicKey = lookup_path(['public_key'], fork[2]); | ||
if (derEncodedPublicKey.byteLength !== 44) { | ||
throw new Error('Invalid public key length'); | ||
} | ||
else { | ||
nodeKeys.set(node_id, derEncodedPublicKey); | ||
} | ||
}); | ||
return { | ||
subnetId: Principal.fromUint8Array(new Uint8Array(delegation.subnet_id)).toText(), | ||
nodeKeys, | ||
}; | ||
}; | ||
export const encodePath = (path, canisterId) => { | ||
@@ -236,2 +164,5 @@ const encoder = new TextEncoder(); | ||
}; | ||
const decodeLeb128 = (buf) => { | ||
return lebDecode(new PipeArrayBuffer(buf)); | ||
}; | ||
const decodeCbor = (buf) => { | ||
@@ -243,6 +174,11 @@ return Cbor.decode(buf); | ||
}; | ||
// Controllers are CBOR-encoded buffers | ||
// time is a LEB128-encoded Nat | ||
const decodeTime = (buf) => { | ||
const decoded = decodeLeb128(buf); | ||
return new Date(Number(decoded / BigInt(1000000))); | ||
}; | ||
// Controllers are CBOR-encoded buffers, starting with a Tag we don't need | ||
const decodeControllers = (buf) => { | ||
// eslint-disable-next-line @typescript-eslint/no-unused-vars | ||
const controllersRaw = decodeCbor(buf); | ||
const [tag, ...controllersRaw] = decodeCbor(buf); | ||
return controllersRaw.map((buf) => { | ||
@@ -249,0 +185,0 @@ return Principal.fromUint8Array(new Uint8Array(buf)); |
@@ -0,2 +1,4 @@ | ||
// tslint:disable:max-classes-per-file | ||
// This file is based on: | ||
// tslint:disable-next-line: max-line-length | ||
// https://github.com/dfinity-lab/dfinity/blob/9bca65f8edd65701ea6bdb00e0752f9186bbc893/docs/spec/public/index.adoc#cbor-encoding-of-requests-and-responses | ||
@@ -79,2 +81,3 @@ import borc from 'borc'; | ||
for (let i = 0; i < len; i++) { | ||
// tslint:disable-next-line:no-bitwise | ||
res = res * BigInt(0x100) + BigInt(buf[i]); | ||
@@ -81,0 +84,0 @@ } |
@@ -9,17 +9,10 @@ import { AgentError } from './errors'; | ||
} | ||
export interface Cert { | ||
tree: HashTree; | ||
signature: ArrayBuffer; | ||
delegation?: Delegation; | ||
declare const enum NodeId { | ||
Empty = 0, | ||
Fork = 1, | ||
Labeled = 2, | ||
Leaf = 3, | ||
Pruned = 4 | ||
} | ||
declare const NodeId: { | ||
Empty: number; | ||
Fork: number; | ||
Labeled: number; | ||
Leaf: number; | ||
Pruned: number; | ||
}; | ||
export declare type NodeIdType = typeof NodeId[keyof typeof NodeId]; | ||
export { NodeId }; | ||
export declare type HashTree = [typeof NodeId.Empty] | [typeof NodeId.Fork, HashTree, HashTree] | [typeof NodeId.Labeled, ArrayBuffer, HashTree] | [typeof NodeId.Leaf, ArrayBuffer] | [typeof NodeId.Pruned, ArrayBuffer]; | ||
export type HashTree = [NodeId.Empty] | [NodeId.Fork, HashTree, HashTree] | [NodeId.Labeled, ArrayBuffer, HashTree] | [NodeId.Leaf, ArrayBuffer] | [NodeId.Pruned, ArrayBuffer]; | ||
/** | ||
@@ -30,7 +23,3 @@ * Make a human readable string out of a hash tree. | ||
export declare function hashTreeToString(tree: HashTree): string; | ||
interface Delegation extends Record<string, any> { | ||
subnet_id: ArrayBuffer; | ||
certificate: ArrayBuffer; | ||
} | ||
declare type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>; | ||
type VerifyFunc = (pk: Uint8Array, sig: Uint8Array, msg: Uint8Array) => Promise<boolean>; | ||
export interface CreateCertificateOptions { | ||
@@ -55,9 +44,2 @@ /** | ||
blsVerify?: VerifyFunc; | ||
/** | ||
* The maximum age of the certificate in minutes. Default is 5 minutes. | ||
* @default 5 | ||
* This is used to verify the time the certificate was signed, particularly for validating Delegation certificates, which can live for longer than the default window of +/- 5 minutes. If the certificate is | ||
* older than the specified age, it will fail verification. | ||
*/ | ||
maxAgeInMinutes?: number; | ||
} | ||
@@ -68,3 +50,2 @@ export declare class Certificate { | ||
private _blsVerify; | ||
private _maxAgeInMinutes; | ||
private readonly cert; | ||
@@ -74,8 +55,8 @@ /** | ||
* CertificateVerificationError if the certificate cannot be verified. | ||
* @constructs Certificate | ||
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions} | ||
* @constructs {@link AuthClient} | ||
* @param {CreateCertificateOptions} options | ||
* @see {@link CreateCertificateOptions} | ||
* @param {ArrayBuffer} options.certificate The bytes of the certificate | ||
* @param {ArrayBuffer} options.rootKey The root key to verify against | ||
* @param {Principal} options.canisterId The effective or signing canister ID | ||
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes. | ||
* @throws {CertificateVerificationError} | ||
@@ -86,3 +67,2 @@ */ | ||
lookup(path: Array<ArrayBuffer | string>): ArrayBuffer | undefined; | ||
lookup_label(label: ArrayBuffer): ArrayBuffer | HashTree | undefined; | ||
private verify; | ||
@@ -92,8 +72,2 @@ private _checkDelegationAndGetKey; | ||
/** | ||
* utility function to constrain the type of a path | ||
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup | ||
* @returns ArrayBuffer or Undefined | ||
*/ | ||
export declare function lookupResultToBuffer(result: ArrayBuffer | HashTree | undefined): ArrayBuffer | undefined; | ||
/** | ||
* @param t | ||
@@ -106,19 +80,3 @@ */ | ||
*/ | ||
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | HashTree | undefined; | ||
/** | ||
* If the tree is a fork, flatten it into an array of trees | ||
* @param t - the tree to flatten | ||
* @returns HashTree[] - the flattened tree | ||
*/ | ||
export declare function flatten_forks(t: HashTree): HashTree[]; | ||
/** | ||
* Check if a canister falls within a range of canisters | ||
* @param canisterId Principal | ||
* @param ranges [Principal, Principal][] | ||
* @returns | ||
*/ | ||
export declare function check_canister_ranges(params: { | ||
canisterId: Principal; | ||
subnetId: Principal; | ||
tree: HashTree; | ||
}): boolean; | ||
export declare function lookup_path(path: Array<ArrayBuffer | string>, tree: HashTree): ArrayBuffer | undefined; | ||
export {}; |
@@ -7,3 +7,2 @@ import * as cbor from './cbor'; | ||
import * as bls from './utils/bls'; | ||
import { decodeTime } from './utils/leb'; | ||
/** | ||
@@ -17,10 +16,2 @@ * A certificate may fail verification with respect to the provided public key | ||
} | ||
const NodeId = { | ||
Empty: 0, | ||
Fork: 1, | ||
Labeled: 2, | ||
Leaf: 3, | ||
Pruned: 4, | ||
}; | ||
export { NodeId }; | ||
/** | ||
@@ -45,40 +36,18 @@ * Make a human readable string out of a hash tree. | ||
switch (tree[0]) { | ||
case NodeId.Empty: | ||
case 0 /* NodeId.Empty */: | ||
return '()'; | ||
case NodeId.Fork: { | ||
if (tree[1] instanceof Array && tree[2] instanceof ArrayBuffer) { | ||
const left = hashTreeToString(tree[1]); | ||
const right = hashTreeToString(tree[2]); | ||
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`; | ||
} | ||
else { | ||
throw new Error('Invalid tree structure for fork'); | ||
} | ||
case 1 /* NodeId.Fork */: { | ||
const left = hashTreeToString(tree[1]); | ||
const right = hashTreeToString(tree[2]); | ||
return `sub(\n left:\n${indent(left)}\n---\n right:\n${indent(right)}\n)`; | ||
} | ||
case NodeId.Labeled: { | ||
if (tree[1] instanceof ArrayBuffer && tree[2] instanceof ArrayBuffer) { | ||
const label = labelToString(tree[1]); | ||
const sub = hashTreeToString(tree[2]); | ||
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`; | ||
} | ||
else { | ||
throw new Error('Invalid tree structure for labeled'); | ||
} | ||
case 2 /* NodeId.Labeled */: { | ||
const label = labelToString(tree[1]); | ||
const sub = hashTreeToString(tree[2]); | ||
return `label(\n label:\n${indent(label)}\n sub:\n${indent(sub)}\n)`; | ||
} | ||
case NodeId.Leaf: { | ||
if (!tree[1]) { | ||
throw new Error('Invalid tree structure for leaf'); | ||
} | ||
else if (Array.isArray(tree[1])) { | ||
return JSON.stringify(tree[1]); | ||
} | ||
case 3 /* NodeId.Leaf */: { | ||
return `leaf(...${tree[1].byteLength} bytes)`; | ||
} | ||
case NodeId.Pruned: { | ||
if (!tree[1]) { | ||
throw new Error('Invalid tree structure for pruned'); | ||
} | ||
else if (Array.isArray(tree[1])) { | ||
return JSON.stringify(tree[1]); | ||
} | ||
case 4 /* NodeId.Pruned */: { | ||
return `pruned(${toHex(new Uint8Array(tree[1]))}`; | ||
@@ -105,20 +74,11 @@ } | ||
export class Certificate { | ||
constructor(certificate, _rootKey, _canisterId, _blsVerify, | ||
// Default to 5 minutes | ||
_maxAgeInMinutes = 5) { | ||
this._rootKey = _rootKey; | ||
this._canisterId = _canisterId; | ||
this._blsVerify = _blsVerify; | ||
this._maxAgeInMinutes = _maxAgeInMinutes; | ||
this.cert = cbor.decode(new Uint8Array(certificate)); | ||
} | ||
/** | ||
* Create a new instance of a certificate, automatically verifying it. Throws a | ||
* CertificateVerificationError if the certificate cannot be verified. | ||
* @constructs Certificate | ||
* @param {CreateCertificateOptions} options {@link CreateCertificateOptions} | ||
* @constructs {@link AuthClient} | ||
* @param {CreateCertificateOptions} options | ||
* @see {@link CreateCertificateOptions} | ||
* @param {ArrayBuffer} options.certificate The bytes of the certificate | ||
* @param {ArrayBuffer} options.rootKey The root key to verify against | ||
* @param {Principal} options.canisterId The effective or signing canister ID | ||
* @param {number} options.maxAgeInMinutes The maximum age of the certificate in minutes. Default is 5 minutes. | ||
* @throws {CertificateVerificationError} | ||
@@ -131,13 +91,15 @@ */ | ||
} | ||
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, blsVerify, options.maxAgeInMinutes); | ||
const cert = new Certificate(options.certificate, options.rootKey, options.canisterId, blsVerify); | ||
await cert.verify(); | ||
return cert; | ||
} | ||
constructor(certificate, _rootKey, _canisterId, _blsVerify) { | ||
this._rootKey = _rootKey; | ||
this._canisterId = _canisterId; | ||
this._blsVerify = _blsVerify; | ||
this.cert = cbor.decode(new Uint8Array(certificate)); | ||
} | ||
lookup(path) { | ||
// constrain the type of the result, so that empty HashTree is undefined | ||
return lookupResultToBuffer(lookup_path(path, this.cert.tree)); | ||
return lookup_path(path, this.cert.tree); | ||
} | ||
lookup_label(label) { | ||
return this.lookup([label]); | ||
} | ||
async verify() { | ||
@@ -150,25 +112,2 @@ const rootHash = await reconstruct(this.cert.tree); | ||
let sigVer = false; | ||
const lookupTime = this.lookup(['time']); | ||
if (!lookupTime) { | ||
// Should never happen - time is always present in IC certificates | ||
throw new CertificateVerificationError('Certificate does not contain a time'); | ||
} | ||
const FIVE_MINUTES_IN_MSEC = 5 * 60 * 1000; | ||
const MAX_AGE_IN_MSEC = this._maxAgeInMinutes * 60 * 1000; | ||
const now = Date.now(); | ||
const earliestCertificateTime = now - MAX_AGE_IN_MSEC; | ||
const fiveMinutesFromNow = now + FIVE_MINUTES_IN_MSEC; | ||
const certTime = decodeTime(lookupTime); | ||
if (certTime.getTime() < earliestCertificateTime) { | ||
throw new CertificateVerificationError(`Certificate is signed more than ${this._maxAgeInMinutes} minutes in the past. Certificate time: ` + | ||
certTime.toISOString() + | ||
' Current time: ' + | ||
new Date(now).toISOString()); | ||
} | ||
else if (certTime.getTime() > fiveMinutesFromNow) { | ||
throw new CertificateVerificationError('Certificate is signed more than 5 minutes in the future. Certificate time: ' + | ||
certTime.toISOString() + | ||
' Current time: ' + | ||
new Date(now).toISOString()); | ||
} | ||
try { | ||
@@ -192,11 +131,13 @@ sigVer = await this._blsVerify(new Uint8Array(key), new Uint8Array(sig), new Uint8Array(msg)); | ||
canisterId: this._canisterId, | ||
blsVerify: this._blsVerify, | ||
// Do not check max age for delegation certificates | ||
maxAgeInMinutes: Infinity, | ||
}); | ||
const canisterInRange = check_canister_ranges({ | ||
canisterId: this._canisterId, | ||
subnetId: Principal.fromUint8Array(new Uint8Array(d.subnet_id)), | ||
tree: cert.cert.tree, | ||
}); | ||
const rangeLookup = cert.lookup(['subnet', d.subnet_id, 'canister_ranges']); | ||
if (!rangeLookup) { | ||
throw new CertificateVerificationError(`Could not find canister ranges for subnet 0x${toHex(d.subnet_id)}`); | ||
} | ||
const ranges_arr = cbor.decode(rangeLookup); | ||
const ranges = ranges_arr.map(v => [ | ||
Principal.fromUint8Array(v[0]), | ||
Principal.fromUint8Array(v[1]), | ||
]); | ||
const canisterInRange = ranges.some(r => r[0].ltEq(this._canisterId) && r[1].gtEq(this._canisterId)); | ||
if (!canisterInRange) { | ||
@@ -226,16 +167,2 @@ throw new CertificateVerificationError(`Canister ${this._canisterId} not in range of delegations for subnet 0x${toHex(d.subnet_id)}`); | ||
/** | ||
* utility function to constrain the type of a path | ||
* @param {ArrayBuffer | HashTree | undefined} result - the result of a lookup | ||
* @returns ArrayBuffer or Undefined | ||
*/ | ||
export function lookupResultToBuffer(result) { | ||
if (result instanceof ArrayBuffer) { | ||
return result; | ||
} | ||
else if (result instanceof Uint8Array) { | ||
return result.buffer; | ||
} | ||
return undefined; | ||
} | ||
/** | ||
* @param t | ||
@@ -245,11 +172,11 @@ */ | ||
switch (t[0]) { | ||
case NodeId.Empty: | ||
case 0 /* NodeId.Empty */: | ||
return hash(domain_sep('ic-hashtree-empty')); | ||
case NodeId.Pruned: | ||
case 4 /* NodeId.Pruned */: | ||
return t[1]; | ||
case NodeId.Leaf: | ||
case 3 /* NodeId.Leaf */: | ||
return hash(concat(domain_sep('ic-hashtree-leaf'), t[1])); | ||
case NodeId.Labeled: | ||
case 2 /* NodeId.Labeled */: | ||
return hash(concat(domain_sep('ic-hashtree-labeled'), t[1], await reconstruct(t[2]))); | ||
case NodeId.Fork: | ||
case 1 /* NodeId.Fork */: | ||
return hash(concat(domain_sep('ic-hashtree-fork'), await reconstruct(t[1]), await reconstruct(t[2]))); | ||
@@ -272,20 +199,7 @@ default: | ||
switch (tree[0]) { | ||
case NodeId.Leaf: { | ||
// should not be undefined | ||
if (!tree[1]) | ||
throw new Error('Invalid tree structure for leaf'); | ||
if (tree[1] instanceof ArrayBuffer) { | ||
return tree[1]; | ||
} | ||
else if (tree[1] instanceof Uint8Array) { | ||
return tree[1].buffer; | ||
} | ||
else | ||
return tree[1]; | ||
case 3 /* NodeId.Leaf */: { | ||
return new Uint8Array(tree[1]).buffer; | ||
} | ||
case NodeId.Fork: { | ||
return tree; | ||
} | ||
default: { | ||
return tree; | ||
return undefined; | ||
} | ||
@@ -300,12 +214,7 @@ } | ||
} | ||
/** | ||
* If the tree is a fork, flatten it into an array of trees | ||
* @param t - the tree to flatten | ||
* @returns HashTree[] - the flattened tree | ||
*/ | ||
export function flatten_forks(t) { | ||
function flatten_forks(t) { | ||
switch (t[0]) { | ||
case NodeId.Empty: | ||
case 0 /* NodeId.Empty */: | ||
return []; | ||
case NodeId.Fork: | ||
case 1 /* NodeId.Fork */: | ||
return flatten_forks(t[1]).concat(flatten_forks(t[2])); | ||
@@ -321,3 +230,3 @@ default: | ||
for (const t of trees) { | ||
if (t[0] === NodeId.Labeled) { | ||
if (t[0] === 2 /* NodeId.Labeled */) { | ||
const p = t[1]; | ||
@@ -330,22 +239,2 @@ if (isBufferEqual(l, p)) { | ||
} | ||
/** | ||
* Check if a canister falls within a range of canisters | ||
* @param canisterId Principal | ||
* @param ranges [Principal, Principal][] | ||
* @returns | ||
*/ | ||
export function check_canister_ranges(params) { | ||
const { canisterId, subnetId, tree } = params; | ||
const rangeLookup = lookup_path(['subnet', subnetId.toUint8Array(), 'canister_ranges'], tree); | ||
if (!rangeLookup || !(rangeLookup instanceof ArrayBuffer)) { | ||
throw new Error(`Could not find canister ranges for subnet ${subnetId}`); | ||
} | ||
const ranges_arr = cbor.decode(rangeLookup); | ||
const ranges = ranges_arr.map(v => [ | ||
Principal.fromUint8Array(v[0]), | ||
Principal.fromUint8Array(v[1]), | ||
]); | ||
const canisterInRange = ranges.some(r => r[0].ltEq(canisterId) && r[1].gtEq(canisterId)); | ||
return canisterInRange; | ||
} | ||
//# sourceMappingURL=certificate.js.map |
import { ActorSubclass } from './actor'; | ||
export * from './actor'; | ||
export * from './agent'; | ||
export * from './auth'; | ||
export * from './certificate'; | ||
export * from './agent/http/transforms'; | ||
export * from './agent/http/types'; | ||
export * from './auth'; | ||
export * from './canisters/asset'; | ||
export * from './certificate'; | ||
export * from './der'; | ||
export * from './fetch_candid'; | ||
export * from './public_key'; | ||
export * from './request_id'; | ||
export * from './utils/bls'; | ||
export * from './utils/buffer'; | ||
export * from './utils/random'; | ||
export * as polling from './polling'; | ||
@@ -17,0 +14,0 @@ /** |
export * from './actor'; | ||
export * from './agent'; | ||
export * from './auth'; | ||
export * from './certificate'; | ||
export * from './agent/http/transforms'; | ||
export * from './agent/http/types'; | ||
export * from './auth'; | ||
export * from './canisters/asset'; | ||
export * from './certificate'; | ||
export * from './der'; | ||
export * from './fetch_candid'; | ||
export * from './public_key'; | ||
export * from './request_id'; | ||
export * from './utils/bls'; | ||
export * from './utils/buffer'; | ||
export * from './utils/random'; | ||
export * as polling from './polling'; | ||
@@ -16,0 +13,0 @@ /** |
@@ -7,4 +7,4 @@ import { Principal } from '@dfinity/principal'; | ||
export { defaultStrategy } from './strategy'; | ||
export declare type PollStrategy = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<void>; | ||
export declare type PollStrategyFactory = () => PollStrategy; | ||
export type PollStrategy = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<void>; | ||
export type PollStrategyFactory = () => PollStrategy; | ||
/** | ||
@@ -11,0 +11,0 @@ * Polls the IC to check the status of the given request then |
@@ -5,3 +5,3 @@ import { Principal } from '@dfinity/principal'; | ||
import { RequestId } from '../request_id'; | ||
export declare type Predicate<T> = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<T>; | ||
export type Predicate<T> = (canisterId: Principal, requestId: RequestId, status: RequestStatusResponseStatus) => Promise<T>; | ||
/** | ||
@@ -8,0 +8,0 @@ * A best practices polling strategy: wait 2 seconds before the first poll, then 1 second |
@@ -1,2 +0,2 @@ | ||
export declare type RequestId = ArrayBuffer & { | ||
export type RequestId = ArrayBuffer & { | ||
__requestId__: void; | ||
@@ -22,8 +22,1 @@ }; | ||
export declare function requestIdOf(request: Record<string, any>): RequestId; | ||
/** | ||
* Hash a map into an ArrayBuffer using the representation-independent-hash function. | ||
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map | ||
* @param map - Any non-nested object | ||
* @returns ArrayBuffer | ||
*/ | ||
export declare function hashOfMap(map: Record<string, unknown>): ArrayBuffer; |
import { lebEncode } from '@dfinity/candid'; | ||
import borc from 'borc'; | ||
import { sha256 } from '@noble/hashes/sha256'; | ||
import { compare, concat, uint8ToBuf } from './utils/buffer'; | ||
import { sha256 as jsSha256 } from 'js-sha256'; | ||
import { compare, concat } from './utils/buffer'; | ||
/** | ||
@@ -10,3 +10,3 @@ * sha256 hash the provided Buffer | ||
export function hash(data) { | ||
return uint8ToBuf(sha256.create().update(new Uint8Array(data)).digest()); | ||
return jsSha256.create().update(new Uint8Array(data)).arrayBuffer(); | ||
} | ||
@@ -20,3 +20,2 @@ /** | ||
if (value instanceof borc.Tagged) { | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
return hashValue(value.value); | ||
@@ -49,5 +48,2 @@ } | ||
} | ||
else if (typeof value === 'object') { | ||
return hashOfMap(value); | ||
} | ||
else if (typeof value === 'bigint') { | ||
@@ -77,12 +73,3 @@ // Do this check much later than the other bigint check because this one is much less | ||
export function requestIdOf(request) { | ||
return hashOfMap(request); | ||
} | ||
/** | ||
* Hash a map into an ArrayBuffer using the representation-independent-hash function. | ||
* https://sdk.dfinity.org/docs/interface-spec/index.html#hash-of-map | ||
* @param map - Any non-nested object | ||
* @returns ArrayBuffer | ||
*/ | ||
export function hashOfMap(map) { | ||
const hashed = Object.entries(map) | ||
const hashed = Object.entries(request) | ||
.filter(([, value]) => value !== undefined) | ||
@@ -99,5 +86,5 @@ .map(([key, value]) => { | ||
const concatenated = concat(...sorted.map(x => concat(...x))); | ||
const result = hash(concatenated); | ||
return result; | ||
const requestId = hash(concatenated); | ||
return requestId; | ||
} | ||
//# sourceMappingURL=request_id.js.map |
@@ -16,21 +16,2 @@ /** | ||
export declare function fromHex(hex: string): ArrayBuffer; | ||
/** | ||
* | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2 | ||
*/ | ||
export declare function compare(b1: ArrayBuffer, b2: ArrayBuffer): number; | ||
/** | ||
* Checks two array buffers for equality. | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns boolean | ||
*/ | ||
export declare function bufEquals(b1: ArrayBuffer, b2: ArrayBuffer): boolean; | ||
/** | ||
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe. | ||
* @param {Uint8Array} arr Uint8Array to convert | ||
* @returns ArrayBuffer | ||
*/ | ||
export declare function uint8ToBuf(arr: Uint8Array): ArrayBuffer; |
@@ -32,2 +32,3 @@ /** | ||
.reduce((acc, curr, i) => { | ||
// tslint:disable-next-line:no-bitwise | ||
acc[(i / 2) | 0] = (acc[(i / 2) | 0] || '') + curr; | ||
@@ -39,8 +40,2 @@ return acc; | ||
} | ||
/** | ||
* | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns number - negative if b1 < b2, positive if b1 > b2, 0 if b1 === b2 | ||
*/ | ||
export function compare(b1, b2) { | ||
@@ -59,19 +54,2 @@ if (b1.byteLength !== b2.byteLength) { | ||
} | ||
/** | ||
* Checks two array buffers for equality. | ||
* @param b1 array buffer 1 | ||
* @param b2 array buffer 2 | ||
* @returns boolean | ||
*/ | ||
export function bufEquals(b1, b2) { | ||
return compare(b1, b2) === 0; | ||
} | ||
/** | ||
* Returns a true ArrayBuffer from a Uint8Array, as Uint8Array.buffer is unsafe. | ||
* @param {Uint8Array} arr Uint8Array to convert | ||
* @returns ArrayBuffer | ||
*/ | ||
export function uint8ToBuf(arr) { | ||
return new DataView(arr.buffer, arr.byteOffset, arr.byteLength).buffer; | ||
} | ||
//# sourceMappingURL=buffer.js.map |
@@ -13,3 +13,3 @@ /** | ||
declare function init(): Promise<InitOutput>; | ||
export declare type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; | ||
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module; | ||
export interface InitOutput { | ||
@@ -16,0 +16,0 @@ readonly memory: WebAssembly.Memory; |
@@ -5,2 +5,4 @@ import * as base64Arraybuffer from 'base64-arraybuffer'; | ||
import { wasmBytesBase64 } from './wasm'; | ||
/* tslint:disable */ | ||
/* eslint-disable */ | ||
let wasm; | ||
@@ -7,0 +9,0 @@ const wasmBytes = base64Arraybuffer.decode(wasmBytesBase64); |
{ | ||
"name": "@dfinity/agent", | ||
"version": "0.20.2", | ||
"version": "0.21.1", | ||
"author": "DFINITY Stiftung <sdk@dfinity.org>", | ||
@@ -50,4 +50,4 @@ "license": "Apache-2.0", | ||
"peerDependencies": { | ||
"@dfinity/candid": "^0.20.2", | ||
"@dfinity/principal": "^0.20.2" | ||
"@dfinity/candid": "^0.21.1", | ||
"@dfinity/principal": "^0.21.1" | ||
}, | ||
@@ -54,0 +54,0 @@ "dependencies": { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
1045465
154
7688