@polkadot-api/substrate-bindings
Advanced tools
Comparing version 0.0.1-1deba41937c544dfa5d4feb703e5a6a2096af097.1.0 to 0.0.1-1e1c7a915d627138d0404bb9257993297c03fea3.1.0
import * as scale_ts from 'scale-ts'; | ||
import { Codec, Encoder, Decoder, CodecType } from 'scale-ts'; | ||
export * from 'scale-ts'; | ||
import { Codec, Encoder, Decoder, StringRecord, CodecType, EncoderType, DecoderType } from 'scale-ts'; | ||
export { Bytes, Codec, CodecType, Decoder, DecoderType, Encoder, EncoderType, Option, Result, ResultPayload, StringRecord, Struct, Tuple, Vector, _void, bool, compact, createCodec, createDecoder, enhanceCodec, enhanceDecoder, enhanceEncoder, i128, i16, i256, i32, i64, i8, str, u128, u16, u256, u32, u64, u8 } from 'scale-ts'; | ||
type SS58String = string & { | ||
__SS58String: unknown; | ||
__SS58String?: unknown; | ||
}; | ||
type SS58AddressInfo = { | ||
isValid: false; | ||
} | { | ||
isValid: true; | ||
ss58Format: number; | ||
publicKey: Uint8Array; | ||
}; | ||
declare const getSs58AddressInfo: (address: SS58String) => SS58AddressInfo; | ||
declare const fromBufferToBase58: (ss58Format: number) => (publicKey: Uint8Array) => SS58String; | ||
declare const AccountId: (ss58Format?: number, nBytes?: 32 | 33) => scale_ts.Codec<SS58String>; | ||
type HexString = string & { | ||
__hexString?: unknown; | ||
}; | ||
declare const Hex: { | ||
(nBytes?: number): Codec<HexString>; | ||
enc: (nBytes?: number) => Encoder<HexString>; | ||
dec: (nBytes?: number) => Decoder<HexString>; | ||
}; | ||
declare class Binary { | ||
#private; | ||
constructor(data: Uint8Array); | ||
asText: () => string; | ||
asHex: () => string; | ||
asBytes: () => Uint8Array; | ||
static fromText(input: string): Binary; | ||
static fromHex(input: HexString): Binary; | ||
static fromBytes(input: Uint8Array): Binary; | ||
} | ||
declare const Bin: { | ||
(nBytes?: number): Codec<Binary>; | ||
enc: (nBytes?: number) => Encoder<Binary>; | ||
dec: (nBytes?: number) => Decoder<Binary>; | ||
}; | ||
interface BitSequence { | ||
bitsLen: number; | ||
bytes: Uint8Array; | ||
} | ||
declare const bitSequence: scale_ts.Codec<BitSequence>; | ||
declare const char: scale_ts.Codec<string>; | ||
declare const compactNumber: Codec<number>; | ||
declare const compactBn: Codec<bigint>; | ||
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<{ | ||
self: T; | ||
}>; | ||
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<{ | ||
self: T; | ||
}>; | ||
declare const Self: <T>(value: () => Codec<T>) => Codec<{ | ||
self: T; | ||
}>; | ||
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>; | ||
type OpaqueValue<T> = { | ||
length: number; | ||
inner: () => T; | ||
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<T>; | ||
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<T>; | ||
declare const Self: <T>(value: () => Codec<T>) => Codec<T>; | ||
type Tuple<T, N extends number> = readonly [T, ...T[]] & { | ||
length: N; | ||
}; | ||
declare const OpaqueCodec: { | ||
<T>(inner: Codec<T>, len?: Codec<number>): Codec<OpaqueValue<T>>; | ||
enc: <T_1>(inner: Encoder<T_1>, len?: Encoder<number>) => Encoder<OpaqueValue<T_1>>; | ||
dec: <T_2>(inner: Decoder<T_2>, len?: Decoder<number>) => Decoder<OpaqueValue<T_2>>; | ||
type Push<T extends any[], V> = [...T, V]; | ||
type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; | ||
type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R ? R : never; | ||
type TuplifyUnion<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = true extends N ? [] : Push<TuplifyUnion<Exclude<T, L>>, L>; | ||
type RestrictedLenTuple<T, O extends StringRecord<any>> = Tuple<T, TuplifyUnion<keyof O> extends Tuple<any, infer V> ? V : 0>; | ||
type ExtractEnumValue<T extends { | ||
type: string; | ||
value?: any; | ||
}, K extends string> = T extends { | ||
type: K; | ||
value: infer R; | ||
} ? R : never; | ||
interface Discriminant<T extends { | ||
type: string; | ||
value?: any; | ||
}> { | ||
is<K extends T["type"]>(this: Enum<T>, type: K): this is Enum<{ | ||
type: K; | ||
value: ExtractEnumValue<T, K>; | ||
}>; | ||
as<K extends T["type"]>(type: K): ExtractEnumValue<T, K>; | ||
} | ||
declare const _Enum: {}; | ||
type Enum<T extends { | ||
type: string; | ||
value?: any; | ||
}> = T & Discriminant<T>; | ||
declare const Enum: <T extends { | ||
type: string; | ||
value?: any; | ||
}, Key extends T["type"]>(type: Key, ...args: ExtractEnumValue<T, Key> extends undefined ? [] : [value: ExtractEnumValue<T, Key>]) => Enum<ExtractEnumValue<T, Key> extends undefined ? T : ExtractEnumValue<T, Key> extends never ? T : { | ||
type: Key; | ||
value: ExtractEnumValue<T, Key>; | ||
}>; | ||
declare const Variant: { | ||
<O extends StringRecord<Codec<any>>>(inner: O, indexes?: RestrictedLenTuple<number, O> | undefined): Codec<Enum<{ [K in keyof O]: K extends string ? { | ||
type: K; | ||
value: CodecType<O[K]>; | ||
} : never; }[keyof O]>>; | ||
enc: <O_1 extends StringRecord<Encoder<any>>>(inner: O_1, x?: RestrictedLenTuple<number, O_1> | undefined) => Encoder<Enum<{ [K_1 in keyof O_1]: K_1 extends string ? { | ||
type: K_1; | ||
value: EncoderType<O_1[K_1]>; | ||
} : never; }[keyof O_1]>>; | ||
dec: <O_2 extends StringRecord<Decoder<any>>>(inner: O_2, x?: RestrictedLenTuple<number, O_2> | undefined) => Decoder<Enum<{ [K_2 in keyof O_2]: K_2 extends string ? { | ||
type: K_2; | ||
value: DecoderType<O_2[K_2]>; | ||
} : never; }[keyof O_2]>>; | ||
}; | ||
declare const blockHeader: Codec<{ | ||
parentHash: HexString; | ||
number: number; | ||
stateRoot: HexString; | ||
extrinsicRoot: HexString; | ||
digests: Enum<{ | ||
type: "consensus"; | ||
value: { | ||
engine: string; | ||
payload: HexString; | ||
}; | ||
} | { | ||
type: "seal"; | ||
value: { | ||
engine: string; | ||
payload: HexString; | ||
}; | ||
} | { | ||
type: "preRuntime"; | ||
value: { | ||
engine: string; | ||
payload: HexString; | ||
}; | ||
} | { | ||
type: "runtimeUpdated"; | ||
value: undefined; | ||
}>[]; | ||
}>; | ||
type BlockHeader = CodecType<typeof blockHeader>; | ||
declare const lookup: scale_ts.Codec<{ | ||
@@ -38,3 +149,3 @@ id: number; | ||
name: string; | ||
type: number | void | undefined; | ||
type: number | undefined; | ||
}[]; | ||
@@ -44,5 +155,5 @@ def: { | ||
value: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -55,5 +166,5 @@ }[]; | ||
fields: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -133,5 +244,2 @@ }[]; | ||
}; | ||
} | { | ||
tag: "historicMetaCompat"; | ||
value: string; | ||
}; | ||
@@ -142,70 +250,8 @@ docs: string[]; | ||
type HexString = string & { | ||
__hexString: unknown; | ||
}; | ||
declare const Hex: { | ||
(nBytes?: number): Codec<HexString>; | ||
enc: (nBytes?: number) => Encoder<HexString>; | ||
dec: (nBytes?: number) => Decoder<HexString>; | ||
}; | ||
declare const pallets: scale_ts.Codec<{ | ||
name: string; | ||
storage: void | { | ||
prefix: string; | ||
items: { | ||
name: string; | ||
modifier: number; | ||
type: { | ||
tag: "map"; | ||
value: { | ||
hashers: ({ | ||
tag: "Blake2128"; | ||
value: undefined; | ||
} | { | ||
tag: "Blake2256"; | ||
value: undefined; | ||
} | { | ||
tag: "Blake2128Concat"; | ||
value: undefined; | ||
} | { | ||
tag: "Twox128"; | ||
value: undefined; | ||
} | { | ||
tag: "Twox256"; | ||
value: undefined; | ||
} | { | ||
tag: "Twox64Concat"; | ||
value: undefined; | ||
} | { | ||
tag: "Identity"; | ||
value: undefined; | ||
})[]; | ||
key: number; | ||
value: number; | ||
}; | ||
} | { | ||
tag: "plain"; | ||
value: number; | ||
}; | ||
fallback: number[]; | ||
docs: string[]; | ||
}[]; | ||
} | undefined; | ||
calls: number | void | undefined; | ||
events: number | void | undefined; | ||
constants: { | ||
name: string; | ||
type: number; | ||
value: HexString; | ||
docs: string[]; | ||
}[]; | ||
errors: number | void | undefined; | ||
index: number; | ||
}[]>; | ||
type V14Pallets = CodecType<typeof pallets>; | ||
declare const extrinsic: scale_ts.Codec<{ | ||
type: number; | ||
version: number; | ||
address: number; | ||
call: number; | ||
signature: number; | ||
extra: number; | ||
signedExtensions: { | ||
@@ -217,4 +263,4 @@ identifier: string; | ||
}>; | ||
type V14Extrinsic = CodecType<typeof extrinsic>; | ||
declare const v14: scale_ts.Codec<{ | ||
type V15Extrinsic = CodecType<typeof extrinsic>; | ||
declare const v15: scale_ts.Codec<{ | ||
lookup: { | ||
@@ -225,3 +271,3 @@ id: number; | ||
name: string; | ||
type: number | void | undefined; | ||
type: number | undefined; | ||
}[]; | ||
@@ -231,5 +277,5 @@ def: { | ||
value: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -242,5 +288,5 @@ }[]; | ||
fields: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -320,5 +366,2 @@ }[]; | ||
}; | ||
} | { | ||
tag: "historicMetaCompat"; | ||
value: string; | ||
}; | ||
@@ -329,3 +372,3 @@ docs: string[]; | ||
name: string; | ||
storage: void | { | ||
storage: { | ||
prefix: string; | ||
@@ -367,8 +410,8 @@ items: { | ||
}; | ||
fallback: number[]; | ||
fallback: HexString; | ||
docs: string[]; | ||
}[]; | ||
} | undefined; | ||
calls: number | void | undefined; | ||
events: number | void | undefined; | ||
calls: number | undefined; | ||
events: number | undefined; | ||
constants: { | ||
@@ -380,8 +423,12 @@ name: string; | ||
}[]; | ||
errors: number | void | undefined; | ||
errors: number | undefined; | ||
index: number; | ||
docs: string[]; | ||
}[]; | ||
extrinsic: { | ||
type: number; | ||
version: number; | ||
address: number; | ||
call: number; | ||
signature: number; | ||
extra: number; | ||
signedExtensions: { | ||
@@ -394,4 +441,26 @@ identifier: string; | ||
type: number; | ||
apis: { | ||
name: string; | ||
methods: { | ||
name: string; | ||
inputs: { | ||
name: string; | ||
type: number; | ||
}[]; | ||
output: number; | ||
docs: string[]; | ||
}[]; | ||
docs: string[]; | ||
}[]; | ||
outerEnums: { | ||
call: number; | ||
event: number; | ||
error: number; | ||
}; | ||
custom: [string, { | ||
type: number; | ||
value: HexString; | ||
}][]; | ||
}>; | ||
type V14 = CodecType<typeof v14>; | ||
type V15 = CodecType<typeof v15>; | ||
@@ -444,2 +513,5 @@ declare const metadata: Codec<{ | ||
tag: "v14"; | ||
value: unknown; | ||
} | { | ||
tag: "v15"; | ||
value: { | ||
@@ -451,3 +523,3 @@ lookup: { | ||
name: string; | ||
type: number | void | undefined; | ||
type: number | undefined; | ||
}[]; | ||
@@ -457,5 +529,5 @@ def: { | ||
value: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -468,5 +540,5 @@ }[]; | ||
fields: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -546,5 +618,2 @@ }[]; | ||
}; | ||
} | { | ||
tag: "historicMetaCompat"; | ||
value: string; | ||
}; | ||
@@ -555,3 +624,3 @@ docs: string[]; | ||
name: string; | ||
storage: void | { | ||
storage: { | ||
prefix: string; | ||
@@ -593,8 +662,8 @@ items: { | ||
}; | ||
fallback: number[]; | ||
fallback: HexString; | ||
docs: string[]; | ||
}[]; | ||
} | undefined; | ||
calls: number | void | undefined; | ||
events: number | void | undefined; | ||
calls: number | undefined; | ||
events: number | undefined; | ||
constants: { | ||
@@ -606,8 +675,12 @@ name: string; | ||
}[]; | ||
errors: number | void | undefined; | ||
errors: number | undefined; | ||
index: number; | ||
docs: string[]; | ||
}[]; | ||
extrinsic: { | ||
type: number; | ||
version: number; | ||
address: number; | ||
call: number; | ||
signature: number; | ||
extra: number; | ||
signedExtensions: { | ||
@@ -620,2 +693,24 @@ identifier: string; | ||
type: number; | ||
apis: { | ||
name: string; | ||
methods: { | ||
name: string; | ||
inputs: { | ||
name: string; | ||
type: number; | ||
}[]; | ||
output: number; | ||
docs: string[]; | ||
}[]; | ||
docs: string[]; | ||
}[]; | ||
outerEnums: { | ||
call: number; | ||
event: number; | ||
error: number; | ||
}; | ||
custom: [string, { | ||
type: number; | ||
value: HexString; | ||
}][]; | ||
}; | ||
@@ -625,12 +720,2 @@ }; | ||
interface BitSequence { | ||
bitsLen: number; | ||
bytes: Uint8Array; | ||
} | ||
declare const bitSequence: scale_ts.Codec<BitSequence>; | ||
declare const char: scale_ts.Codec<string>; | ||
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>; | ||
declare const Blake2256: (encoded: Uint8Array) => Uint8Array; | ||
@@ -655,74 +740,62 @@ declare const Blake2128: (encoded: Uint8Array) => Uint8Array; | ||
type Tuple<T> = readonly [T, ...T[]]; | ||
interface DescriptorCommon<Pallet extends string, Name extends string> { | ||
checksum: bigint; | ||
pallet: Pallet; | ||
name: Name; | ||
} | ||
interface ArgsWithPayloadCodec<Args extends Array<any>, O> { | ||
len: Args["length"]; | ||
} | ||
interface ArgsWithoutPayloadCodec<Args extends Array<any>> { | ||
len: Args["length"]; | ||
} | ||
interface StorageDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithPayloadCodec<any, any>> { | ||
type: "storage"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface ConstantDescriptor<Common extends DescriptorCommon<string, string>, Codecs> { | ||
type: "const"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface EventDescriptor<Common extends DescriptorCommon<string, string>, Codecs> { | ||
type: "event"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface ErrorDescriptor<Common extends DescriptorCommon<string, string>, Codecs> { | ||
type: "error"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface TxDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>> { | ||
type: "tx"; | ||
props: Common; | ||
codecs: Codecs; | ||
events: Events; | ||
errors: Errors; | ||
} | ||
type Descriptor = ConstantDescriptor<any, any> | EventDescriptor<any, any> | StorageDescriptor<any, any> | ErrorDescriptor<any, any> | TxDescriptor<any, any, any, any>; | ||
declare const createCommonDescriptor: <Pallet extends string, Name extends string>(checksum: bigint, pallet: Pallet, name: Name) => DescriptorCommon<Pallet, Name>; | ||
declare const getDescriptorCreator: <Type extends "const" | "event" | "error", Pallet extends string, Name extends string, Codecs>(type: Type, checksum: bigint, pallet: Pallet, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>; | ||
declare const getPalletCreator: <Pallet extends string>(pallet: Pallet) => { | ||
getPayloadDescriptor: <Type extends "const" | "event" | "error", Name extends string, Codecs>(type: Type, checksum: bigint, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>; | ||
getStorageDescriptor: <Name_1 extends string, Codecs_1 extends ArgsWithPayloadCodec<any[], any>>(checksum: bigint, name: Name_1, codecs: Codecs_1) => StorageDescriptor<DescriptorCommon<Pallet, Name_1>, Codecs_1>; | ||
getTxDescriptor: <Name_2 extends string, Codecs_2 extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>>(checksum: bigint, name: Name_2, events: Events, errors: Errors, codecs: Codecs_2) => TxDescriptor<DescriptorCommon<Pallet, Name_2>, Codecs_2, Events, Errors>; | ||
type PlainDescriptor<T> = string & { | ||
_type?: T; | ||
}; | ||
type EventToObject<E extends EventDescriptor<DescriptorCommon<any, string>, any>> = E extends EventDescriptor<DescriptorCommon<any, infer K>, infer V> ? { | ||
type: K; | ||
value: V; | ||
} : unknown; | ||
type UnionizeTupleEvents<E extends Array<EventDescriptor<any, any>>> = E extends Array<infer Ev> ? Ev extends EventDescriptor<any, any> ? EventToObject<Ev> : unknown : unknown; | ||
type TxDescriptorArgs<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, ArgsWithoutPayloadCodec<infer A>, any, any> ? A : []; | ||
type TxDescriptorEvents<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, infer E, any> ? E : []; | ||
type TxDescriptorErrors<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, any, infer Errors> ? Errors extends Tuple<ErrorDescriptor<any, any>> ? { | ||
[K in keyof Errors]: Errors[K] extends ErrorDescriptor<DescriptorCommon<any, infer Type>, infer Value> ? { | ||
type: Type; | ||
value: Value; | ||
} : unknown; | ||
}[keyof Errors extends number ? keyof Errors : never] : [] : []; | ||
type TxFunction<D extends TxDescriptor<any, any, any, any>> = (...args: TxDescriptorArgs<D>) => Promise<{ | ||
ok: true; | ||
events: Array<UnionizeTupleEvents<TxDescriptorEvents<D>>>; | ||
} | { | ||
ok: false; | ||
error: TxDescriptorErrors<D>; | ||
}>; | ||
type StorageType<T extends StorageDescriptor<any, ArgsWithPayloadCodec<any, any>>> = T extends StorageDescriptor<any, ArgsWithPayloadCodec<infer Args, infer Payload>> ? { | ||
keyArgs: Args; | ||
value: Payload; | ||
} : unknown; | ||
type StorageDescriptor<Args extends Array<any>, T, Optional extends true | false> = string & { | ||
_type: T; | ||
_args: Args; | ||
_optional: Optional; | ||
}; | ||
type TxDescriptor<Args extends {} | undefined> = string & { | ||
___: Args; | ||
}; | ||
type RuntimeDescriptor<Args extends Array<any>, T> = string & { | ||
__: [Args, T]; | ||
}; | ||
type Descriptors = { | ||
pallets: Record<string, [ | ||
Record<string, StorageDescriptor<any, any, any>>, | ||
Record<string, TxDescriptor<any>>, | ||
Record<string, PlainDescriptor<any>>, | ||
Record<string, PlainDescriptor<any>>, | ||
Record<string, PlainDescriptor<any>> | ||
]>; | ||
apis: Record<string, Record<string, RuntimeDescriptor<any, any>>>; | ||
asset: PlainDescriptor<any>; | ||
}; | ||
type PickDescriptors<Idx extends 0 | 1 | 2 | 3 | 4, T extends Descriptors["pallets"]> = { | ||
[K in keyof T]: T[K][Idx]; | ||
}; | ||
type ExtractStorage<T extends Record<string, Record<string, StorageDescriptor<any, any, any>>>> = { | ||
[K in keyof T]: { | ||
[KK in keyof T[K]]: T[K][KK] extends StorageDescriptor<infer Key, infer Value, infer Optional> ? { | ||
KeyArgs: Key; | ||
Value: Value; | ||
IsOptional: Optional; | ||
} : unknown; | ||
}; | ||
}; | ||
type ExtractTx<T extends Record<string, Record<string, TxDescriptor<any>>>> = { | ||
[K in keyof T]: { | ||
[KK in keyof T[K]]: T[K][KK] extends TxDescriptor<infer Args> ? Args : unknown; | ||
}; | ||
}; | ||
type ExtractPlain<T extends Record<string, Record<string, PlainDescriptor<any>>>> = { | ||
[K in keyof T]: { | ||
[KK in keyof T[K]]: T[K][KK] extends PlainDescriptor<infer Value> ? Value : unknown; | ||
}; | ||
}; | ||
type QueryFromDescriptors<T extends Descriptors> = ExtractStorage<PickDescriptors<0, T["pallets"]>>; | ||
type TxFromDescriptors<T extends Descriptors> = ExtractTx<PickDescriptors<1, T["pallets"]>>; | ||
type EventsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<2, T["pallets"]>>; | ||
type ErrorsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<3, T["pallets"]>>; | ||
type ConstFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<4, T["pallets"]>>; | ||
export { AccountId, ArgsWithPayloadCodec, ArgsWithoutPayloadCodec, BitSequence, Blake2128, Blake2128Concat, Blake2256, ConstantDescriptor, Descriptor, DescriptorCommon, EncoderWithHash, ErrorDescriptor, EventDescriptor, EventToObject, Hex, HexString, Identity, OpaqueCodec, OpaqueValue, SS58String, Self, Storage, StorageDescriptor, StorageType, Twox128, Twox256, Twox64Concat, TxDescriptor, TxDescriptorArgs, TxDescriptorErrors, TxDescriptorEvents, TxFunction, UnionizeTupleEvents, V14, V14Extrinsic, V14Lookup, V14Pallets, bitSequence, char, compactBn, compactNumber, createCommonDescriptor, fixedStr, getDescriptorCreator, getPalletCreator, h64, metadata, selfDecoder, selfEncoder, v14 }; | ||
type GetEnum<T extends Enum<{ | ||
type: string; | ||
value: any; | ||
}>> = { | ||
[K in T["type"]]: (...args: ExtractEnumValue<T, K> extends undefined ? [] : [value: ExtractEnumValue<T, K>]) => T; | ||
}; | ||
export { AccountId, Bin, Binary, type BitSequence, Blake2128, Blake2128Concat, Blake2256, type BlockHeader, type ConstFromDescriptors, type Descriptors, type Discriminant, type EncoderWithHash, Enum, type ErrorsFromDescriptors, type EventsFromDescriptors, type ExtractEnumValue, type GetEnum, Hex, type HexString, Identity, type PlainDescriptor, type QueryFromDescriptors, type RuntimeDescriptor, type SS58AddressInfo, type SS58String, Self, Storage, type StorageDescriptor, Twox128, Twox256, Twox64Concat, type TxDescriptor, type TxFromDescriptors, type V14Lookup, type V15, type V15Extrinsic, Variant, _Enum, bitSequence, blockHeader, char, compactBn, compactNumber, fixedStr, fromBufferToBase58, getSs58AddressInfo, h64, metadata, selfDecoder, selfEncoder, v15 }; |
@@ -6,2 +6,3 @@ "use strict"; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; | ||
var __export = (target, all) => { | ||
@@ -19,4 +20,25 @@ for (var name in all) | ||
}; | ||
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default")); | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
var __publicField = (obj, key, value) => { | ||
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); | ||
return value; | ||
}; | ||
var __accessCheck = (obj, member, msg) => { | ||
if (!member.has(obj)) | ||
throw TypeError("Cannot " + msg); | ||
}; | ||
var __privateGet = (obj, member, getter) => { | ||
__accessCheck(obj, member, "read from private field"); | ||
return getter ? getter.call(obj) : member.get(obj); | ||
}; | ||
var __privateAdd = (obj, member, value) => { | ||
if (member.has(obj)) | ||
throw TypeError("Cannot add the same private member more than once"); | ||
member instanceof WeakSet ? member.add(obj) : member.set(obj, value); | ||
}; | ||
var __privateSet = (obj, member, value, setter) => { | ||
__accessCheck(obj, member, "write to private field"); | ||
setter ? setter.call(obj, value) : member.set(obj, value); | ||
return value; | ||
}; | ||
@@ -27,55 +49,99 @@ // src/index.ts | ||
AccountId: () => AccountId, | ||
Bin: () => Bin, | ||
Binary: () => Binary, | ||
Blake2128: () => Blake2128, | ||
Blake2128Concat: () => Blake2128Concat, | ||
Blake2256: () => Blake2256, | ||
Bytes: () => import_scale_ts8.Bytes, | ||
Enum: () => Enum, | ||
Hex: () => Hex, | ||
Identity: () => Identity, | ||
OpaqueCodec: () => OpaqueCodec, | ||
Option: () => import_scale_ts8.Option, | ||
Result: () => import_scale_ts8.Result, | ||
Self: () => Self, | ||
Storage: () => Storage, | ||
Struct: () => import_scale_ts8.Struct, | ||
Tuple: () => import_scale_ts8.Tuple, | ||
Twox128: () => Twox128, | ||
Twox256: () => Twox256, | ||
Twox64Concat: () => Twox64Concat, | ||
bitSequence: () => bitSequence2, | ||
Variant: () => Variant, | ||
Vector: () => import_scale_ts8.Vector, | ||
_Enum: () => _Enum, | ||
_void: () => import_scale_ts8._void, | ||
bitSequence: () => bitSequence, | ||
blockHeader: () => blockHeader, | ||
bool: () => import_scale_ts8.bool, | ||
char: () => char, | ||
compact: () => import_scale_ts8.compact, | ||
compactBn: () => compactBn, | ||
compactNumber: () => compactNumber, | ||
createCommonDescriptor: () => createCommonDescriptor, | ||
createCodec: () => import_scale_ts8.createCodec, | ||
createDecoder: () => import_scale_ts8.createDecoder, | ||
enhanceCodec: () => import_scale_ts8.enhanceCodec, | ||
enhanceDecoder: () => import_scale_ts8.enhanceDecoder, | ||
enhanceEncoder: () => import_scale_ts8.enhanceEncoder, | ||
fixedStr: () => fixedStr, | ||
getDescriptorCreator: () => getDescriptorCreator, | ||
getPalletCreator: () => getPalletCreator, | ||
fromBufferToBase58: () => fromBufferToBase58, | ||
getSs58AddressInfo: () => getSs58AddressInfo, | ||
h64: () => h64, | ||
i128: () => import_scale_ts8.i128, | ||
i16: () => import_scale_ts8.i16, | ||
i256: () => import_scale_ts8.i256, | ||
i32: () => import_scale_ts8.i32, | ||
i64: () => import_scale_ts8.i64, | ||
i8: () => import_scale_ts8.i8, | ||
metadata: () => metadata, | ||
selfDecoder: () => selfDecoder, | ||
selfEncoder: () => selfEncoder, | ||
v14: () => v14 | ||
str: () => import_scale_ts8.str, | ||
u128: () => import_scale_ts8.u128, | ||
u16: () => import_scale_ts8.u16, | ||
u256: () => import_scale_ts8.u256, | ||
u32: () => import_scale_ts8.u32, | ||
u64: () => import_scale_ts8.u64, | ||
u8: () => import_scale_ts8.u8, | ||
v15: () => v15 | ||
}); | ||
module.exports = __toCommonJS(src_exports); | ||
__reExport(src_exports, require("scale-ts"), module.exports); | ||
// src/codecs/index.ts | ||
var codecs_exports = {}; | ||
__export(codecs_exports, { | ||
AccountId: () => AccountId, | ||
Hex: () => Hex, | ||
OpaqueCodec: () => OpaqueCodec, | ||
Self: () => Self, | ||
bitSequence: () => bitSequence2, | ||
char: () => char, | ||
compactBn: () => compactBn, | ||
compactNumber: () => compactNumber, | ||
fixedStr: () => fixedStr, | ||
metadata: () => metadata, | ||
selfDecoder: () => selfDecoder, | ||
selfEncoder: () => selfEncoder, | ||
v14: () => v14 | ||
}); | ||
__reExport(codecs_exports, require("scale-ts")); | ||
// src/codecs/scale/AccountId.ts | ||
var import_scale_ts = require("scale-ts"); | ||
// src/codecs/AccountId.ts | ||
var import_scale_ts = require("scale-ts"); | ||
// src/utils/ss58-util.ts | ||
var import_base = require("@scure/base"); | ||
var import_blake2b = require("@noble/hashes/blake2b"); | ||
var import_base = require("@scure/base"); | ||
var SS58_PREFIX = new TextEncoder().encode("SS58PRE"); | ||
var CHECKSUM_LENGTH = 2; | ||
var getSs58AddressInfo = (address) => { | ||
try { | ||
const decoded = import_base.base58.decode(address); | ||
const prefixBytes = decoded.subarray(0, decoded[0] & 64 ? 2 : 1); | ||
const publicKey = decoded.subarray( | ||
prefixBytes.length, | ||
decoded.length - CHECKSUM_LENGTH | ||
); | ||
const checksum = decoded.subarray(prefixBytes.length + publicKey.length); | ||
const expectedChecksum = (0, import_blake2b.blake2b)( | ||
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey), | ||
{ | ||
dkLen: 64 | ||
} | ||
).subarray(0, CHECKSUM_LENGTH); | ||
const isChecksumValid = checksum[0] === expectedChecksum[0] && checksum[1] === expectedChecksum[1]; | ||
if (!isChecksumValid) | ||
return { isValid: false }; | ||
return { | ||
isValid: true, | ||
ss58Format: prefixBytesToNumber(prefixBytes), | ||
publicKey: publicKey.slice() | ||
}; | ||
} catch (_) { | ||
return { isValid: false }; | ||
} | ||
}; | ||
var prefixBytesToNumber = (bytes) => { | ||
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); | ||
return dv.byteLength === 1 ? dv.getUint8(0) : dv.getUint16(0); | ||
}; | ||
var fromBufferToBase58 = (ss58Format) => { | ||
@@ -98,24 +164,13 @@ const prefixBytes = ss58Format < 64 ? Uint8Array.of(ss58Format) : Uint8Array.of( | ||
}; | ||
function fromBase58ToBuffer(nBytes, ss58Format) { | ||
// src/codecs/scale/AccountId.ts | ||
function fromBase58ToBuffer(nBytes, _ss58Format) { | ||
return (address) => { | ||
const decoded = import_base.base58.decode(address); | ||
const prefixBytes = decoded.subarray(0, decoded[0] & 64 ? 2 : 1); | ||
const publicKey = decoded.subarray( | ||
prefixBytes.length, | ||
decoded.length - CHECKSUM_LENGTH | ||
); | ||
const info = getSs58AddressInfo(address); | ||
if (!info.isValid) | ||
throw new Error("Invalid checksum"); | ||
const { publicKey } = info; | ||
if (publicKey.length !== nBytes) | ||
throw new Error("Invalid public key length"); | ||
const checksum = decoded.subarray(prefixBytes.length + publicKey.length); | ||
const expectedChecksum = (0, import_blake2b.blake2b)( | ||
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey), | ||
{ | ||
dkLen: 64 | ||
} | ||
).subarray(0, CHECKSUM_LENGTH); | ||
if (checksum[0] !== expectedChecksum[0] || checksum[1] !== expectedChecksum[1]) | ||
throw new Error("Invalid checksum"); | ||
if (prefixBytesToNumber(prefixBytes) != ss58Format) | ||
throw new Error("Invalid SS58 prefix"); | ||
return publicKey.slice(); | ||
return publicKey; | ||
}; | ||
@@ -128,17 +183,114 @@ } | ||
); | ||
var prefixBytesToNumber = (bytes) => { | ||
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength); | ||
return dv.byteLength === 1 ? dv.getUint8(0) : dv.getUint16(0); | ||
}; | ||
// src/codecs/compact.ts | ||
// src/codecs/scale/Binary.ts | ||
var import_scale_ts2 = require("scale-ts"); | ||
var compactNumber = import_scale_ts2.compact; | ||
var compactBn = import_scale_ts2.compact; | ||
var import_utils = require("@polkadot-api/utils"); | ||
var textEncoder = new TextEncoder(); | ||
var textDecoder = new TextDecoder(); | ||
var _bytes, _hex, _str; | ||
var _Binary = class _Binary { | ||
constructor(data) { | ||
__privateAdd(this, _bytes, void 0); | ||
__privateAdd(this, _hex, null); | ||
__privateAdd(this, _str, null); | ||
__publicField(this, "asText", () => __privateGet(this, _str) === null ? __privateSet(this, _str, textDecoder.decode(__privateGet(this, _bytes))) : __privateGet(this, _str)); | ||
__publicField(this, "asHex", () => __privateGet(this, _hex) === null ? __privateSet(this, _hex, (0, import_utils.toHex)(__privateGet(this, _bytes))) : __privateGet(this, _hex)); | ||
__publicField(this, "asBytes", () => __privateGet(this, _bytes)); | ||
__privateSet(this, _bytes, data); | ||
} | ||
static fromText(input) { | ||
return new _Binary(textEncoder.encode(input)); | ||
} | ||
static fromHex(input) { | ||
return new _Binary((0, import_utils.fromHex)(input)); | ||
} | ||
static fromBytes(input) { | ||
return new _Binary(input); | ||
} | ||
}; | ||
_bytes = new WeakMap(); | ||
_hex = new WeakMap(); | ||
_str = new WeakMap(); | ||
var Binary = _Binary; | ||
var enc = (nBytes) => { | ||
const _enc = import_scale_ts2.Bytes.enc(nBytes); | ||
return (value) => _enc(value.asBytes()); | ||
}; | ||
var dec = (nBytes) => { | ||
const _dec = import_scale_ts2.Bytes.dec(nBytes); | ||
return (value) => Binary.fromBytes(_dec(value)); | ||
}; | ||
var Bin = (nBytes) => (0, import_scale_ts2.createCodec)(enc(nBytes), dec(nBytes)); | ||
Bin.enc = enc; | ||
Bin.dec = dec; | ||
// src/codecs/Self.ts | ||
// src/codecs/scale/bitSequence.ts | ||
var import_scale_ts4 = require("scale-ts"); | ||
// src/codecs/scale/compact.ts | ||
var import_scale_ts3 = require("scale-ts"); | ||
var compactNumber = import_scale_ts3.compact; | ||
var compactBn = import_scale_ts3.compact; | ||
// src/codecs/scale/bitSequence.ts | ||
var bitSequenceDecoder = (0, import_scale_ts4.createDecoder)((data) => { | ||
const bitsLen = compactNumber.dec(data); | ||
const bytesLen = Math.ceil(bitsLen / 8); | ||
const bytes = (0, import_scale_ts4.Bytes)(bytesLen).dec(data); | ||
return { bytes, bitsLen }; | ||
}); | ||
var bitSequenceEncoder = (input) => { | ||
if (input.bitsLen > input.bytes.length * 8) | ||
throw new Error( | ||
`Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})` | ||
); | ||
const lenEncoded = compactNumber.enc(input.bitsLen); | ||
const result = new Uint8Array(input.bytes.length + lenEncoded.length); | ||
result.set(lenEncoded, 0); | ||
result.set(input.bytes, lenEncoded.length); | ||
return result; | ||
}; | ||
var bitSequence = (0, import_scale_ts4.createCodec)(bitSequenceEncoder, bitSequenceDecoder); | ||
// src/codecs/scale/char.ts | ||
var import_scale_ts5 = require("scale-ts"); | ||
var char = (0, import_scale_ts5.enhanceCodec)( | ||
import_scale_ts5.u8, | ||
(str5) => str5.charCodeAt(0), | ||
String.fromCharCode | ||
); | ||
// src/codecs/scale/Hex.ts | ||
var import_utils2 = require("@polkadot-api/utils"); | ||
var import_scale_ts6 = require("scale-ts"); | ||
var enc2 = (nBytes) => { | ||
const _enc = import_scale_ts6.Bytes.enc(nBytes); | ||
return (value) => _enc((0, import_utils2.fromHex)(value)); | ||
}; | ||
var dec2 = (nBytes) => { | ||
const _dec = import_scale_ts6.Bytes.dec(nBytes); | ||
return (value) => (0, import_utils2.toHex)(_dec(value)); | ||
}; | ||
var Hex = (nBytes) => (0, import_scale_ts6.createCodec)(enc2(nBytes), dec2(nBytes)); | ||
Hex.enc = enc2; | ||
Hex.dec = dec2; | ||
// src/codecs/scale/fixed-str.ts | ||
var import_scale_ts7 = require("scale-ts"); | ||
var textEncoder2 = new TextEncoder(); | ||
var textDecoder2 = new TextDecoder(); | ||
var fixedStr = (nBytes) => (0, import_scale_ts7.enhanceCodec)( | ||
(0, import_scale_ts7.Bytes)(nBytes), | ||
(str5) => textEncoder2.encode(str5), | ||
(bytes) => textDecoder2.decode(bytes) | ||
); | ||
// src/codecs/scale/re-exported.ts | ||
var import_scale_ts8 = require("scale-ts"); | ||
// src/codecs/scale/Self.ts | ||
var import_scale_ts9 = require("scale-ts"); | ||
var selfEncoder = (value) => { | ||
let cache = (x) => { | ||
const encoder = import_scale_ts3.Struct.enc({ self: value() }); | ||
const encoder = value(); | ||
cache = encoder; | ||
@@ -151,3 +303,3 @@ return encoder(x); | ||
let cache = (x) => { | ||
const decoder = import_scale_ts3.Struct.dec({ self: value() }); | ||
const decoder = value(); | ||
const result = decoder; | ||
@@ -159,3 +311,3 @@ cache = decoder; | ||
}; | ||
var Self = (value) => (0, import_scale_ts3.createCodec)( | ||
var Self = (value) => (0, import_scale_ts9.createCodec)( | ||
selfEncoder(() => value().enc), | ||
@@ -165,53 +317,103 @@ selfDecoder(() => value().dec) | ||
// src/codecs/Opaque.ts | ||
var import_scale_ts4 = require("scale-ts"); | ||
var OpaqueDecoder = (inner, len = compactNumber.dec) => (0, import_scale_ts4.createDecoder)((bytes) => { | ||
const length = len(bytes); | ||
const innerBytes = (0, import_scale_ts4.Bytes)(length).dec(bytes); | ||
let _cachedValue; | ||
return { | ||
length, | ||
inner: () => _cachedValue = _cachedValue || inner(innerBytes) | ||
}; | ||
// src/codecs/scale/Variant.ts | ||
var import_scale_ts10 = require("scale-ts"); | ||
var import_utils3 = require("@polkadot-api/utils"); | ||
var _Enum = new Proxy( | ||
{}, | ||
{ | ||
get(_, prop) { | ||
return (value) => Enum(prop, value); | ||
} | ||
} | ||
); | ||
var Enum = (_type, _value) => ({ | ||
as: (type) => { | ||
if (type !== _type) | ||
throw new Error(`Enum.as(${type}) used with actual type ${_type}`); | ||
return _value; | ||
}, | ||
is: (type) => type === _type, | ||
type: _type, | ||
value: _value | ||
}); | ||
var OpaqueEncoder = (inner, len = compactNumber.enc) => (input) => { | ||
const lenBytes = len(input.length); | ||
const result = new Uint8Array(lenBytes.length + input.length); | ||
result.set(lenBytes, 0); | ||
result.set(inner(input.inner()), lenBytes.length); | ||
return result; | ||
var VariantEnc = (...args) => { | ||
const enc3 = import_scale_ts10.Enum.enc(...args); | ||
return (v) => enc3({ tag: v.type, value: v.value }); | ||
}; | ||
var OpaqueCodec = (inner, len = compactNumber) => (0, import_scale_ts4.createCodec)( | ||
OpaqueEncoder(inner.enc, len.enc), | ||
OpaqueDecoder(inner.dec, len.dec) | ||
var VariantDec = (...args) => { | ||
const dec3 = import_scale_ts10.Enum.dec(...args); | ||
return (v) => { | ||
const { tag, value } = dec3(v); | ||
return Enum(tag, value); | ||
}; | ||
}; | ||
var Variant = (inner, ...args) => (0, import_scale_ts10.createCodec)( | ||
VariantEnc( | ||
(0, import_utils3.mapObject)(inner, ([encoder]) => encoder), | ||
...args | ||
), | ||
VariantDec( | ||
(0, import_utils3.mapObject)(inner, ([, decoder]) => decoder), | ||
...args | ||
) | ||
); | ||
OpaqueCodec.enc = OpaqueEncoder; | ||
OpaqueCodec.dec = OpaqueDecoder; | ||
Variant.enc = VariantEnc; | ||
Variant.dec = VariantDec; | ||
// src/codecs/metadata/v14/v14.ts | ||
var import_scale_ts8 = require("scale-ts"); | ||
// src/codecs/blockHeader.ts | ||
var textEncoder3 = new TextEncoder(); | ||
var textDecoder3 = new TextDecoder(); | ||
var fourChars = (0, import_scale_ts8.enhanceCodec)( | ||
(0, import_scale_ts8.Bytes)(4), | ||
textEncoder3.encode.bind(textEncoder3), | ||
textDecoder3.decode.bind(textDecoder3) | ||
); | ||
var diggestVal = (0, import_scale_ts8.Struct)({ | ||
engine: fourChars, | ||
payload: Hex() | ||
}); | ||
var diggest = Variant( | ||
{ | ||
consensus: diggestVal, | ||
seal: diggestVal, | ||
preRuntime: diggestVal, | ||
runtimeUpdated: import_scale_ts8._void | ||
}, | ||
[4, 5, 6, 8] | ||
); | ||
var hex32 = Hex(32); | ||
var blockHeader = (0, import_scale_ts8.Struct)({ | ||
parentHash: hex32, | ||
number: compactNumber, | ||
stateRoot: hex32, | ||
extrinsicRoot: hex32, | ||
digests: (0, import_scale_ts8.Vector)(diggest) | ||
}); | ||
// src/codecs/metadata/v14/lookup.ts | ||
var import_scale_ts5 = require("scale-ts"); | ||
var oStr = (0, import_scale_ts5.Option)(import_scale_ts5.str); | ||
var strs = (0, import_scale_ts5.Vector)(import_scale_ts5.str); | ||
var primitive = (0, import_scale_ts5.Enum)({ | ||
bool: import_scale_ts5._void, | ||
char: import_scale_ts5._void, | ||
str: import_scale_ts5._void, | ||
u8: import_scale_ts5._void, | ||
u16: import_scale_ts5._void, | ||
u32: import_scale_ts5._void, | ||
u64: import_scale_ts5._void, | ||
u128: import_scale_ts5._void, | ||
u256: import_scale_ts5._void, | ||
i8: import_scale_ts5._void, | ||
i16: import_scale_ts5._void, | ||
i32: import_scale_ts5._void, | ||
i64: import_scale_ts5._void, | ||
i128: import_scale_ts5._void, | ||
i256: import_scale_ts5._void | ||
// src/codecs/metadata/v15/v15.ts | ||
var import_scale_ts13 = require("scale-ts"); | ||
// src/codecs/metadata/v15/lookup.ts | ||
var import_scale_ts11 = require("scale-ts"); | ||
var oStr = (0, import_scale_ts11.Option)(import_scale_ts11.str); | ||
var strs = (0, import_scale_ts11.Vector)(import_scale_ts11.str); | ||
var primitive = (0, import_scale_ts11.Enum)({ | ||
bool: import_scale_ts11._void, | ||
char: import_scale_ts11._void, | ||
str: import_scale_ts11._void, | ||
u8: import_scale_ts11._void, | ||
u16: import_scale_ts11._void, | ||
u32: import_scale_ts11._void, | ||
u64: import_scale_ts11._void, | ||
u128: import_scale_ts11._void, | ||
u256: import_scale_ts11._void, | ||
i8: import_scale_ts11._void, | ||
i16: import_scale_ts11._void, | ||
i32: import_scale_ts11._void, | ||
i64: import_scale_ts11._void, | ||
i128: import_scale_ts11._void, | ||
i256: import_scale_ts11._void | ||
}); | ||
var fields = (0, import_scale_ts5.Vector)( | ||
(0, import_scale_ts5.Struct)({ | ||
var fields = (0, import_scale_ts11.Vector)( | ||
(0, import_scale_ts11.Struct)({ | ||
name: oStr, | ||
@@ -223,19 +425,19 @@ type: compactNumber, | ||
); | ||
var arr = (0, import_scale_ts5.Struct)({ | ||
len: import_scale_ts5.u32, | ||
var arr = (0, import_scale_ts11.Struct)({ | ||
len: import_scale_ts11.u32, | ||
type: compactNumber | ||
}); | ||
var bitSequence = (0, import_scale_ts5.Struct)({ | ||
var bitSequence2 = (0, import_scale_ts11.Struct)({ | ||
bitStoreType: compactNumber, | ||
bitOrderType: compactNumber | ||
}); | ||
var variant = (0, import_scale_ts5.Vector)( | ||
(0, import_scale_ts5.Struct)({ | ||
name: import_scale_ts5.str, | ||
var variant = (0, import_scale_ts11.Vector)( | ||
(0, import_scale_ts11.Struct)({ | ||
name: import_scale_ts11.str, | ||
fields, | ||
index: import_scale_ts5.u8, | ||
index: import_scale_ts11.u8, | ||
docs: strs | ||
}) | ||
); | ||
var def = (0, import_scale_ts5.Enum)({ | ||
var def = (0, import_scale_ts11.Enum)({ | ||
composite: fields, | ||
@@ -245,14 +447,13 @@ variant, | ||
array: arr, | ||
tuple: (0, import_scale_ts5.Vector)(compactNumber), | ||
tuple: (0, import_scale_ts11.Vector)(compactNumber), | ||
primitive, | ||
compact: compactNumber, | ||
bitSequence, | ||
historicMetaCompat: import_scale_ts5.str | ||
bitSequence: bitSequence2 | ||
}); | ||
var param = (0, import_scale_ts5.Struct)({ | ||
name: import_scale_ts5.str, | ||
type: (0, import_scale_ts5.Option)(compactNumber) | ||
var param = (0, import_scale_ts11.Struct)({ | ||
name: import_scale_ts11.str, | ||
type: (0, import_scale_ts11.Option)(compactNumber) | ||
}); | ||
var params = (0, import_scale_ts5.Vector)(param); | ||
var entry = (0, import_scale_ts5.Struct)({ | ||
var params = (0, import_scale_ts11.Vector)(param); | ||
var entry = (0, import_scale_ts11.Struct)({ | ||
id: compactNumber, | ||
@@ -264,32 +465,17 @@ path: strs, | ||
}); | ||
var lookup = (0, import_scale_ts5.Vector)(entry); | ||
var lookup = (0, import_scale_ts11.Vector)(entry); | ||
// src/codecs/Hex.ts | ||
var import_utils = require("@polkadot-api/utils"); | ||
var import_scale_ts6 = require("scale-ts"); | ||
var enc = (nBytes) => { | ||
const _enc = import_scale_ts6.Bytes.enc(nBytes); | ||
return (value) => _enc((0, import_utils.fromHex)(value)); | ||
}; | ||
var dec = (nBytes) => { | ||
const _dec = import_scale_ts6.Bytes.dec(nBytes); | ||
return (value) => (0, import_utils.toHex)(_dec(value)); | ||
}; | ||
var Hex = (nBytes) => (0, import_scale_ts6.createCodec)(enc(nBytes), dec(nBytes)); | ||
Hex.enc = enc; | ||
Hex.dec = dec; | ||
// src/codecs/metadata/v14/pallets.ts | ||
var import_scale_ts7 = require("scale-ts"); | ||
var hashType = (0, import_scale_ts7.Enum)({ | ||
Blake2128: import_scale_ts7._void, | ||
Blake2256: import_scale_ts7._void, | ||
Blake2128Concat: import_scale_ts7._void, | ||
Twox128: import_scale_ts7._void, | ||
Twox256: import_scale_ts7._void, | ||
Twox64Concat: import_scale_ts7._void, | ||
Identity: import_scale_ts7._void | ||
// src/codecs/metadata/v15/pallets.ts | ||
var import_scale_ts12 = require("scale-ts"); | ||
var hashType = (0, import_scale_ts12.Enum)({ | ||
Blake2128: import_scale_ts12._void, | ||
Blake2256: import_scale_ts12._void, | ||
Blake2128Concat: import_scale_ts12._void, | ||
Twox128: import_scale_ts12._void, | ||
Twox256: import_scale_ts12._void, | ||
Twox64Concat: import_scale_ts12._void, | ||
Identity: import_scale_ts12._void | ||
}); | ||
var hashers = (0, import_scale_ts7.Vector)(hashType); | ||
var storageMap = (0, import_scale_ts7.Struct)({ | ||
var hashers = (0, import_scale_ts12.Vector)(hashType); | ||
var storageMap = (0, import_scale_ts12.Struct)({ | ||
hashers, | ||
@@ -299,44 +485,66 @@ key: compactNumber, | ||
}); | ||
var storageItem = (0, import_scale_ts7.Struct)({ | ||
name: import_scale_ts7.str, | ||
modifier: import_scale_ts7.u8, | ||
type: (0, import_scale_ts7.Enum)({ | ||
var storageItem = (0, import_scale_ts12.Struct)({ | ||
name: import_scale_ts12.str, | ||
modifier: import_scale_ts12.u8, | ||
type: (0, import_scale_ts12.Enum)({ | ||
plain: compactNumber, | ||
map: storageMap | ||
}), | ||
fallback: (0, import_scale_ts7.Vector)(import_scale_ts7.u8), | ||
docs: (0, import_scale_ts7.Vector)(import_scale_ts7.str) | ||
fallback: Hex(), | ||
docs: (0, import_scale_ts12.Vector)(import_scale_ts12.str) | ||
}); | ||
var storage = (0, import_scale_ts7.Option)( | ||
(0, import_scale_ts7.Struct)({ | ||
prefix: import_scale_ts7.str, | ||
items: (0, import_scale_ts7.Vector)(storageItem) | ||
var storage = (0, import_scale_ts12.Option)( | ||
(0, import_scale_ts12.Struct)({ | ||
prefix: import_scale_ts12.str, | ||
items: (0, import_scale_ts12.Vector)(storageItem) | ||
}) | ||
); | ||
var pallets = (0, import_scale_ts7.Vector)( | ||
(0, import_scale_ts7.Struct)({ | ||
name: import_scale_ts7.str, | ||
var pallets = (0, import_scale_ts12.Vector)( | ||
(0, import_scale_ts12.Struct)({ | ||
name: import_scale_ts12.str, | ||
storage, | ||
calls: (0, import_scale_ts7.Option)(compactNumber), | ||
events: (0, import_scale_ts7.Option)(compactNumber), | ||
constants: (0, import_scale_ts7.Vector)( | ||
(0, import_scale_ts7.Struct)({ | ||
name: import_scale_ts7.str, | ||
calls: (0, import_scale_ts12.Option)(compactNumber), | ||
events: (0, import_scale_ts12.Option)(compactNumber), | ||
constants: (0, import_scale_ts12.Vector)( | ||
(0, import_scale_ts12.Struct)({ | ||
name: import_scale_ts12.str, | ||
type: compactNumber, | ||
value: Hex(), | ||
docs: (0, import_scale_ts7.Vector)(import_scale_ts7.str) | ||
docs: (0, import_scale_ts12.Vector)(import_scale_ts12.str) | ||
}) | ||
), | ||
errors: (0, import_scale_ts7.Option)(compactNumber), | ||
index: import_scale_ts7.u8 | ||
errors: (0, import_scale_ts12.Option)(compactNumber), | ||
index: import_scale_ts12.u8, | ||
docs: (0, import_scale_ts12.Vector)(import_scale_ts12.str) | ||
}) | ||
); | ||
// src/codecs/metadata/v14/v14.ts | ||
var extrinsic = (0, import_scale_ts8.Struct)({ | ||
type: compactNumber, | ||
version: import_scale_ts8.u8, | ||
signedExtensions: (0, import_scale_ts8.Vector)( | ||
(0, import_scale_ts8.Struct)({ | ||
identifier: import_scale_ts8.str, | ||
// src/codecs/metadata/v15/v15.ts | ||
var docs = (0, import_scale_ts13.Vector)(import_scale_ts13.str); | ||
var runtimeApi = (0, import_scale_ts13.Struct)({ | ||
name: import_scale_ts13.str, | ||
methods: (0, import_scale_ts13.Vector)( | ||
(0, import_scale_ts13.Struct)({ | ||
name: import_scale_ts13.str, | ||
inputs: (0, import_scale_ts13.Vector)( | ||
(0, import_scale_ts13.Struct)({ | ||
name: import_scale_ts13.str, | ||
type: compactNumber | ||
}) | ||
), | ||
output: compactNumber, | ||
docs | ||
}) | ||
), | ||
docs | ||
}); | ||
var extrinsic = (0, import_scale_ts13.Struct)({ | ||
version: import_scale_ts13.u8, | ||
address: compactNumber, | ||
call: compactNumber, | ||
signature: compactNumber, | ||
extra: compactNumber, | ||
signedExtensions: (0, import_scale_ts13.Vector)( | ||
(0, import_scale_ts13.Struct)({ | ||
identifier: import_scale_ts13.str, | ||
type: compactNumber, | ||
@@ -347,21 +555,28 @@ additionalSigned: compactNumber | ||
}); | ||
var v14 = (0, import_scale_ts8.Struct)({ | ||
var v15 = (0, import_scale_ts13.Struct)({ | ||
lookup, | ||
pallets, | ||
extrinsic, | ||
type: compactNumber | ||
type: compactNumber, | ||
apis: (0, import_scale_ts13.Vector)(runtimeApi), | ||
outerEnums: (0, import_scale_ts13.Struct)({ | ||
call: compactNumber, | ||
event: compactNumber, | ||
error: compactNumber | ||
}), | ||
custom: (0, import_scale_ts13.Vector)((0, import_scale_ts13.Tuple)(import_scale_ts13.str, (0, import_scale_ts13.Struct)({ type: compactNumber, value: Hex() }))) | ||
}); | ||
// src/codecs/metadata/metadata.ts | ||
var import_scale_ts9 = require("scale-ts"); | ||
var import_scale_ts14 = require("scale-ts"); | ||
var unsupportedFn = () => { | ||
throw new Error("Unsupported metadata version!"); | ||
}; | ||
var unsupported = (0, import_scale_ts9.createCodec)( | ||
var unsupported = (0, import_scale_ts14.createCodec)( | ||
unsupportedFn, | ||
unsupportedFn | ||
); | ||
var metadata = (0, import_scale_ts9.Struct)({ | ||
magicNumber: import_scale_ts9.u32, | ||
metadata: (0, import_scale_ts9.Enum)({ | ||
var metadata = (0, import_scale_ts14.Struct)({ | ||
magicNumber: import_scale_ts14.u32, | ||
metadata: (0, import_scale_ts14.Enum)({ | ||
v0: unsupported, | ||
@@ -381,50 +596,9 @@ v1: unsupported, | ||
v13: unsupported, | ||
v14 | ||
v14: unsupported, | ||
v15 | ||
}) | ||
}); | ||
// src/codecs/bitSequence.ts | ||
var import_scale_ts10 = require("scale-ts"); | ||
var bitSequenceDecoder = (0, import_scale_ts10.createDecoder)((data) => { | ||
const bitsLen = compactNumber.dec(data); | ||
const bytesLen = Math.ceil(bitsLen / 8); | ||
const bytes = (0, import_scale_ts10.Bytes)(bytesLen).dec(data); | ||
return { bytes, bitsLen }; | ||
}); | ||
var bitSequenceEncoder = (input) => { | ||
if (input.bitsLen > input.bytes.length * 8) | ||
throw new Error( | ||
`Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})` | ||
); | ||
const lenEncoded = compactNumber.enc(input.bitsLen); | ||
const result = new Uint8Array(input.bytes.length + lenEncoded.length); | ||
result.set(lenEncoded, 0); | ||
result.set(input.bytes, lenEncoded.length); | ||
return result; | ||
}; | ||
var bitSequence2 = (0, import_scale_ts10.createCodec)(bitSequenceEncoder, bitSequenceDecoder); | ||
// src/codecs/char.ts | ||
var import_scale_ts11 = require("scale-ts"); | ||
var char = (0, import_scale_ts11.enhanceCodec)( | ||
import_scale_ts11.u8, | ||
(str4) => str4.charCodeAt(0), | ||
String.fromCharCode | ||
); | ||
// src/codecs/fixed-str.ts | ||
var import_scale_ts12 = require("scale-ts"); | ||
var textEncoder = new TextEncoder(); | ||
var textDecoder = new TextDecoder(); | ||
var fixedStr = (nBytes) => (0, import_scale_ts12.enhanceCodec)( | ||
(0, import_scale_ts12.Bytes)(nBytes), | ||
(str4) => textEncoder.encode(str4), | ||
(bytes) => textDecoder.decode(bytes) | ||
); | ||
// src/index.ts | ||
__reExport(src_exports, codecs_exports, module.exports); | ||
// src/hashes/blake2.ts | ||
var import_utils2 = require("@polkadot-api/utils"); | ||
var import_utils4 = require("@polkadot-api/utils"); | ||
var import_blake2b2 = require("@noble/hashes/blake2b"); | ||
@@ -435,3 +609,3 @@ var len32 = { dkLen: 32 }; | ||
var Blake2128 = (encoded) => (0, import_blake2b2.blake2b)(encoded, len16); | ||
var Blake2128Concat = (encoded) => (0, import_utils2.mergeUint8)(Blake2128(encoded), encoded); | ||
var Blake2128Concat = (encoded) => (0, import_utils4.mergeUint8)(Blake2128(encoded), encoded); | ||
@@ -442,4 +616,4 @@ // src/hashes/identity.ts | ||
// src/hashes/twoX.ts | ||
var import_utils3 = require("@polkadot-api/utils"); | ||
var import_scale_ts13 = require("scale-ts"); | ||
var import_utils5 = require("@polkadot-api/utils"); | ||
var import_scale_ts15 = require("scale-ts"); | ||
@@ -597,15 +771,15 @@ // src/hashes/h64.ts | ||
}; | ||
var Twox64Concat = (encoded) => (0, import_utils3.mergeUint8)(import_scale_ts13.u64.enc(h64(encoded)), encoded); | ||
var Twox64Concat = (encoded) => (0, import_utils5.mergeUint8)(import_scale_ts15.u64.enc(h64(encoded)), encoded); | ||
// src/storage.ts | ||
var import_utils4 = require("@polkadot-api/utils"); | ||
var textEncoder2 = new TextEncoder(); | ||
var import_utils6 = require("@polkadot-api/utils"); | ||
var textEncoder4 = new TextEncoder(); | ||
var Storage = (pallet) => { | ||
const palledEncoded = Twox128(textEncoder2.encode(pallet)); | ||
return (name, dec2, ...encoders) => { | ||
const palletItemEncoded = (0, import_utils4.mergeUint8)( | ||
const palledEncoded = Twox128(textEncoder4.encode(pallet)); | ||
return (name, dec3, ...encoders) => { | ||
const palletItemEncoded = (0, import_utils6.mergeUint8)( | ||
palledEncoded, | ||
Twox128(textEncoder2.encode(name)) | ||
Twox128(textEncoder4.encode(name)) | ||
); | ||
const palletItemEncodedHex = (0, import_utils4.toHex)(palletItemEncoded); | ||
const palletItemEncodedHex = (0, import_utils6.toHex)(palletItemEncoded); | ||
const bytesToSkip = encoders.map((e) => e[1]).map((x) => { | ||
@@ -638,10 +812,10 @@ if (x === Identity) | ||
const fns = encoders.map( | ||
([{ enc: enc3 }, hash]) => (val) => hash(enc3(val)) | ||
([{ enc: enc4 }, hash]) => (val) => hash(enc4(val)) | ||
); | ||
const enc2 = (...args) => (0, import_utils4.toHex)( | ||
(0, import_utils4.mergeUint8)(palletItemEncoded, ...args.map((val, idx) => fns[idx](val))) | ||
const enc3 = (...args) => (0, import_utils6.toHex)( | ||
(0, import_utils6.mergeUint8)(palletItemEncoded, ...args.map((val, idx) => fns[idx](val))) | ||
); | ||
return { | ||
enc: enc2, | ||
dec: dec2, | ||
enc: enc3, | ||
dec: dec3, | ||
keyDecoder | ||
@@ -651,38 +825,2 @@ }; | ||
}; | ||
// src/descriptors.ts | ||
var createCommonDescriptor = (checksum, pallet, name) => ({ | ||
checksum, | ||
pallet, | ||
name | ||
}); | ||
var getDescriptorCreator = (type, checksum, pallet, name, codecs) => ({ | ||
type, | ||
props: { checksum, pallet, name }, | ||
codecs | ||
}); | ||
var getPalletCreator = (pallet) => { | ||
const getPayloadDescriptor = (type, checksum, name, codecs) => ({ | ||
type, | ||
props: { checksum, pallet, name }, | ||
codecs | ||
}); | ||
const getStorageDescriptor = (checksum, name, codecs) => ({ | ||
type: "storage", | ||
props: { checksum, pallet, name }, | ||
codecs | ||
}); | ||
const getTxDescriptor = (checksum, name, events, errors, codecs) => ({ | ||
type: "tx", | ||
props: { checksum, pallet, name }, | ||
codecs, | ||
events, | ||
errors | ||
}); | ||
return { | ||
getPayloadDescriptor, | ||
getStorageDescriptor, | ||
getTxDescriptor | ||
}; | ||
}; | ||
//# sourceMappingURL=index.js.map |
import * as scale_ts from 'scale-ts'; | ||
import { Codec, Encoder, Decoder, CodecType } from 'scale-ts'; | ||
export * from 'scale-ts'; | ||
import { Codec, Encoder, Decoder, StringRecord, CodecType, EncoderType, DecoderType } from 'scale-ts'; | ||
export { Bytes, Codec, CodecType, Decoder, DecoderType, Encoder, EncoderType, Option, Result, ResultPayload, StringRecord, Struct, Tuple, Vector, _void, bool, compact, createCodec, createDecoder, enhanceCodec, enhanceDecoder, enhanceEncoder, i128, i16, i256, i32, i64, i8, str, u128, u16, u256, u32, u64, u8 } from 'scale-ts'; | ||
type SS58String = string & { | ||
__SS58String: unknown; | ||
__SS58String?: unknown; | ||
}; | ||
type SS58AddressInfo = { | ||
isValid: false; | ||
} | { | ||
isValid: true; | ||
ss58Format: number; | ||
publicKey: Uint8Array; | ||
}; | ||
declare const getSs58AddressInfo: (address: SS58String) => SS58AddressInfo; | ||
declare const fromBufferToBase58: (ss58Format: number) => (publicKey: Uint8Array) => SS58String; | ||
declare const AccountId: (ss58Format?: number, nBytes?: 32 | 33) => scale_ts.Codec<SS58String>; | ||
type HexString = string & { | ||
__hexString?: unknown; | ||
}; | ||
declare const Hex: { | ||
(nBytes?: number): Codec<HexString>; | ||
enc: (nBytes?: number) => Encoder<HexString>; | ||
dec: (nBytes?: number) => Decoder<HexString>; | ||
}; | ||
declare class Binary { | ||
#private; | ||
constructor(data: Uint8Array); | ||
asText: () => string; | ||
asHex: () => string; | ||
asBytes: () => Uint8Array; | ||
static fromText(input: string): Binary; | ||
static fromHex(input: HexString): Binary; | ||
static fromBytes(input: Uint8Array): Binary; | ||
} | ||
declare const Bin: { | ||
(nBytes?: number): Codec<Binary>; | ||
enc: (nBytes?: number) => Encoder<Binary>; | ||
dec: (nBytes?: number) => Decoder<Binary>; | ||
}; | ||
interface BitSequence { | ||
bitsLen: number; | ||
bytes: Uint8Array; | ||
} | ||
declare const bitSequence: scale_ts.Codec<BitSequence>; | ||
declare const char: scale_ts.Codec<string>; | ||
declare const compactNumber: Codec<number>; | ||
declare const compactBn: Codec<bigint>; | ||
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<{ | ||
self: T; | ||
}>; | ||
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<{ | ||
self: T; | ||
}>; | ||
declare const Self: <T>(value: () => Codec<T>) => Codec<{ | ||
self: T; | ||
}>; | ||
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>; | ||
type OpaqueValue<T> = { | ||
length: number; | ||
inner: () => T; | ||
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<T>; | ||
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<T>; | ||
declare const Self: <T>(value: () => Codec<T>) => Codec<T>; | ||
type Tuple<T, N extends number> = readonly [T, ...T[]] & { | ||
length: N; | ||
}; | ||
declare const OpaqueCodec: { | ||
<T>(inner: Codec<T>, len?: Codec<number>): Codec<OpaqueValue<T>>; | ||
enc: <T_1>(inner: Encoder<T_1>, len?: Encoder<number>) => Encoder<OpaqueValue<T_1>>; | ||
dec: <T_2>(inner: Decoder<T_2>, len?: Decoder<number>) => Decoder<OpaqueValue<T_2>>; | ||
type Push<T extends any[], V> = [...T, V]; | ||
type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never; | ||
type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R ? R : never; | ||
type TuplifyUnion<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = true extends N ? [] : Push<TuplifyUnion<Exclude<T, L>>, L>; | ||
type RestrictedLenTuple<T, O extends StringRecord<any>> = Tuple<T, TuplifyUnion<keyof O> extends Tuple<any, infer V> ? V : 0>; | ||
type ExtractEnumValue<T extends { | ||
type: string; | ||
value?: any; | ||
}, K extends string> = T extends { | ||
type: K; | ||
value: infer R; | ||
} ? R : never; | ||
interface Discriminant<T extends { | ||
type: string; | ||
value?: any; | ||
}> { | ||
is<K extends T["type"]>(this: Enum<T>, type: K): this is Enum<{ | ||
type: K; | ||
value: ExtractEnumValue<T, K>; | ||
}>; | ||
as<K extends T["type"]>(type: K): ExtractEnumValue<T, K>; | ||
} | ||
declare const _Enum: {}; | ||
type Enum<T extends { | ||
type: string; | ||
value?: any; | ||
}> = T & Discriminant<T>; | ||
declare const Enum: <T extends { | ||
type: string; | ||
value?: any; | ||
}, Key extends T["type"]>(type: Key, ...args: ExtractEnumValue<T, Key> extends undefined ? [] : [value: ExtractEnumValue<T, Key>]) => Enum<ExtractEnumValue<T, Key> extends undefined ? T : ExtractEnumValue<T, Key> extends never ? T : { | ||
type: Key; | ||
value: ExtractEnumValue<T, Key>; | ||
}>; | ||
declare const Variant: { | ||
<O extends StringRecord<Codec<any>>>(inner: O, indexes?: RestrictedLenTuple<number, O> | undefined): Codec<Enum<{ [K in keyof O]: K extends string ? { | ||
type: K; | ||
value: CodecType<O[K]>; | ||
} : never; }[keyof O]>>; | ||
enc: <O_1 extends StringRecord<Encoder<any>>>(inner: O_1, x?: RestrictedLenTuple<number, O_1> | undefined) => Encoder<Enum<{ [K_1 in keyof O_1]: K_1 extends string ? { | ||
type: K_1; | ||
value: EncoderType<O_1[K_1]>; | ||
} : never; }[keyof O_1]>>; | ||
dec: <O_2 extends StringRecord<Decoder<any>>>(inner: O_2, x?: RestrictedLenTuple<number, O_2> | undefined) => Decoder<Enum<{ [K_2 in keyof O_2]: K_2 extends string ? { | ||
type: K_2; | ||
value: DecoderType<O_2[K_2]>; | ||
} : never; }[keyof O_2]>>; | ||
}; | ||
declare const blockHeader: Codec<{ | ||
parentHash: HexString; | ||
number: number; | ||
stateRoot: HexString; | ||
extrinsicRoot: HexString; | ||
digests: Enum<{ | ||
type: "consensus"; | ||
value: { | ||
engine: string; | ||
payload: HexString; | ||
}; | ||
} | { | ||
type: "seal"; | ||
value: { | ||
engine: string; | ||
payload: HexString; | ||
}; | ||
} | { | ||
type: "preRuntime"; | ||
value: { | ||
engine: string; | ||
payload: HexString; | ||
}; | ||
} | { | ||
type: "runtimeUpdated"; | ||
value: undefined; | ||
}>[]; | ||
}>; | ||
type BlockHeader = CodecType<typeof blockHeader>; | ||
declare const lookup: scale_ts.Codec<{ | ||
@@ -38,3 +149,3 @@ id: number; | ||
name: string; | ||
type: number | void | undefined; | ||
type: number | undefined; | ||
}[]; | ||
@@ -44,5 +155,5 @@ def: { | ||
value: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -55,5 +166,5 @@ }[]; | ||
fields: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -133,5 +244,2 @@ }[]; | ||
}; | ||
} | { | ||
tag: "historicMetaCompat"; | ||
value: string; | ||
}; | ||
@@ -142,70 +250,8 @@ docs: string[]; | ||
type HexString = string & { | ||
__hexString: unknown; | ||
}; | ||
declare const Hex: { | ||
(nBytes?: number): Codec<HexString>; | ||
enc: (nBytes?: number) => Encoder<HexString>; | ||
dec: (nBytes?: number) => Decoder<HexString>; | ||
}; | ||
declare const pallets: scale_ts.Codec<{ | ||
name: string; | ||
storage: void | { | ||
prefix: string; | ||
items: { | ||
name: string; | ||
modifier: number; | ||
type: { | ||
tag: "map"; | ||
value: { | ||
hashers: ({ | ||
tag: "Blake2128"; | ||
value: undefined; | ||
} | { | ||
tag: "Blake2256"; | ||
value: undefined; | ||
} | { | ||
tag: "Blake2128Concat"; | ||
value: undefined; | ||
} | { | ||
tag: "Twox128"; | ||
value: undefined; | ||
} | { | ||
tag: "Twox256"; | ||
value: undefined; | ||
} | { | ||
tag: "Twox64Concat"; | ||
value: undefined; | ||
} | { | ||
tag: "Identity"; | ||
value: undefined; | ||
})[]; | ||
key: number; | ||
value: number; | ||
}; | ||
} | { | ||
tag: "plain"; | ||
value: number; | ||
}; | ||
fallback: number[]; | ||
docs: string[]; | ||
}[]; | ||
} | undefined; | ||
calls: number | void | undefined; | ||
events: number | void | undefined; | ||
constants: { | ||
name: string; | ||
type: number; | ||
value: HexString; | ||
docs: string[]; | ||
}[]; | ||
errors: number | void | undefined; | ||
index: number; | ||
}[]>; | ||
type V14Pallets = CodecType<typeof pallets>; | ||
declare const extrinsic: scale_ts.Codec<{ | ||
type: number; | ||
version: number; | ||
address: number; | ||
call: number; | ||
signature: number; | ||
extra: number; | ||
signedExtensions: { | ||
@@ -217,4 +263,4 @@ identifier: string; | ||
}>; | ||
type V14Extrinsic = CodecType<typeof extrinsic>; | ||
declare const v14: scale_ts.Codec<{ | ||
type V15Extrinsic = CodecType<typeof extrinsic>; | ||
declare const v15: scale_ts.Codec<{ | ||
lookup: { | ||
@@ -225,3 +271,3 @@ id: number; | ||
name: string; | ||
type: number | void | undefined; | ||
type: number | undefined; | ||
}[]; | ||
@@ -231,5 +277,5 @@ def: { | ||
value: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -242,5 +288,5 @@ }[]; | ||
fields: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -320,5 +366,2 @@ }[]; | ||
}; | ||
} | { | ||
tag: "historicMetaCompat"; | ||
value: string; | ||
}; | ||
@@ -329,3 +372,3 @@ docs: string[]; | ||
name: string; | ||
storage: void | { | ||
storage: { | ||
prefix: string; | ||
@@ -367,8 +410,8 @@ items: { | ||
}; | ||
fallback: number[]; | ||
fallback: HexString; | ||
docs: string[]; | ||
}[]; | ||
} | undefined; | ||
calls: number | void | undefined; | ||
events: number | void | undefined; | ||
calls: number | undefined; | ||
events: number | undefined; | ||
constants: { | ||
@@ -380,8 +423,12 @@ name: string; | ||
}[]; | ||
errors: number | void | undefined; | ||
errors: number | undefined; | ||
index: number; | ||
docs: string[]; | ||
}[]; | ||
extrinsic: { | ||
type: number; | ||
version: number; | ||
address: number; | ||
call: number; | ||
signature: number; | ||
extra: number; | ||
signedExtensions: { | ||
@@ -394,4 +441,26 @@ identifier: string; | ||
type: number; | ||
apis: { | ||
name: string; | ||
methods: { | ||
name: string; | ||
inputs: { | ||
name: string; | ||
type: number; | ||
}[]; | ||
output: number; | ||
docs: string[]; | ||
}[]; | ||
docs: string[]; | ||
}[]; | ||
outerEnums: { | ||
call: number; | ||
event: number; | ||
error: number; | ||
}; | ||
custom: [string, { | ||
type: number; | ||
value: HexString; | ||
}][]; | ||
}>; | ||
type V14 = CodecType<typeof v14>; | ||
type V15 = CodecType<typeof v15>; | ||
@@ -444,2 +513,5 @@ declare const metadata: Codec<{ | ||
tag: "v14"; | ||
value: unknown; | ||
} | { | ||
tag: "v15"; | ||
value: { | ||
@@ -451,3 +523,3 @@ lookup: { | ||
name: string; | ||
type: number | void | undefined; | ||
type: number | undefined; | ||
}[]; | ||
@@ -457,5 +529,5 @@ def: { | ||
value: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -468,5 +540,5 @@ }[]; | ||
fields: { | ||
name: string | void | undefined; | ||
name: string | undefined; | ||
type: number; | ||
typeName: string | void | undefined; | ||
typeName: string | undefined; | ||
docs: string[]; | ||
@@ -546,5 +618,2 @@ }[]; | ||
}; | ||
} | { | ||
tag: "historicMetaCompat"; | ||
value: string; | ||
}; | ||
@@ -555,3 +624,3 @@ docs: string[]; | ||
name: string; | ||
storage: void | { | ||
storage: { | ||
prefix: string; | ||
@@ -593,8 +662,8 @@ items: { | ||
}; | ||
fallback: number[]; | ||
fallback: HexString; | ||
docs: string[]; | ||
}[]; | ||
} | undefined; | ||
calls: number | void | undefined; | ||
events: number | void | undefined; | ||
calls: number | undefined; | ||
events: number | undefined; | ||
constants: { | ||
@@ -606,8 +675,12 @@ name: string; | ||
}[]; | ||
errors: number | void | undefined; | ||
errors: number | undefined; | ||
index: number; | ||
docs: string[]; | ||
}[]; | ||
extrinsic: { | ||
type: number; | ||
version: number; | ||
address: number; | ||
call: number; | ||
signature: number; | ||
extra: number; | ||
signedExtensions: { | ||
@@ -620,2 +693,24 @@ identifier: string; | ||
type: number; | ||
apis: { | ||
name: string; | ||
methods: { | ||
name: string; | ||
inputs: { | ||
name: string; | ||
type: number; | ||
}[]; | ||
output: number; | ||
docs: string[]; | ||
}[]; | ||
docs: string[]; | ||
}[]; | ||
outerEnums: { | ||
call: number; | ||
event: number; | ||
error: number; | ||
}; | ||
custom: [string, { | ||
type: number; | ||
value: HexString; | ||
}][]; | ||
}; | ||
@@ -625,12 +720,2 @@ }; | ||
interface BitSequence { | ||
bitsLen: number; | ||
bytes: Uint8Array; | ||
} | ||
declare const bitSequence: scale_ts.Codec<BitSequence>; | ||
declare const char: scale_ts.Codec<string>; | ||
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>; | ||
declare const Blake2256: (encoded: Uint8Array) => Uint8Array; | ||
@@ -655,74 +740,62 @@ declare const Blake2128: (encoded: Uint8Array) => Uint8Array; | ||
type Tuple<T> = readonly [T, ...T[]]; | ||
interface DescriptorCommon<Pallet extends string, Name extends string> { | ||
checksum: bigint; | ||
pallet: Pallet; | ||
name: Name; | ||
} | ||
interface ArgsWithPayloadCodec<Args extends Array<any>, O> { | ||
len: Args["length"]; | ||
} | ||
interface ArgsWithoutPayloadCodec<Args extends Array<any>> { | ||
len: Args["length"]; | ||
} | ||
interface StorageDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithPayloadCodec<any, any>> { | ||
type: "storage"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface ConstantDescriptor<Common extends DescriptorCommon<string, string>, Codecs> { | ||
type: "const"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface EventDescriptor<Common extends DescriptorCommon<string, string>, Codecs> { | ||
type: "event"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface ErrorDescriptor<Common extends DescriptorCommon<string, string>, Codecs> { | ||
type: "error"; | ||
props: Common; | ||
codecs: Codecs; | ||
} | ||
interface TxDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>> { | ||
type: "tx"; | ||
props: Common; | ||
codecs: Codecs; | ||
events: Events; | ||
errors: Errors; | ||
} | ||
type Descriptor = ConstantDescriptor<any, any> | EventDescriptor<any, any> | StorageDescriptor<any, any> | ErrorDescriptor<any, any> | TxDescriptor<any, any, any, any>; | ||
declare const createCommonDescriptor: <Pallet extends string, Name extends string>(checksum: bigint, pallet: Pallet, name: Name) => DescriptorCommon<Pallet, Name>; | ||
declare const getDescriptorCreator: <Type extends "const" | "event" | "error", Pallet extends string, Name extends string, Codecs>(type: Type, checksum: bigint, pallet: Pallet, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>; | ||
declare const getPalletCreator: <Pallet extends string>(pallet: Pallet) => { | ||
getPayloadDescriptor: <Type extends "const" | "event" | "error", Name extends string, Codecs>(type: Type, checksum: bigint, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>; | ||
getStorageDescriptor: <Name_1 extends string, Codecs_1 extends ArgsWithPayloadCodec<any[], any>>(checksum: bigint, name: Name_1, codecs: Codecs_1) => StorageDescriptor<DescriptorCommon<Pallet, Name_1>, Codecs_1>; | ||
getTxDescriptor: <Name_2 extends string, Codecs_2 extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>>(checksum: bigint, name: Name_2, events: Events, errors: Errors, codecs: Codecs_2) => TxDescriptor<DescriptorCommon<Pallet, Name_2>, Codecs_2, Events, Errors>; | ||
type PlainDescriptor<T> = string & { | ||
_type?: T; | ||
}; | ||
type EventToObject<E extends EventDescriptor<DescriptorCommon<any, string>, any>> = E extends EventDescriptor<DescriptorCommon<any, infer K>, infer V> ? { | ||
type: K; | ||
value: V; | ||
} : unknown; | ||
type UnionizeTupleEvents<E extends Array<EventDescriptor<any, any>>> = E extends Array<infer Ev> ? Ev extends EventDescriptor<any, any> ? EventToObject<Ev> : unknown : unknown; | ||
type TxDescriptorArgs<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, ArgsWithoutPayloadCodec<infer A>, any, any> ? A : []; | ||
type TxDescriptorEvents<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, infer E, any> ? E : []; | ||
type TxDescriptorErrors<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, any, infer Errors> ? Errors extends Tuple<ErrorDescriptor<any, any>> ? { | ||
[K in keyof Errors]: Errors[K] extends ErrorDescriptor<DescriptorCommon<any, infer Type>, infer Value> ? { | ||
type: Type; | ||
value: Value; | ||
} : unknown; | ||
}[keyof Errors extends number ? keyof Errors : never] : [] : []; | ||
type TxFunction<D extends TxDescriptor<any, any, any, any>> = (...args: TxDescriptorArgs<D>) => Promise<{ | ||
ok: true; | ||
events: Array<UnionizeTupleEvents<TxDescriptorEvents<D>>>; | ||
} | { | ||
ok: false; | ||
error: TxDescriptorErrors<D>; | ||
}>; | ||
type StorageType<T extends StorageDescriptor<any, ArgsWithPayloadCodec<any, any>>> = T extends StorageDescriptor<any, ArgsWithPayloadCodec<infer Args, infer Payload>> ? { | ||
keyArgs: Args; | ||
value: Payload; | ||
} : unknown; | ||
type StorageDescriptor<Args extends Array<any>, T, Optional extends true | false> = string & { | ||
_type: T; | ||
_args: Args; | ||
_optional: Optional; | ||
}; | ||
type TxDescriptor<Args extends {} | undefined> = string & { | ||
___: Args; | ||
}; | ||
type RuntimeDescriptor<Args extends Array<any>, T> = string & { | ||
__: [Args, T]; | ||
}; | ||
type Descriptors = { | ||
pallets: Record<string, [ | ||
Record<string, StorageDescriptor<any, any, any>>, | ||
Record<string, TxDescriptor<any>>, | ||
Record<string, PlainDescriptor<any>>, | ||
Record<string, PlainDescriptor<any>>, | ||
Record<string, PlainDescriptor<any>> | ||
]>; | ||
apis: Record<string, Record<string, RuntimeDescriptor<any, any>>>; | ||
asset: PlainDescriptor<any>; | ||
}; | ||
type PickDescriptors<Idx extends 0 | 1 | 2 | 3 | 4, T extends Descriptors["pallets"]> = { | ||
[K in keyof T]: T[K][Idx]; | ||
}; | ||
type ExtractStorage<T extends Record<string, Record<string, StorageDescriptor<any, any, any>>>> = { | ||
[K in keyof T]: { | ||
[KK in keyof T[K]]: T[K][KK] extends StorageDescriptor<infer Key, infer Value, infer Optional> ? { | ||
KeyArgs: Key; | ||
Value: Value; | ||
IsOptional: Optional; | ||
} : unknown; | ||
}; | ||
}; | ||
type ExtractTx<T extends Record<string, Record<string, TxDescriptor<any>>>> = { | ||
[K in keyof T]: { | ||
[KK in keyof T[K]]: T[K][KK] extends TxDescriptor<infer Args> ? Args : unknown; | ||
}; | ||
}; | ||
type ExtractPlain<T extends Record<string, Record<string, PlainDescriptor<any>>>> = { | ||
[K in keyof T]: { | ||
[KK in keyof T[K]]: T[K][KK] extends PlainDescriptor<infer Value> ? Value : unknown; | ||
}; | ||
}; | ||
type QueryFromDescriptors<T extends Descriptors> = ExtractStorage<PickDescriptors<0, T["pallets"]>>; | ||
type TxFromDescriptors<T extends Descriptors> = ExtractTx<PickDescriptors<1, T["pallets"]>>; | ||
type EventsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<2, T["pallets"]>>; | ||
type ErrorsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<3, T["pallets"]>>; | ||
type ConstFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<4, T["pallets"]>>; | ||
export { AccountId, ArgsWithPayloadCodec, ArgsWithoutPayloadCodec, BitSequence, Blake2128, Blake2128Concat, Blake2256, ConstantDescriptor, Descriptor, DescriptorCommon, EncoderWithHash, ErrorDescriptor, EventDescriptor, EventToObject, Hex, HexString, Identity, OpaqueCodec, OpaqueValue, SS58String, Self, Storage, StorageDescriptor, StorageType, Twox128, Twox256, Twox64Concat, TxDescriptor, TxDescriptorArgs, TxDescriptorErrors, TxDescriptorEvents, TxFunction, UnionizeTupleEvents, V14, V14Extrinsic, V14Lookup, V14Pallets, bitSequence, char, compactBn, compactNumber, createCommonDescriptor, fixedStr, getDescriptorCreator, getPalletCreator, h64, metadata, selfDecoder, selfEncoder, v14 }; | ||
type GetEnum<T extends Enum<{ | ||
type: string; | ||
value: any; | ||
}>> = { | ||
[K in T["type"]]: (...args: ExtractEnumValue<T, K> extends undefined ? [] : [value: ExtractEnumValue<T, K>]) => T; | ||
}; | ||
export { AccountId, Bin, Binary, type BitSequence, Blake2128, Blake2128Concat, Blake2256, type BlockHeader, type ConstFromDescriptors, type Descriptors, type Discriminant, type EncoderWithHash, Enum, type ErrorsFromDescriptors, type EventsFromDescriptors, type ExtractEnumValue, type GetEnum, Hex, type HexString, Identity, type PlainDescriptor, type QueryFromDescriptors, type RuntimeDescriptor, type SS58AddressInfo, type SS58String, Self, Storage, type StorageDescriptor, Twox128, Twox256, Twox64Concat, type TxDescriptor, type TxFromDescriptors, type V14Lookup, type V15, type V15Extrinsic, Variant, _Enum, bitSequence, blockHeader, char, compactBn, compactNumber, fixedStr, fromBufferToBase58, getSs58AddressInfo, h64, metadata, selfDecoder, selfEncoder, v15 }; |
@@ -1,2 +0,2 @@ | ||
"use strict";var J=Object.defineProperty;var Be=Object.getOwnPropertyDescriptor;var Ve=Object.getOwnPropertyNames;var _e=Object.prototype.hasOwnProperty;var ae=(e,r)=>{for(var t in r)J(e,t,{get:r[t],enumerable:!0})},X=(e,r,t,i)=>{if(r&&typeof r=="object"||typeof r=="function")for(let a of Ve(r))!_e.call(e,a)&&a!==t&&J(e,a,{get:()=>r[a],enumerable:!(i=Be(r,a))||i.enumerable});return e},m=(e,r,t)=>(X(e,r,"default"),t&&X(t,r,"default"));var He=e=>X(J({},"__esModule",{value:!0}),e);var T={};ae(T,{AccountId:()=>de,Blake2128:()=>we,Blake2128Concat:()=>oe,Blake2256:()=>nr,Hex:()=>V,Identity:()=>ne,OpaqueCodec:()=>O,Self:()=>me,Storage:()=>ar,Twox128:()=>G,Twox256:()=>cr,Twox64Concat:()=>ce,bitSequence:()=>be,char:()=>he,compactBn:()=>pe,compactNumber:()=>d,createCommonDescriptor:()=>ir,fixedStr:()=>ve,getDescriptorCreator:()=>dr,getPalletCreator:()=>pr,h64:()=>k,metadata:()=>Te,selfDecoder:()=>re,selfEncoder:()=>ee,v14:()=>$});module.exports=He(T);m(T,require("scale-ts"),module.exports);var l={};ae(l,{AccountId:()=>de,Hex:()=>V,OpaqueCodec:()=>O,Self:()=>me,bitSequence:()=>be,char:()=>he,compactBn:()=>pe,compactNumber:()=>d,fixedStr:()=>ve,metadata:()=>Te,selfDecoder:()=>re,selfEncoder:()=>ee,v14:()=>$});m(l,require("scale-ts"));var K=require("scale-ts"),Y=require("@noble/hashes/blake2b"),Z=require("@scure/base"),ie=new TextEncoder().encode("SS58PRE"),Q=2,Ie=e=>{let r=e<64?Uint8Array.of(e):Uint8Array.of((e&252)>>2|64,e>>8|(e&3)<<6);return t=>{let i=(0,Y.blake2b)(Uint8Array.of(...ie,...r,...t),{dkLen:64}).subarray(0,Q);return Z.base58.encode(Uint8Array.of(...r,...t,...i))}};function Le(e,r){return t=>{let i=Z.base58.decode(t),a=i.subarray(0,i[0]&64?2:1),y=i.subarray(a.length,i.length-Q);if(y.length!==e)throw new Error("Invalid public key length");let x=i.subarray(a.length+y.length),f=(0,Y.blake2b)(Uint8Array.of(...ie,...a,...y),{dkLen:64}).subarray(0,Q);if(x[0]!==f[0]||x[1]!==f[1])throw new Error("Invalid checksum");if(We(a)!=r)throw new Error("Invalid SS58 prefix");return y.slice()}}var de=(e=42,r=32)=>(0,K.enhanceCodec)((0,K.Bytes)(r),Le(r,e),Ie(e)),We=e=>{let r=new DataView(e.buffer,e.byteOffset,e.byteLength);return r.byteLength===1?r.getUint8(0):r.getUint16(0)};var F=require("scale-ts"),d=F.compact,pe=F.compact;var L=require("scale-ts"),ee=e=>{let r=t=>{let i=L.Struct.enc({self:e()});return r=i,i(t)};return t=>r(t)},re=e=>{let r=t=>{let i=L.Struct.dec({self:e()}),a=i;return r=i,a(t)};return t=>r(t)},me=e=>(0,L.createCodec)(ee(()=>e().enc),re(()=>e().dec));var B=require("scale-ts");var ye=(e,r=d.dec)=>(0,B.createDecoder)(t=>{let i=r(t),a=(0,B.Bytes)(i).dec(t),y;return{length:i,inner:()=>y=y||e(a)}}),le=(e,r=d.enc)=>t=>{let i=r(t.length),a=new Uint8Array(i.length+t.length);return a.set(i,0),a.set(e(t.inner()),i.length),a},O=(e,r=d)=>(0,B.createCodec)(le(e.enc,r.enc),ye(e.dec,r.dec));O.enc=le;O.dec=ye;var S=require("scale-ts");var n=require("scale-ts"),xe=(0,n.Option)(n.str),M=(0,n.Vector)(n.str),qe=(0,n.Enum)({bool:n._void,char:n._void,str:n._void,u8:n._void,u16:n._void,u32:n._void,u64:n._void,u128:n._void,u256:n._void,i8:n._void,i16:n._void,i32:n._void,i64:n._void,i128:n._void,i256:n._void}),ge=(0,n.Vector)((0,n.Struct)({name:xe,type:d,typeName:xe,docs:M})),Ke=(0,n.Struct)({len:n.u32,type:d}),Oe=(0,n.Struct)({bitStoreType:d,bitOrderType:d}),Me=(0,n.Vector)((0,n.Struct)({name:n.str,fields:ge,index:n.u8,docs:M})),Re=(0,n.Enum)({composite:ge,variant:Me,sequence:d,array:Ke,tuple:(0,n.Vector)(d),primitive:qe,compact:d,bitSequence:Oe,historicMetaCompat:n.str}),$e=(0,n.Struct)({name:n.str,type:(0,n.Option)(d)}),ze=(0,n.Vector)($e),je=(0,n.Struct)({id:d,path:M,params:ze,def:Re,docs:M}),fe=(0,n.Vector)(je);var R=require("@polkadot-api/utils"),W=require("scale-ts"),ue=e=>{let r=W.Bytes.enc(e);return t=>r((0,R.fromHex)(t))},Ce=e=>{let r=W.Bytes.dec(e);return t=>(0,R.toHex)(r(t))},V=e=>(0,W.createCodec)(ue(e),Ce(e));V.enc=ue;V.dec=Ce;var c=require("scale-ts"),Ge=(0,c.Enum)({Blake2128:c._void,Blake2256:c._void,Blake2128Concat:c._void,Twox128:c._void,Twox256:c._void,Twox64Concat:c._void,Identity:c._void}),Xe=(0,c.Vector)(Ge),Je=(0,c.Struct)({hashers:Xe,key:d,value:d}),Qe=(0,c.Struct)({name:c.str,modifier:c.u8,type:(0,c.Enum)({plain:d,map:Je}),fallback:(0,c.Vector)(c.u8),docs:(0,c.Vector)(c.str)}),Ye=(0,c.Option)((0,c.Struct)({prefix:c.str,items:(0,c.Vector)(Qe)})),De=(0,c.Vector)((0,c.Struct)({name:c.str,storage:Ye,calls:(0,c.Option)(d),events:(0,c.Option)(d),constants:(0,c.Vector)((0,c.Struct)({name:c.str,type:d,value:V(),docs:(0,c.Vector)(c.str)})),errors:(0,c.Option)(d),index:c.u8}));var Ze=(0,S.Struct)({type:d,version:S.u8,signedExtensions:(0,S.Vector)((0,S.Struct)({identifier:S.str,type:d,additionalSigned:d}))}),$=(0,S.Struct)({lookup:fe,pallets:De,extrinsic:Ze,type:d});var P=require("scale-ts");var Ee=()=>{throw new Error("Unsupported metadata version!")},h=(0,P.createCodec)(Ee,Ee),Te=(0,P.Struct)({magicNumber:P.u32,metadata:(0,P.Enum)({v0:h,v1:h,v2:h,v3:h,v4:h,v5:h,v6:h,v7:h,v8:h,v9:h,v10:h,v11:h,v12:h,v13:h,v14:$})});var _=require("scale-ts");var Fe=(0,_.createDecoder)(e=>{let r=d.dec(e),t=Math.ceil(r/8);return{bytes:(0,_.Bytes)(t).dec(e),bitsLen:r}}),er=e=>{if(e.bitsLen>e.bytes.length*8)throw new Error(`Not enough bytes. (bitsLen:${e.bitsLen}, bytesLen:${e.bytes.length})`);let r=d.enc(e.bitsLen),t=new Uint8Array(e.bytes.length+r.length);return t.set(r,0),t.set(e.bytes,r.length),t},be=(0,_.createCodec)(er,Fe);var z=require("scale-ts"),he=(0,z.enhanceCodec)(z.u8,e=>e.charCodeAt(0),String.fromCharCode);var j=require("scale-ts"),rr=new TextEncoder,tr=new TextDecoder,ve=e=>(0,j.enhanceCodec)((0,j.Bytes)(e),r=>rr.encode(r),r=>tr.decode(r));m(T,l,module.exports);var Ae=require("@polkadot-api/utils"),te=require("@noble/hashes/blake2b"),or={dkLen:32},nr=e=>(0,te.blake2b)(e,or),sr={dkLen:16},we=e=>(0,te.blake2b)(e,sr),oe=e=>(0,Ae.mergeUint8)(we(e),e);var ne=e=>e;var Ue=require("@polkadot-api/utils"),Pe=require("scale-ts");var N=(e,r,t,i)=>new DataView(new Uint16Array([e,r,t,i]).buffer).getBigUint64(0,!0),se=2n**64n-1n,E=(e,r)=>e<<r&se|e>>64n-r,p=(e,r)=>e*r&se,C=(e,r)=>e+r&se,D=11400714785074694791n,v=14029467366897019727n,Se=1609587929392839161n,q=9650029242287828579n,ke=2870177450012600261n;function k(e,r=0n){let t=C(C(r,D),v),i=C(r,v),a=r,y=r-D,x=e.length,f=0,U=null;(function(){let o=0,A=o+x;if(x){if(U=new Uint8Array(32),x<32){U.set(e.subarray(0,x),f),f+=x;return}if(o<=A-32){let I=A-32;do{let w;w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),t=p(E(C(t,p(w,v)),31n),D),o+=8,w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),i=p(E(C(i,p(w,v)),31n),D),o+=8,w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),a=p(E(C(a,p(w,v)),31n),D),o+=8,w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),y=p(E(C(y,p(w,v)),31n),D),o+=8}while(o<=I)}o<A&&(U.set(e.subarray(o,A),f),f=A-o)}})(),e=U||e;let s,g=0;for(x>=32?(s=E(t,1n),s=C(s,E(i,7n)),s=C(s,E(a,12n)),s=C(s,E(y,18n)),t=p(E(p(t,v),31n),D),s=s^t,s=C(p(s,D),q),i=p(E(p(i,v),31n),D),s=s^i,s=C(p(s,D),q),a=p(E(p(a,v),31n),D),s=s^a,s=C(p(s,D),q),y=p(E(p(y,v),31n),D),s=s^y,s=C(p(s,D),q)):s=C(r,ke),s=C(s,BigInt(x));g<=f-8;){let b=N(e[g+1]<<8|e[g],e[g+3]<<8|e[g+2],e[g+5]<<8|e[g+4],e[g+7]<<8|e[g+6]);b=p(E(p(b,v),31n),D),s=C(p(E(s^b,27n),D),q),g+=8}if(g+4<=f){let b=p(N(e[g+1]<<8|e[g],e[g+3]<<8|e[g+2],0,0),D);s=C(p(E(s^b,23n),v),Se),g+=4}for(;g<f;){let b=p(N(e[g++],0,0,0),ke);s=p(E(s^b,11n),D)}let u=s>>33n;return s=p(s^u,v),u=s>>29n,s=p(s^u,Se),u=s>>32n,s^=u,s}var G=e=>{let r=new Uint8Array(16),t=new DataView(r.buffer);return t.setBigUint64(0,k(e),!0),t.setBigUint64(8,k(e,1n),!0),r},cr=e=>{let r=new Uint8Array(32),t=new DataView(r.buffer);return t.setBigUint64(0,k(e),!0),t.setBigUint64(8,k(e,1n),!0),t.setBigUint64(16,k(e,2n),!0),t.setBigUint64(24,k(e,3n),!0),r},ce=e=>(0,Ue.mergeUint8)(Pe.u64.enc(k(e)),e);var H=require("@polkadot-api/utils");var Ne=new TextEncoder,ar=e=>{let r=G(Ne.encode(e));return(t,i,...a)=>{let y=(0,H.mergeUint8)(r,G(Ne.encode(t))),x=(0,H.toHex)(y),f=a.map(u=>u[1]).map(u=>u===ne?0:u===ce?8:u===oe?16:null).filter(Boolean),U=u=>{if(!u.startsWith(x))throw new Error(`key does not match this storage (${e}.${t})`);if(f.length!==a.length)throw new Error("Impossible to decode this key");if(a.length===0)return[];let b=u.slice(x.length),o=new Array(a.length);for(let A=0,I=0;A<f.length;A++){let w=a[A][0];I+=f[A],o[A]=w.dec(b.slice(I*2)),I+=w.enc(o[A]).length}return o},s=a.map(([{enc:u},b])=>o=>b(u(o)));return{enc:(...u)=>(0,H.toHex)((0,H.mergeUint8)(y,...u.map((b,o)=>s[o](b)))),dec:i,keyDecoder:U}}};var ir=(e,r,t)=>({checksum:e,pallet:r,name:t}),dr=(e,r,t,i,a)=>({type:e,props:{checksum:r,pallet:t,name:i},codecs:a}),pr=e=>({getPayloadDescriptor:(a,y,x,f)=>({type:a,props:{checksum:y,pallet:e,name:x},codecs:f}),getStorageDescriptor:(a,y,x)=>({type:"storage",props:{checksum:a,pallet:e,name:y},codecs:x}),getTxDescriptor:(a,y,x,f,U)=>({type:"tx",props:{checksum:a,pallet:e,name:y},codecs:U,events:x,errors:f})}); | ||
"use strict";var $=Object.defineProperty;var je=Object.getOwnPropertyDescriptor;var ze=Object.getOwnPropertyNames;var Fe=Object.prototype.hasOwnProperty;var Ge=(e,t,n)=>t in e?$(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n;var Xe=(e,t)=>{for(var n in t)$(e,n,{get:t[n],enumerable:!0})},Je=(e,t,n,d)=>{if(t&&typeof t=="object"||typeof t=="function")for(let y of ze(t))!Fe.call(e,y)&&y!==n&&$(e,y,{get:()=>t[y],enumerable:!(d=je(t,y))||d.enumerable});return e};var Qe=e=>Je($({},"__esModule",{value:!0}),e);var j=(e,t,n)=>(Ge(e,typeof t!="symbol"?t+"":t,n),n),be=(e,t,n)=>{if(!t.has(e))throw TypeError("Cannot "+n)};var A=(e,t,n)=>(be(e,t,"read from private field"),n?n.call(e):t.get(e)),z=(e,t,n)=>{if(t.has(e))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(e):t.set(e,n)},F=(e,t,n,d)=>(be(e,t,"write to private field"),d?d.call(e,n):t.set(e,n),n);var _t={};Xe(_t,{AccountId:()=>et,Bin:()=>ie,Binary:()=>X,Blake2128:()=>Ne,Blake2128Concat:()=>fe,Blake2256:()=>Ot,Bytes:()=>r.Bytes,Enum:()=>ye,Hex:()=>C,Identity:()=>xe,Option:()=>r.Option,Result:()=>r.Result,Self:()=>mt,Storage:()=>Lt,Struct:()=>r.Struct,Tuple:()=>r.Tuple,Twox128:()=>te,Twox256:()=>Rt,Twox64Concat:()=>Te,Variant:()=>q,Vector:()=>r.Vector,_Enum:()=>ut,_void:()=>r._void,bitSequence:()=>st,blockHeader:()=>ft,bool:()=>r.bool,char:()=>it,compact:()=>r.compact,compactBn:()=>rt,compactNumber:()=>a,createCodec:()=>r.createCodec,createDecoder:()=>r.createDecoder,enhanceCodec:()=>r.enhanceCodec,enhanceDecoder:()=>r.enhanceDecoder,enhanceEncoder:()=>r.enhanceEncoder,fixedStr:()=>yt,fromBufferToBase58:()=>se,getSs58AddressInfo:()=>ce,h64:()=>U,i128:()=>r.i128,i16:()=>r.i16,i256:()=>r.i256,i32:()=>r.i32,i64:()=>r.i64,i8:()=>r.i8,metadata:()=>Bt,selfDecoder:()=>Ue,selfEncoder:()=>Ce,str:()=>r.str,u128:()=>r.u128,u16:()=>r.u16,u256:()=>r.u256,u32:()=>r.u32,u64:()=>r.u64,u8:()=>r.u8,v15:()=>le});module.exports=Qe(_t);var G=require("scale-ts");var re=require("@scure/base"),oe=require("@noble/hashes/blake2b"),he=new TextEncoder().encode("SS58PRE"),ne=2,ce=e=>{try{let t=re.base58.decode(e),n=t.subarray(0,t[0]&64?2:1),d=t.subarray(n.length,t.length-ne),y=t.subarray(n.length+d.length),E=(0,oe.blake2b)(Uint8Array.of(...he,...n,...d),{dkLen:64}).subarray(0,ne);return y[0]===E[0]&&y[1]===E[1]?{isValid:!0,ss58Format:Ye(n),publicKey:d.slice()}:{isValid:!1}}catch{return{isValid:!1}}},Ye=e=>{let t=new DataView(e.buffer,e.byteOffset,e.byteLength);return t.byteLength===1?t.getUint8(0):t.getUint16(0)},se=e=>{let t=e<64?Uint8Array.of(e):Uint8Array.of((e&252)>>2|64,e>>8|(e&3)<<6);return n=>{let d=(0,oe.blake2b)(Uint8Array.of(...he,...t,...n),{dkLen:64}).subarray(0,ne);return re.base58.encode(Uint8Array.of(...t,...n,...d))}};function Ze(e,t){return n=>{let d=ce(n);if(!d.isValid)throw new Error("Invalid checksum");let{publicKey:y}=d;if(y.length!==e)throw new Error("Invalid public key length");return y}}var et=(e=42,t=32)=>(0,G.enhanceCodec)((0,G.Bytes)(t),Ze(t,e),se(e));var N=require("scale-ts"),J=require("@polkadot-api/utils");var tt=new TextEncoder,nt=new TextDecoder,V,O,H,I=class I{constructor(t){z(this,V,void 0);z(this,O,null);z(this,H,null);j(this,"asText",()=>A(this,H)===null?F(this,H,nt.decode(A(this,V))):A(this,H));j(this,"asHex",()=>A(this,O)===null?F(this,O,(0,J.toHex)(A(this,V))):A(this,O));j(this,"asBytes",()=>A(this,V));F(this,V,t)}static fromText(t){return new I(tt.encode(t))}static fromHex(t){return new I((0,J.fromHex)(t))}static fromBytes(t){return new I(t)}};V=new WeakMap,O=new WeakMap,H=new WeakMap;var X=I,Ee=e=>{let t=N.Bytes.enc(e);return n=>t(n.asBytes())},ve=e=>{let t=N.Bytes.dec(e);return n=>X.fromBytes(t(n))},ie=e=>(0,N.createCodec)(Ee(e),ve(e));ie.enc=Ee;ie.dec=ve;var R=require("scale-ts");var ae=require("scale-ts"),a=ae.compact,rt=ae.compact;var ot=(0,R.createDecoder)(e=>{let t=a.dec(e),n=Math.ceil(t/8);return{bytes:(0,R.Bytes)(n).dec(e),bitsLen:t}}),ct=e=>{if(e.bitsLen>e.bytes.length*8)throw new Error(`Not enough bytes. (bitsLen:${e.bitsLen}, bytesLen:${e.bytes.length})`);let t=a.enc(e.bitsLen),n=new Uint8Array(e.bytes.length+t.length);return n.set(t,0),n.set(e.bytes,t.length),n},st=(0,R.createCodec)(ct,ot);var Q=require("scale-ts"),it=(0,Q.enhanceCodec)(Q.u8,e=>e.charCodeAt(0),String.fromCharCode);var Y=require("@polkadot-api/utils"),P=require("scale-ts"),Se=e=>{let t=P.Bytes.enc(e);return n=>t((0,Y.fromHex)(n))},ke=e=>{let t=P.Bytes.dec(e);return n=>(0,Y.toHex)(t(n))},C=e=>(0,P.createCodec)(Se(e),ke(e));C.enc=Se;C.dec=ke;var Z=require("scale-ts"),at=new TextEncoder,dt=new TextDecoder,yt=e=>(0,Z.enhanceCodec)((0,Z.Bytes)(e),t=>at.encode(t),t=>dt.decode(t));var r=require("scale-ts");var we=require("scale-ts"),Ce=e=>{let t=n=>{let d=e();return t=d,d(n)};return n=>t(n)},Ue=e=>{let t=n=>{let d=e(),y=d;return t=d,y(n)};return n=>t(n)},mt=e=>(0,we.createCodec)(Ce(()=>e().enc),Ue(()=>e().dec));var M=require("scale-ts"),de=require("@polkadot-api/utils"),ut=new Proxy({},{get(e,t){return n=>ye(t,n)}}),ye=(e,t)=>({as:n=>{if(n!==e)throw new Error(`Enum.as(${n}) used with actual type ${e}`);return t},is:n=>n===e,type:e,value:t}),Ae=(...e)=>{let t=M.Enum.enc(...e);return n=>t({tag:n.type,value:n.value})},Ke=(...e)=>{let t=M.Enum.dec(...e);return n=>{let{tag:d,value:y}=t(n);return ye(d,y)}},q=(e,...t)=>(0,M.createCodec)(Ae((0,de.mapObject)(e,([n])=>n),...t),Ke((0,de.mapObject)(e,([,n])=>n),...t));q.enc=Ae;q.dec=Ke;var Ve=new TextEncoder,Be=new TextDecoder,lt=(0,r.enhanceCodec)((0,r.Bytes)(4),Ve.encode.bind(Ve),Be.decode.bind(Be)),me=(0,r.Struct)({engine:lt,payload:C()}),pt=q({consensus:me,seal:me,preRuntime:me,runtimeUpdated:r._void},[4,5,6,8]),ue=C(32),ft=(0,r.Struct)({parentHash:ue,number:a,stateRoot:ue,extrinsicRoot:ue,digests:(0,r.Vector)(pt)});var u=require("scale-ts");var c=require("scale-ts"),De=(0,c.Option)(c.str),ee=(0,c.Vector)(c.str),xt=(0,c.Enum)({bool:c._void,char:c._void,str:c._void,u8:c._void,u16:c._void,u32:c._void,u64:c._void,u128:c._void,u256:c._void,i8:c._void,i16:c._void,i32:c._void,i64:c._void,i128:c._void,i256:c._void}),Oe=(0,c.Vector)((0,c.Struct)({name:De,type:a,typeName:De,docs:ee})),gt=(0,c.Struct)({len:c.u32,type:a}),Tt=(0,c.Struct)({bitStoreType:a,bitOrderType:a}),bt=(0,c.Vector)((0,c.Struct)({name:c.str,fields:Oe,index:c.u8,docs:ee})),ht=(0,c.Enum)({composite:Oe,variant:bt,sequence:a,array:gt,tuple:(0,c.Vector)(a),primitive:xt,compact:a,bitSequence:Tt}),Et=(0,c.Struct)({name:c.str,type:(0,c.Option)(a)}),vt=(0,c.Vector)(Et),St=(0,c.Struct)({id:a,path:ee,params:vt,def:ht,docs:ee}),He=(0,c.Vector)(St);var i=require("scale-ts"),kt=(0,i.Enum)({Blake2128:i._void,Blake2256:i._void,Blake2128Concat:i._void,Twox128:i._void,Twox256:i._void,Twox64Concat:i._void,Identity:i._void}),wt=(0,i.Vector)(kt),Ct=(0,i.Struct)({hashers:wt,key:a,value:a}),Ut=(0,i.Struct)({name:i.str,modifier:i.u8,type:(0,i.Enum)({plain:a,map:Ct}),fallback:C(),docs:(0,i.Vector)(i.str)}),At=(0,i.Option)((0,i.Struct)({prefix:i.str,items:(0,i.Vector)(Ut)})),Re=(0,i.Vector)((0,i.Struct)({name:i.str,storage:At,calls:(0,i.Option)(a),events:(0,i.Option)(a),constants:(0,i.Vector)((0,i.Struct)({name:i.str,type:a,value:C(),docs:(0,i.Vector)(i.str)})),errors:(0,i.Option)(a),index:i.u8,docs:(0,i.Vector)(i.str)}));var Le=(0,u.Vector)(u.str),Kt=(0,u.Struct)({name:u.str,methods:(0,u.Vector)((0,u.Struct)({name:u.str,inputs:(0,u.Vector)((0,u.Struct)({name:u.str,type:a})),output:a,docs:Le})),docs:Le}),Vt=(0,u.Struct)({version:u.u8,address:a,call:a,signature:a,extra:a,signedExtensions:(0,u.Vector)((0,u.Struct)({identifier:u.str,type:a,additionalSigned:a}))}),le=(0,u.Struct)({lookup:He,pallets:Re,extrinsic:Vt,type:a,apis:(0,u.Vector)(Kt),outerEnums:(0,u.Struct)({call:a,event:a,error:a}),custom:(0,u.Vector)((0,u.Tuple)(u.str,(0,u.Struct)({type:a,value:C()})))});var K=require("scale-ts");var _e=()=>{throw new Error("Unsupported metadata version!")},T=(0,K.createCodec)(_e,_e),Bt=(0,K.Struct)({magicNumber:K.u32,metadata:(0,K.Enum)({v0:T,v1:T,v2:T,v3:T,v4:T,v5:T,v6:T,v7:T,v8:T,v9:T,v10:T,v11:T,v12:T,v13:T,v14:T,v15:le})});var Ie=require("@polkadot-api/utils"),pe=require("@noble/hashes/blake2b"),Dt={dkLen:32},Ot=e=>(0,pe.blake2b)(e,Dt),Ht={dkLen:16},Ne=e=>(0,pe.blake2b)(e,Ht),fe=e=>(0,Ie.mergeUint8)(Ne(e),e);var xe=e=>e;var qe=require("@polkadot-api/utils"),We=require("scale-ts");var B=(e,t,n,d)=>new DataView(new Uint16Array([e,t,n,d]).buffer).getBigUint64(0,!0),ge=2n**64n-1n,g=(e,t)=>e<<t&ge|e>>64n-t,m=(e,t)=>e*t&ge,f=(e,t)=>e+t&ge,x=11400714785074694791n,h=14029467366897019727n,Pe=1609587929392839161n,W=9650029242287828579n,Me=2870177450012600261n;function U(e,t=0n){let n=f(f(t,x),h),d=f(t,h),y=t,E=t-x,S=e.length,k=0,D=null;(function(){let o=0,v=o+S;if(S){if(D=new Uint8Array(32),S<32){D.set(e.subarray(0,S),k),k+=S;return}if(o<=v-32){let _=v-32;do{let w;w=B(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),n=m(g(f(n,m(w,h)),31n),x),o+=8,w=B(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),d=m(g(f(d,m(w,h)),31n),x),o+=8,w=B(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),y=m(g(f(y,m(w,h)),31n),x),o+=8,w=B(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),E=m(g(f(E,m(w,h)),31n),x),o+=8}while(o<=_)}o<v&&(D.set(e.subarray(o,v),k),k=v-o)}})(),e=D||e;let s,l=0;for(S>=32?(s=g(n,1n),s=f(s,g(d,7n)),s=f(s,g(y,12n)),s=f(s,g(E,18n)),n=m(g(m(n,h),31n),x),s=s^n,s=f(m(s,x),W),d=m(g(m(d,h),31n),x),s=s^d,s=f(m(s,x),W),y=m(g(m(y,h),31n),x),s=s^y,s=f(m(s,x),W),E=m(g(m(E,h),31n),x),s=s^E,s=f(m(s,x),W)):s=f(t,Me),s=f(s,BigInt(S));l<=k-8;){let b=B(e[l+1]<<8|e[l],e[l+3]<<8|e[l+2],e[l+5]<<8|e[l+4],e[l+7]<<8|e[l+6]);b=m(g(m(b,h),31n),x),s=f(m(g(s^b,27n),x),W),l+=8}if(l+4<=k){let b=m(B(e[l+1]<<8|e[l],e[l+3]<<8|e[l+2],0,0),x);s=f(m(g(s^b,23n),h),Pe),l+=4}for(;l<k;){let b=m(B(e[l++],0,0,0),Me);s=m(g(s^b,11n),x)}let p=s>>33n;return s=m(s^p,h),p=s>>29n,s=m(s^p,Pe),p=s>>32n,s^=p,s}var te=e=>{let t=new Uint8Array(16),n=new DataView(t.buffer);return n.setBigUint64(0,U(e),!0),n.setBigUint64(8,U(e,1n),!0),t},Rt=e=>{let t=new Uint8Array(32),n=new DataView(t.buffer);return n.setBigUint64(0,U(e),!0),n.setBigUint64(8,U(e,1n),!0),n.setBigUint64(16,U(e,2n),!0),n.setBigUint64(24,U(e,3n),!0),t},Te=e=>(0,qe.mergeUint8)(We.u64.enc(U(e)),e);var L=require("@polkadot-api/utils");var $e=new TextEncoder,Lt=e=>{let t=te($e.encode(e));return(n,d,...y)=>{let E=(0,L.mergeUint8)(t,te($e.encode(n))),S=(0,L.toHex)(E),k=y.map(p=>p[1]).map(p=>p===xe?0:p===Te?8:p===fe?16:null).filter(Boolean),D=p=>{if(!p.startsWith(S))throw new Error(`key does not match this storage (${e}.${n})`);if(k.length!==y.length)throw new Error("Impossible to decode this key");if(y.length===0)return[];let b=p.slice(S.length),o=new Array(y.length);for(let v=0,_=0;v<k.length;v++){let w=y[v][0];_+=k[v],o[v]=w.dec(b.slice(_*2)),_+=w.enc(o[v]).length}return o},s=y.map(([{enc:p},b])=>o=>b(p(o)));return{enc:(...p)=>(0,L.toHex)((0,L.mergeUint8)(E,...p.map((b,o)=>s[o](b)))),dec:d,keyDecoder:D}}}; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@polkadot-api/substrate-bindings", | ||
"version": "0.0.1-1deba41937c544dfa5d4feb703e5a6a2096af097.1.0", | ||
"version": "0.0.1-1e1c7a915d627138d0404bb9257993297c03fea3.1.0", | ||
"author": "Josep M Sobrepere (https://github.com/josepot)", | ||
@@ -45,4 +45,4 @@ "repository": { | ||
"@scure/base": "^1.1.1", | ||
"scale-ts": "^1.4.0", | ||
"@polkadot-api/utils": "0.0.1-1deba41937c544dfa5d4feb703e5a6a2096af097.1.0" | ||
"scale-ts": "^1.6.0", | ||
"@polkadot-api/utils": "0.0.1-1e1c7a915d627138d0404bb9257993297c03fea3.1.0" | ||
}, | ||
@@ -49,0 +49,0 @@ "devDependencies": { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
265148
12
3144