Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@polkadot-api/substrate-bindings

Package Overview
Dependencies
Maintainers
2
Versions
604
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@polkadot-api/substrate-bindings - npm Package Compare versions

Comparing version 0.0.1-8167145a6504fc77f310715b5208be71992108b5.1.0 to 0.0.1-81aa776246f63e7c9d431e89d66d4a7f8651a0ae.1.0

474

dist/index.d.ts
import * as scale_ts from 'scale-ts';
import { Codec, Encoder, Decoder, CodecType } from 'scale-ts';
export * from 'scale-ts';
import { Codec, Encoder, Decoder, StringRecord, CodecType, EncoderType, DecoderType } from 'scale-ts';
export { Bytes, Codec, CodecType, Decoder, DecoderType, Encoder, EncoderType, Option, Result, ResultPayload, StringRecord, Struct, Tuple, Vector, _void, bool, compact, createCodec, createDecoder, enhanceCodec, enhanceDecoder, enhanceEncoder, i128, i16, i256, i32, i64, i8, str, u128, u16, u256, u32, u64, u8 } from 'scale-ts';
import * as scale_ts_dist_types from 'scale-ts/dist/types';
type SS58String = string & {
__SS58String: unknown;
__SS58String?: unknown;
};
declare const AccountId: (ss58Format?: number, nBytes?: 32 | 33) => scale_ts.Codec<SS58String>;
type HexString = string & {
__hexString?: unknown;
};
declare const Hex: {
(nBytes?: number): Codec<HexString>;
enc: (nBytes?: number) => Encoder<HexString>;
dec: (nBytes?: number) => Decoder<HexString>;
};
declare class Binary {
#private;
constructor(data: Uint8Array);
asText: () => string;
asHex: () => string;
asBytes: () => Uint8Array;
static fromText(input: string): Binary;
static fromHex(input: HexString): Binary;
static fromBytes(input: Uint8Array): Binary;
}
declare const Bin: {
(nBytes?: number): Codec<Binary>;
enc: (nBytes?: number) => Encoder<Binary>;
dec: (nBytes?: number) => Decoder<Binary>;
};
interface BitSequence {
bitsLen: number;
bytes: Uint8Array;
}
declare const bitSequence: scale_ts.Codec<BitSequence>;
declare const char: scale_ts.Codec<string>;
declare const compactNumber: Codec<number>;
declare const compactBn: Codec<bigint>;
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<{
self: T;
}>;
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<{
self: T;
}>;
declare const Self: <T>(value: () => Codec<T>) => Codec<{
self: T;
}>;
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>;
type OpaqueValue<T> = {
length: number;
inner: () => T;
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<T>;
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<T>;
declare const Self: <T>(value: () => Codec<T>) => Codec<T>;
type Tuple<T, N extends number> = readonly [T, ...T[]] & {
length: N;
};
declare const OpaqueCodec: {
<T>(inner: Codec<T>, len?: Codec<number>): Codec<OpaqueValue<T>>;
enc: <T_1>(inner: Encoder<T_1>, len?: Encoder<number>) => Encoder<OpaqueValue<T_1>>;
dec: <T_2>(inner: Decoder<T_2>, len?: Decoder<number>) => Decoder<OpaqueValue<T_2>>;
type Push<T extends any[], V> = [...T, V];
type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never;
type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R ? R : never;
type TuplifyUnion<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = true extends N ? [] : Push<TuplifyUnion<Exclude<T, L>>, L>;
type RestrictedLenTuple<T, O extends StringRecord<any>> = Tuple<T, TuplifyUnion<keyof O> extends Tuple<any, infer V> ? V : 0>;
type ExtractEnumValue<T extends {
type: string;
value?: any;
}, K extends string> = T extends {
type: K;
value: infer R;
} ? R : never;
interface Discriminant<T extends {
type: string;
value?: any;
}> {
is<K extends T["type"]>(this: Enum<T>, type: K): this is Enum<{
type: K;
value: ExtractEnumValue<T, K>;
}>;
as<K extends T["type"]>(type: K): ExtractEnumValue<T, K>;
}
declare const _Enum: {};
type Enum<T extends {
type: string;
value?: any;
}> = T & Discriminant<T>;
declare const Enum: <T extends {
type: string;
value?: any;
}, Key extends T["type"]>(type: Key, ...args: ExtractEnumValue<T, Key> extends undefined ? [] : [value: ExtractEnumValue<T, Key>]) => Enum<ExtractEnumValue<T, Key> extends undefined ? T : ExtractEnumValue<T, Key> extends never ? T : {
type: Key;
value: ExtractEnumValue<T, Key>;
}>;
declare const Variant: {
<O extends StringRecord<Codec<any>>>(inner: O, indexes?: RestrictedLenTuple<number, O> | undefined): Codec<Enum<{ [K in keyof O]: K extends string ? {
type: K;
value: CodecType<O[K]>;
} : never; }[keyof O]>>;
enc: <O_1 extends StringRecord<Encoder<any>>>(inner: O_1, x?: RestrictedLenTuple<number, O_1> | undefined) => Encoder<Enum<{ [K_1 in keyof O_1]: K_1 extends string ? {
type: K_1;
value: EncoderType<O_1[K_1]>;
} : never; }[keyof O_1]>>;
dec: <O_2 extends StringRecord<Decoder<any>>>(inner: O_2, x?: RestrictedLenTuple<number, O_2> | undefined) => Decoder<Enum<{ [K_2 in keyof O_2]: K_2 extends string ? {
type: K_2;
value: DecoderType<O_2[K_2]>;
} : never; }[keyof O_2]>>;
};
declare const blockHeader: scale_ts_dist_types.Codec<{
parentHash: HexString;
number: number;
stateRoot: HexString;
extrinsicRoot: HexString;
digests: Enum<{
type: "consensus";
value: {
engine: string;
payload: HexString;
};
} | {
type: "seal";
value: {
engine: string;
payload: HexString;
};
} | {
type: "preRuntime";
value: {
engine: string;
payload: HexString;
};
} | {
type: "runtimeUpdated";
value: undefined;
}>[];
}>;
type BlockHeader = CodecType<typeof blockHeader>;
declare const lookup: scale_ts.Codec<{

@@ -38,3 +140,3 @@ id: number;

name: string;
type: number | void | undefined;
type: number | undefined;
}[];

@@ -44,5 +146,5 @@ def: {

value: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -55,5 +157,5 @@ }[];

fields: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -133,5 +235,2 @@ }[];

};
} | {
tag: "historicMetaCompat";
value: string;
};

@@ -142,70 +241,8 @@ docs: string[];

type HexString = string & {
__hexString: unknown;
};
declare const Hex: {
(nBytes?: number): Codec<HexString>;
enc: (nBytes?: number) => Encoder<HexString>;
dec: (nBytes?: number) => Decoder<HexString>;
};
declare const pallets: scale_ts.Codec<{
name: string;
storage: void | {
prefix: string;
items: {
name: string;
modifier: number;
type: {
tag: "map";
value: {
hashers: ({
tag: "Blake2128";
value: undefined;
} | {
tag: "Blake2256";
value: undefined;
} | {
tag: "Blake2128Concat";
value: undefined;
} | {
tag: "Twox128";
value: undefined;
} | {
tag: "Twox256";
value: undefined;
} | {
tag: "Twox64Concat";
value: undefined;
} | {
tag: "Identity";
value: undefined;
})[];
key: number;
value: number;
};
} | {
tag: "plain";
value: number;
};
fallback: number[];
docs: string[];
}[];
} | undefined;
calls: number | void | undefined;
events: number | void | undefined;
constants: {
name: string;
type: number;
value: HexString;
docs: string[];
}[];
errors: number | void | undefined;
index: number;
}[]>;
type V14Pallets = CodecType<typeof pallets>;
declare const extrinsic: scale_ts.Codec<{
type: number;
version: number;
address: number;
call: number;
signature: number;
extra: number;
signedExtensions: {

@@ -217,4 +254,4 @@ identifier: string;

}>;
type V14Extrinsic = CodecType<typeof extrinsic>;
declare const v14: scale_ts.Codec<{
type V15Extrinsic = CodecType<typeof extrinsic>;
declare const v15: scale_ts.Codec<{
lookup: {

@@ -225,3 +262,3 @@ id: number;

name: string;
type: number | void | undefined;
type: number | undefined;
}[];

@@ -231,5 +268,5 @@ def: {

value: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -242,5 +279,5 @@ }[];

fields: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -320,5 +357,2 @@ }[];

};
} | {
tag: "historicMetaCompat";
value: string;
};

@@ -329,3 +363,3 @@ docs: string[];

name: string;
storage: void | {
storage: {
prefix: string;

@@ -367,8 +401,8 @@ items: {

};
fallback: number[];
fallback: HexString;
docs: string[];
}[];
} | undefined;
calls: number | void | undefined;
events: number | void | undefined;
calls: number | undefined;
events: number | undefined;
constants: {

@@ -380,8 +414,12 @@ name: string;

}[];
errors: number | void | undefined;
errors: number | undefined;
index: number;
docs: string[];
}[];
extrinsic: {
type: number;
version: number;
address: number;
call: number;
signature: number;
extra: number;
signedExtensions: {

@@ -394,4 +432,26 @@ identifier: string;

type: number;
apis: {
name: string;
methods: {
name: string;
inputs: {
name: string;
type: number;
}[];
output: number;
docs: string[];
}[];
docs: string[];
}[];
outerEnums: {
call: number;
event: number;
error: number;
};
custom: [string, {
type: number;
value: HexString;
}][];
}>;
type V14 = CodecType<typeof v14>;
type V15 = CodecType<typeof v15>;

@@ -444,2 +504,5 @@ declare const metadata: Codec<{

tag: "v14";
value: unknown;
} | {
tag: "v15";
value: {

@@ -451,3 +514,3 @@ lookup: {

name: string;
type: number | void | undefined;
type: number | undefined;
}[];

@@ -457,5 +520,5 @@ def: {

value: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -468,5 +531,5 @@ }[];

fields: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -546,5 +609,2 @@ }[];

};
} | {
tag: "historicMetaCompat";
value: string;
};

@@ -555,3 +615,3 @@ docs: string[];

name: string;
storage: void | {
storage: {
prefix: string;

@@ -593,8 +653,8 @@ items: {

};
fallback: number[];
fallback: HexString;
docs: string[];
}[];
} | undefined;
calls: number | void | undefined;
events: number | void | undefined;
calls: number | undefined;
events: number | undefined;
constants: {

@@ -606,8 +666,12 @@ name: string;

}[];
errors: number | void | undefined;
errors: number | undefined;
index: number;
docs: string[];
}[];
extrinsic: {
type: number;
version: number;
address: number;
call: number;
signature: number;
extra: number;
signedExtensions: {

@@ -620,2 +684,24 @@ identifier: string;

type: number;
apis: {
name: string;
methods: {
name: string;
inputs: {
name: string;
type: number;
}[];
output: number;
docs: string[];
}[];
docs: string[];
}[];
outerEnums: {
call: number;
event: number;
error: number;
};
custom: [string, {
type: number;
value: HexString;
}][];
};

@@ -625,12 +711,2 @@ };

interface BitSequence {
bitsLen: number;
bytes: Uint8Array;
}
declare const bitSequence: scale_ts.Codec<BitSequence>;
declare const char: scale_ts.Codec<string>;
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>;
declare const Blake2256: (encoded: Uint8Array) => Uint8Array;

@@ -655,74 +731,62 @@ declare const Blake2128: (encoded: Uint8Array) => Uint8Array;

type Tuple<T> = readonly [T, ...T[]];
interface DescriptorCommon<Pallet extends string, Name extends string> {
checksum: bigint;
pallet: Pallet;
name: Name;
}
interface ArgsWithPayloadCodec<Args extends Array<any>, O> {
len: Args["length"];
}
interface ArgsWithoutPayloadCodec<Args extends Array<any>> {
len: Args["length"];
}
interface StorageDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithPayloadCodec<any, any>> {
type: "storage";
props: Common;
codecs: Codecs;
}
interface ConstantDescriptor<Common extends DescriptorCommon<string, string>, Codecs> {
type: "const";
props: Common;
codecs: Codecs;
}
interface EventDescriptor<Common extends DescriptorCommon<string, string>, Codecs> {
type: "event";
props: Common;
codecs: Codecs;
}
interface ErrorDescriptor<Common extends DescriptorCommon<string, string>, Codecs> {
type: "error";
props: Common;
codecs: Codecs;
}
interface TxDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>> {
type: "tx";
props: Common;
codecs: Codecs;
events: Events;
errors: Errors;
}
type Descriptor = ConstantDescriptor<any, any> | EventDescriptor<any, any> | StorageDescriptor<any, any> | ErrorDescriptor<any, any> | TxDescriptor<any, any, any, any>;
declare const createCommonDescriptor: <Pallet extends string, Name extends string>(checksum: bigint, pallet: Pallet, name: Name) => DescriptorCommon<Pallet, Name>;
declare const getDescriptorCreator: <Type extends "const" | "event" | "error", Pallet extends string, Name extends string, Codecs>(type: Type, checksum: bigint, pallet: Pallet, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>;
declare const getPalletCreator: <Pallet extends string>(pallet: Pallet) => {
getPayloadDescriptor: <Type extends "const" | "event" | "error", Name extends string, Codecs>(type: Type, checksum: bigint, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>;
getStorageDescriptor: <Name_1 extends string, Codecs_1 extends ArgsWithPayloadCodec<any[], any>>(checksum: bigint, name: Name_1, codecs: Codecs_1) => StorageDescriptor<DescriptorCommon<Pallet, Name_1>, Codecs_1>;
getTxDescriptor: <Name_2 extends string, Codecs_2 extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>>(checksum: bigint, name: Name_2, events: Events, errors: Errors, codecs: Codecs_2) => TxDescriptor<DescriptorCommon<Pallet, Name_2>, Codecs_2, Events, Errors>;
type PlainDescriptor<T> = string & {
_type?: T;
};
type EventToObject<E extends EventDescriptor<DescriptorCommon<any, string>, any>> = E extends EventDescriptor<DescriptorCommon<any, infer K>, infer V> ? {
type: K;
value: V;
} : unknown;
type UnionizeTupleEvents<E extends Array<EventDescriptor<any, any>>> = E extends Array<infer Ev> ? Ev extends EventDescriptor<any, any> ? EventToObject<Ev> : unknown : unknown;
type TxDescriptorArgs<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, ArgsWithoutPayloadCodec<infer A>, any, any> ? A : [];
type TxDescriptorEvents<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, infer E, any> ? E : [];
type TxDescriptorErrors<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, any, infer Errors> ? Errors extends Tuple<ErrorDescriptor<any, any>> ? {
[K in keyof Errors]: Errors[K] extends ErrorDescriptor<DescriptorCommon<any, infer Type>, infer Value> ? {
type: Type;
value: Value;
} : unknown;
}[keyof Errors extends number ? keyof Errors : never] : [] : [];
type TxFunction<D extends TxDescriptor<any, any, any, any>> = (...args: TxDescriptorArgs<D>) => Promise<{
ok: true;
events: Array<UnionizeTupleEvents<TxDescriptorEvents<D>>>;
} | {
ok: false;
error: TxDescriptorErrors<D>;
}>;
type StorageType<T extends StorageDescriptor<any, ArgsWithPayloadCodec<any, any>>> = T extends StorageDescriptor<any, ArgsWithPayloadCodec<infer Args, infer Payload>> ? {
keyArgs: Args;
value: Payload;
} : unknown;
type StorageDescriptor<Args extends Array<any>, T, Optional extends true | false> = string & {
_type: T;
_args: Args;
_optional: Optional;
};
type TxDescriptor<Args extends {} | undefined> = string & {
___: Args;
};
type RuntimeDescriptor<Args extends Array<any>, T> = string & {
__: [Args, T];
};
type Descriptors = {
pallets: Record<string, [
Record<string, StorageDescriptor<any, any, any>>,
Record<string, TxDescriptor<any>>,
Record<string, PlainDescriptor<any>>,
Record<string, PlainDescriptor<any>>,
Record<string, PlainDescriptor<any>>
]>;
apis: Record<string, Record<string, RuntimeDescriptor<any, any>>>;
asset: PlainDescriptor<any>;
};
type PickDescriptors<Idx extends 0 | 1 | 2 | 3 | 4, T extends Descriptors["pallets"]> = {
[K in keyof T]: T[K][Idx];
};
type ExtractStorage<T extends Record<string, Record<string, StorageDescriptor<any, any, any>>>> = {
[K in keyof T]: {
[KK in keyof T[K]]: T[K][KK] extends StorageDescriptor<infer Key, infer Value, infer Optional> ? {
KeyArgs: Key;
Value: Value;
IsOptional: Optional;
} : unknown;
};
};
type ExtractTx<T extends Record<string, Record<string, TxDescriptor<any>>>> = {
[K in keyof T]: {
[KK in keyof T[K]]: T[K][KK] extends TxDescriptor<infer Args> ? Args : unknown;
};
};
type ExtractPlain<T extends Record<string, Record<string, PlainDescriptor<any>>>> = {
[K in keyof T]: {
[KK in keyof T[K]]: T[K][KK] extends PlainDescriptor<infer Value> ? Value : unknown;
};
};
type QueryFromDescriptors<T extends Descriptors> = ExtractStorage<PickDescriptors<0, T["pallets"]>>;
type TxFromDescriptors<T extends Descriptors> = ExtractTx<PickDescriptors<1, T["pallets"]>>;
type EventsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<2, T["pallets"]>>;
type ErrorsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<3, T["pallets"]>>;
type ConstFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<4, T["pallets"]>>;
export { AccountId, ArgsWithPayloadCodec, ArgsWithoutPayloadCodec, BitSequence, Blake2128, Blake2128Concat, Blake2256, ConstantDescriptor, Descriptor, DescriptorCommon, EncoderWithHash, ErrorDescriptor, EventDescriptor, EventToObject, Hex, HexString, Identity, OpaqueCodec, OpaqueValue, SS58String, Self, Storage, StorageDescriptor, StorageType, Twox128, Twox256, Twox64Concat, TxDescriptor, TxDescriptorArgs, TxDescriptorErrors, TxDescriptorEvents, TxFunction, UnionizeTupleEvents, V14, V14Extrinsic, V14Lookup, V14Pallets, bitSequence, char, compactBn, compactNumber, createCommonDescriptor, fixedStr, getDescriptorCreator, getPalletCreator, h64, metadata, selfDecoder, selfEncoder, v14 };
type GetEnum<T extends Enum<{
type: string;
value: any;
}>> = {
[K in T["type"]]: (...args: ExtractEnumValue<T, K> extends undefined ? [] : [value: ExtractEnumValue<T, K>]) => T;
};
export { AccountId, Bin, Binary, type BitSequence, Blake2128, Blake2128Concat, Blake2256, type BlockHeader, type ConstFromDescriptors, type Descriptors, type Discriminant, type EncoderWithHash, Enum, type ErrorsFromDescriptors, type EventsFromDescriptors, type ExtractEnumValue, type GetEnum, Hex, type HexString, Identity, type PlainDescriptor, type QueryFromDescriptors, type RuntimeDescriptor, type SS58String, Self, Storage, type StorageDescriptor, Twox128, Twox256, Twox64Concat, type TxDescriptor, type TxFromDescriptors, type V14Lookup, type V15, type V15Extrinsic, Variant, _Enum, bitSequence, blockHeader, char, compactBn, compactNumber, fixedStr, h64, metadata, selfDecoder, selfEncoder, v15 };

@@ -6,2 +6,3 @@ "use strict";

var __hasOwnProp = Object.prototype.hasOwnProperty;
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __export = (target, all) => {

@@ -19,4 +20,25 @@ for (var name in all)

};
var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "default"), secondTarget && __copyProps(secondTarget, mod, "default"));
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var __publicField = (obj, key, value) => {
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
return value;
};
var __accessCheck = (obj, member, msg) => {
if (!member.has(obj))
throw TypeError("Cannot " + msg);
};
var __privateGet = (obj, member, getter) => {
__accessCheck(obj, member, "read from private field");
return getter ? getter.call(obj) : member.get(obj);
};
var __privateAdd = (obj, member, value) => {
if (member.has(obj))
throw TypeError("Cannot add the same private member more than once");
member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
};
var __privateSet = (obj, member, value, setter) => {
__accessCheck(obj, member, "write to private field");
setter ? setter.call(obj, value) : member.set(obj, value);
return value;
};

@@ -27,50 +49,59 @@ // src/index.ts

AccountId: () => AccountId,
Bin: () => Bin,
Binary: () => Binary,
Blake2128: () => Blake2128,
Blake2128Concat: () => Blake2128Concat,
Blake2256: () => Blake2256,
Bytes: () => import_scale_ts8.Bytes,
Enum: () => Enum,
Hex: () => Hex,
Identity: () => Identity,
OpaqueCodec: () => OpaqueCodec,
Option: () => import_scale_ts8.Option,
Result: () => import_scale_ts8.Result,
Self: () => Self,
Storage: () => Storage,
Struct: () => import_scale_ts8.Struct,
Tuple: () => import_scale_ts8.Tuple,
Twox128: () => Twox128,
Twox256: () => Twox256,
Twox64Concat: () => Twox64Concat,
bitSequence: () => bitSequence2,
Variant: () => Variant,
Vector: () => import_scale_ts8.Vector,
_Enum: () => _Enum,
_void: () => import_scale_ts8._void,
bitSequence: () => bitSequence,
blockHeader: () => blockHeader,
bool: () => import_scale_ts8.bool,
char: () => char,
compact: () => import_scale_ts8.compact,
compactBn: () => compactBn,
compactNumber: () => compactNumber,
createCommonDescriptor: () => createCommonDescriptor,
createCodec: () => import_scale_ts8.createCodec,
createDecoder: () => import_scale_ts8.createDecoder,
enhanceCodec: () => import_scale_ts8.enhanceCodec,
enhanceDecoder: () => import_scale_ts8.enhanceDecoder,
enhanceEncoder: () => import_scale_ts8.enhanceEncoder,
fixedStr: () => fixedStr,
getDescriptorCreator: () => getDescriptorCreator,
getPalletCreator: () => getPalletCreator,
h64: () => h64,
i128: () => import_scale_ts8.i128,
i16: () => import_scale_ts8.i16,
i256: () => import_scale_ts8.i256,
i32: () => import_scale_ts8.i32,
i64: () => import_scale_ts8.i64,
i8: () => import_scale_ts8.i8,
metadata: () => metadata,
selfDecoder: () => selfDecoder,
selfEncoder: () => selfEncoder,
v14: () => v14
str: () => import_scale_ts8.str,
u128: () => import_scale_ts8.u128,
u16: () => import_scale_ts8.u16,
u256: () => import_scale_ts8.u256,
u32: () => import_scale_ts8.u32,
u64: () => import_scale_ts8.u64,
u8: () => import_scale_ts8.u8,
v15: () => v15
});
module.exports = __toCommonJS(src_exports);
__reExport(src_exports, require("scale-ts"), module.exports);
// src/codecs/index.ts
var codecs_exports = {};
__export(codecs_exports, {
AccountId: () => AccountId,
Hex: () => Hex,
OpaqueCodec: () => OpaqueCodec,
Self: () => Self,
bitSequence: () => bitSequence2,
char: () => char,
compactBn: () => compactBn,
compactNumber: () => compactNumber,
fixedStr: () => fixedStr,
metadata: () => metadata,
selfDecoder: () => selfDecoder,
selfEncoder: () => selfEncoder,
v14: () => v14
});
__reExport(codecs_exports, require("scale-ts"));
// src/codecs/AccountId.ts
// src/codecs/scale/AccountId.ts
var import_scale_ts = require("scale-ts");

@@ -132,12 +163,113 @@ var import_blake2b = require("@noble/hashes/blake2b");

// src/codecs/compact.ts
// src/codecs/scale/Binary.ts
var import_scale_ts2 = require("scale-ts");
var compactNumber = import_scale_ts2.compact;
var compactBn = import_scale_ts2.compact;
var import_utils = require("@polkadot-api/utils");
var textEncoder = new TextEncoder();
var textDecoder = new TextDecoder();
var _bytes, _hex, _str;
var _Binary = class _Binary {
constructor(data) {
__privateAdd(this, _bytes, void 0);
__privateAdd(this, _hex, null);
__privateAdd(this, _str, null);
__publicField(this, "asText", () => __privateGet(this, _str) === null ? __privateSet(this, _str, textDecoder.decode(__privateGet(this, _bytes))) : __privateGet(this, _str));
__publicField(this, "asHex", () => __privateGet(this, _hex) === null ? __privateSet(this, _hex, (0, import_utils.toHex)(__privateGet(this, _bytes))) : __privateGet(this, _hex));
__publicField(this, "asBytes", () => __privateGet(this, _bytes));
__privateSet(this, _bytes, data);
}
static fromText(input) {
return new _Binary(textEncoder.encode(input));
}
static fromHex(input) {
return new _Binary((0, import_utils.fromHex)(input));
}
static fromBytes(input) {
return new _Binary(input);
}
};
_bytes = new WeakMap();
_hex = new WeakMap();
_str = new WeakMap();
var Binary = _Binary;
var enc = (nBytes) => {
const _enc = import_scale_ts2.Bytes.enc(nBytes);
return (value) => _enc(value.asBytes());
};
var dec = (nBytes) => {
const _dec = import_scale_ts2.Bytes.dec(nBytes);
return (value) => Binary.fromBytes(_dec(value));
};
var Bin = (nBytes) => (0, import_scale_ts2.createCodec)(enc(nBytes), dec(nBytes));
Bin.enc = enc;
Bin.dec = dec;
// src/codecs/Self.ts
// src/codecs/scale/bitSequence.ts
var import_scale_ts4 = require("scale-ts");
// src/codecs/scale/compact.ts
var import_scale_ts3 = require("scale-ts");
var compactNumber = import_scale_ts3.compact;
var compactBn = import_scale_ts3.compact;
// src/codecs/scale/bitSequence.ts
var bitSequenceDecoder = (0, import_scale_ts4.createDecoder)((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = (0, import_scale_ts4.Bytes)(bytesLen).dec(data);
return { bytes, bitsLen };
});
var bitSequenceEncoder = (input) => {
if (input.bitsLen > input.bytes.length * 8)
throw new Error(
`Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})`
);
const lenEncoded = compactNumber.enc(input.bitsLen);
const result = new Uint8Array(input.bytes.length + lenEncoded.length);
result.set(lenEncoded, 0);
result.set(input.bytes, lenEncoded.length);
return result;
};
var bitSequence = (0, import_scale_ts4.createCodec)(bitSequenceEncoder, bitSequenceDecoder);
// src/codecs/scale/char.ts
var import_scale_ts5 = require("scale-ts");
var char = (0, import_scale_ts5.enhanceCodec)(
import_scale_ts5.u8,
(str5) => str5.charCodeAt(0),
String.fromCharCode
);
// src/codecs/scale/Hex.ts
var import_utils2 = require("@polkadot-api/utils");
var import_scale_ts6 = require("scale-ts");
var enc2 = (nBytes) => {
const _enc = import_scale_ts6.Bytes.enc(nBytes);
return (value) => _enc((0, import_utils2.fromHex)(value));
};
var dec2 = (nBytes) => {
const _dec = import_scale_ts6.Bytes.dec(nBytes);
return (value) => (0, import_utils2.toHex)(_dec(value));
};
var Hex = (nBytes) => (0, import_scale_ts6.createCodec)(enc2(nBytes), dec2(nBytes));
Hex.enc = enc2;
Hex.dec = dec2;
// src/codecs/scale/fixed-str.ts
var import_scale_ts7 = require("scale-ts");
var textEncoder2 = new TextEncoder();
var textDecoder2 = new TextDecoder();
var fixedStr = (nBytes) => (0, import_scale_ts7.enhanceCodec)(
(0, import_scale_ts7.Bytes)(nBytes),
(str5) => textEncoder2.encode(str5),
(bytes) => textDecoder2.decode(bytes)
);
// src/codecs/scale/re-exported.ts
var import_scale_ts8 = require("scale-ts");
// src/codecs/scale/Self.ts
var import_scale_ts9 = require("scale-ts");
var selfEncoder = (value) => {
let cache = (x) => {
const encoder = import_scale_ts3.Struct.enc({ self: value() });
const encoder = value();
cache = encoder;

@@ -150,3 +282,3 @@ return encoder(x);

let cache = (x) => {
const decoder = import_scale_ts3.Struct.dec({ self: value() });
const decoder = value();
const result = decoder;

@@ -158,3 +290,3 @@ cache = decoder;

};
var Self = (value) => (0, import_scale_ts3.createCodec)(
var Self = (value) => (0, import_scale_ts9.createCodec)(
selfEncoder(() => value().enc),

@@ -164,53 +296,103 @@ selfDecoder(() => value().dec)

// src/codecs/Opaque.ts
var import_scale_ts4 = require("scale-ts");
var OpaqueDecoder = (inner, len = compactNumber.dec) => (0, import_scale_ts4.createDecoder)((bytes) => {
const length = len(bytes);
const innerBytes = (0, import_scale_ts4.Bytes)(length).dec(bytes);
let _cachedValue;
return {
length,
inner: () => _cachedValue = _cachedValue || inner(innerBytes)
};
// src/codecs/scale/Variant.ts
var import_scale_ts10 = require("scale-ts");
var import_utils3 = require("@polkadot-api/utils");
var _Enum = new Proxy(
{},
{
get(_, prop) {
return (value) => Enum(prop, value);
}
}
);
var Enum = (_type, _value) => ({
as: (type) => {
if (type !== _type)
throw new Error(`Enum.as(${type}) used with actual type ${_type}`);
return _value;
},
is: (type) => type === _type,
type: _type,
value: _value
});
var OpaqueEncoder = (inner, len = compactNumber.enc) => (input) => {
const lenBytes = len(input.length);
const result = new Uint8Array(lenBytes.length + input.length);
result.set(lenBytes, 0);
result.set(inner(input.inner()), lenBytes.length);
return result;
var VariantEnc = (...args) => {
const enc3 = import_scale_ts10.Enum.enc(...args);
return (v) => enc3({ tag: v.type, value: v.value });
};
var OpaqueCodec = (inner, len = compactNumber) => (0, import_scale_ts4.createCodec)(
OpaqueEncoder(inner.enc, len.enc),
OpaqueDecoder(inner.dec, len.dec)
var VariantDec = (...args) => {
const dec3 = import_scale_ts10.Enum.dec(...args);
return (v) => {
const { tag, value } = dec3(v);
return Enum(tag, value);
};
};
var Variant = (inner, ...args) => (0, import_scale_ts10.createCodec)(
VariantEnc(
(0, import_utils3.mapObject)(inner, ([encoder]) => encoder),
...args
),
VariantDec(
(0, import_utils3.mapObject)(inner, ([, decoder]) => decoder),
...args
)
);
OpaqueCodec.enc = OpaqueEncoder;
OpaqueCodec.dec = OpaqueDecoder;
Variant.enc = VariantEnc;
Variant.dec = VariantDec;
// src/codecs/metadata/v14/v14.ts
var import_scale_ts8 = require("scale-ts");
// src/codecs/blockHeader.ts
var textEncoder3 = new TextEncoder();
var textDecoder3 = new TextDecoder();
var fourChars = (0, import_scale_ts8.enhanceCodec)(
(0, import_scale_ts8.Bytes)(4),
textEncoder3.encode.bind(textEncoder3),
textDecoder3.decode.bind(textDecoder3)
);
var diggestVal = (0, import_scale_ts8.Struct)({
engine: fourChars,
payload: Hex()
});
var diggest = Variant(
{
consensus: diggestVal,
seal: diggestVal,
preRuntime: diggestVal,
runtimeUpdated: import_scale_ts8._void
},
[4, 5, 6, 8]
);
var hex32 = Hex(32);
var blockHeader = (0, import_scale_ts8.Struct)({
parentHash: hex32,
number: compactNumber,
stateRoot: hex32,
extrinsicRoot: hex32,
digests: (0, import_scale_ts8.Vector)(diggest)
});
// src/codecs/metadata/v14/lookup.ts
var import_scale_ts5 = require("scale-ts");
var oStr = (0, import_scale_ts5.Option)(import_scale_ts5.str);
var strs = (0, import_scale_ts5.Vector)(import_scale_ts5.str);
var primitive = (0, import_scale_ts5.Enum)({
bool: import_scale_ts5._void,
char: import_scale_ts5._void,
str: import_scale_ts5._void,
u8: import_scale_ts5._void,
u16: import_scale_ts5._void,
u32: import_scale_ts5._void,
u64: import_scale_ts5._void,
u128: import_scale_ts5._void,
u256: import_scale_ts5._void,
i8: import_scale_ts5._void,
i16: import_scale_ts5._void,
i32: import_scale_ts5._void,
i64: import_scale_ts5._void,
i128: import_scale_ts5._void,
i256: import_scale_ts5._void
// src/codecs/metadata/v15/v15.ts
var import_scale_ts13 = require("scale-ts");
// src/codecs/metadata/v15/lookup.ts
var import_scale_ts11 = require("scale-ts");
var oStr = (0, import_scale_ts11.Option)(import_scale_ts11.str);
var strs = (0, import_scale_ts11.Vector)(import_scale_ts11.str);
var primitive = (0, import_scale_ts11.Enum)({
bool: import_scale_ts11._void,
char: import_scale_ts11._void,
str: import_scale_ts11._void,
u8: import_scale_ts11._void,
u16: import_scale_ts11._void,
u32: import_scale_ts11._void,
u64: import_scale_ts11._void,
u128: import_scale_ts11._void,
u256: import_scale_ts11._void,
i8: import_scale_ts11._void,
i16: import_scale_ts11._void,
i32: import_scale_ts11._void,
i64: import_scale_ts11._void,
i128: import_scale_ts11._void,
i256: import_scale_ts11._void
});
var fields = (0, import_scale_ts5.Vector)(
(0, import_scale_ts5.Struct)({
var fields = (0, import_scale_ts11.Vector)(
(0, import_scale_ts11.Struct)({
name: oStr,

@@ -222,19 +404,19 @@ type: compactNumber,

);
var arr = (0, import_scale_ts5.Struct)({
len: import_scale_ts5.u32,
var arr = (0, import_scale_ts11.Struct)({
len: import_scale_ts11.u32,
type: compactNumber
});
var bitSequence = (0, import_scale_ts5.Struct)({
var bitSequence2 = (0, import_scale_ts11.Struct)({
bitStoreType: compactNumber,
bitOrderType: compactNumber
});
var variant = (0, import_scale_ts5.Vector)(
(0, import_scale_ts5.Struct)({
name: import_scale_ts5.str,
var variant = (0, import_scale_ts11.Vector)(
(0, import_scale_ts11.Struct)({
name: import_scale_ts11.str,
fields,
index: import_scale_ts5.u8,
index: import_scale_ts11.u8,
docs: strs
})
);
var def = (0, import_scale_ts5.Enum)({
var def = (0, import_scale_ts11.Enum)({
composite: fields,

@@ -244,14 +426,13 @@ variant,

array: arr,
tuple: (0, import_scale_ts5.Vector)(compactNumber),
tuple: (0, import_scale_ts11.Vector)(compactNumber),
primitive,
compact: compactNumber,
bitSequence,
historicMetaCompat: import_scale_ts5.str
bitSequence: bitSequence2
});
var param = (0, import_scale_ts5.Struct)({
name: import_scale_ts5.str,
type: (0, import_scale_ts5.Option)(compactNumber)
var param = (0, import_scale_ts11.Struct)({
name: import_scale_ts11.str,
type: (0, import_scale_ts11.Option)(compactNumber)
});
var params = (0, import_scale_ts5.Vector)(param);
var entry = (0, import_scale_ts5.Struct)({
var params = (0, import_scale_ts11.Vector)(param);
var entry = (0, import_scale_ts11.Struct)({
id: compactNumber,

@@ -263,32 +444,17 @@ path: strs,

});
var lookup = (0, import_scale_ts5.Vector)(entry);
var lookup = (0, import_scale_ts11.Vector)(entry);
// src/codecs/Hex.ts
var import_utils = require("@polkadot-api/utils");
var import_scale_ts6 = require("scale-ts");
var enc = (nBytes) => {
const _enc = import_scale_ts6.Bytes.enc(nBytes);
return (value) => _enc((0, import_utils.fromHex)(value));
};
var dec = (nBytes) => {
const _dec = import_scale_ts6.Bytes.dec(nBytes);
return (value) => (0, import_utils.toHex)(_dec(value));
};
var Hex = (nBytes) => (0, import_scale_ts6.createCodec)(enc(nBytes), dec(nBytes));
Hex.enc = enc;
Hex.dec = dec;
// src/codecs/metadata/v14/pallets.ts
var import_scale_ts7 = require("scale-ts");
var hashType = (0, import_scale_ts7.Enum)({
Blake2128: import_scale_ts7._void,
Blake2256: import_scale_ts7._void,
Blake2128Concat: import_scale_ts7._void,
Twox128: import_scale_ts7._void,
Twox256: import_scale_ts7._void,
Twox64Concat: import_scale_ts7._void,
Identity: import_scale_ts7._void
// src/codecs/metadata/v15/pallets.ts
var import_scale_ts12 = require("scale-ts");
var hashType = (0, import_scale_ts12.Enum)({
Blake2128: import_scale_ts12._void,
Blake2256: import_scale_ts12._void,
Blake2128Concat: import_scale_ts12._void,
Twox128: import_scale_ts12._void,
Twox256: import_scale_ts12._void,
Twox64Concat: import_scale_ts12._void,
Identity: import_scale_ts12._void
});
var hashers = (0, import_scale_ts7.Vector)(hashType);
var storageMap = (0, import_scale_ts7.Struct)({
var hashers = (0, import_scale_ts12.Vector)(hashType);
var storageMap = (0, import_scale_ts12.Struct)({
hashers,

@@ -298,44 +464,66 @@ key: compactNumber,

});
var storageItem = (0, import_scale_ts7.Struct)({
name: import_scale_ts7.str,
modifier: import_scale_ts7.u8,
type: (0, import_scale_ts7.Enum)({
var storageItem = (0, import_scale_ts12.Struct)({
name: import_scale_ts12.str,
modifier: import_scale_ts12.u8,
type: (0, import_scale_ts12.Enum)({
plain: compactNumber,
map: storageMap
}),
fallback: (0, import_scale_ts7.Vector)(import_scale_ts7.u8),
docs: (0, import_scale_ts7.Vector)(import_scale_ts7.str)
fallback: Hex(),
docs: (0, import_scale_ts12.Vector)(import_scale_ts12.str)
});
var storage = (0, import_scale_ts7.Option)(
(0, import_scale_ts7.Struct)({
prefix: import_scale_ts7.str,
items: (0, import_scale_ts7.Vector)(storageItem)
var storage = (0, import_scale_ts12.Option)(
(0, import_scale_ts12.Struct)({
prefix: import_scale_ts12.str,
items: (0, import_scale_ts12.Vector)(storageItem)
})
);
var pallets = (0, import_scale_ts7.Vector)(
(0, import_scale_ts7.Struct)({
name: import_scale_ts7.str,
var pallets = (0, import_scale_ts12.Vector)(
(0, import_scale_ts12.Struct)({
name: import_scale_ts12.str,
storage,
calls: (0, import_scale_ts7.Option)(compactNumber),
events: (0, import_scale_ts7.Option)(compactNumber),
constants: (0, import_scale_ts7.Vector)(
(0, import_scale_ts7.Struct)({
name: import_scale_ts7.str,
calls: (0, import_scale_ts12.Option)(compactNumber),
events: (0, import_scale_ts12.Option)(compactNumber),
constants: (0, import_scale_ts12.Vector)(
(0, import_scale_ts12.Struct)({
name: import_scale_ts12.str,
type: compactNumber,
value: Hex(),
docs: (0, import_scale_ts7.Vector)(import_scale_ts7.str)
docs: (0, import_scale_ts12.Vector)(import_scale_ts12.str)
})
),
errors: (0, import_scale_ts7.Option)(compactNumber),
index: import_scale_ts7.u8
errors: (0, import_scale_ts12.Option)(compactNumber),
index: import_scale_ts12.u8,
docs: (0, import_scale_ts12.Vector)(import_scale_ts12.str)
})
);
// src/codecs/metadata/v14/v14.ts
var extrinsic = (0, import_scale_ts8.Struct)({
type: compactNumber,
version: import_scale_ts8.u8,
signedExtensions: (0, import_scale_ts8.Vector)(
(0, import_scale_ts8.Struct)({
identifier: import_scale_ts8.str,
// src/codecs/metadata/v15/v15.ts
var docs = (0, import_scale_ts13.Vector)(import_scale_ts13.str);
var runtimeApi = (0, import_scale_ts13.Struct)({
name: import_scale_ts13.str,
methods: (0, import_scale_ts13.Vector)(
(0, import_scale_ts13.Struct)({
name: import_scale_ts13.str,
inputs: (0, import_scale_ts13.Vector)(
(0, import_scale_ts13.Struct)({
name: import_scale_ts13.str,
type: compactNumber
})
),
output: compactNumber,
docs
})
),
docs
});
var extrinsic = (0, import_scale_ts13.Struct)({
version: import_scale_ts13.u8,
address: compactNumber,
call: compactNumber,
signature: compactNumber,
extra: compactNumber,
signedExtensions: (0, import_scale_ts13.Vector)(
(0, import_scale_ts13.Struct)({
identifier: import_scale_ts13.str,
type: compactNumber,

@@ -346,21 +534,28 @@ additionalSigned: compactNumber

});
var v14 = (0, import_scale_ts8.Struct)({
var v15 = (0, import_scale_ts13.Struct)({
lookup,
pallets,
extrinsic,
type: compactNumber
type: compactNumber,
apis: (0, import_scale_ts13.Vector)(runtimeApi),
outerEnums: (0, import_scale_ts13.Struct)({
call: compactNumber,
event: compactNumber,
error: compactNumber
}),
custom: (0, import_scale_ts13.Vector)((0, import_scale_ts13.Tuple)(import_scale_ts13.str, (0, import_scale_ts13.Struct)({ type: compactNumber, value: Hex() })))
});
// src/codecs/metadata/metadata.ts
var import_scale_ts9 = require("scale-ts");
var import_scale_ts14 = require("scale-ts");
var unsupportedFn = () => {
throw new Error("Unsupported metadata version!");
};
var unsupported = (0, import_scale_ts9.createCodec)(
var unsupported = (0, import_scale_ts14.createCodec)(
unsupportedFn,
unsupportedFn
);
var metadata = (0, import_scale_ts9.Struct)({
magicNumber: import_scale_ts9.u32,
metadata: (0, import_scale_ts9.Enum)({
var metadata = (0, import_scale_ts14.Struct)({
magicNumber: import_scale_ts14.u32,
metadata: (0, import_scale_ts14.Enum)({
v0: unsupported,

@@ -380,50 +575,9 @@ v1: unsupported,

v13: unsupported,
v14
v14: unsupported,
v15
})
});
// src/codecs/bitSequence.ts
var import_scale_ts10 = require("scale-ts");
var bitSequenceDecoder = (0, import_scale_ts10.createDecoder)((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = (0, import_scale_ts10.Bytes)(bytesLen).dec(data);
return { bytes, bitsLen };
});
var bitSequenceEncoder = (input) => {
if (input.bitsLen > input.bytes.length * 8)
throw new Error(
`Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})`
);
const lenEncoded = compactNumber.enc(input.bitsLen);
const result = new Uint8Array(input.bytes.length + lenEncoded.length);
result.set(lenEncoded, 0);
result.set(input.bytes, lenEncoded.length);
return result;
};
var bitSequence2 = (0, import_scale_ts10.createCodec)(bitSequenceEncoder, bitSequenceDecoder);
// src/codecs/char.ts
var import_scale_ts11 = require("scale-ts");
var char = (0, import_scale_ts11.enhanceCodec)(
import_scale_ts11.u8,
(str4) => str4.charCodeAt(0),
String.fromCharCode
);
// src/codecs/fixed-str.ts
var import_scale_ts12 = require("scale-ts");
var textEncoder = new TextEncoder();
var textDecoder = new TextDecoder();
var fixedStr = (nBytes) => (0, import_scale_ts12.enhanceCodec)(
(0, import_scale_ts12.Bytes)(nBytes),
(str4) => textEncoder.encode(str4),
(bytes) => textDecoder.decode(bytes)
);
// src/index.ts
__reExport(src_exports, codecs_exports, module.exports);
// src/hashes/blake2.ts
var import_utils2 = require("@polkadot-api/utils");
var import_utils4 = require("@polkadot-api/utils");
var import_blake2b2 = require("@noble/hashes/blake2b");

@@ -434,3 +588,3 @@ var len32 = { dkLen: 32 };

var Blake2128 = (encoded) => (0, import_blake2b2.blake2b)(encoded, len16);
var Blake2128Concat = (encoded) => (0, import_utils2.mergeUint8)(Blake2128(encoded), encoded);
var Blake2128Concat = (encoded) => (0, import_utils4.mergeUint8)(Blake2128(encoded), encoded);

@@ -441,4 +595,4 @@ // src/hashes/identity.ts

// src/hashes/twoX.ts
var import_utils3 = require("@polkadot-api/utils");
var import_scale_ts13 = require("scale-ts");
var import_utils5 = require("@polkadot-api/utils");
var import_scale_ts15 = require("scale-ts");

@@ -596,15 +750,15 @@ // src/hashes/h64.ts

};
var Twox64Concat = (encoded) => (0, import_utils3.mergeUint8)(import_scale_ts13.u64.enc(h64(encoded)), encoded);
var Twox64Concat = (encoded) => (0, import_utils5.mergeUint8)(import_scale_ts15.u64.enc(h64(encoded)), encoded);
// src/storage.ts
var import_utils4 = require("@polkadot-api/utils");
var textEncoder2 = new TextEncoder();
var import_utils6 = require("@polkadot-api/utils");
var textEncoder4 = new TextEncoder();
var Storage = (pallet) => {
const palledEncoded = Twox128(textEncoder2.encode(pallet));
return (name, dec2, ...encoders) => {
const palletItemEncoded = (0, import_utils4.mergeUint8)(
const palledEncoded = Twox128(textEncoder4.encode(pallet));
return (name, dec3, ...encoders) => {
const palletItemEncoded = (0, import_utils6.mergeUint8)(
palledEncoded,
Twox128(textEncoder2.encode(name))
Twox128(textEncoder4.encode(name))
);
const palletItemEncodedHex = (0, import_utils4.toHex)(palletItemEncoded);
const palletItemEncodedHex = (0, import_utils6.toHex)(palletItemEncoded);
const bytesToSkip = encoders.map((e) => e[1]).map((x) => {

@@ -637,10 +791,10 @@ if (x === Identity)

const fns = encoders.map(
([{ enc: enc3 }, hash]) => (val) => hash(enc3(val))
([{ enc: enc4 }, hash]) => (val) => hash(enc4(val))
);
const enc2 = (...args) => (0, import_utils4.toHex)(
(0, import_utils4.mergeUint8)(palletItemEncoded, ...args.map((val, idx) => fns[idx](val)))
const enc3 = (...args) => (0, import_utils6.toHex)(
(0, import_utils6.mergeUint8)(palletItemEncoded, ...args.map((val, idx) => fns[idx](val)))
);
return {
enc: enc2,
dec: dec2,
enc: enc3,
dec: dec3,
keyDecoder

@@ -650,38 +804,2 @@ };

};
// src/descriptors.ts
var createCommonDescriptor = (checksum, pallet, name) => ({
checksum,
pallet,
name
});
var getDescriptorCreator = (type, checksum, pallet, name, codecs) => ({
type,
props: { checksum, pallet, name },
codecs
});
var getPalletCreator = (pallet) => {
const getPayloadDescriptor = (type, checksum, name, codecs) => ({
type,
props: { checksum, pallet, name },
codecs
});
const getStorageDescriptor = (checksum, name, codecs) => ({
type: "storage",
props: { checksum, pallet, name },
codecs
});
const getTxDescriptor = (checksum, name, events, errors, codecs) => ({
type: "tx",
props: { checksum, pallet, name },
codecs,
events,
errors
});
return {
getPayloadDescriptor,
getStorageDescriptor,
getTxDescriptor
};
};
//# sourceMappingURL=index.js.map
import * as scale_ts from 'scale-ts';
import { Codec, Encoder, Decoder, CodecType } from 'scale-ts';
export * from 'scale-ts';
import { Codec, Encoder, Decoder, StringRecord, CodecType, EncoderType, DecoderType } from 'scale-ts';
export { Bytes, Codec, CodecType, Decoder, DecoderType, Encoder, EncoderType, Option, Result, ResultPayload, StringRecord, Struct, Tuple, Vector, _void, bool, compact, createCodec, createDecoder, enhanceCodec, enhanceDecoder, enhanceEncoder, i128, i16, i256, i32, i64, i8, str, u128, u16, u256, u32, u64, u8 } from 'scale-ts';
import * as scale_ts_dist_types from 'scale-ts/dist/types';
type SS58String = string & {
__SS58String: unknown;
__SS58String?: unknown;
};
declare const AccountId: (ss58Format?: number, nBytes?: 32 | 33) => scale_ts.Codec<SS58String>;
type HexString = string & {
__hexString?: unknown;
};
declare const Hex: {
(nBytes?: number): Codec<HexString>;
enc: (nBytes?: number) => Encoder<HexString>;
dec: (nBytes?: number) => Decoder<HexString>;
};
declare class Binary {
#private;
constructor(data: Uint8Array);
asText: () => string;
asHex: () => string;
asBytes: () => Uint8Array;
static fromText(input: string): Binary;
static fromHex(input: HexString): Binary;
static fromBytes(input: Uint8Array): Binary;
}
declare const Bin: {
(nBytes?: number): Codec<Binary>;
enc: (nBytes?: number) => Encoder<Binary>;
dec: (nBytes?: number) => Decoder<Binary>;
};
interface BitSequence {
bitsLen: number;
bytes: Uint8Array;
}
declare const bitSequence: scale_ts.Codec<BitSequence>;
declare const char: scale_ts.Codec<string>;
declare const compactNumber: Codec<number>;
declare const compactBn: Codec<bigint>;
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<{
self: T;
}>;
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<{
self: T;
}>;
declare const Self: <T>(value: () => Codec<T>) => Codec<{
self: T;
}>;
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>;
type OpaqueValue<T> = {
length: number;
inner: () => T;
declare const selfEncoder: <T>(value: () => Encoder<T>) => Encoder<T>;
declare const selfDecoder: <T>(value: () => Decoder<T>) => Decoder<T>;
declare const Self: <T>(value: () => Codec<T>) => Codec<T>;
type Tuple<T, N extends number> = readonly [T, ...T[]] & {
length: N;
};
declare const OpaqueCodec: {
<T>(inner: Codec<T>, len?: Codec<number>): Codec<OpaqueValue<T>>;
enc: <T_1>(inner: Encoder<T_1>, len?: Encoder<number>) => Encoder<OpaqueValue<T_1>>;
dec: <T_2>(inner: Decoder<T_2>, len?: Decoder<number>) => Decoder<OpaqueValue<T_2>>;
type Push<T extends any[], V> = [...T, V];
type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends (k: infer I) => void ? I : never;
type LastOf<T> = UnionToIntersection<T extends any ? () => T : never> extends () => infer R ? R : never;
type TuplifyUnion<T, L = LastOf<T>, N = [T] extends [never] ? true : false> = true extends N ? [] : Push<TuplifyUnion<Exclude<T, L>>, L>;
type RestrictedLenTuple<T, O extends StringRecord<any>> = Tuple<T, TuplifyUnion<keyof O> extends Tuple<any, infer V> ? V : 0>;
type ExtractEnumValue<T extends {
type: string;
value?: any;
}, K extends string> = T extends {
type: K;
value: infer R;
} ? R : never;
interface Discriminant<T extends {
type: string;
value?: any;
}> {
is<K extends T["type"]>(this: Enum<T>, type: K): this is Enum<{
type: K;
value: ExtractEnumValue<T, K>;
}>;
as<K extends T["type"]>(type: K): ExtractEnumValue<T, K>;
}
declare const _Enum: {};
type Enum<T extends {
type: string;
value?: any;
}> = T & Discriminant<T>;
declare const Enum: <T extends {
type: string;
value?: any;
}, Key extends T["type"]>(type: Key, ...args: ExtractEnumValue<T, Key> extends undefined ? [] : [value: ExtractEnumValue<T, Key>]) => Enum<ExtractEnumValue<T, Key> extends undefined ? T : ExtractEnumValue<T, Key> extends never ? T : {
type: Key;
value: ExtractEnumValue<T, Key>;
}>;
declare const Variant: {
<O extends StringRecord<Codec<any>>>(inner: O, indexes?: RestrictedLenTuple<number, O> | undefined): Codec<Enum<{ [K in keyof O]: K extends string ? {
type: K;
value: CodecType<O[K]>;
} : never; }[keyof O]>>;
enc: <O_1 extends StringRecord<Encoder<any>>>(inner: O_1, x?: RestrictedLenTuple<number, O_1> | undefined) => Encoder<Enum<{ [K_1 in keyof O_1]: K_1 extends string ? {
type: K_1;
value: EncoderType<O_1[K_1]>;
} : never; }[keyof O_1]>>;
dec: <O_2 extends StringRecord<Decoder<any>>>(inner: O_2, x?: RestrictedLenTuple<number, O_2> | undefined) => Decoder<Enum<{ [K_2 in keyof O_2]: K_2 extends string ? {
type: K_2;
value: DecoderType<O_2[K_2]>;
} : never; }[keyof O_2]>>;
};
declare const blockHeader: scale_ts_dist_types.Codec<{
parentHash: HexString;
number: number;
stateRoot: HexString;
extrinsicRoot: HexString;
digests: Enum<{
type: "consensus";
value: {
engine: string;
payload: HexString;
};
} | {
type: "seal";
value: {
engine: string;
payload: HexString;
};
} | {
type: "preRuntime";
value: {
engine: string;
payload: HexString;
};
} | {
type: "runtimeUpdated";
value: undefined;
}>[];
}>;
type BlockHeader = CodecType<typeof blockHeader>;
declare const lookup: scale_ts.Codec<{

@@ -38,3 +140,3 @@ id: number;

name: string;
type: number | void | undefined;
type: number | undefined;
}[];

@@ -44,5 +146,5 @@ def: {

value: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -55,5 +157,5 @@ }[];

fields: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -133,5 +235,2 @@ }[];

};
} | {
tag: "historicMetaCompat";
value: string;
};

@@ -142,70 +241,8 @@ docs: string[];

type HexString = string & {
__hexString: unknown;
};
declare const Hex: {
(nBytes?: number): Codec<HexString>;
enc: (nBytes?: number) => Encoder<HexString>;
dec: (nBytes?: number) => Decoder<HexString>;
};
declare const pallets: scale_ts.Codec<{
name: string;
storage: void | {
prefix: string;
items: {
name: string;
modifier: number;
type: {
tag: "map";
value: {
hashers: ({
tag: "Blake2128";
value: undefined;
} | {
tag: "Blake2256";
value: undefined;
} | {
tag: "Blake2128Concat";
value: undefined;
} | {
tag: "Twox128";
value: undefined;
} | {
tag: "Twox256";
value: undefined;
} | {
tag: "Twox64Concat";
value: undefined;
} | {
tag: "Identity";
value: undefined;
})[];
key: number;
value: number;
};
} | {
tag: "plain";
value: number;
};
fallback: number[];
docs: string[];
}[];
} | undefined;
calls: number | void | undefined;
events: number | void | undefined;
constants: {
name: string;
type: number;
value: HexString;
docs: string[];
}[];
errors: number | void | undefined;
index: number;
}[]>;
type V14Pallets = CodecType<typeof pallets>;
declare const extrinsic: scale_ts.Codec<{
type: number;
version: number;
address: number;
call: number;
signature: number;
extra: number;
signedExtensions: {

@@ -217,4 +254,4 @@ identifier: string;

}>;
type V14Extrinsic = CodecType<typeof extrinsic>;
declare const v14: scale_ts.Codec<{
type V15Extrinsic = CodecType<typeof extrinsic>;
declare const v15: scale_ts.Codec<{
lookup: {

@@ -225,3 +262,3 @@ id: number;

name: string;
type: number | void | undefined;
type: number | undefined;
}[];

@@ -231,5 +268,5 @@ def: {

value: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -242,5 +279,5 @@ }[];

fields: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -320,5 +357,2 @@ }[];

};
} | {
tag: "historicMetaCompat";
value: string;
};

@@ -329,3 +363,3 @@ docs: string[];

name: string;
storage: void | {
storage: {
prefix: string;

@@ -367,8 +401,8 @@ items: {

};
fallback: number[];
fallback: HexString;
docs: string[];
}[];
} | undefined;
calls: number | void | undefined;
events: number | void | undefined;
calls: number | undefined;
events: number | undefined;
constants: {

@@ -380,8 +414,12 @@ name: string;

}[];
errors: number | void | undefined;
errors: number | undefined;
index: number;
docs: string[];
}[];
extrinsic: {
type: number;
version: number;
address: number;
call: number;
signature: number;
extra: number;
signedExtensions: {

@@ -394,4 +432,26 @@ identifier: string;

type: number;
apis: {
name: string;
methods: {
name: string;
inputs: {
name: string;
type: number;
}[];
output: number;
docs: string[];
}[];
docs: string[];
}[];
outerEnums: {
call: number;
event: number;
error: number;
};
custom: [string, {
type: number;
value: HexString;
}][];
}>;
type V14 = CodecType<typeof v14>;
type V15 = CodecType<typeof v15>;

@@ -444,2 +504,5 @@ declare const metadata: Codec<{

tag: "v14";
value: unknown;
} | {
tag: "v15";
value: {

@@ -451,3 +514,3 @@ lookup: {

name: string;
type: number | void | undefined;
type: number | undefined;
}[];

@@ -457,5 +520,5 @@ def: {

value: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -468,5 +531,5 @@ }[];

fields: {
name: string | void | undefined;
name: string | undefined;
type: number;
typeName: string | void | undefined;
typeName: string | undefined;
docs: string[];

@@ -546,5 +609,2 @@ }[];

};
} | {
tag: "historicMetaCompat";
value: string;
};

@@ -555,3 +615,3 @@ docs: string[];

name: string;
storage: void | {
storage: {
prefix: string;

@@ -593,8 +653,8 @@ items: {

};
fallback: number[];
fallback: HexString;
docs: string[];
}[];
} | undefined;
calls: number | void | undefined;
events: number | void | undefined;
calls: number | undefined;
events: number | undefined;
constants: {

@@ -606,8 +666,12 @@ name: string;

}[];
errors: number | void | undefined;
errors: number | undefined;
index: number;
docs: string[];
}[];
extrinsic: {
type: number;
version: number;
address: number;
call: number;
signature: number;
extra: number;
signedExtensions: {

@@ -620,2 +684,24 @@ identifier: string;

type: number;
apis: {
name: string;
methods: {
name: string;
inputs: {
name: string;
type: number;
}[];
output: number;
docs: string[];
}[];
docs: string[];
}[];
outerEnums: {
call: number;
event: number;
error: number;
};
custom: [string, {
type: number;
value: HexString;
}][];
};

@@ -625,12 +711,2 @@ };

interface BitSequence {
bitsLen: number;
bytes: Uint8Array;
}
declare const bitSequence: scale_ts.Codec<BitSequence>;
declare const char: scale_ts.Codec<string>;
declare const fixedStr: (nBytes: number) => scale_ts.Codec<string>;
declare const Blake2256: (encoded: Uint8Array) => Uint8Array;

@@ -655,74 +731,62 @@ declare const Blake2128: (encoded: Uint8Array) => Uint8Array;

type Tuple<T> = readonly [T, ...T[]];
interface DescriptorCommon<Pallet extends string, Name extends string> {
checksum: bigint;
pallet: Pallet;
name: Name;
}
interface ArgsWithPayloadCodec<Args extends Array<any>, O> {
len: Args["length"];
}
interface ArgsWithoutPayloadCodec<Args extends Array<any>> {
len: Args["length"];
}
interface StorageDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithPayloadCodec<any, any>> {
type: "storage";
props: Common;
codecs: Codecs;
}
interface ConstantDescriptor<Common extends DescriptorCommon<string, string>, Codecs> {
type: "const";
props: Common;
codecs: Codecs;
}
interface EventDescriptor<Common extends DescriptorCommon<string, string>, Codecs> {
type: "event";
props: Common;
codecs: Codecs;
}
interface ErrorDescriptor<Common extends DescriptorCommon<string, string>, Codecs> {
type: "error";
props: Common;
codecs: Codecs;
}
interface TxDescriptor<Common extends DescriptorCommon<string, string>, Codecs extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>> {
type: "tx";
props: Common;
codecs: Codecs;
events: Events;
errors: Errors;
}
type Descriptor = ConstantDescriptor<any, any> | EventDescriptor<any, any> | StorageDescriptor<any, any> | ErrorDescriptor<any, any> | TxDescriptor<any, any, any, any>;
declare const createCommonDescriptor: <Pallet extends string, Name extends string>(checksum: bigint, pallet: Pallet, name: Name) => DescriptorCommon<Pallet, Name>;
declare const getDescriptorCreator: <Type extends "const" | "event" | "error", Pallet extends string, Name extends string, Codecs>(type: Type, checksum: bigint, pallet: Pallet, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>;
declare const getPalletCreator: <Pallet extends string>(pallet: Pallet) => {
getPayloadDescriptor: <Type extends "const" | "event" | "error", Name extends string, Codecs>(type: Type, checksum: bigint, name: Name, codecs: Codecs) => Type extends "const" ? ConstantDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : Type extends "event" ? EventDescriptor<DescriptorCommon<Pallet, Name>, Codecs> : ErrorDescriptor<DescriptorCommon<Pallet, Name>, Codecs>;
getStorageDescriptor: <Name_1 extends string, Codecs_1 extends ArgsWithPayloadCodec<any[], any>>(checksum: bigint, name: Name_1, codecs: Codecs_1) => StorageDescriptor<DescriptorCommon<Pallet, Name_1>, Codecs_1>;
getTxDescriptor: <Name_2 extends string, Codecs_2 extends ArgsWithoutPayloadCodec<any>, Events extends Tuple<EventDescriptor<any, any>>, Errors extends Tuple<ErrorDescriptor<any, any>>>(checksum: bigint, name: Name_2, events: Events, errors: Errors, codecs: Codecs_2) => TxDescriptor<DescriptorCommon<Pallet, Name_2>, Codecs_2, Events, Errors>;
type PlainDescriptor<T> = string & {
_type?: T;
};
type EventToObject<E extends EventDescriptor<DescriptorCommon<any, string>, any>> = E extends EventDescriptor<DescriptorCommon<any, infer K>, infer V> ? {
type: K;
value: V;
} : unknown;
type UnionizeTupleEvents<E extends Array<EventDescriptor<any, any>>> = E extends Array<infer Ev> ? Ev extends EventDescriptor<any, any> ? EventToObject<Ev> : unknown : unknown;
type TxDescriptorArgs<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, ArgsWithoutPayloadCodec<infer A>, any, any> ? A : [];
type TxDescriptorEvents<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, infer E, any> ? E : [];
type TxDescriptorErrors<D extends TxDescriptor<any, any, any, any>> = D extends TxDescriptor<any, any, any, infer Errors> ? Errors extends Tuple<ErrorDescriptor<any, any>> ? {
[K in keyof Errors]: Errors[K] extends ErrorDescriptor<DescriptorCommon<any, infer Type>, infer Value> ? {
type: Type;
value: Value;
} : unknown;
}[keyof Errors extends number ? keyof Errors : never] : [] : [];
type TxFunction<D extends TxDescriptor<any, any, any, any>> = (...args: TxDescriptorArgs<D>) => Promise<{
ok: true;
events: Array<UnionizeTupleEvents<TxDescriptorEvents<D>>>;
} | {
ok: false;
error: TxDescriptorErrors<D>;
}>;
type StorageType<T extends StorageDescriptor<any, ArgsWithPayloadCodec<any, any>>> = T extends StorageDescriptor<any, ArgsWithPayloadCodec<infer Args, infer Payload>> ? {
keyArgs: Args;
value: Payload;
} : unknown;
type StorageDescriptor<Args extends Array<any>, T, Optional extends true | false> = string & {
_type: T;
_args: Args;
_optional: Optional;
};
type TxDescriptor<Args extends {} | undefined> = string & {
___: Args;
};
type RuntimeDescriptor<Args extends Array<any>, T> = string & {
__: [Args, T];
};
type Descriptors = {
pallets: Record<string, [
Record<string, StorageDescriptor<any, any, any>>,
Record<string, TxDescriptor<any>>,
Record<string, PlainDescriptor<any>>,
Record<string, PlainDescriptor<any>>,
Record<string, PlainDescriptor<any>>
]>;
apis: Record<string, Record<string, RuntimeDescriptor<any, any>>>;
asset: PlainDescriptor<any>;
};
type PickDescriptors<Idx extends 0 | 1 | 2 | 3 | 4, T extends Descriptors["pallets"]> = {
[K in keyof T]: T[K][Idx];
};
type ExtractStorage<T extends Record<string, Record<string, StorageDescriptor<any, any, any>>>> = {
[K in keyof T]: {
[KK in keyof T[K]]: T[K][KK] extends StorageDescriptor<infer Key, infer Value, infer Optional> ? {
KeyArgs: Key;
Value: Value;
IsOptional: Optional;
} : unknown;
};
};
type ExtractTx<T extends Record<string, Record<string, TxDescriptor<any>>>> = {
[K in keyof T]: {
[KK in keyof T[K]]: T[K][KK] extends TxDescriptor<infer Args> ? Args : unknown;
};
};
type ExtractPlain<T extends Record<string, Record<string, PlainDescriptor<any>>>> = {
[K in keyof T]: {
[KK in keyof T[K]]: T[K][KK] extends PlainDescriptor<infer Value> ? Value : unknown;
};
};
type QueryFromDescriptors<T extends Descriptors> = ExtractStorage<PickDescriptors<0, T["pallets"]>>;
type TxFromDescriptors<T extends Descriptors> = ExtractTx<PickDescriptors<1, T["pallets"]>>;
type EventsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<2, T["pallets"]>>;
type ErrorsFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<3, T["pallets"]>>;
type ConstFromDescriptors<T extends Descriptors> = ExtractPlain<PickDescriptors<4, T["pallets"]>>;
export { AccountId, ArgsWithPayloadCodec, ArgsWithoutPayloadCodec, BitSequence, Blake2128, Blake2128Concat, Blake2256, ConstantDescriptor, Descriptor, DescriptorCommon, EncoderWithHash, ErrorDescriptor, EventDescriptor, EventToObject, Hex, HexString, Identity, OpaqueCodec, OpaqueValue, SS58String, Self, Storage, StorageDescriptor, StorageType, Twox128, Twox256, Twox64Concat, TxDescriptor, TxDescriptorArgs, TxDescriptorErrors, TxDescriptorEvents, TxFunction, UnionizeTupleEvents, V14, V14Extrinsic, V14Lookup, V14Pallets, bitSequence, char, compactBn, compactNumber, createCommonDescriptor, fixedStr, getDescriptorCreator, getPalletCreator, h64, metadata, selfDecoder, selfEncoder, v14 };
type GetEnum<T extends Enum<{
type: string;
value: any;
}>> = {
[K in T["type"]]: (...args: ExtractEnumValue<T, K> extends undefined ? [] : [value: ExtractEnumValue<T, K>]) => T;
};
export { AccountId, Bin, Binary, type BitSequence, Blake2128, Blake2128Concat, Blake2256, type BlockHeader, type ConstFromDescriptors, type Descriptors, type Discriminant, type EncoderWithHash, Enum, type ErrorsFromDescriptors, type EventsFromDescriptors, type ExtractEnumValue, type GetEnum, Hex, type HexString, Identity, type PlainDescriptor, type QueryFromDescriptors, type RuntimeDescriptor, type SS58String, Self, Storage, type StorageDescriptor, Twox128, Twox256, Twox64Concat, type TxDescriptor, type TxFromDescriptors, type V14Lookup, type V15, type V15Extrinsic, Variant, _Enum, bitSequence, blockHeader, char, compactBn, compactNumber, fixedStr, h64, metadata, selfDecoder, selfEncoder, v15 };

@@ -1,2 +0,2 @@

"use strict";var J=Object.defineProperty;var Be=Object.getOwnPropertyDescriptor;var Ve=Object.getOwnPropertyNames;var _e=Object.prototype.hasOwnProperty;var ae=(e,r)=>{for(var t in r)J(e,t,{get:r[t],enumerable:!0})},X=(e,r,t,i)=>{if(r&&typeof r=="object"||typeof r=="function")for(let a of Ve(r))!_e.call(e,a)&&a!==t&&J(e,a,{get:()=>r[a],enumerable:!(i=Be(r,a))||i.enumerable});return e},m=(e,r,t)=>(X(e,r,"default"),t&&X(t,r,"default"));var He=e=>X(J({},"__esModule",{value:!0}),e);var T={};ae(T,{AccountId:()=>de,Blake2128:()=>we,Blake2128Concat:()=>oe,Blake2256:()=>nr,Hex:()=>V,Identity:()=>ne,OpaqueCodec:()=>O,Self:()=>me,Storage:()=>ar,Twox128:()=>G,Twox256:()=>cr,Twox64Concat:()=>ce,bitSequence:()=>be,char:()=>he,compactBn:()=>pe,compactNumber:()=>d,createCommonDescriptor:()=>ir,fixedStr:()=>ve,getDescriptorCreator:()=>dr,getPalletCreator:()=>pr,h64:()=>k,metadata:()=>Te,selfDecoder:()=>re,selfEncoder:()=>ee,v14:()=>$});module.exports=He(T);m(T,require("scale-ts"),module.exports);var l={};ae(l,{AccountId:()=>de,Hex:()=>V,OpaqueCodec:()=>O,Self:()=>me,bitSequence:()=>be,char:()=>he,compactBn:()=>pe,compactNumber:()=>d,fixedStr:()=>ve,metadata:()=>Te,selfDecoder:()=>re,selfEncoder:()=>ee,v14:()=>$});m(l,require("scale-ts"));var K=require("scale-ts"),Y=require("@noble/hashes/blake2b"),Z=require("@scure/base"),ie=new TextEncoder().encode("SS58PRE"),Q=2,Ie=e=>{let r=e<64?Uint8Array.of(e):Uint8Array.of((e&252)>>2|64,e>>8|(e&3)<<6);return t=>{let i=(0,Y.blake2b)(Uint8Array.of(...ie,...r,...t),{dkLen:64}).subarray(0,Q);return Z.base58.encode(Uint8Array.of(...r,...t,...i))}};function Le(e,r){return t=>{let i=Z.base58.decode(t),a=i.subarray(0,i[0]&64?2:1),y=i.subarray(a.length,i.length-Q);if(y.length!==e)throw new Error("Invalid public key length");let x=i.subarray(a.length+y.length),f=(0,Y.blake2b)(Uint8Array.of(...ie,...a,...y),{dkLen:64}).subarray(0,Q);if(x[0]!==f[0]||x[1]!==f[1])throw new Error("Invalid checksum");if(We(a)!=r)throw new Error("Invalid SS58 prefix");return y.slice()}}var de=(e=42,r=32)=>(0,K.enhanceCodec)((0,K.Bytes)(r),Le(r,e),Ie(e)),We=e=>{let r=new DataView(e.buffer,e.byteOffset,e.byteLength);return r.byteLength===1?r.getUint8(0):r.getUint16(0)};var F=require("scale-ts"),d=F.compact,pe=F.compact;var L=require("scale-ts"),ee=e=>{let r=t=>{let i=L.Struct.enc({self:e()});return r=i,i(t)};return t=>r(t)},re=e=>{let r=t=>{let i=L.Struct.dec({self:e()}),a=i;return r=i,a(t)};return t=>r(t)},me=e=>(0,L.createCodec)(ee(()=>e().enc),re(()=>e().dec));var B=require("scale-ts");var ye=(e,r=d.dec)=>(0,B.createDecoder)(t=>{let i=r(t),a=(0,B.Bytes)(i).dec(t),y;return{length:i,inner:()=>y=y||e(a)}}),le=(e,r=d.enc)=>t=>{let i=r(t.length),a=new Uint8Array(i.length+t.length);return a.set(i,0),a.set(e(t.inner()),i.length),a},O=(e,r=d)=>(0,B.createCodec)(le(e.enc,r.enc),ye(e.dec,r.dec));O.enc=le;O.dec=ye;var S=require("scale-ts");var n=require("scale-ts"),xe=(0,n.Option)(n.str),M=(0,n.Vector)(n.str),qe=(0,n.Enum)({bool:n._void,char:n._void,str:n._void,u8:n._void,u16:n._void,u32:n._void,u64:n._void,u128:n._void,u256:n._void,i8:n._void,i16:n._void,i32:n._void,i64:n._void,i128:n._void,i256:n._void}),ge=(0,n.Vector)((0,n.Struct)({name:xe,type:d,typeName:xe,docs:M})),Ke=(0,n.Struct)({len:n.u32,type:d}),Oe=(0,n.Struct)({bitStoreType:d,bitOrderType:d}),Me=(0,n.Vector)((0,n.Struct)({name:n.str,fields:ge,index:n.u8,docs:M})),Re=(0,n.Enum)({composite:ge,variant:Me,sequence:d,array:Ke,tuple:(0,n.Vector)(d),primitive:qe,compact:d,bitSequence:Oe,historicMetaCompat:n.str}),$e=(0,n.Struct)({name:n.str,type:(0,n.Option)(d)}),ze=(0,n.Vector)($e),je=(0,n.Struct)({id:d,path:M,params:ze,def:Re,docs:M}),fe=(0,n.Vector)(je);var R=require("@polkadot-api/utils"),W=require("scale-ts"),ue=e=>{let r=W.Bytes.enc(e);return t=>r((0,R.fromHex)(t))},Ce=e=>{let r=W.Bytes.dec(e);return t=>(0,R.toHex)(r(t))},V=e=>(0,W.createCodec)(ue(e),Ce(e));V.enc=ue;V.dec=Ce;var c=require("scale-ts"),Ge=(0,c.Enum)({Blake2128:c._void,Blake2256:c._void,Blake2128Concat:c._void,Twox128:c._void,Twox256:c._void,Twox64Concat:c._void,Identity:c._void}),Xe=(0,c.Vector)(Ge),Je=(0,c.Struct)({hashers:Xe,key:d,value:d}),Qe=(0,c.Struct)({name:c.str,modifier:c.u8,type:(0,c.Enum)({plain:d,map:Je}),fallback:(0,c.Vector)(c.u8),docs:(0,c.Vector)(c.str)}),Ye=(0,c.Option)((0,c.Struct)({prefix:c.str,items:(0,c.Vector)(Qe)})),De=(0,c.Vector)((0,c.Struct)({name:c.str,storage:Ye,calls:(0,c.Option)(d),events:(0,c.Option)(d),constants:(0,c.Vector)((0,c.Struct)({name:c.str,type:d,value:V(),docs:(0,c.Vector)(c.str)})),errors:(0,c.Option)(d),index:c.u8}));var Ze=(0,S.Struct)({type:d,version:S.u8,signedExtensions:(0,S.Vector)((0,S.Struct)({identifier:S.str,type:d,additionalSigned:d}))}),$=(0,S.Struct)({lookup:fe,pallets:De,extrinsic:Ze,type:d});var P=require("scale-ts");var Ee=()=>{throw new Error("Unsupported metadata version!")},h=(0,P.createCodec)(Ee,Ee),Te=(0,P.Struct)({magicNumber:P.u32,metadata:(0,P.Enum)({v0:h,v1:h,v2:h,v3:h,v4:h,v5:h,v6:h,v7:h,v8:h,v9:h,v10:h,v11:h,v12:h,v13:h,v14:$})});var _=require("scale-ts");var Fe=(0,_.createDecoder)(e=>{let r=d.dec(e),t=Math.ceil(r/8);return{bytes:(0,_.Bytes)(t).dec(e),bitsLen:r}}),er=e=>{if(e.bitsLen>e.bytes.length*8)throw new Error(`Not enough bytes. (bitsLen:${e.bitsLen}, bytesLen:${e.bytes.length})`);let r=d.enc(e.bitsLen),t=new Uint8Array(e.bytes.length+r.length);return t.set(r,0),t.set(e.bytes,r.length),t},be=(0,_.createCodec)(er,Fe);var z=require("scale-ts"),he=(0,z.enhanceCodec)(z.u8,e=>e.charCodeAt(0),String.fromCharCode);var j=require("scale-ts"),rr=new TextEncoder,tr=new TextDecoder,ve=e=>(0,j.enhanceCodec)((0,j.Bytes)(e),r=>rr.encode(r),r=>tr.decode(r));m(T,l,module.exports);var Ae=require("@polkadot-api/utils"),te=require("@noble/hashes/blake2b"),or={dkLen:32},nr=e=>(0,te.blake2b)(e,or),sr={dkLen:16},we=e=>(0,te.blake2b)(e,sr),oe=e=>(0,Ae.mergeUint8)(we(e),e);var ne=e=>e;var Ue=require("@polkadot-api/utils"),Pe=require("scale-ts");var N=(e,r,t,i)=>new DataView(new Uint16Array([e,r,t,i]).buffer).getBigUint64(0,!0),se=2n**64n-1n,E=(e,r)=>e<<r&se|e>>64n-r,p=(e,r)=>e*r&se,C=(e,r)=>e+r&se,D=11400714785074694791n,v=14029467366897019727n,Se=1609587929392839161n,q=9650029242287828579n,ke=2870177450012600261n;function k(e,r=0n){let t=C(C(r,D),v),i=C(r,v),a=r,y=r-D,x=e.length,f=0,U=null;(function(){let o=0,A=o+x;if(x){if(U=new Uint8Array(32),x<32){U.set(e.subarray(0,x),f),f+=x;return}if(o<=A-32){let I=A-32;do{let w;w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),t=p(E(C(t,p(w,v)),31n),D),o+=8,w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),i=p(E(C(i,p(w,v)),31n),D),o+=8,w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),a=p(E(C(a,p(w,v)),31n),D),o+=8,w=N(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),y=p(E(C(y,p(w,v)),31n),D),o+=8}while(o<=I)}o<A&&(U.set(e.subarray(o,A),f),f=A-o)}})(),e=U||e;let s,g=0;for(x>=32?(s=E(t,1n),s=C(s,E(i,7n)),s=C(s,E(a,12n)),s=C(s,E(y,18n)),t=p(E(p(t,v),31n),D),s=s^t,s=C(p(s,D),q),i=p(E(p(i,v),31n),D),s=s^i,s=C(p(s,D),q),a=p(E(p(a,v),31n),D),s=s^a,s=C(p(s,D),q),y=p(E(p(y,v),31n),D),s=s^y,s=C(p(s,D),q)):s=C(r,ke),s=C(s,BigInt(x));g<=f-8;){let b=N(e[g+1]<<8|e[g],e[g+3]<<8|e[g+2],e[g+5]<<8|e[g+4],e[g+7]<<8|e[g+6]);b=p(E(p(b,v),31n),D),s=C(p(E(s^b,27n),D),q),g+=8}if(g+4<=f){let b=p(N(e[g+1]<<8|e[g],e[g+3]<<8|e[g+2],0,0),D);s=C(p(E(s^b,23n),v),Se),g+=4}for(;g<f;){let b=p(N(e[g++],0,0,0),ke);s=p(E(s^b,11n),D)}let u=s>>33n;return s=p(s^u,v),u=s>>29n,s=p(s^u,Se),u=s>>32n,s^=u,s}var G=e=>{let r=new Uint8Array(16),t=new DataView(r.buffer);return t.setBigUint64(0,k(e),!0),t.setBigUint64(8,k(e,1n),!0),r},cr=e=>{let r=new Uint8Array(32),t=new DataView(r.buffer);return t.setBigUint64(0,k(e),!0),t.setBigUint64(8,k(e,1n),!0),t.setBigUint64(16,k(e,2n),!0),t.setBigUint64(24,k(e,3n),!0),r},ce=e=>(0,Ue.mergeUint8)(Pe.u64.enc(k(e)),e);var H=require("@polkadot-api/utils");var Ne=new TextEncoder,ar=e=>{let r=G(Ne.encode(e));return(t,i,...a)=>{let y=(0,H.mergeUint8)(r,G(Ne.encode(t))),x=(0,H.toHex)(y),f=a.map(u=>u[1]).map(u=>u===ne?0:u===ce?8:u===oe?16:null).filter(Boolean),U=u=>{if(!u.startsWith(x))throw new Error(`key does not match this storage (${e}.${t})`);if(f.length!==a.length)throw new Error("Impossible to decode this key");if(a.length===0)return[];let b=u.slice(x.length),o=new Array(a.length);for(let A=0,I=0;A<f.length;A++){let w=a[A][0];I+=f[A],o[A]=w.dec(b.slice(I*2)),I+=w.enc(o[A]).length}return o},s=a.map(([{enc:u},b])=>o=>b(u(o)));return{enc:(...u)=>(0,H.toHex)((0,H.mergeUint8)(y,...u.map((b,o)=>s[o](b)))),dec:i,keyDecoder:U}}};var ir=(e,r,t)=>({checksum:e,pallet:r,name:t}),dr=(e,r,t,i,a)=>({type:e,props:{checksum:r,pallet:t,name:i},codecs:a}),pr=e=>({getPayloadDescriptor:(a,y,x,f)=>({type:a,props:{checksum:y,pallet:e,name:x},codecs:f}),getStorageDescriptor:(a,y,x)=>({type:"storage",props:{checksum:a,pallet:e,name:y},codecs:x}),getTxDescriptor:(a,y,x,f,U)=>({type:"tx",props:{checksum:a,pallet:e,name:y},codecs:U,events:x,errors:f})});
"use strict";var $=Object.defineProperty;var We=Object.getOwnPropertyDescriptor;var $e=Object.getOwnPropertyNames;var je=Object.prototype.hasOwnProperty;var ze=(e,t,n)=>t in e?$(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n;var Ge=(e,t)=>{for(var n in t)$(e,n,{get:t[n],enumerable:!0})},Xe=(e,t,n,d)=>{if(t&&typeof t=="object"||typeof t=="function")for(let m of $e(t))!je.call(e,m)&&m!==n&&$(e,m,{get:()=>t[m],enumerable:!(d=We(t,m))||d.enumerable});return e};var Je=e=>Xe($({},"__esModule",{value:!0}),e);var j=(e,t,n)=>(ze(e,typeof t!="symbol"?t+"":t,n),n),ge=(e,t,n)=>{if(!t.has(e))throw TypeError("Cannot "+n)};var A=(e,t,n)=>(ge(e,t,"read from private field"),n?n.call(e):t.get(e)),z=(e,t,n)=>{if(t.has(e))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(e):t.set(e,n)},G=(e,t,n,d)=>(ge(e,t,"write to private field"),d?d.call(e,n):t.set(e,n),n);var Lt={};Ge(Lt,{AccountId:()=>Ze,Bin:()=>ce,Binary:()=>J,Blake2128:()=>_e,Blake2128Concat:()=>le,Blake2256:()=>Ot,Bytes:()=>r.Bytes,Enum:()=>ie,Hex:()=>C,Identity:()=>pe,Option:()=>r.Option,Result:()=>r.Result,Self:()=>yt,Storage:()=>Rt,Struct:()=>r.Struct,Tuple:()=>r.Tuple,Twox128:()=>te,Twox256:()=>Ht,Twox64Concat:()=>fe,Variant:()=>q,Vector:()=>r.Vector,_Enum:()=>mt,_void:()=>r._void,bitSequence:()=>ct,blockHeader:()=>pt,bool:()=>r.bool,char:()=>st,compact:()=>r.compact,compactBn:()=>nt,compactNumber:()=>i,createCodec:()=>r.createCodec,createDecoder:()=>r.createDecoder,enhanceCodec:()=>r.enhanceCodec,enhanceDecoder:()=>r.enhanceDecoder,enhanceEncoder:()=>r.enhanceEncoder,fixedStr:()=>dt,h64:()=>U,i128:()=>r.i128,i16:()=>r.i16,i256:()=>r.i256,i32:()=>r.i32,i64:()=>r.i64,i8:()=>r.i8,metadata:()=>Bt,selfDecoder:()=>Se,selfEncoder:()=>ke,str:()=>r.str,u128:()=>r.u128,u16:()=>r.u16,u256:()=>r.u256,u32:()=>r.u32,u64:()=>r.u64,u8:()=>r.u8,v15:()=>me});module.exports=Je(Lt);var X=require("scale-ts"),re=require("@noble/hashes/blake2b"),oe=require("@scure/base"),Te=new TextEncoder().encode("SS58PRE"),ne=2,Qe=e=>{let t=e<64?Uint8Array.of(e):Uint8Array.of((e&252)>>2|64,e>>8|(e&3)<<6);return n=>{let d=(0,re.blake2b)(Uint8Array.of(...Te,...t,...n),{dkLen:64}).subarray(0,ne);return oe.base58.encode(Uint8Array.of(...t,...n,...d))}};function Ye(e,t){return n=>{let d=oe.base58.decode(n),m=d.subarray(0,d[0]&64?2:1),b=d.subarray(m.length,d.length-ne);if(b.length!==e)throw new Error("Invalid public key length");let v=d.subarray(m.length+b.length),h=(0,re.blake2b)(Uint8Array.of(...Te,...m,...b),{dkLen:64}).subarray(0,ne);if(v[0]!==h[0]||v[1]!==h[1])throw new Error("Invalid checksum");if(Fe(m)!=t)throw new Error("Invalid SS58 prefix");return b.slice()}}var Ze=(e=42,t=32)=>(0,X.enhanceCodec)((0,X.Bytes)(t),Ye(t,e),Qe(e)),Fe=e=>{let t=new DataView(e.buffer,e.byteOffset,e.byteLength);return t.byteLength===1?t.getUint8(0):t.getUint16(0)};var N=require("scale-ts"),Q=require("@polkadot-api/utils");var et=new TextEncoder,tt=new TextDecoder,B,V,H,I=class I{constructor(t){z(this,B,void 0);z(this,V,null);z(this,H,null);j(this,"asText",()=>A(this,H)===null?G(this,H,tt.decode(A(this,B))):A(this,H));j(this,"asHex",()=>A(this,V)===null?G(this,V,(0,Q.toHex)(A(this,B))):A(this,V));j(this,"asBytes",()=>A(this,B));G(this,B,t)}static fromText(t){return new I(et.encode(t))}static fromHex(t){return new I((0,Q.fromHex)(t))}static fromBytes(t){return new I(t)}};B=new WeakMap,V=new WeakMap,H=new WeakMap;var J=I,be=e=>{let t=N.Bytes.enc(e);return n=>t(n.asBytes())},he=e=>{let t=N.Bytes.dec(e);return n=>J.fromBytes(t(n))},ce=e=>(0,N.createCodec)(be(e),he(e));ce.enc=be;ce.dec=he;var R=require("scale-ts");var se=require("scale-ts"),i=se.compact,nt=se.compact;var rt=(0,R.createDecoder)(e=>{let t=i.dec(e),n=Math.ceil(t/8);return{bytes:(0,R.Bytes)(n).dec(e),bitsLen:t}}),ot=e=>{if(e.bitsLen>e.bytes.length*8)throw new Error(`Not enough bytes. (bitsLen:${e.bitsLen}, bytesLen:${e.bytes.length})`);let t=i.enc(e.bitsLen),n=new Uint8Array(e.bytes.length+t.length);return n.set(t,0),n.set(e.bytes,t.length),n},ct=(0,R.createCodec)(ot,rt);var Y=require("scale-ts"),st=(0,Y.enhanceCodec)(Y.u8,e=>e.charCodeAt(0),String.fromCharCode);var Z=require("@polkadot-api/utils"),P=require("scale-ts"),Ee=e=>{let t=P.Bytes.enc(e);return n=>t((0,Z.fromHex)(n))},ve=e=>{let t=P.Bytes.dec(e);return n=>(0,Z.toHex)(t(n))},C=e=>(0,P.createCodec)(Ee(e),ve(e));C.enc=Ee;C.dec=ve;var F=require("scale-ts"),at=new TextEncoder,it=new TextDecoder,dt=e=>(0,F.enhanceCodec)((0,F.Bytes)(e),t=>at.encode(t),t=>it.decode(t));var r=require("scale-ts");var we=require("scale-ts"),ke=e=>{let t=n=>{let d=e();return t=d,d(n)};return n=>t(n)},Se=e=>{let t=n=>{let d=e(),m=d;return t=d,m(n)};return n=>t(n)},yt=e=>(0,we.createCodec)(ke(()=>e().enc),Se(()=>e().dec));var M=require("scale-ts"),ae=require("@polkadot-api/utils"),mt=new Proxy({},{get(e,t){return n=>ie(t,n)}}),ie=(e,t)=>({as:n=>{if(n!==e)throw new Error(`Enum.as(${n}) used with actual type ${e}`);return t},is:n=>n===e,type:e,value:t}),Ce=(...e)=>{let t=M.Enum.enc(...e);return n=>t({tag:n.type,value:n.value})},Ue=(...e)=>{let t=M.Enum.dec(...e);return n=>{let{tag:d,value:m}=t(n);return ie(d,m)}},q=(e,...t)=>(0,M.createCodec)(Ce((0,ae.mapObject)(e,([n])=>n),...t),Ue((0,ae.mapObject)(e,([,n])=>n),...t));q.enc=Ce;q.dec=Ue;var Ae=new TextEncoder,Ke=new TextDecoder,ut=(0,r.enhanceCodec)((0,r.Bytes)(4),Ae.encode.bind(Ae),Ke.decode.bind(Ke)),de=(0,r.Struct)({engine:ut,payload:C()}),lt=q({consensus:de,seal:de,preRuntime:de,runtimeUpdated:r._void},[4,5,6,8]),ye=C(32),pt=(0,r.Struct)({parentHash:ye,number:i,stateRoot:ye,extrinsicRoot:ye,digests:(0,r.Vector)(lt)});var u=require("scale-ts");var c=require("scale-ts"),Be=(0,c.Option)(c.str),ee=(0,c.Vector)(c.str),xt=(0,c.Enum)({bool:c._void,char:c._void,str:c._void,u8:c._void,u16:c._void,u32:c._void,u64:c._void,u128:c._void,u256:c._void,i8:c._void,i16:c._void,i32:c._void,i64:c._void,i128:c._void,i256:c._void}),De=(0,c.Vector)((0,c.Struct)({name:Be,type:i,typeName:Be,docs:ee})),ft=(0,c.Struct)({len:c.u32,type:i}),gt=(0,c.Struct)({bitStoreType:i,bitOrderType:i}),Tt=(0,c.Vector)((0,c.Struct)({name:c.str,fields:De,index:c.u8,docs:ee})),bt=(0,c.Enum)({composite:De,variant:Tt,sequence:i,array:ft,tuple:(0,c.Vector)(i),primitive:xt,compact:i,bitSequence:gt}),ht=(0,c.Struct)({name:c.str,type:(0,c.Option)(i)}),Et=(0,c.Vector)(ht),vt=(0,c.Struct)({id:i,path:ee,params:Et,def:bt,docs:ee}),Oe=(0,c.Vector)(vt);var a=require("scale-ts"),wt=(0,a.Enum)({Blake2128:a._void,Blake2256:a._void,Blake2128Concat:a._void,Twox128:a._void,Twox256:a._void,Twox64Concat:a._void,Identity:a._void}),kt=(0,a.Vector)(wt),St=(0,a.Struct)({hashers:kt,key:i,value:i}),Ct=(0,a.Struct)({name:a.str,modifier:a.u8,type:(0,a.Enum)({plain:i,map:St}),fallback:C(),docs:(0,a.Vector)(a.str)}),Ut=(0,a.Option)((0,a.Struct)({prefix:a.str,items:(0,a.Vector)(Ct)})),Ve=(0,a.Vector)((0,a.Struct)({name:a.str,storage:Ut,calls:(0,a.Option)(i),events:(0,a.Option)(i),constants:(0,a.Vector)((0,a.Struct)({name:a.str,type:i,value:C(),docs:(0,a.Vector)(a.str)})),errors:(0,a.Option)(i),index:a.u8,docs:(0,a.Vector)(a.str)}));var He=(0,u.Vector)(u.str),At=(0,u.Struct)({name:u.str,methods:(0,u.Vector)((0,u.Struct)({name:u.str,inputs:(0,u.Vector)((0,u.Struct)({name:u.str,type:i})),output:i,docs:He})),docs:He}),Kt=(0,u.Struct)({version:u.u8,address:i,call:i,signature:i,extra:i,signedExtensions:(0,u.Vector)((0,u.Struct)({identifier:u.str,type:i,additionalSigned:i}))}),me=(0,u.Struct)({lookup:Oe,pallets:Ve,extrinsic:Kt,type:i,apis:(0,u.Vector)(At),outerEnums:(0,u.Struct)({call:i,event:i,error:i}),custom:(0,u.Vector)((0,u.Tuple)(u.str,(0,u.Struct)({type:i,value:C()})))});var K=require("scale-ts");var Re=()=>{throw new Error("Unsupported metadata version!")},T=(0,K.createCodec)(Re,Re),Bt=(0,K.Struct)({magicNumber:K.u32,metadata:(0,K.Enum)({v0:T,v1:T,v2:T,v3:T,v4:T,v5:T,v6:T,v7:T,v8:T,v9:T,v10:T,v11:T,v12:T,v13:T,v14:T,v15:me})});var Le=require("@polkadot-api/utils"),ue=require("@noble/hashes/blake2b"),Dt={dkLen:32},Ot=e=>(0,ue.blake2b)(e,Dt),Vt={dkLen:16},_e=e=>(0,ue.blake2b)(e,Vt),le=e=>(0,Le.mergeUint8)(_e(e),e);var pe=e=>e;var Pe=require("@polkadot-api/utils"),Me=require("scale-ts");var D=(e,t,n,d)=>new DataView(new Uint16Array([e,t,n,d]).buffer).getBigUint64(0,!0),xe=2n**64n-1n,g=(e,t)=>e<<t&xe|e>>64n-t,y=(e,t)=>e*t&xe,x=(e,t)=>e+t&xe,f=11400714785074694791n,w=14029467366897019727n,Ie=1609587929392839161n,W=9650029242287828579n,Ne=2870177450012600261n;function U(e,t=0n){let n=x(x(t,f),w),d=x(t,w),m=t,b=t-f,v=e.length,h=0,O=null;(function(){let o=0,k=o+v;if(v){if(O=new Uint8Array(32),v<32){O.set(e.subarray(0,v),h),h+=v;return}if(o<=k-32){let _=k-32;do{let S;S=D(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),n=y(g(x(n,y(S,w)),31n),f),o+=8,S=D(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),d=y(g(x(d,y(S,w)),31n),f),o+=8,S=D(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),m=y(g(x(m,y(S,w)),31n),f),o+=8,S=D(e[o+1]<<8|e[o],e[o+3]<<8|e[o+2],e[o+5]<<8|e[o+4],e[o+7]<<8|e[o+6]),b=y(g(x(b,y(S,w)),31n),f),o+=8}while(o<=_)}o<k&&(O.set(e.subarray(o,k),h),h=k-o)}})(),e=O||e;let s,l=0;for(v>=32?(s=g(n,1n),s=x(s,g(d,7n)),s=x(s,g(m,12n)),s=x(s,g(b,18n)),n=y(g(y(n,w),31n),f),s=s^n,s=x(y(s,f),W),d=y(g(y(d,w),31n),f),s=s^d,s=x(y(s,f),W),m=y(g(y(m,w),31n),f),s=s^m,s=x(y(s,f),W),b=y(g(y(b,w),31n),f),s=s^b,s=x(y(s,f),W)):s=x(t,Ne),s=x(s,BigInt(v));l<=h-8;){let E=D(e[l+1]<<8|e[l],e[l+3]<<8|e[l+2],e[l+5]<<8|e[l+4],e[l+7]<<8|e[l+6]);E=y(g(y(E,w),31n),f),s=x(y(g(s^E,27n),f),W),l+=8}if(l+4<=h){let E=y(D(e[l+1]<<8|e[l],e[l+3]<<8|e[l+2],0,0),f);s=x(y(g(s^E,23n),w),Ie),l+=4}for(;l<h;){let E=y(D(e[l++],0,0,0),Ne);s=y(g(s^E,11n),f)}let p=s>>33n;return s=y(s^p,w),p=s>>29n,s=y(s^p,Ie),p=s>>32n,s^=p,s}var te=e=>{let t=new Uint8Array(16),n=new DataView(t.buffer);return n.setBigUint64(0,U(e),!0),n.setBigUint64(8,U(e,1n),!0),t},Ht=e=>{let t=new Uint8Array(32),n=new DataView(t.buffer);return n.setBigUint64(0,U(e),!0),n.setBigUint64(8,U(e,1n),!0),n.setBigUint64(16,U(e,2n),!0),n.setBigUint64(24,U(e,3n),!0),t},fe=e=>(0,Pe.mergeUint8)(Me.u64.enc(U(e)),e);var L=require("@polkadot-api/utils");var qe=new TextEncoder,Rt=e=>{let t=te(qe.encode(e));return(n,d,...m)=>{let b=(0,L.mergeUint8)(t,te(qe.encode(n))),v=(0,L.toHex)(b),h=m.map(p=>p[1]).map(p=>p===pe?0:p===fe?8:p===le?16:null).filter(Boolean),O=p=>{if(!p.startsWith(v))throw new Error(`key does not match this storage (${e}.${n})`);if(h.length!==m.length)throw new Error("Impossible to decode this key");if(m.length===0)return[];let E=p.slice(v.length),o=new Array(m.length);for(let k=0,_=0;k<h.length;k++){let S=m[k][0];_+=h[k],o[k]=S.dec(E.slice(_*2)),_+=S.enc(o[k]).length}return o},s=m.map(([{enc:p},E])=>o=>E(p(o)));return{enc:(...p)=>(0,L.toHex)((0,L.mergeUint8)(b,...p.map((E,o)=>s[o](E)))),dec:d,keyDecoder:O}}};
//# sourceMappingURL=index.js.map
{
"name": "@polkadot-api/substrate-bindings",
"version": "0.0.1-8167145a6504fc77f310715b5208be71992108b5.1.0",
"version": "0.0.1-81aa776246f63e7c9d431e89d66d4a7f8651a0ae.1.0",
"author": "Josep M Sobrepere (https://github.com/josepot)",

@@ -45,4 +45,4 @@ "repository": {

"@scure/base": "^1.1.1",
"scale-ts": "^1.4.0",
"@polkadot-api/utils": "0.0.1-8167145a6504fc77f310715b5208be71992108b5.1.0"
"scale-ts": "^1.6.0",
"@polkadot-api/utils": "0.0.1-81aa776246f63e7c9d431e89d66d4a7f8651a0ae.1.0"
},

@@ -49,0 +49,0 @@ "devDependencies": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc