New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@polkadot-api/substrate-bindings

Package Overview
Dependencies
Maintainers
2
Versions
604
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@polkadot-api/substrate-bindings - npm Package Compare versions

Comparing version 0.6.0 to 0.6.1

dist/esm/codecs/blockHeader.mjs

8

dist/index.d.ts

@@ -119,4 +119,4 @@ import * as scale_ts from 'scale-ts';

<O extends StringRecord<Codec<any>>>(inner: O, indexes?: RestrictedLenTuple<number, O> | undefined): Codec<Enum<{ [K in keyof O]: CodecType<O[K]>; }>>;
enc: <O_1 extends StringRecord<Encoder<any>>>(inner: O_1, x?: RestrictedLenTuple<number, O_1> | undefined) => Encoder<Enum<{ [K_1 in keyof O_1]: EncoderType<O_1[K_1]>; }>>;
dec: <O_2 extends StringRecord<Decoder<any>>>(inner: O_2, x?: RestrictedLenTuple<number, O_2> | undefined) => Decoder<Enum<{ [K_2 in keyof O_2]: DecoderType<O_2[K_2]>; }>>;
enc: <O extends StringRecord<Encoder<any>>>(inner: O, x?: RestrictedLenTuple<number, O> | undefined) => Encoder<Enum<{ [K in keyof O]: EncoderType<O[K]>; }>>;
dec: <O extends StringRecord<Decoder<any>>>(inner: O, x?: RestrictedLenTuple<number, O> | undefined) => Decoder<Enum<{ [K in keyof O]: DecoderType<O[K]>; }>>;
};

@@ -1089,8 +1089,8 @@

type EncoderWithHash<T> = [Codec<T>, (input: Uint8Array) => Uint8Array];
declare const Storage: (pallet: string) => <T, A extends EncoderWithHash<any>[]>(name: string, dec: Decoder<T>, ...encoders_0: A) => {
declare const Storage: (pallet: string) => <T, A extends Array<EncoderWithHash<any>>>(name: string, dec: Decoder<T>, ...encoders_0: A) => {
enc: (...args: { [K in keyof A]: A[K] extends EncoderWithHash<infer V> ? V : unknown; }) => string;
dec: Decoder<T>;
keyDecoder: (value: string) => { [K_1 in keyof A]: A[K_1] extends EncoderWithHash<infer V_1> ? V_1 : unknown; };
keyDecoder: (value: string) => { [K in keyof A]: A[K] extends EncoderWithHash<infer V> ? V : unknown; };
};
export { AccountId, Bin, Binary, type BitSequence, Blake2128, Blake2128Concat, Blake2256, Blake3256, Blake3256Concat, type BlockHeader, type EncoderWithHash, Enum, type EnumVariant, type ExtractEnumValue, FixedSizeBinary, type GetEnum, Hex, type HexString, Identity, type SS58AddressInfo, type SS58String, Self, Storage, Twox128, Twox256, Twox64Concat, type V14, type V14Extrinsic, type V14Lookup, type V15, type V15Extrinsic, Variant, _Enum, bitSequence, blockHeader, char, compactBn, compactNumber, ethAccount, fixedStr, fromBufferToBase58, getSs58AddressInfo, h64, metadata, selfDecoder, selfEncoder, v14, v15 };

@@ -1,108 +0,15 @@

"use strict";
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __typeError = (msg) => {
throw TypeError(msg);
};
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), setter ? setter.call(obj, value) : member.set(obj, value), value);
'use strict';
// src/index.ts
var src_exports = {};
__export(src_exports, {
AccountId: () => AccountId,
Bin: () => Bin,
Binary: () => Binary,
Blake2128: () => Blake2128,
Blake2128Concat: () => Blake2128Concat,
Blake2256: () => Blake2256,
Blake3256: () => Blake3256,
Blake3256Concat: () => Blake3256Concat,
Bytes: () => import_scale_ts8.Bytes,
Enum: () => Enum2,
FixedSizeBinary: () => FixedSizeBinary,
Hex: () => Hex,
Identity: () => Identity,
Option: () => import_scale_ts8.Option,
Result: () => import_scale_ts8.Result,
ScaleEnum: () => import_scale_ts8.Enum,
Self: () => Self,
Storage: () => Storage,
Struct: () => import_scale_ts8.Struct,
Tuple: () => import_scale_ts8.Tuple,
Twox128: () => Twox128,
Twox256: () => Twox256,
Twox64Concat: () => Twox64Concat,
Variant: () => Variant,
Vector: () => import_scale_ts8.Vector,
_Enum: () => _Enum,
_void: () => import_scale_ts8._void,
bitSequence: () => bitSequence,
blockHeader: () => blockHeader,
bool: () => import_scale_ts8.bool,
char: () => char,
compact: () => import_scale_ts8.compact,
compactBn: () => compactBn,
compactNumber: () => compactNumber,
createCodec: () => import_scale_ts8.createCodec,
createDecoder: () => import_scale_ts8.createDecoder,
enhanceCodec: () => import_scale_ts8.enhanceCodec,
enhanceDecoder: () => import_scale_ts8.enhanceDecoder,
enhanceEncoder: () => import_scale_ts8.enhanceEncoder,
ethAccount: () => ethAccount,
fixedStr: () => fixedStr,
fromBufferToBase58: () => fromBufferToBase58,
getSs58AddressInfo: () => getSs58AddressInfo,
h64: () => h64,
i128: () => import_scale_ts8.i128,
i16: () => import_scale_ts8.i16,
i256: () => import_scale_ts8.i256,
i32: () => import_scale_ts8.i32,
i64: () => import_scale_ts8.i64,
i8: () => import_scale_ts8.i8,
metadata: () => metadata,
selfDecoder: () => selfDecoder,
selfEncoder: () => selfEncoder,
str: () => import_scale_ts8.str,
u128: () => import_scale_ts8.u128,
u16: () => import_scale_ts8.u16,
u256: () => import_scale_ts8.u256,
u32: () => import_scale_ts8.u32,
u64: () => import_scale_ts8.u64,
u8: () => import_scale_ts8.u8,
v14: () => v14,
v15: () => v15
});
module.exports = __toCommonJS(src_exports);
var scaleTs = require('scale-ts');
var base = require('@scure/base');
var blake2b = require('@noble/hashes/blake2b');
var utils = require('@polkadot-api/utils');
var sha3 = require('@noble/hashes/sha3');
var blake3 = require('@noble/hashes/blake3');
// src/codecs/scale/AccountId.ts
var import_scale_ts = require("scale-ts");
// src/utils/ss58-util.ts
var import_base = require("@scure/base");
var import_blake2b = require("@noble/hashes/blake2b");
var SS58_PREFIX = new TextEncoder().encode("SS58PRE");
var CHECKSUM_LENGTH = 2;
var getSs58AddressInfo = (address) => {
const SS58_PREFIX = new TextEncoder().encode("SS58PRE");
const CHECKSUM_LENGTH = 2;
const getSs58AddressInfo = (address) => {
try {
const decoded = import_base.base58.decode(address);
const decoded = base.base58.decode(address);
const prefixBytes = decoded.subarray(0, decoded[0] & 64 ? 2 : 1);

@@ -114,3 +21,3 @@ const publicKey = decoded.subarray(

const checksum = decoded.subarray(prefixBytes.length + publicKey.length);
const expectedChecksum = (0, import_blake2b.blake2b)(
const expectedChecksum = blake2b.blake2b(
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey),

@@ -132,7 +39,7 @@ {

};
var prefixBytesToNumber = (bytes) => {
const prefixBytesToNumber = (bytes) => {
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
return dv.byteLength === 1 ? dv.getUint8(0) : dv.getUint16(0);
};
var fromBufferToBase58 = (ss58Format) => {
const fromBufferToBase58 = (ss58Format) => {
const prefixBytes = ss58Format < 64 ? Uint8Array.of(ss58Format) : Uint8Array.of(

@@ -143,3 +50,3 @@ (ss58Format & 252) >> 2 | 64,

return (publicKey) => {
const checksum = (0, import_blake2b.blake2b)(
const checksum = blake2b.blake2b(
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey),

@@ -150,3 +57,3 @@ {

).subarray(0, CHECKSUM_LENGTH);
return import_base.base58.encode(
return base.base58.encode(
Uint8Array.of(...prefixBytes, ...publicKey, ...checksum)

@@ -157,3 +64,2 @@ );

// src/codecs/scale/AccountId.ts
function fromBase58ToBuffer(nBytes, _ss58Format) {

@@ -169,15 +75,22 @@ return (address) => {

}
var AccountId = (ss58Format = 42, nBytes = 32) => (0, import_scale_ts.enhanceCodec)(
(0, import_scale_ts.Bytes)(nBytes),
fromBase58ToBuffer(nBytes, ss58Format),
const AccountId = (ss58Format = 42, nBytes = 32) => scaleTs.enhanceCodec(
scaleTs.Bytes(nBytes),
fromBase58ToBuffer(nBytes),
fromBufferToBase58(ss58Format)
);
// src/codecs/scale/Binary.ts
var import_scale_ts2 = require("scale-ts");
var import_utils = require("@polkadot-api/utils");
var textEncoder = new TextEncoder();
var textDecoder = new TextDecoder();
var __defProp = Object.defineProperty;
var __typeError = (msg) => {
throw TypeError(msg);
};
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
var _bytes, _hex, _str;
var _Binary = class _Binary {
const textEncoder$3 = new TextEncoder();
const textDecoder$2 = new TextDecoder();
const _Binary = class _Binary {
constructor(data) {

@@ -187,4 +100,4 @@ __privateAdd(this, _bytes);

__privateAdd(this, _str, null);
__publicField(this, "asText", () => __privateGet(this, _str) === null ? __privateSet(this, _str, textDecoder.decode(__privateGet(this, _bytes))) : __privateGet(this, _str));
__publicField(this, "asHex", () => __privateGet(this, _hex) === null ? __privateSet(this, _hex, (0, import_utils.toHex)(__privateGet(this, _bytes))) : __privateGet(this, _hex));
__publicField(this, "asText", () => __privateGet(this, _str) === null ? __privateSet(this, _str, textDecoder$2.decode(__privateGet(this, _bytes))) : __privateGet(this, _str));
__publicField(this, "asHex", () => __privateGet(this, _hex) === null ? __privateSet(this, _hex, utils.toHex(__privateGet(this, _bytes))) : __privateGet(this, _hex));
__publicField(this, "asBytes", () => __privateGet(this, _bytes));

@@ -194,6 +107,6 @@ __privateSet(this, _bytes, data);

static fromText(input) {
return new _Binary(textEncoder.encode(input));
return new _Binary(textEncoder$3.encode(input));
}
static fromHex(input) {
return new _Binary((0, import_utils.fromHex)(input));
return new _Binary(utils.fromHex(input));
}

@@ -207,4 +120,4 @@ static fromBytes(input) {

_str = new WeakMap();
var Binary = _Binary;
var FixedSizeBinary = class _FixedSizeBinary extends Binary {
let Binary = _Binary;
class FixedSizeBinary extends Binary {
constructor(data) {

@@ -214,33 +127,27 @@ super(data);

static fromArray(input) {
return new _FixedSizeBinary(new Uint8Array(input));
return new FixedSizeBinary(new Uint8Array(input));
}
};
var enc = (nBytes) => {
const _enc = import_scale_ts2.Bytes.enc(nBytes);
}
const enc$1 = (nBytes) => {
const _enc = scaleTs.Bytes.enc(nBytes);
return (value) => _enc(value.asBytes());
};
var dec = (nBytes) => {
const _dec = import_scale_ts2.Bytes.dec(nBytes);
const dec$1 = (nBytes) => {
const _dec = scaleTs.Bytes.dec(nBytes);
return (value) => Binary.fromBytes(_dec(value));
};
var Bin = (nBytes) => (0, import_scale_ts2.createCodec)(enc(nBytes), dec(nBytes));
Bin.enc = enc;
Bin.dec = dec;
const Bin = (nBytes) => scaleTs.createCodec(enc$1(nBytes), dec$1(nBytes));
Bin.enc = enc$1;
Bin.dec = dec$1;
// src/codecs/scale/bitSequence.ts
var import_scale_ts4 = require("scale-ts");
const compactNumber = scaleTs.compact;
const compactBn = scaleTs.compact;
// src/codecs/scale/compact.ts
var import_scale_ts3 = require("scale-ts");
var compactNumber = import_scale_ts3.compact;
var compactBn = import_scale_ts3.compact;
// src/codecs/scale/bitSequence.ts
var bitSequenceDecoder = (0, import_scale_ts4.createDecoder)((data) => {
const bitSequenceDecoder = scaleTs.createDecoder((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = (0, import_scale_ts4.Bytes)(bytesLen).dec(data);
const bytes = scaleTs.Bytes(bytesLen).dec(data);
return { bytes, bitsLen };
});
var bitSequenceEncoder = (input) => {
const bitSequenceEncoder = (input) => {
if (input.bitsLen > input.bytes.length * 8)

@@ -256,43 +163,31 @@ throw new Error(

};
var bitSequence = (0, import_scale_ts4.createCodec)(bitSequenceEncoder, bitSequenceDecoder);
const bitSequence$1 = scaleTs.createCodec(bitSequenceEncoder, bitSequenceDecoder);
// src/codecs/scale/char.ts
var import_scale_ts5 = require("scale-ts");
var char = (0, import_scale_ts5.enhanceCodec)(
import_scale_ts5.u8,
(str8) => str8.charCodeAt(0),
const char = scaleTs.enhanceCodec(
scaleTs.u8,
(str) => str.charCodeAt(0),
String.fromCharCode
);
// src/codecs/scale/Hex.ts
var import_utils2 = require("@polkadot-api/utils");
var import_scale_ts6 = require("scale-ts");
var enc2 = (nBytes) => {
const _enc = import_scale_ts6.Bytes.enc(nBytes);
return (value) => _enc((0, import_utils2.fromHex)(value));
const enc = (nBytes) => {
const _enc = scaleTs.Bytes.enc(nBytes);
return (value) => _enc(utils.fromHex(value));
};
var dec2 = (nBytes) => {
const _dec = import_scale_ts6.Bytes.dec(nBytes);
return (value) => (0, import_utils2.toHex)(_dec(value));
const dec = (nBytes) => {
const _dec = scaleTs.Bytes.dec(nBytes);
return (value) => utils.toHex(_dec(value));
};
var Hex = (nBytes) => (0, import_scale_ts6.createCodec)(enc2(nBytes), dec2(nBytes));
Hex.enc = enc2;
Hex.dec = dec2;
const Hex = (nBytes) => scaleTs.createCodec(enc(nBytes), dec(nBytes));
Hex.enc = enc;
Hex.dec = dec;
// src/codecs/scale/fixed-str.ts
var import_scale_ts7 = require("scale-ts");
var textEncoder2 = new TextEncoder();
var textDecoder2 = new TextDecoder();
var fixedStr = (nBytes) => (0, import_scale_ts7.enhanceCodec)(
(0, import_scale_ts7.Bytes)(nBytes),
(str8) => textEncoder2.encode(str8),
(bytes) => textDecoder2.decode(bytes)
const textEncoder$2 = new TextEncoder();
const textDecoder$1 = new TextDecoder();
const fixedStr = (nBytes) => scaleTs.enhanceCodec(
scaleTs.Bytes(nBytes),
(str) => textEncoder$2.encode(str),
(bytes) => textDecoder$1.decode(bytes)
);
// src/codecs/scale/re-exported.ts
var import_scale_ts8 = require("scale-ts");
// src/codecs/scale/Self.ts
var import_scale_ts9 = require("scale-ts");
var selfEncoder = (value) => {
const selfEncoder = (value) => {
let cache = (x) => {

@@ -305,3 +200,3 @@ const encoder = value();

};
var selfDecoder = (value) => {
const selfDecoder = (value) => {
let cache = (x) => {

@@ -315,3 +210,3 @@ const decoder = value();

};
var Self = (value) => (0, import_scale_ts9.createCodec)(
const Self = (value) => scaleTs.createCodec(
selfEncoder(() => value().enc),

@@ -321,8 +216,3 @@ selfDecoder(() => value().dec)

// src/codecs/scale/Variant.ts
var import_scale_ts10 = require("scale-ts");
var import_utils3 = require("@polkadot-api/utils");
// src/types/enum.ts
var discriminant = {
const discriminant = {
is(value, type) {

@@ -339,3 +229,3 @@ return value.type === type;

};
var Enum2 = Object.assign((type, value) => {
const Enum = Object.assign((type, value) => {
return {

@@ -346,7 +236,7 @@ type,

}, discriminant);
var _Enum = new Proxy(
const _Enum = new Proxy(
{},
{
get(_, prop) {
return (value) => Enum2(prop, value);
return (value) => Enum(prop, value);
}

@@ -356,21 +246,20 @@ }

// src/codecs/scale/Variant.ts
var VariantEnc = (...args) => {
const enc3 = import_scale_ts10.Enum.enc(...args);
return (v) => enc3({ tag: v.type, value: v.value });
const VariantEnc = (...args) => {
const enc = scaleTs.Enum.enc(...args);
return (v) => enc({ tag: v.type, value: v.value });
};
var VariantDec = (...args) => {
const dec3 = import_scale_ts10.Enum.dec(...args);
const VariantDec = (...args) => {
const dec = scaleTs.Enum.dec(...args);
return (v) => {
const { tag, value } = dec3(v);
return Enum2(tag, value);
const { tag, value } = dec(v);
return Enum(tag, value);
};
};
var Variant = (inner, ...args) => (0, import_scale_ts10.createCodec)(
const Variant = (inner, ...args) => scaleTs.createCodec(
VariantEnc(
(0, import_utils3.mapObject)(inner, ([encoder]) => encoder),
utils.mapObject(inner, ([encoder]) => encoder),
...args
),
VariantDec(
(0, import_utils3.mapObject)(inner, ([, decoder]) => decoder),
utils.mapObject(inner, ([, decoder]) => decoder),
...args

@@ -382,24 +271,20 @@ )

// src/codecs/scale/ethAccount.ts
var import_utils4 = require("@polkadot-api/utils");
var import_scale_ts11 = require("scale-ts");
var import_sha3 = require("@noble/hashes/sha3");
var getFormattedAddress = (hexAddress) => {
const getFormattedAddress = (hexAddress) => {
const nonChecksum = hexAddress.slice(2);
const hashedAddress = (0, import_utils4.toHex)((0, import_sha3.keccak_256)(nonChecksum)).slice(2);
const hashedAddress = utils.toHex(sha3.keccak_256(nonChecksum)).slice(2);
const result = new Array(40);
for (let i = 0; i < 40; i++) {
const checksumVal = parseInt(hashedAddress[i], 16);
const char2 = nonChecksum[i];
result[i] = checksumVal > 7 ? char2.toUpperCase() : char2;
const char = nonChecksum[i];
result[i] = checksumVal > 7 ? char.toUpperCase() : char;
}
return `0x${result.join("")}`;
};
var bytes20Dec = (0, import_scale_ts11.Bytes)(20)[1];
var ethAccount = (0, import_scale_ts11.createCodec)(
const bytes20Dec = scaleTs.Bytes(20)[1];
const ethAccount = scaleTs.createCodec(
(input) => {
const bytes = (0, import_utils4.fromHex)(input);
const bytes = utils.fromHex(input);
if (bytes.length !== 20)
throw new Error(`Invalid length found on EthAddress(${input})`);
const hexAddress = (0, import_utils4.toHex)(bytes);
const hexAddress = utils.toHex(bytes);
if (input === hexAddress || input === hexAddress.toUpperCase()) return bytes;

@@ -410,18 +295,17 @@ if (getFormattedAddress(hexAddress) !== input)

},
(0, import_scale_ts11.createDecoder)((bytes) => getFormattedAddress((0, import_utils4.toHex)(bytes20Dec(bytes))))
scaleTs.createDecoder((bytes) => getFormattedAddress(utils.toHex(bytes20Dec(bytes))))
);
// src/codecs/blockHeader.ts
var textEncoder3 = new TextEncoder();
var textDecoder3 = new TextDecoder();
var fourChars = (0, import_scale_ts8.enhanceCodec)(
(0, import_scale_ts8.Bytes)(4),
textEncoder3.encode.bind(textEncoder3),
textDecoder3.decode.bind(textDecoder3)
const textEncoder$1 = new TextEncoder();
const textDecoder = new TextDecoder();
const fourChars = scaleTs.enhanceCodec(
scaleTs.Bytes(4),
textEncoder$1.encode.bind(textEncoder$1),
textDecoder.decode.bind(textDecoder)
);
var diggestVal = (0, import_scale_ts8.Struct)({
const diggestVal = scaleTs.Struct({
engine: fourChars,
payload: Hex()
});
var diggest = Variant(
const diggest = Variant(
{

@@ -431,8 +315,8 @@ consensus: diggestVal,

preRuntime: diggestVal,
runtimeUpdated: import_scale_ts8._void
runtimeUpdated: scaleTs._void
},
[4, 5, 6, 8]
);
var hex32 = Hex(32);
var blockHeader = (0, import_scale_ts8.Struct)({
const hex32 = Hex(32);
const blockHeader = scaleTs.Struct({
parentHash: hex32,

@@ -442,39 +326,27 @@ number: compactNumber,

extrinsicRoot: hex32,
digests: (0, import_scale_ts8.Vector)(diggest)
digests: scaleTs.Vector(diggest)
});
// src/codecs/metadata/metadata.ts
var import_scale_ts18 = require("scale-ts");
const docs = scaleTs.Vector(scaleTs.str);
// src/codecs/metadata/v15.ts
var import_scale_ts16 = require("scale-ts");
// src/codecs/metadata/lookup.ts
var import_scale_ts13 = require("scale-ts");
// src/codecs/metadata/docs.ts
var import_scale_ts12 = require("scale-ts");
var docs = (0, import_scale_ts12.Vector)(import_scale_ts12.str);
// src/codecs/metadata/lookup.ts
var oStr = (0, import_scale_ts13.Option)(import_scale_ts13.str);
var primitive = (0, import_scale_ts13.Enum)({
bool: import_scale_ts13._void,
char: import_scale_ts13._void,
str: import_scale_ts13._void,
u8: import_scale_ts13._void,
u16: import_scale_ts13._void,
u32: import_scale_ts13._void,
u64: import_scale_ts13._void,
u128: import_scale_ts13._void,
u256: import_scale_ts13._void,
i8: import_scale_ts13._void,
i16: import_scale_ts13._void,
i32: import_scale_ts13._void,
i64: import_scale_ts13._void,
i128: import_scale_ts13._void,
i256: import_scale_ts13._void
const oStr = scaleTs.Option(scaleTs.str);
const primitive = scaleTs.Enum({
bool: scaleTs._void,
char: scaleTs._void,
str: scaleTs._void,
u8: scaleTs._void,
u16: scaleTs._void,
u32: scaleTs._void,
u64: scaleTs._void,
u128: scaleTs._void,
u256: scaleTs._void,
i8: scaleTs._void,
i16: scaleTs._void,
i32: scaleTs._void,
i64: scaleTs._void,
i128: scaleTs._void,
i256: scaleTs._void
});
var fields = (0, import_scale_ts13.Vector)(
(0, import_scale_ts13.Struct)({
const fields = scaleTs.Vector(
scaleTs.Struct({
name: oStr,

@@ -486,19 +358,19 @@ type: compactNumber,

);
var arr = (0, import_scale_ts13.Struct)({
len: import_scale_ts13.u32,
const arr = scaleTs.Struct({
len: scaleTs.u32,
type: compactNumber
});
var bitSequence2 = (0, import_scale_ts13.Struct)({
const bitSequence = scaleTs.Struct({
bitStoreType: compactNumber,
bitOrderType: compactNumber
});
var variant = (0, import_scale_ts13.Vector)(
(0, import_scale_ts13.Struct)({
name: import_scale_ts13.str,
const variant = scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
fields,
index: import_scale_ts13.u8,
index: scaleTs.u8,
docs
})
);
var def = (0, import_scale_ts13.Enum)({
const def = scaleTs.Enum({
composite: fields,

@@ -508,13 +380,13 @@ variant,

array: arr,
tuple: (0, import_scale_ts13.Vector)(compactNumber),
tuple: scaleTs.Vector(compactNumber),
primitive,
compact: compactNumber,
bitSequence: bitSequence2
bitSequence
});
var param = (0, import_scale_ts13.Struct)({
name: import_scale_ts13.str,
type: (0, import_scale_ts13.Option)(compactNumber)
const param = scaleTs.Struct({
name: scaleTs.str,
type: scaleTs.Option(compactNumber)
});
var params = (0, import_scale_ts13.Vector)(param);
var entry = (0, import_scale_ts13.Struct)({
const params = scaleTs.Vector(param);
const entry = scaleTs.Struct({
id: compactNumber,

@@ -526,17 +398,15 @@ path: docs,

});
var lookup = (0, import_scale_ts13.Vector)(entry);
const lookup = scaleTs.Vector(entry);
// src/codecs/metadata/pallets.ts
var import_scale_ts14 = require("scale-ts");
var hashType = (0, import_scale_ts14.Enum)({
Blake2128: import_scale_ts14._void,
Blake2256: import_scale_ts14._void,
Blake2128Concat: import_scale_ts14._void,
Twox128: import_scale_ts14._void,
Twox256: import_scale_ts14._void,
Twox64Concat: import_scale_ts14._void,
Identity: import_scale_ts14._void
const hashType = scaleTs.Enum({
Blake2128: scaleTs._void,
Blake2256: scaleTs._void,
Blake2128Concat: scaleTs._void,
Twox128: scaleTs._void,
Twox256: scaleTs._void,
Twox64Concat: scaleTs._void,
Identity: scaleTs._void
});
var hashers = (0, import_scale_ts14.Vector)(hashType);
var storageMap = (0, import_scale_ts14.Struct)({
const hashers = scaleTs.Vector(hashType);
const storageMap = scaleTs.Struct({
hashers,

@@ -546,6 +416,6 @@ key: compactNumber,

});
var storageItem = (0, import_scale_ts14.Struct)({
name: import_scale_ts14.str,
modifier: import_scale_ts14.u8,
type: (0, import_scale_ts14.Enum)({
const storageItem = scaleTs.Struct({
name: scaleTs.str,
modifier: scaleTs.u8,
type: scaleTs.Enum({
plain: compactNumber,

@@ -557,16 +427,16 @@ map: storageMap

});
var storage = (0, import_scale_ts14.Option)(
(0, import_scale_ts14.Struct)({
prefix: import_scale_ts14.str,
items: (0, import_scale_ts14.Vector)(storageItem)
const storage = scaleTs.Option(
scaleTs.Struct({
prefix: scaleTs.str,
items: scaleTs.Vector(storageItem)
})
);
var v14Pallet = {
name: import_scale_ts14.str,
const v14Pallet = {
name: scaleTs.str,
storage,
calls: (0, import_scale_ts14.Option)(compactNumber),
events: (0, import_scale_ts14.Option)(compactNumber),
constants: (0, import_scale_ts14.Vector)(
(0, import_scale_ts14.Struct)({
name: import_scale_ts14.str,
calls: scaleTs.Option(compactNumber),
events: scaleTs.Option(compactNumber),
constants: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
type: compactNumber,

@@ -577,6 +447,6 @@ value: Hex(),

),
errors: (0, import_scale_ts14.Option)(compactNumber),
index: import_scale_ts14.u8
errors: scaleTs.Option(compactNumber),
index: scaleTs.u8
};
var v15Pallet = {
const v15Pallet = {
...v14Pallet,

@@ -586,12 +456,10 @@ docs

// src/codecs/metadata/runtime-api.ts
var import_scale_ts15 = require("scale-ts");
var runtimeApi = (0, import_scale_ts15.Struct)({
name: import_scale_ts15.str,
methods: (0, import_scale_ts15.Vector)(
(0, import_scale_ts15.Struct)({
name: import_scale_ts15.str,
inputs: (0, import_scale_ts15.Vector)(
(0, import_scale_ts15.Struct)({
name: import_scale_ts15.str,
const runtimeApi = scaleTs.Struct({
name: scaleTs.str,
methods: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
inputs: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
type: compactNumber

@@ -607,5 +475,4 @@ })

// src/codecs/metadata/v15.ts
var extrinsic = (0, import_scale_ts16.Struct)({
version: import_scale_ts16.u8,
const extrinsic$1 = scaleTs.Struct({
version: scaleTs.u8,
address: compactNumber,

@@ -615,5 +482,5 @@ call: compactNumber,

extra: compactNumber,
signedExtensions: (0, import_scale_ts16.Vector)(
(0, import_scale_ts16.Struct)({
identifier: import_scale_ts16.str,
signedExtensions: scaleTs.Vector(
scaleTs.Struct({
identifier: scaleTs.str,
type: compactNumber,

@@ -624,9 +491,9 @@ additionalSigned: compactNumber

});
var v15 = (0, import_scale_ts16.Struct)({
const v15 = scaleTs.Struct({
lookup,
pallets: (0, import_scale_ts16.Vector)((0, import_scale_ts16.Struct)(v15Pallet)),
extrinsic,
pallets: scaleTs.Vector(scaleTs.Struct(v15Pallet)),
extrinsic: extrinsic$1,
type: compactNumber,
apis: (0, import_scale_ts16.Vector)(runtimeApi),
outerEnums: (0, import_scale_ts16.Struct)({
apis: scaleTs.Vector(runtimeApi),
outerEnums: scaleTs.Struct({
call: compactNumber,

@@ -636,18 +503,16 @@ event: compactNumber,

}),
custom: (0, import_scale_ts16.Vector)((0, import_scale_ts16.Tuple)(import_scale_ts16.str, (0, import_scale_ts16.Struct)({ type: compactNumber, value: Hex() })))
custom: scaleTs.Vector(scaleTs.Tuple(scaleTs.str, scaleTs.Struct({ type: compactNumber, value: Hex() })))
});
// src/codecs/metadata/v14.ts
var import_scale_ts17 = require("scale-ts");
var empty = new Uint8Array();
var Always = (value) => (0, import_scale_ts17.createCodec)(
const empty = new Uint8Array();
const Always = (value) => scaleTs.createCodec(
() => empty,
() => value
);
var extrinsic2 = (0, import_scale_ts17.Struct)({
const extrinsic = scaleTs.Struct({
type: compactNumber,
version: import_scale_ts17.u8,
signedExtensions: (0, import_scale_ts17.Vector)(
(0, import_scale_ts17.Struct)({
identifier: import_scale_ts17.str,
version: scaleTs.u8,
signedExtensions: scaleTs.Vector(
scaleTs.Struct({
identifier: scaleTs.str,
type: compactNumber,

@@ -658,6 +523,6 @@ additionalSigned: compactNumber

});
var v14 = (0, import_scale_ts17.Struct)({
const v14 = scaleTs.Struct({
lookup,
pallets: (0, import_scale_ts17.Vector)((0, import_scale_ts17.Struct)({ ...v14Pallet, docs: Always([]) })),
extrinsic: extrinsic2,
pallets: scaleTs.Vector(scaleTs.Struct({ ...v14Pallet, docs: Always([]) })),
extrinsic,
type: compactNumber,

@@ -667,13 +532,12 @@ apis: Always([])

// src/codecs/metadata/metadata.ts
var unsupportedFn = () => {
const unsupportedFn = () => {
throw new Error("Unsupported metadata version!");
};
var unsupported = (0, import_scale_ts18.createCodec)(
const unsupported = scaleTs.createCodec(
unsupportedFn,
unsupportedFn
);
var metadata = (0, import_scale_ts18.Struct)({
magicNumber: import_scale_ts18.u32,
metadata: (0, import_scale_ts18.Enum)({
const metadata = scaleTs.Struct({
magicNumber: scaleTs.u32,
metadata: scaleTs.Enum({
v0: unsupported,

@@ -698,36 +562,24 @@ v1: unsupported,

// src/hashes/blake2.ts
var import_utils5 = require("@polkadot-api/utils");
var import_blake2b2 = require("@noble/hashes/blake2b");
var len32 = { dkLen: 32 };
var Blake2256 = (encoded) => (0, import_blake2b2.blake2b)(encoded, len32);
var len16 = { dkLen: 16 };
var Blake2128 = (encoded) => (0, import_blake2b2.blake2b)(encoded, len16);
var Blake2128Concat = (encoded) => (0, import_utils5.mergeUint8)(Blake2128(encoded), encoded);
const len32$1 = { dkLen: 32 };
const Blake2256 = (encoded) => blake2b.blake2b(encoded, len32$1);
const len16 = { dkLen: 16 };
const Blake2128 = (encoded) => blake2b.blake2b(encoded, len16);
const Blake2128Concat = (encoded) => utils.mergeUint8(Blake2128(encoded), encoded);
// src/hashes/blake3.ts
var import_utils6 = require("@polkadot-api/utils");
var import_blake3 = require("@noble/hashes/blake3");
var len322 = { dkLen: 32 };
var Blake3256 = (encoded) => (0, import_blake3.blake3)(encoded, len322);
var Blake3256Concat = (encoded) => (0, import_utils6.mergeUint8)(Blake3256(encoded), encoded);
const len32 = { dkLen: 32 };
const Blake3256 = (encoded) => blake3.blake3(encoded, len32);
const Blake3256Concat = (encoded) => utils.mergeUint8(Blake3256(encoded), encoded);
// src/hashes/identity.ts
var Identity = (encoded) => encoded;
const Identity = (encoded) => encoded;
// src/hashes/twoX.ts
var import_utils7 = require("@polkadot-api/utils");
var import_scale_ts19 = require("scale-ts");
// src/hashes/h64.ts
var bigintFromU16 = (v0, v1, v2, v3) => new DataView(new Uint16Array([v0, v1, v2, v3]).buffer).getBigUint64(0, true);
var MASK_64 = 2n ** 64n - 1n;
var rotl = (input, nBits) => input << nBits & MASK_64 | input >> 64n - nBits;
var multiply = (a, b) => a * b & MASK_64;
var add = (a, b) => a + b & MASK_64;
var PRIME64_1 = 11400714785074694791n;
var PRIME64_2 = 14029467366897019727n;
var PRIME64_3 = 1609587929392839161n;
var PRIME64_4 = 9650029242287828579n;
var PRIME64_5 = 2870177450012600261n;
const bigintFromU16 = (v0, v1, v2, v3) => new DataView(new Uint16Array([v0, v1, v2, v3]).buffer).getBigUint64(0, true);
const MASK_64 = 2n ** 64n - 1n;
const rotl = (input, nBits) => input << nBits & MASK_64 | input >> 64n - nBits;
const multiply = (a, b) => a * b & MASK_64;
const add = (a, b) => a + b & MASK_64;
const PRIME64_1 = 11400714785074694791n;
const PRIME64_2 = 14029467366897019727n;
const PRIME64_3 = 1609587929392839161n;
const PRIME64_4 = 9650029242287828579n;
const PRIME64_5 = 2870177450012600261n;
function h64(input, seed = 0n) {

@@ -855,4 +707,3 @@ let v1 = add(add(seed, PRIME64_1), PRIME64_2);

// src/hashes/twoX.ts
var Twox128 = (input) => {
const Twox128 = (input) => {
const result = new Uint8Array(16);

@@ -864,3 +715,3 @@ const dv = new DataView(result.buffer);

};
var Twox256 = (input) => {
const Twox256 = (input) => {
const result = new Uint8Array(32);

@@ -874,15 +725,13 @@ const dv = new DataView(result.buffer);

};
var Twox64Concat = (encoded) => (0, import_utils7.mergeUint8)(import_scale_ts19.u64.enc(h64(encoded)), encoded);
const Twox64Concat = (encoded) => utils.mergeUint8(scaleTs.u64.enc(h64(encoded)), encoded);
// src/storage.ts
var import_utils8 = require("@polkadot-api/utils");
var textEncoder4 = new TextEncoder();
var Storage = (pallet) => {
const palledEncoded = Twox128(textEncoder4.encode(pallet));
return (name, dec3, ...encoders) => {
const palletItemEncoded = (0, import_utils8.mergeUint8)(
const textEncoder = new TextEncoder();
const Storage = (pallet) => {
const palledEncoded = Twox128(textEncoder.encode(pallet));
return (name, dec, ...encoders) => {
const palletItemEncoded = utils.mergeUint8(
palledEncoded,
Twox128(textEncoder4.encode(name))
Twox128(textEncoder.encode(name))
);
const palletItemEncodedHex = (0, import_utils8.toHex)(palletItemEncoded);
const palletItemEncodedHex = utils.toHex(palletItemEncoded);
const bytesToSkip = encoders.map((e) => e[1]).map((x) => {

@@ -911,10 +760,10 @@ if (x === Identity) return 0;

const fns = encoders.map(
([{ enc: enc4 }, hash]) => (val) => hash(enc4(val))
([{ enc: enc2 }, hash]) => (val) => hash(enc2(val))
);
const enc3 = (...args) => (0, import_utils8.toHex)(
(0, import_utils8.mergeUint8)(palletItemEncoded, ...args.map((val, idx) => fns[idx](val)))
const enc = (...args) => utils.toHex(
utils.mergeUint8(palletItemEncoded, ...args.map((val, idx) => fns[idx](val)))
);
return {
enc: enc3,
dec: dec3,
enc,
dec,
keyDecoder

@@ -924,2 +773,149 @@ };

};
//# sourceMappingURL=index.js.map
Object.defineProperty(exports, "Bytes", {
enumerable: true,
get: function () { return scaleTs.Bytes; }
});
Object.defineProperty(exports, "Option", {
enumerable: true,
get: function () { return scaleTs.Option; }
});
Object.defineProperty(exports, "Result", {
enumerable: true,
get: function () { return scaleTs.Result; }
});
Object.defineProperty(exports, "ScaleEnum", {
enumerable: true,
get: function () { return scaleTs.Enum; }
});
Object.defineProperty(exports, "Struct", {
enumerable: true,
get: function () { return scaleTs.Struct; }
});
Object.defineProperty(exports, "Tuple", {
enumerable: true,
get: function () { return scaleTs.Tuple; }
});
Object.defineProperty(exports, "Vector", {
enumerable: true,
get: function () { return scaleTs.Vector; }
});
Object.defineProperty(exports, "_void", {
enumerable: true,
get: function () { return scaleTs._void; }
});
Object.defineProperty(exports, "bool", {
enumerable: true,
get: function () { return scaleTs.bool; }
});
Object.defineProperty(exports, "compact", {
enumerable: true,
get: function () { return scaleTs.compact; }
});
Object.defineProperty(exports, "createCodec", {
enumerable: true,
get: function () { return scaleTs.createCodec; }
});
Object.defineProperty(exports, "createDecoder", {
enumerable: true,
get: function () { return scaleTs.createDecoder; }
});
Object.defineProperty(exports, "enhanceCodec", {
enumerable: true,
get: function () { return scaleTs.enhanceCodec; }
});
Object.defineProperty(exports, "enhanceDecoder", {
enumerable: true,
get: function () { return scaleTs.enhanceDecoder; }
});
Object.defineProperty(exports, "enhanceEncoder", {
enumerable: true,
get: function () { return scaleTs.enhanceEncoder; }
});
Object.defineProperty(exports, "i128", {
enumerable: true,
get: function () { return scaleTs.i128; }
});
Object.defineProperty(exports, "i16", {
enumerable: true,
get: function () { return scaleTs.i16; }
});
Object.defineProperty(exports, "i256", {
enumerable: true,
get: function () { return scaleTs.i256; }
});
Object.defineProperty(exports, "i32", {
enumerable: true,
get: function () { return scaleTs.i32; }
});
Object.defineProperty(exports, "i64", {
enumerable: true,
get: function () { return scaleTs.i64; }
});
Object.defineProperty(exports, "i8", {
enumerable: true,
get: function () { return scaleTs.i8; }
});
Object.defineProperty(exports, "str", {
enumerable: true,
get: function () { return scaleTs.str; }
});
Object.defineProperty(exports, "u128", {
enumerable: true,
get: function () { return scaleTs.u128; }
});
Object.defineProperty(exports, "u16", {
enumerable: true,
get: function () { return scaleTs.u16; }
});
Object.defineProperty(exports, "u256", {
enumerable: true,
get: function () { return scaleTs.u256; }
});
Object.defineProperty(exports, "u32", {
enumerable: true,
get: function () { return scaleTs.u32; }
});
Object.defineProperty(exports, "u64", {
enumerable: true,
get: function () { return scaleTs.u64; }
});
Object.defineProperty(exports, "u8", {
enumerable: true,
get: function () { return scaleTs.u8; }
});
exports.AccountId = AccountId;
exports.Bin = Bin;
exports.Binary = Binary;
exports.Blake2128 = Blake2128;
exports.Blake2128Concat = Blake2128Concat;
exports.Blake2256 = Blake2256;
exports.Blake3256 = Blake3256;
exports.Blake3256Concat = Blake3256Concat;
exports.Enum = Enum;
exports.FixedSizeBinary = FixedSizeBinary;
exports.Hex = Hex;
exports.Identity = Identity;
exports.Self = Self;
exports.Storage = Storage;
exports.Twox128 = Twox128;
exports.Twox256 = Twox256;
exports.Twox64Concat = Twox64Concat;
exports.Variant = Variant;
exports._Enum = _Enum;
exports.bitSequence = bitSequence$1;
exports.blockHeader = blockHeader;
exports.char = char;
exports.compactBn = compactBn;
exports.compactNumber = compactNumber;
exports.ethAccount = ethAccount;
exports.fixedStr = fixedStr;
exports.fromBufferToBase58 = fromBufferToBase58;
exports.getSs58AddressInfo = getSs58AddressInfo;
exports.h64 = h64;
exports.metadata = metadata;
exports.selfDecoder = selfDecoder;
exports.selfEncoder = selfEncoder;
exports.v14 = v14;
exports.v15 = v15;
//# sourceMappingURL=index.js.map
{
"name": "@polkadot-api/substrate-bindings",
"version": "0.6.0",
"version": "0.6.1",
"author": "Josep M Sobrepere (https://github.com/josepot)",

@@ -15,12 +15,12 @@ "repository": {

"production": {
"import": "./dist/index.mjs",
"import": "./dist/esm/index.mjs",
"require": "./dist/min/index.js",
"default": "./dist/index.js"
},
"import": "./dist/index.mjs",
"import": "./dist/esm/index.mjs",
"require": "./dist/index.js",
"default": "./dist/index.js"
},
"module": "./dist/index.mjs",
"import": "./dist/index.mjs",
"module": "./dist/esm/index.mjs",
"import": "./dist/esm/index.mjs",
"require": "./dist/index.js",

@@ -32,4 +32,4 @@ "default": "./dist/index.js"

"main": "./dist/index.js",
"module": "./dist/index.mjs",
"browser": "./dist/index.mjs",
"module": "./dist/esm/index.mjs",
"browser": "./dist/esm/index.mjs",
"types": "./dist/index.d.ts",

@@ -43,3 +43,3 @@ "files": [

"scale-ts": "^1.6.0",
"@polkadot-api/utils": "0.1.0"
"@polkadot-api/utils": "0.1.1"
},

@@ -51,3 +51,3 @@ "devDependencies": {

"scripts": {
"build": "tsc --noEmit && tsup-node src/index.ts --clean --sourcemap --platform neutral --target=es2020 --format esm,cjs --dts && tsup-node src/index.ts --clean --sourcemap --platform neutral --target=es2020 --format cjs --dts --minify --out-dir dist/min",
"build": "tsc --noEmit && rollup -c ../../rollup.config.js",
"test": "vitest",

@@ -54,0 +54,0 @@ "lint": "prettier --check README.md \"src/**/*.{js,jsx,ts,tsx,json,md}\"",

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc