Socket
Socket
Sign inDemoInstall

ripple-binary-codec

Package Overview
Dependencies
Maintainers
9
Versions
86
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ripple-binary-codec - npm Package Compare versions

Comparing version 1.6.0-beta.0 to 1.6.0

dist/enums/bytes.d.ts

27

dist/binary.d.ts

@@ -5,2 +5,3 @@ import { BinaryParser } from './serdes/binary-parser';

import { sha512Half, transactionID } from './hashes';
import { type XrplDefinitionsBase } from './enums';
import { JsonObject } from './types/serialized-type';

@@ -12,5 +13,7 @@ import { Buffer } from 'buffer/';

* @param bytes hex-string to construct BinaryParser from
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns A BinaryParser
*/
declare const makeParser: (bytes: string) => BinaryParser;
declare const makeParser: (bytes: string, definitions?: XrplDefinitionsBase) => BinaryParser;
/**

@@ -20,5 +23,7 @@ * Parse BinaryParser into JSON

* @param parser BinaryParser object
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON for the bytes in the BinaryParser
*/
declare const readJSON: (parser: BinaryParser) => JsonObject;
declare const readJSON: (parser: BinaryParser, definitions?: XrplDefinitionsBase) => JsonObject;
/**

@@ -28,5 +33,7 @@ * Parse a hex-string into its JSON interpretation

* @param bytes hex-string to parse into JSON
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON
*/
declare const binaryToJSON: (bytes: string) => JsonObject;
declare const binaryToJSON: (bytes: string, definitions?: XrplDefinitionsBase) => JsonObject;
/**

@@ -41,2 +48,3 @@ * Interface for passing parameters to SerializeObject

signingFieldsOnly?: boolean;
definitions?: XrplDefinitionsBase;
}

@@ -47,3 +55,3 @@ /**

* @param object JSON object to serialize
* @param opts options for serializing, including optional prefix, suffix, and signingFieldOnly
* @param opts options for serializing, including optional prefix, suffix, signingFieldOnly, and definitions
* @returns A Buffer containing the serialized object

@@ -57,5 +65,8 @@ */

* @param prefix Prefix bytes to put before the serialized object
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns A Buffer with the serialized object
*/
declare function signingData(transaction: JsonObject, prefix?: Buffer): Buffer;
declare function signingData(transaction: JsonObject, prefix?: Buffer, opts?: {
definitions?: XrplDefinitionsBase;
}): Buffer;
/**

@@ -72,2 +83,3 @@ * Interface describing fields required for a Claim

* @param claim A claim object to serialize
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the serialized object with appropriate prefix

@@ -81,5 +93,8 @@ */

* @param signingAccount Account to sign the transaction with
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns serialized transaction with appropriate prefix and suffix
*/
declare function multiSigningData(transaction: JsonObject, signingAccount: string | AccountID): Buffer;
declare function multiSigningData(transaction: JsonObject, signingAccount: string | AccountID, opts?: {
definitions: XrplDefinitionsBase;
}): Buffer;
export { BinaryParser, BinarySerializer, BytesList, ClaimObject, makeParser, serializeObject, readJSON, multiSigningData, signingData, signingClaimData, binaryToJSON, sha512Half, transactionID, };

@@ -5,13 +5,14 @@ "use strict";

exports.transactionID = exports.sha512Half = exports.binaryToJSON = exports.signingClaimData = exports.signingData = exports.multiSigningData = exports.readJSON = exports.serializeObject = exports.makeParser = exports.BytesList = exports.BinarySerializer = exports.BinaryParser = void 0;
var types_1 = require("./types");
var binary_parser_1 = require("./serdes/binary-parser");
const types_1 = require("./types");
const binary_parser_1 = require("./serdes/binary-parser");
Object.defineProperty(exports, "BinaryParser", { enumerable: true, get: function () { return binary_parser_1.BinaryParser; } });
var hash_prefixes_1 = require("./hash-prefixes");
var binary_serializer_1 = require("./serdes/binary-serializer");
const hash_prefixes_1 = require("./hash-prefixes");
const binary_serializer_1 = require("./serdes/binary-serializer");
Object.defineProperty(exports, "BinarySerializer", { enumerable: true, get: function () { return binary_serializer_1.BinarySerializer; } });
Object.defineProperty(exports, "BytesList", { enumerable: true, get: function () { return binary_serializer_1.BytesList; } });
var hashes_1 = require("./hashes");
const hashes_1 = require("./hashes");
Object.defineProperty(exports, "sha512Half", { enumerable: true, get: function () { return hashes_1.sha512Half; } });
Object.defineProperty(exports, "transactionID", { enumerable: true, get: function () { return hashes_1.transactionID; } });
var bigInt = require("big-integer");
const enums_1 = require("./enums");
const bigInt = require("big-integer");
/**

@@ -21,5 +22,7 @@ * Construct a BinaryParser

* @param bytes hex-string to construct BinaryParser from
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns A BinaryParser
*/
var makeParser = function (bytes) { return new binary_parser_1.BinaryParser(bytes); };
const makeParser = (bytes, definitions) => new binary_parser_1.BinaryParser(bytes, definitions);
exports.makeParser = makeParser;

@@ -30,7 +33,7 @@ /**

* @param parser BinaryParser object
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON for the bytes in the BinaryParser
*/
var readJSON = function (parser) {
return parser.readType(types_1.coreTypes.STObject).toJSON();
};
const readJSON = (parser, definitions = enums_1.DEFAULT_DEFINITIONS) => parser.readType(types_1.coreTypes.STObject).toJSON(definitions);
exports.readJSON = readJSON;

@@ -41,5 +44,7 @@ /**

* @param bytes hex-string to parse into JSON
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON
*/
var binaryToJSON = function (bytes) { return readJSON(makeParser(bytes)); };
const binaryToJSON = (bytes, definitions) => readJSON(makeParser(bytes, definitions), definitions);
exports.binaryToJSON = binaryToJSON;

@@ -50,16 +55,17 @@ /**

* @param object JSON object to serialize
* @param opts options for serializing, including optional prefix, suffix, and signingFieldOnly
* @param opts options for serializing, including optional prefix, suffix, signingFieldOnly, and definitions
* @returns A Buffer containing the serialized object
*/
function serializeObject(object, opts) {
if (opts === void 0) { opts = {}; }
var prefix = opts.prefix, suffix = opts.suffix, _a = opts.signingFieldsOnly, signingFieldsOnly = _a === void 0 ? false : _a;
var bytesList = new binary_serializer_1.BytesList();
function serializeObject(object, opts = {}) {
const { prefix, suffix, signingFieldsOnly = false, definitions } = opts;
const bytesList = new binary_serializer_1.BytesList();
if (prefix) {
bytesList.put(prefix);
}
var filter = signingFieldsOnly
? function (f) { return f.isSigningField; }
const filter = signingFieldsOnly
? (f) => f.isSigningField
: undefined;
types_1.coreTypes.STObject.from(object, filter).toBytesSink(bytesList);
types_1.coreTypes.STObject
.from(object, filter, definitions)
.toBytesSink(bytesList);
if (suffix) {

@@ -76,7 +82,11 @@ bytesList.put(suffix);

* @param prefix Prefix bytes to put before the serialized object
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns A Buffer with the serialized object
*/
function signingData(transaction, prefix) {
if (prefix === void 0) { prefix = hash_prefixes_1.HashPrefix.transactionSig; }
return serializeObject(transaction, { prefix: prefix, signingFieldsOnly: true });
function signingData(transaction, prefix = hash_prefixes_1.HashPrefix.transactionSig, opts = {}) {
return serializeObject(transaction, {
prefix,
signingFieldsOnly: true,
definitions: opts.definitions,
});
}

@@ -88,10 +98,11 @@ exports.signingData = signingData;

* @param claim A claim object to serialize
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the serialized object with appropriate prefix
*/
function signingClaimData(claim) {
var num = bigInt(String(claim.amount));
var prefix = hash_prefixes_1.HashPrefix.paymentChannelClaim;
var channel = types_1.coreTypes.Hash256.from(claim.channel).toBytes();
var amount = types_1.coreTypes.UInt64.from(num).toBytes();
var bytesList = new binary_serializer_1.BytesList();
const num = bigInt(String(claim.amount));
const prefix = hash_prefixes_1.HashPrefix.paymentChannelClaim;
const channel = types_1.coreTypes.Hash256.from(claim.channel).toBytes();
const amount = types_1.coreTypes.UInt64.from(num).toBytes();
const bytesList = new binary_serializer_1.BytesList();
bytesList.put(prefix);

@@ -108,11 +119,15 @@ bytesList.put(channel);

* @param signingAccount Account to sign the transaction with
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns serialized transaction with appropriate prefix and suffix
*/
function multiSigningData(transaction, signingAccount) {
var prefix = hash_prefixes_1.HashPrefix.transactionMultiSig;
var suffix = types_1.coreTypes.AccountID.from(signingAccount).toBytes();
function multiSigningData(transaction, signingAccount, opts = {
definitions: enums_1.DEFAULT_DEFINITIONS,
}) {
const prefix = hash_prefixes_1.HashPrefix.transactionMultiSig;
const suffix = types_1.coreTypes.AccountID.from(signingAccount).toBytes();
return serializeObject(transaction, {
prefix: prefix,
suffix: suffix,
prefix,
suffix,
signingFieldsOnly: true,
definitions: opts.definitions,
});

@@ -119,0 +134,0 @@ }

@@ -1,2 +0,2 @@

import { Field, TransactionType, LedgerEntryType, Type, TransactionResult } from './enums';
import { DEFAULT_DEFINITIONS, Field, TransactionType, LedgerEntryType, Type, TransactionResult } from './enums';
import * as types from './types';

@@ -9,2 +9,2 @@ import * as binary from './binary';

import { HashPrefix } from './hash-prefixes';
export { hashes, binary, ledgerHashes, Field, TransactionType, LedgerEntryType, Type, TransactionResult, quality, HashPrefix, ShaMap, types, };
export { hashes, binary, ledgerHashes, DEFAULT_DEFINITIONS, Field, TransactionType, LedgerEntryType, Type, TransactionResult, quality, HashPrefix, ShaMap, types, };
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.types = exports.ShaMap = exports.HashPrefix = exports.quality = exports.TransactionResult = exports.Type = exports.LedgerEntryType = exports.TransactionType = exports.Field = exports.ledgerHashes = exports.binary = exports.hashes = void 0;
var enums_1 = require("./enums");
exports.types = exports.ShaMap = exports.HashPrefix = exports.quality = exports.TransactionResult = exports.Type = exports.LedgerEntryType = exports.TransactionType = exports.Field = exports.DEFAULT_DEFINITIONS = exports.ledgerHashes = exports.binary = exports.hashes = void 0;
const enums_1 = require("./enums");
Object.defineProperty(exports, "DEFAULT_DEFINITIONS", { enumerable: true, get: function () { return enums_1.DEFAULT_DEFINITIONS; } });
Object.defineProperty(exports, "Field", { enumerable: true, get: function () { return enums_1.Field; } });

@@ -10,16 +34,16 @@ Object.defineProperty(exports, "TransactionType", { enumerable: true, get: function () { return enums_1.TransactionType; } });

Object.defineProperty(exports, "TransactionResult", { enumerable: true, get: function () { return enums_1.TransactionResult; } });
var types = require("./types");
const types = __importStar(require("./types"));
exports.types = types;
var binary = require("./binary");
const binary = __importStar(require("./binary"));
exports.binary = binary;
var shamap_1 = require("./shamap");
const shamap_1 = require("./shamap");
Object.defineProperty(exports, "ShaMap", { enumerable: true, get: function () { return shamap_1.ShaMap; } });
var ledgerHashes = require("./ledger-hashes");
const ledgerHashes = __importStar(require("./ledger-hashes"));
exports.ledgerHashes = ledgerHashes;
var hashes = require("./hashes");
const hashes = __importStar(require("./hashes"));
exports.hashes = hashes;
var quality_1 = require("./quality");
const quality_1 = require("./quality");
Object.defineProperty(exports, "quality", { enumerable: true, get: function () { return quality_1.quality; } });
var hash_prefixes_1 = require("./hash-prefixes");
const hash_prefixes_1 = require("./hash-prefixes");
Object.defineProperty(exports, "HashPrefix", { enumerable: true, get: function () { return hash_prefixes_1.HashPrefix; } });
//# sourceMappingURL=coretypes.js.map

@@ -1,48 +0,12 @@

import { SerializedType } from '../types/serialized-type';
import { Buffer } from 'buffer/';
import { BytesList } from '../binary';
export declare const TRANSACTION_TYPES: string[];
export declare class Bytes {
readonly name: string;
readonly ordinal: number;
readonly ordinalWidth: number;
readonly bytes: Buffer;
constructor(name: string, ordinal: number, ordinalWidth: number);
toJSON(): string;
toBytesSink(sink: BytesList): void;
toBytes(): Uint8Array;
}
declare class BytesLookup {
readonly ordinalWidth: number;
constructor(types: Record<string, number>, ordinalWidth: number);
from(value: Bytes | string): Bytes;
fromParser(parser: any): Bytes;
}
interface FieldInfo {
nth: number;
isVLEncoded: boolean;
isSerialized: boolean;
isSigningField: boolean;
type: string;
}
interface FieldInstance {
readonly nth: number;
readonly isVariableLengthEncoded: boolean;
readonly isSerialized: boolean;
readonly isSigningField: boolean;
readonly type: Bytes;
readonly ordinal: number;
readonly name: string;
readonly header: Buffer;
readonly associatedType: typeof SerializedType;
}
declare class FieldLookup {
constructor(fields: Array<[string, FieldInfo]>);
fromString(value: string): FieldInstance;
}
declare const Type: BytesLookup;
declare const LedgerEntryType: BytesLookup;
declare const TransactionType: BytesLookup;
declare const TransactionResult: BytesLookup;
declare const Field: FieldLookup;
export { Field, FieldInstance, Type, LedgerEntryType, TransactionResult, TransactionType, };
import { XrplDefinitionsBase, FieldInstance, Bytes } from './xrpl-definitions-base';
/**
* By default, coreTypes from the `types` folder is where known type definitions are initialized to avoid import cycles.
*/
declare const DEFAULT_DEFINITIONS: XrplDefinitionsBase;
declare const Type: import("./bytes").BytesLookup;
declare const LedgerEntryType: import("./bytes").BytesLookup;
declare const TransactionType: import("./bytes").BytesLookup;
declare const TransactionResult: import("./bytes").BytesLookup;
declare const Field: import("./field").FieldLookup;
declare const TRANSACTION_TYPES: string[];
export { Bytes, XrplDefinitionsBase, DEFAULT_DEFINITIONS, Field, FieldInstance, Type, LedgerEntryType, TransactionResult, TransactionType, TRANSACTION_TYPES, };
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TransactionType = exports.TransactionResult = exports.LedgerEntryType = exports.Type = exports.Field = exports.Bytes = exports.TRANSACTION_TYPES = void 0;
var enums = require("./definitions.json");
var serialized_type_1 = require("../types/serialized-type");
var buffer_1 = require("buffer/");
/*
* @brief: All valid transaction types
exports.TRANSACTION_TYPES = exports.TransactionType = exports.TransactionResult = exports.LedgerEntryType = exports.Type = exports.Field = exports.DEFAULT_DEFINITIONS = exports.XrplDefinitionsBase = exports.Bytes = void 0;
const enums = __importStar(require("./definitions.json"));
const xrpl_definitions_base_1 = require("./xrpl-definitions-base");
Object.defineProperty(exports, "XrplDefinitionsBase", { enumerable: true, get: function () { return xrpl_definitions_base_1.XrplDefinitionsBase; } });
Object.defineProperty(exports, "Bytes", { enumerable: true, get: function () { return xrpl_definitions_base_1.Bytes; } });
/**
* By default, coreTypes from the `types` folder is where known type definitions are initialized to avoid import cycles.
*/
exports.TRANSACTION_TYPES = Object.entries(enums.TRANSACTION_TYPES)
.filter(function (_a) {
var _key = _a[0], value = _a[1];
return value >= 0;
})
.map(function (_a) {
var key = _a[0], _value = _a[1];
return key;
});
var TYPE_WIDTH = 2;
var LEDGER_ENTRY_WIDTH = 2;
var TRANSACTION_TYPE_WIDTH = 2;
var TRANSACTION_RESULT_WIDTH = 1;
/*
* @brief: Serialize a field based on type_code and Field.nth
*/
function fieldHeader(type, nth) {
var header = [];
if (type < 16) {
if (nth < 16) {
header.push((type << 4) | nth);
}
else {
header.push(type << 4, nth);
}
}
else if (nth < 16) {
header.push(nth, type);
}
else {
header.push(0, type, nth);
}
return buffer_1.Buffer.from(header);
}
/*
* @brief: Bytes, name, and ordinal representing one type, ledger_type, transaction type, or result
*/
var Bytes = /** @class */ (function () {
function Bytes(name, ordinal, ordinalWidth) {
this.name = name;
this.ordinal = ordinal;
this.ordinalWidth = ordinalWidth;
this.bytes = buffer_1.Buffer.alloc(ordinalWidth);
for (var i = 0; i < ordinalWidth; i++) {
this.bytes[ordinalWidth - i - 1] = (ordinal >>> (i * 8)) & 0xff;
}
}
Bytes.prototype.toJSON = function () {
return this.name;
};
Bytes.prototype.toBytesSink = function (sink) {
sink.put(this.bytes);
};
Bytes.prototype.toBytes = function () {
return this.bytes;
};
return Bytes;
}());
exports.Bytes = Bytes;
/*
* @brief: Collection of Bytes objects, mapping bidirectionally
*/
var BytesLookup = /** @class */ (function () {
function BytesLookup(types, ordinalWidth) {
var _this = this;
this.ordinalWidth = ordinalWidth;
Object.entries(types).forEach(function (_a) {
var k = _a[0], v = _a[1];
_this[k] = new Bytes(k, v, ordinalWidth);
_this[v.toString()] = _this[k];
});
}
BytesLookup.prototype.from = function (value) {
return value instanceof Bytes ? value : this[value];
};
BytesLookup.prototype.fromParser = function (parser) {
return this.from(parser.readUIntN(this.ordinalWidth).toString());
};
return BytesLookup;
}());
function buildField(_a) {
var name = _a[0], info = _a[1];
var typeOrdinal = enums.TYPES[info.type];
var field = fieldHeader(typeOrdinal, info.nth);
return {
name: name,
nth: info.nth,
isVariableLengthEncoded: info.isVLEncoded,
isSerialized: info.isSerialized,
isSigningField: info.isSigningField,
ordinal: (typeOrdinal << 16) | info.nth,
type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH),
header: field,
associatedType: serialized_type_1.SerializedType, // For later assignment in ./types/index.js
};
}
/*
* @brief: The collection of all fields as defined in definitions.json
*/
var FieldLookup = /** @class */ (function () {
function FieldLookup(fields) {
var _this = this;
fields.forEach(function (_a) {
var k = _a[0], v = _a[1];
_this[k] = buildField([k, v]);
_this[_this[k].ordinal.toString()] = _this[k];
});
}
FieldLookup.prototype.fromString = function (value) {
return this[value];
};
return FieldLookup;
}());
var Type = new BytesLookup(enums.TYPES, TYPE_WIDTH);
const DEFAULT_DEFINITIONS = new xrpl_definitions_base_1.XrplDefinitionsBase(enums, {});
exports.DEFAULT_DEFINITIONS = DEFAULT_DEFINITIONS;
const Type = DEFAULT_DEFINITIONS.type;
exports.Type = Type;
var LedgerEntryType = new BytesLookup(enums.LEDGER_ENTRY_TYPES, LEDGER_ENTRY_WIDTH);
const LedgerEntryType = DEFAULT_DEFINITIONS.ledgerEntryType;
exports.LedgerEntryType = LedgerEntryType;
var TransactionType = new BytesLookup(enums.TRANSACTION_TYPES, TRANSACTION_TYPE_WIDTH);
const TransactionType = DEFAULT_DEFINITIONS.transactionType;
exports.TransactionType = TransactionType;
var TransactionResult = new BytesLookup(enums.TRANSACTION_RESULTS, TRANSACTION_RESULT_WIDTH);
const TransactionResult = DEFAULT_DEFINITIONS.transactionResult;
exports.TransactionResult = TransactionResult;
var Field = new FieldLookup(enums.FIELDS);
const Field = DEFAULT_DEFINITIONS.field;
exports.Field = Field;
/*
* @brief: All valid transaction types
*/
const TRANSACTION_TYPES = DEFAULT_DEFINITIONS.transactionNames;
exports.TRANSACTION_TYPES = TRANSACTION_TYPES;
//# sourceMappingURL=index.js.map

@@ -24,5 +24,2 @@ {

"UInt512": 23,
"Issue": 24,
"XChainBridge": 25,
"XChainAttestationBatch": 26,
"Transaction": 10001,

@@ -41,7 +38,4 @@ "LedgerEntry": 10002,

"Offer": 111,
"Bridge": 105,
"LedgerHashes": 104,
"Amendments": 102,
"XChainClaimID": 113,
"XChainCreateAccountClaimID": 116,
"FeeSettings": 115,

@@ -55,3 +49,2 @@ "Escrow": 117,

"NFTokenOffer": 55,
"AMM": 121,
"Any": -3,

@@ -245,12 +238,2 @@ "Child": -2,

[
"WasLockingChainSend",
{
"nth": 19,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt8"
}
],
[
"LedgerEntryType",

@@ -296,12 +279,2 @@ {

[
"TradingFee",
{
"nth": 5,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt16"
}
],
[
"Version",

@@ -797,22 +770,2 @@ {

[
"VoteWeight",
{
"nth": 47,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt32"
}
],
[
"DiscountedFee",
{
"nth": 48,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt32"
}
],
[
"IndexNext",

@@ -988,32 +941,2 @@ {

[
"XChainClaimID",
{
"nth": 20,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt64"
}
],
[
"XChainAccountCreateCount",
{
"nth": 21,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt64"
}
],
[
"XChainAccountClaimCount",
{
"nth": 22,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "UInt64"
}
],
[
"EmailHash",

@@ -1199,12 +1122,2 @@ {

[
"AMMID",
{
"nth": 14,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Hash256"
}
],
[
"BookDirectory",

@@ -1480,32 +1393,2 @@ {

[
"Amount2",
{
"nth": 11,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"BidMin",
{
"nth": 12,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"BidMax",
{
"nth": 13,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"MinimumOffer",

@@ -1551,82 +1434,2 @@ {

[
"LPTokenOut",
{
"nth": 20,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"LPTokenIn",
{
"nth": 21,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"EPrice",
{
"nth": 22,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"Price",
{
"nth": 23,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"LPTokenBalance",
{
"nth": 24,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"XChainFee",
{
"nth": 28,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"SignatureReward",
{
"nth": 29,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"MinAccountCreateAmount",
{
"nth": 30,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Amount"
}
],
[
"PublicKey",

@@ -1962,12 +1765,2 @@ {

[
"AMMAccount",
{
"nth": 11,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"HookAccount",

@@ -1983,72 +1776,2 @@ {

[
"ThisChainAccount",
{
"nth": 17,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"OtherChainSource",
{
"nth": 18,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"OtherChainDestination",
{
"nth": 19,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"AttestationSignerAccount",
{
"nth": 20,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"AttestationRewardAccount",
{
"nth": 21,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"LockingChainDoor",
{
"nth": 22,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"IssuingChainDoor",
{
"nth": 23,
"isVLEncoded": true,
"isSerialized": true,
"isSigningField": true,
"type": "AccountID"
}
],
[
"Indexes",

@@ -2104,62 +1827,2 @@ {

[
"LockingChainIssue",
{
"nth": 1,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Issue"
}
],
[
"IssuingChainIssue",
{
"nth": 2,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Issue"
}
],
[
"Asset",
{
"nth": 3,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Issue"
}
],
[
"Asset2",
{
"nth": 4,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "Issue"
}
],
[
"XChainBridge",
{
"nth": 1,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "XChainBridge"
}
],
[
"XChainAttestationBatch",
{
"nth": 1,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "XChainAttestationBatch"
}
],
[
"TransactionMetaData",

@@ -2375,122 +2038,2 @@ {

[
"VoteEntry",
{
"nth": 25,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"AuctionSlot",
{
"nth": 27,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"AuthAccount",
{
"nth": 28,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"AMMToken",
{
"nth": 29,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"Token1",
{
"nth": 30,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"Token2",
{
"nth": 31,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"XChainClaimProofSig",
{
"nth": 32,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"XChainCreateAccountProofSig",
{
"nth": 33,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"XChainAttestationBatchElement",
{
"nth": 34,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"XChainClaimAttestationBatchElement",
{
"nth": 35,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"XChainCreateAccountAttestationBatchElement",
{
"nth": 36,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"XChainAttestationBatchInner",
{
"nth": 37,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STObject"
}
],
[
"Signers",

@@ -2586,12 +2129,2 @@ {

[
"VoteSlots",
{
"nth": 14,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
],
[
"Majorities",

@@ -2645,62 +2178,2 @@ {

}
],
[
"XChainProofSigs",
{
"nth": 21,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
],
[
"XChainClaimAttestationBatch",
{
"nth": 22,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
],
[
"XChainCreateAccountAttestationBatch",
{
"nth": 23,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
],
[
"XChainClaimAttestations",
{
"nth": 24,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
],
[
"XChainCreateAccountAttestations",
{
"nth": 25,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
],
[
"AuthAccounts",
{
"nth": 26,
"isVLEncoded": false,
"isSerialized": true,
"isSigningField": true,
"type": "STArray"
}
]

@@ -2761,10 +2234,2 @@ ],

"temBAD_NFTOKEN_TRANSFER_FEE": -262,
"temAMM_BAD_TOKENS": -261,
"temEQUAL_DOOR_ACCOUNTS": -259,
"temBAD_XCHAIN_PROOF": -258,
"temSIDECHAIN_BAD_ISSUES": -257,
"temSIDECHAIN_NONDOOR_OWNER": -256,
"temXCHAIN_BRIDGE_BAD_MIN_ACCOUNT_CREATE_AMOUNT": -255,
"temXCHAIN_BRIDGE_BAD_REWARD_AMOUNT": -254,
"temXCHAIN_TOO_MANY_ATTESTATIONS": -253,

@@ -2805,3 +2270,2 @@ "tefFAILURE": -199,

"terPRE_TICKET": -88,
"terNO_AMM": -87,

@@ -2855,26 +2319,3 @@ "tesSUCCESS": 0,

"tecOBJECT_NOT_FOUND": 160,
"tecINSUFFICIENT_PAYMENT": 161,
"tecAMM_UNFUNDED": 162,
"tecAMM_BALANCE": 163,
"tecAMM_FAILED_DEPOSIT": 164,
"tecAMM_FAILED_WITHDRAW": 165,
"tecAMM_INVALID_TOKENS": 166,
"tecAMM_FAILED_BID": 167,
"tecAMM_FAILED_VOTE": 168,
"tecBAD_XCHAIN_TRANSFER_ISSUE": 171,
"tecXCHAIN_NO_CLAIM_ID": 172,
"tecXCHAIN_BAD_CLAIM_ID": 173,
"tecXCHAIN_CLAIM_NO_QUORUM": 174,
"tecXCHAIN_PROOF_UNKNOWN_KEY": 175,
"tecXCHAIN_CREATE_ACCOUNT_NONXRP_ISSUE": 176,
"tecXCHAIN_CLAIM_ACCOUNT_DST_EXISTS": 177,
"tecXCHAIN_WRONG_CHAIN": 178,
"tecXCHAIN_REWARD_MISMATCH": 179,
"tecXCHAIN_NO_SIGNERS_LIST": 180,
"tecXCHAIN_SENDING_ACCOUNT_MISMATCH": 181,
"tecXCHAIN_INSUFF_CREATE_AMOUNT": 182,
"tecXCHAIN_ACCOUNT_CREATE_PAST": 183,
"tecXCHAIN_ACCOUNT_CREATE_TOO_MANY": 184,
"tecXCHAIN_PAYMENT_FAILED": 185,
"tecXCHAIN_SELF_COMMIT": 186
"tecINSUFFICIENT_PAYMENT": 161
},

@@ -2911,14 +2352,2 @@ "TRANSACTION_TYPES": {

"NFTokenAcceptOffer": 29,
"AMMCreate": 35,
"AMMDeposit": 36,
"AMMWithdraw": 37,
"AMMVote": 38,
"AMMBid": 39,
"XChainCreateBridge": 40,
"XChainCreateClaimID": 41,
"XChainCommit": 42,
"XChainClaim": 43,
"XChainAccountCreateCommit": 44,
"XChainAddAttestation": 45,
"XChainModifyBridge": 46,
"EnableAmendment": 100,

@@ -2925,0 +2354,0 @@ "SetFee": 101,

@@ -5,3 +5,3 @@ "use strict";

*/
var input = {
const input = {
temBAD_SEND_XRP_PATHS: -283,

@@ -97,10 +97,10 @@ temBAD_SEQUENCE: -282,

};
var startingFromTemBADSENDXRPPATHS = -284;
var startingFromTefFAILURE = -199;
var startingFromTerRETRY = -99;
var tesSUCCESS = 0;
var startingFromTecCLAIM = 100;
var startingFromTecDIRFULL = 121;
var previousKey = 'tem';
Object.keys(input).forEach(function (key) {
let startingFromTemBADSENDXRPPATHS = -284;
let startingFromTefFAILURE = -199;
let startingFromTerRETRY = -99;
const tesSUCCESS = 0;
let startingFromTecCLAIM = 100;
const startingFromTecDIRFULL = 121;
let previousKey = 'tem';
Object.keys(input).forEach((key) => {
if (key.substring(0, 3) !== previousKey.substring(0, 3)) {

@@ -111,12 +111,12 @@ console.log();

if (key.substring(0, 3) === 'tem') {
console.log(" \"".concat(key, "\": ").concat(startingFromTemBADSENDXRPPATHS++, ","));
console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`);
}
else if (key.substring(0, 3) === 'tef') {
console.log(" \"".concat(key, "\": ").concat(startingFromTefFAILURE++, ","));
console.log(` "${key}": ${startingFromTefFAILURE++},`);
}
else if (key.substring(0, 3) === 'ter') {
console.log(" \"".concat(key, "\": ").concat(startingFromTerRETRY++, ","));
console.log(` "${key}": ${startingFromTerRETRY++},`);
}
else if (key.substring(0, 3) === 'tes') {
console.log(" \"".concat(key, "\": ").concat(tesSUCCESS, ","));
console.log(` "${key}": ${tesSUCCESS},`);
}

@@ -127,5 +127,5 @@ else if (key.substring(0, 3) === 'tec') {

}
console.log(" \"".concat(key, "\": ").concat(startingFromTecCLAIM++, ","));
console.log(` "${key}": ${startingFromTecCLAIM++},`);
}
});
//# sourceMappingURL=utils-renumber.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.HashPrefix = void 0;
var buffer_1 = require("buffer/");
const buffer_1 = require("buffer/");
/**

@@ -12,3 +12,3 @@ * Write a 32 bit integer to a Buffer

function bytes(uint32) {
var result = buffer_1.Buffer.alloc(4);
const result = buffer_1.Buffer.alloc(4);
result.writeUInt32BE(uint32, 0);

@@ -20,3 +20,3 @@ return result;

*/
var HashPrefix = {
const HashPrefix = {
transactionID: bytes(0x54584e00),

@@ -23,0 +23,0 @@ // transaction plus metadata

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.transactionID = exports.sha512Half = exports.Sha512Half = void 0;
var hash_prefixes_1 = require("./hash-prefixes");
var createHash = require("create-hash");
var hash_256_1 = require("./types/hash-256");
var binary_serializer_1 = require("./serdes/binary-serializer");
var buffer_1 = require("buffer/");
const hash_prefixes_1 = require("./hash-prefixes");
const createHash = require("create-hash");
const hash_256_1 = require("./types/hash-256");
const binary_serializer_1 = require("./serdes/binary-serializer");
const buffer_1 = require("buffer/");
/**

@@ -28,8 +13,6 @@ * Class for hashing with SHA512

*/
var Sha512Half = /** @class */ (function (_super) {
__extends(Sha512Half, _super);
function Sha512Half() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.hash = createHash('sha512');
return _this;
class Sha512Half extends binary_serializer_1.BytesList {
constructor() {
super(...arguments);
this.hash = createHash('sha512');
}

@@ -42,5 +25,5 @@ /**

*/
Sha512Half.put = function (bytes) {
static put(bytes) {
return new Sha512Half().put(bytes);
};
}
/**

@@ -52,6 +35,6 @@ * Write bytes to an existing Sha512Hash

*/
Sha512Half.prototype.put = function (bytes) {
put(bytes) {
this.hash.update(bytes);
return this;
};
}
/**

@@ -62,5 +45,5 @@ * Compute SHA512 hash and slice in half

*/
Sha512Half.prototype.finish256 = function () {
finish256() {
return buffer_1.Buffer.from(this.hash.digest().slice(0, 32));
};
}
/**

@@ -71,7 +54,6 @@ * Constructs a Hash256 from the Sha512Half object

*/
Sha512Half.prototype.finish = function () {
finish() {
return new hash_256_1.Hash256(this.finish256());
};
return Sha512Half;
}(binary_serializer_1.BytesList));
}
}
exports.Sha512Half = Sha512Half;

@@ -84,9 +66,5 @@ /**

*/
function sha512Half() {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
var hash = new Sha512Half();
args.forEach(function (a) { return hash.put(a); });
function sha512Half(...args) {
const hash = new Sha512Half();
args.forEach((a) => hash.put(a));
return hash.finish256();

@@ -93,0 +71,0 @@ }

import { decodeLedgerData } from './ledger-hashes';
import { JsonObject } from './types/serialized-type';
import { XrplDefinitionsBase, TRANSACTION_TYPES, DEFAULT_DEFINITIONS } from './enums';
import { XrplDefinitions } from './enums/xrpl-definitions';
import { coreTypes } from './types';
/**

@@ -7,5 +10,6 @@ * Decode a transaction

* @param binary hex-string of the encoded transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the JSON representation of the transaction
*/
declare function decode(binary: string): JsonObject;
declare function decode(binary: string, definitions?: XrplDefinitionsBase): JsonObject;
/**

@@ -15,5 +19,7 @@ * Encode a transaction

* @param json The JSON representation of a transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
*
* @returns A hex-string of the encoded transaction
*/
declare function encode(json: object): string;
declare function encode(json: object, definitions?: XrplDefinitionsBase): string;
/**

@@ -24,5 +30,6 @@ * Encode a transaction and prepare for signing

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
declare function encodeForSigning(json: object): string;
declare function encodeForSigning(json: object, definitions?: XrplDefinitionsBase): string;
/**

@@ -33,2 +40,3 @@ * Encode a transaction and prepare for signing with a claim

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction

@@ -42,5 +50,6 @@ */

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
declare function encodeForMultisigning(json: object, signer: string): string;
declare function encodeForMultisigning(json: object, signer: string, definitions?: XrplDefinitionsBase): string;
/**

@@ -60,13 +69,2 @@ * Encode a quality value

declare function decodeQuality(value: string): string;
declare const _default: {
decode: typeof decode;
encode: typeof encode;
encodeForSigning: typeof encodeForSigning;
encodeForSigningClaim: typeof encodeForSigningClaim;
encodeForMultisigning: typeof encodeForMultisigning;
encodeQuality: typeof encodeQuality;
decodeQuality: typeof decodeQuality;
decodeLedgerData: typeof decodeLedgerData;
TRANSACTION_TYPES: string[];
};
export = _default;
export { decode, encode, encodeForSigning, encodeForSigningClaim, encodeForMultisigning, encodeQuality, decodeQuality, decodeLedgerData, TRANSACTION_TYPES, XrplDefinitions, XrplDefinitionsBase, DEFAULT_DEFINITIONS, coreTypes, };
"use strict";
var assert = require("assert");
var coretypes_1 = require("./coretypes");
var ledger_hashes_1 = require("./ledger-hashes");
var enums_1 = require("./enums");
var signingData = coretypes_1.binary.signingData, signingClaimData = coretypes_1.binary.signingClaimData, multiSigningData = coretypes_1.binary.multiSigningData, binaryToJSON = coretypes_1.binary.binaryToJSON, serializeObject = coretypes_1.binary.serializeObject;
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.coreTypes = exports.DEFAULT_DEFINITIONS = exports.XrplDefinitionsBase = exports.XrplDefinitions = exports.TRANSACTION_TYPES = exports.decodeLedgerData = exports.decodeQuality = exports.encodeQuality = exports.encodeForMultisigning = exports.encodeForSigningClaim = exports.encodeForSigning = exports.encode = exports.decode = void 0;
const assert = __importStar(require("assert"));
const coretypes_1 = require("./coretypes");
const ledger_hashes_1 = require("./ledger-hashes");
Object.defineProperty(exports, "decodeLedgerData", { enumerable: true, get: function () { return ledger_hashes_1.decodeLedgerData; } });
const enums_1 = require("./enums");
Object.defineProperty(exports, "XrplDefinitionsBase", { enumerable: true, get: function () { return enums_1.XrplDefinitionsBase; } });
Object.defineProperty(exports, "TRANSACTION_TYPES", { enumerable: true, get: function () { return enums_1.TRANSACTION_TYPES; } });
Object.defineProperty(exports, "DEFAULT_DEFINITIONS", { enumerable: true, get: function () { return enums_1.DEFAULT_DEFINITIONS; } });
const xrpl_definitions_1 = require("./enums/xrpl-definitions");
Object.defineProperty(exports, "XrplDefinitions", { enumerable: true, get: function () { return xrpl_definitions_1.XrplDefinitions; } });
const types_1 = require("./types");
Object.defineProperty(exports, "coreTypes", { enumerable: true, get: function () { return types_1.coreTypes; } });
const { signingData, signingClaimData, multiSigningData, binaryToJSON, serializeObject, } = coretypes_1.binary;
/**

@@ -11,8 +44,10 @@ * Decode a transaction

* @param binary hex-string of the encoded transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the JSON representation of the transaction
*/
function decode(binary) {
function decode(binary, definitions) {
assert.ok(typeof binary === 'string', 'binary must be a hex string');
return binaryToJSON(binary);
return binaryToJSON(binary, definitions);
}
exports.decode = decode;
/**

@@ -22,10 +57,13 @@ * Encode a transaction

* @param json The JSON representation of a transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
*
* @returns A hex-string of the encoded transaction
*/
function encode(json) {
function encode(json, definitions) {
assert.ok(typeof json === 'object');
return serializeObject(json)
return serializeObject(json, { definitions })
.toString('hex')
.toUpperCase();
}
exports.encode = encode;
/**

@@ -36,10 +74,14 @@ * Encode a transaction and prepare for signing

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
function encodeForSigning(json) {
function encodeForSigning(json, definitions) {
assert.ok(typeof json === 'object');
return signingData(json)
return signingData(json, coretypes_1.HashPrefix.transactionSig, {
definitions,
})
.toString('hex')
.toUpperCase();
}
exports.encodeForSigning = encodeForSigning;
/**

@@ -50,2 +92,3 @@ * Encode a transaction and prepare for signing with a claim

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction

@@ -59,2 +102,3 @@ */

}
exports.encodeForSigningClaim = encodeForSigningClaim;
/**

@@ -65,11 +109,14 @@ * Encode a transaction and prepare for multi-signing

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
function encodeForMultisigning(json, signer) {
function encodeForMultisigning(json, signer, definitions) {
assert.ok(typeof json === 'object');
assert.equal(json['SigningPubKey'], '');
return multiSigningData(json, signer)
const definitionsOpt = definitions ? { definitions } : undefined;
return multiSigningData(json, signer, definitionsOpt)
.toString('hex')
.toUpperCase();
}
exports.encodeForMultisigning = encodeForMultisigning;
/**

@@ -85,2 +132,3 @@ * Encode a quality value

}
exports.encodeQuality = encodeQuality;
/**

@@ -96,13 +144,3 @@ * Decode a quality value

}
module.exports = {
decode: decode,
encode: encode,
encodeForSigning: encodeForSigning,
encodeForSigningClaim: encodeForSigningClaim,
encodeForMultisigning: encodeForMultisigning,
encodeQuality: encodeQuality,
decodeQuality: decodeQuality,
decodeLedgerData: ledger_hashes_1.decodeLedgerData,
TRANSACTION_TYPES: enums_1.TRANSACTION_TYPES,
};
exports.decodeQuality = decodeQuality;
//# sourceMappingURL=index.js.map
import { Hash256 } from './types/hash-256';
import { JsonObject } from './types/serialized-type';
import * as bigInt from 'big-integer';
import bigInt = require('big-integer');
import { XrplDefinitionsBase } from './enums';
/**

@@ -43,5 +44,7 @@ * Function computing the hash of a transaction tree

* @param binary A serialized ledger header
* @param definitions Type definitions to parse the ledger objects.
* Used if there are non-default ledger objects to decode.
* @returns A JSON object describing a ledger header
*/
declare function decodeLedgerData(binary: string): object;
declare function decodeLedgerData(binary: string, definitions?: XrplDefinitionsBase): object;
export { accountStateHash, transactionTreeHash, ledgerHash, decodeLedgerData };
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.decodeLedgerData = exports.ledgerHash = exports.transactionTreeHash = exports.accountStateHash = void 0;
var assert = require("assert");
var shamap_1 = require("./shamap");
var hash_prefixes_1 = require("./hash-prefixes");
var hashes_1 = require("./hashes");
var binary_1 = require("./binary");
var hash_256_1 = require("./types/hash-256");
var st_object_1 = require("./types/st-object");
var uint_64_1 = require("./types/uint-64");
var uint_32_1 = require("./types/uint-32");
var uint_8_1 = require("./types/uint-8");
var binary_parser_1 = require("./serdes/binary-parser");
var bigInt = require("big-integer");
const assert = __importStar(require("assert"));
const shamap_1 = require("./shamap");
const hash_prefixes_1 = require("./hash-prefixes");
const hashes_1 = require("./hashes");
const binary_1 = require("./binary");
const hash_256_1 = require("./types/hash-256");
const st_object_1 = require("./types/st-object");
const uint_64_1 = require("./types/uint-64");
const uint_32_1 = require("./types/uint-32");
const uint_8_1 = require("./types/uint-8");
const binary_parser_1 = require("./serdes/binary-parser");
const bigInt = require("big-integer");
/**

@@ -24,4 +47,4 @@ * Computes the hash of a list of objects

function computeHash(itemizer, itemsJson) {
var map = new shamap_1.ShaMap();
itemsJson.forEach(function (item) { return map.addItem.apply(map, itemizer(item)); });
const map = new shamap_1.ShaMap();
itemsJson.forEach((item) => map.addItem(...itemizer(item)));
return map.hash();

@@ -37,9 +60,9 @@ }

assert.ok(json.hash);
var index = hash_256_1.Hash256.from(json.hash);
var item = {
hashPrefix: function () {
const index = hash_256_1.Hash256.from(json.hash);
const item = {
hashPrefix() {
return hash_prefixes_1.HashPrefix.transaction;
},
toBytesSink: function (sink) {
var serializer = new binary_1.BinarySerializer(sink);
toBytesSink(sink) {
const serializer = new binary_1.BinarySerializer(sink);
serializer.writeLengthEncoded(st_object_1.STObject.from(json));

@@ -58,9 +81,9 @@ serializer.writeLengthEncoded(st_object_1.STObject.from(json.metaData));

function entryItemizer(json) {
var index = hash_256_1.Hash256.from(json.index);
var bytes = (0, binary_1.serializeObject)(json);
var item = {
hashPrefix: function () {
const index = hash_256_1.Hash256.from(json.index);
const bytes = (0, binary_1.serializeObject)(json);
const item = {
hashPrefix() {
return hash_prefixes_1.HashPrefix.accountStateEntry;
},
toBytesSink: function (sink) {
toBytesSink(sink) {
sink.put(bytes);

@@ -78,3 +101,3 @@ },

function transactionTreeHash(param) {
var itemizer = transactionItemizer;
const itemizer = transactionItemizer;
return computeHash(itemizer, param);

@@ -90,3 +113,3 @@ }

function accountStateHash(param) {
var itemizer = entryItemizer;
const itemizer = entryItemizer;
return computeHash(itemizer, param);

@@ -102,3 +125,3 @@ }

function ledgerHash(header) {
var hash = new hashes_1.Sha512Half();
const hash = new hashes_1.Sha512Half();
hash.put(hash_prefixes_1.HashPrefix.ledgerHeader);

@@ -123,7 +146,9 @@ assert.ok(header.parent_close_time !== undefined);

* @param binary A serialized ledger header
* @param definitions Type definitions to parse the ledger objects.
* Used if there are non-default ledger objects to decode.
* @returns A JSON object describing a ledger header
*/
function decodeLedgerData(binary) {
function decodeLedgerData(binary, definitions) {
assert.ok(typeof binary === 'string', 'binary must be a hex string');
var parser = new binary_parser_1.BinaryParser(binary);
const parser = new binary_parser_1.BinaryParser(binary, definitions);
return {

@@ -130,0 +155,0 @@ ledger_index: parser.readUInt32(),

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.quality = void 0;
var types_1 = require("./types");
var decimal_js_1 = require("decimal.js");
var bigInt = require("big-integer");
var buffer_1 = require("buffer/");
const types_1 = require("./types");
const decimal_js_1 = require("decimal.js");
const bigInt = require("big-integer");
const buffer_1 = require("buffer/");
/**
* class for encoding and decoding quality
*/
var quality = /** @class */ (function () {
function quality() {
}
class quality {
/**

@@ -20,10 +18,10 @@ * Encode quality amount

*/
quality.encode = function (quality) {
var decimal = new decimal_js_1.Decimal(quality);
var exponent = decimal.e - 15;
var qualityString = decimal.times("1e".concat(-exponent)).abs().toString();
var bytes = types_1.coreTypes.UInt64.from(bigInt(qualityString)).toBytes();
static encode(quality) {
const decimal = new decimal_js_1.Decimal(quality);
const exponent = decimal.e - 15;
const qualityString = decimal.times(`1e${-exponent}`).abs().toString();
const bytes = types_1.coreTypes.UInt64.from(bigInt(qualityString)).toBytes();
bytes[0] = exponent + 100;
return bytes;
};
}
/**

@@ -35,11 +33,10 @@ * Decode quality amount

*/
quality.decode = function (quality) {
var bytes = buffer_1.Buffer.from(quality, 'hex').slice(-8);
var exponent = bytes[0] - 100;
var mantissa = new decimal_js_1.Decimal("0x".concat(bytes.slice(1).toString('hex')));
return mantissa.times("1e".concat(exponent));
};
return quality;
}());
static decode(quality) {
const bytes = buffer_1.Buffer.from(quality, 'hex').slice(-8);
const exponent = bytes[0] - 100;
const mantissa = new decimal_js_1.Decimal(`0x${bytes.slice(1).toString('hex')}`);
return mantissa.times(`1e${exponent}`);
}
}
exports.quality = quality;
//# sourceMappingURL=quality.js.map

@@ -1,3 +0,3 @@

import { FieldInstance } from '../enums';
import { SerializedType } from '../types/serialized-type';
import { XrplDefinitionsBase, FieldInstance } from '../enums';
import { type SerializedType } from '../types/serialized-type';
import { Buffer } from 'buffer/';

@@ -9,2 +9,3 @@ /**

private bytes;
definitions: XrplDefinitionsBase;
/**

@@ -14,4 +15,6 @@ * Initialize bytes to a hex string

* @param hexBytes a hex string
* @param definitions Rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
*/
constructor(hexBytes: string);
constructor(hexBytes: string, definitions?: XrplDefinitionsBase);
/**

@@ -18,0 +21,0 @@ * Peek the first byte of the BinaryParser

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BinaryParser = void 0;
var assert = require("assert");
var enums_1 = require("../enums");
var buffer_1 = require("buffer/");
const assert = __importStar(require("assert"));
const enums_1 = require("../enums");
const buffer_1 = require("buffer/");
/**
* BinaryParser is used to compute fields and values from a HexString
*/
var BinaryParser = /** @class */ (function () {
class BinaryParser {
/**

@@ -15,5 +38,8 @@ * Initialize bytes to a hex string

* @param hexBytes a hex string
* @param definitions Rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
*/
function BinaryParser(hexBytes) {
constructor(hexBytes, definitions = enums_1.DEFAULT_DEFINITIONS) {
this.bytes = buffer_1.Buffer.from(hexBytes, 'hex');
this.definitions = definitions;
}

@@ -25,6 +51,6 @@ /**

*/
BinaryParser.prototype.peek = function () {
peek() {
assert.ok(this.bytes.byteLength !== 0);
return this.bytes[0];
};
}
/**

@@ -35,6 +61,6 @@ * Consume the first n bytes of the BinaryParser

*/
BinaryParser.prototype.skip = function (n) {
skip(n) {
assert.ok(n <= this.bytes.byteLength);
this.bytes = this.bytes.slice(n);
};
}
/**

@@ -46,8 +72,8 @@ * read the first n bytes from the BinaryParser

*/
BinaryParser.prototype.read = function (n) {
read(n) {
assert.ok(n <= this.bytes.byteLength);
var slice = this.bytes.slice(0, n);
const slice = this.bytes.slice(0, n);
this.skip(n);
return slice;
};
}
/**

@@ -59,22 +85,22 @@ * Read an integer of given size

*/
BinaryParser.prototype.readUIntN = function (n) {
readUIntN(n) {
assert.ok(0 < n && n <= 4, 'invalid n');
return this.read(n).reduce(function (a, b) { return (a << 8) | b; }) >>> 0;
};
BinaryParser.prototype.readUInt8 = function () {
return this.read(n).reduce((a, b) => (a << 8) | b) >>> 0;
}
readUInt8() {
return this.readUIntN(1);
};
BinaryParser.prototype.readUInt16 = function () {
}
readUInt16() {
return this.readUIntN(2);
};
BinaryParser.prototype.readUInt32 = function () {
}
readUInt32() {
return this.readUIntN(4);
};
BinaryParser.prototype.size = function () {
}
size() {
return this.bytes.byteLength;
};
BinaryParser.prototype.end = function (customEnd) {
var length = this.bytes.byteLength;
}
end(customEnd) {
const length = this.bytes.byteLength;
return length === 0 || (customEnd !== undefined && length <= customEnd);
};
}
/**

@@ -85,5 +111,5 @@ * Reads variable length encoded bytes

*/
BinaryParser.prototype.readVariableLength = function () {
readVariableLength() {
return this.read(this.readVariableLengthLength());
};
}
/**

@@ -94,4 +120,4 @@ * Reads the length of the variable length encoded bytes

*/
BinaryParser.prototype.readVariableLengthLength = function () {
var b1 = this.readUInt8();
readVariableLengthLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {

@@ -101,12 +127,12 @@ return b1;

else if (b1 <= 240) {
var b2 = this.readUInt8();
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
}
else if (b1 <= 254) {
var b2 = this.readUInt8();
var b3 = this.readUInt8();
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error('Invalid variable length indicator');
};
}
/**

@@ -117,5 +143,5 @@ * Reads the field ordinal from the BinaryParser

*/
BinaryParser.prototype.readFieldOrdinal = function () {
var type = this.readUInt8();
var nth = type & 15;
readFieldOrdinal() {
let type = this.readUInt8();
let nth = type & 15;
type >>= 4;

@@ -135,3 +161,3 @@ if (type === 0) {

return (type << 16) | nth;
};
}
/**

@@ -142,5 +168,5 @@ * Read the field from the BinaryParser

*/
BinaryParser.prototype.readField = function () {
return enums_1.Field.fromString(this.readFieldOrdinal().toString());
};
readField() {
return this.definitions.field.fromString(this.readFieldOrdinal().toString());
}
/**

@@ -152,5 +178,5 @@ * Read a given type from the BinaryParser

*/
BinaryParser.prototype.readType = function (type) {
readType(type) {
return type.fromParser(this);
};
}
/**

@@ -162,5 +188,5 @@ * Get the type associated with a given field

*/
BinaryParser.prototype.typeForField = function (field) {
typeForField(field) {
return field.associatedType;
};
}
/**

@@ -172,16 +198,16 @@ * Read value of the type specified by field from the BinaryParser

*/
BinaryParser.prototype.readFieldValue = function (field) {
var type = this.typeForField(field);
readFieldValue(field) {
const type = this.typeForField(field);
if (!type) {
throw new Error("unsupported: (".concat(field.name, ", ").concat(field.type.name, ")"));
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
var sizeHint = field.isVariableLengthEncoded
const sizeHint = field.isVariableLengthEncoded
? this.readVariableLengthLength()
: undefined;
var value = type.fromParser(this, sizeHint);
const value = type.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error("fromParser for (".concat(field.name, ", ").concat(field.type.name, ") -> undefined "));
throw new Error(`fromParser for (${field.name}, ${field.type.name}) -> undefined `);
}
return value;
};
}
/**

@@ -192,9 +218,8 @@ * Get the next field and value from the BinaryParser

*/
BinaryParser.prototype.readFieldAndValue = function () {
var field = this.readField();
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
};
return BinaryParser;
}());
}
}
exports.BinaryParser = BinaryParser;
//# sourceMappingURL=binary-parser.js.map
import { FieldInstance } from '../enums';
import { SerializedType } from '../types/serialized-type';
import { type SerializedType } from '../types/serialized-type';
import { Buffer } from 'buffer/';

@@ -4,0 +4,0 @@ /**

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BinarySerializer = exports.BytesList = void 0;
var assert = require("assert");
var buffer_1 = require("buffer/");
const assert = __importStar(require("assert"));
const buffer_1 = require("buffer/");
/**
* Bytes list is a collection of buffer objects
*/
var BytesList = /** @class */ (function () {
function BytesList() {
class BytesList {
constructor() {
this.bytesArray = [];

@@ -18,5 +41,5 @@ }

*/
BytesList.prototype.getLength = function () {
getLength() {
return buffer_1.Buffer.concat(this.bytesArray).byteLength;
};
}
/**

@@ -28,7 +51,7 @@ * Put bytes in the BytesList

*/
BytesList.prototype.put = function (bytesArg) {
var bytes = buffer_1.Buffer.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
put(bytesArg) {
const bytes = buffer_1.Buffer.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
this.bytesArray.push(bytes);
return this;
};
}
/**

@@ -39,13 +62,12 @@ * Write this BytesList to the back of another bytes list

*/
BytesList.prototype.toBytesSink = function (list) {
toBytesSink(list) {
list.put(this.toBytes());
};
BytesList.prototype.toBytes = function () {
}
toBytes() {
return buffer_1.Buffer.concat(this.bytesArray);
};
BytesList.prototype.toHex = function () {
}
toHex() {
return this.toBytes().toString('hex').toUpperCase();
};
return BytesList;
}());
}
}
exports.BytesList = BytesList;

@@ -55,4 +77,4 @@ /**

*/
var BinarySerializer = /** @class */ (function () {
function BinarySerializer(sink) {
class BinarySerializer {
constructor(sink) {
this.sink = new BytesList();

@@ -66,5 +88,5 @@ this.sink = sink;

*/
BinarySerializer.prototype.write = function (value) {
write(value) {
value.toBytesSink(this.sink);
};
}
/**

@@ -75,5 +97,5 @@ * Write bytes to this BinarySerializer

*/
BinarySerializer.prototype.put = function (bytes) {
put(bytes) {
this.sink.put(bytes);
};
}
/**

@@ -85,5 +107,5 @@ * Write a value of a given type to this BinarySerializer

*/
BinarySerializer.prototype.writeType = function (type, value) {
writeType(type, value) {
this.write(type.from(value));
};
}
/**

@@ -94,5 +116,5 @@ * Write BytesList to this BinarySerializer

*/
BinarySerializer.prototype.writeBytesList = function (bl) {
writeBytesList(bl) {
bl.toBytesSink(this.sink);
};
}
/**

@@ -103,4 +125,4 @@ * Calculate the header of Variable Length encoded bytes

*/
BinarySerializer.prototype.encodeVariableLength = function (length) {
var lenBytes = buffer_1.Buffer.alloc(3);
encodeVariableLength(length) {
const lenBytes = buffer_1.Buffer.alloc(3);
if (length <= 192) {

@@ -124,3 +146,3 @@ lenBytes[0] = length;

throw new Error('Overflow error');
};
}
/**

@@ -132,5 +154,4 @@ * Write field and value to BinarySerializer

*/
BinarySerializer.prototype.writeFieldAndValue = function (field, value, isUnlModifyWorkaround) {
if (isUnlModifyWorkaround === void 0) { isUnlModifyWorkaround = false; }
var associatedValue = field.associatedType.from(value);
writeFieldAndValue(field, value, isUnlModifyWorkaround = false) {
const associatedValue = field.associatedType.from(value);
assert.ok(associatedValue.toBytesSink !== undefined);

@@ -145,3 +166,3 @@ assert.ok(field.name !== undefined);

}
};
}
/**

@@ -152,5 +173,4 @@ * Write a variable length encoded value to the BinarySerializer

*/
BinarySerializer.prototype.writeLengthEncoded = function (value, isUnlModifyWorkaround) {
if (isUnlModifyWorkaround === void 0) { isUnlModifyWorkaround = false; }
var bytes = new BytesList();
writeLengthEncoded(value, isUnlModifyWorkaround = false) {
const bytes = new BytesList();
if (!isUnlModifyWorkaround) {

@@ -162,6 +182,5 @@ // this part doesn't happen for the Account field in a UNLModify transaction

this.writeBytesList(bytes);
};
return BinarySerializer;
}());
}
}
exports.BinarySerializer = BinarySerializer;
//# sourceMappingURL=binary-serializer.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.ShaMapLeaf = exports.ShaMapNode = exports.ShaMap = void 0;
var assert_1 = require("assert");
var types_1 = require("./types");
var hash_prefixes_1 = require("./hash-prefixes");
var hashes_1 = require("./hashes");
var buffer_1 = require("buffer/");
const assert_1 = require("assert");
const types_1 = require("./types");
const hash_prefixes_1 = require("./hash-prefixes");
const hashes_1 = require("./hashes");
const buffer_1 = require("buffer/");
/**
* Abstract class describing a SHAMapNode
*/
var ShaMapNode = /** @class */ (function () {
function ShaMapNode() {
}
return ShaMapNode;
}());
class ShaMapNode {
}
exports.ShaMapNode = ShaMapNode;

@@ -36,9 +18,7 @@ /**

*/
var ShaMapLeaf = /** @class */ (function (_super) {
__extends(ShaMapLeaf, _super);
function ShaMapLeaf(index, item) {
var _this = _super.call(this) || this;
_this.index = index;
_this.item = item;
return _this;
class ShaMapLeaf extends ShaMapNode {
constructor(index, item) {
super();
this.index = index;
this.item = item;
}

@@ -48,11 +28,11 @@ /**

*/
ShaMapLeaf.prototype.isLeaf = function () {
isLeaf() {
return true;
};
}
/**
* @returns false as ShaMapLeaf is not an inner node
*/
ShaMapLeaf.prototype.isInner = function () {
isInner() {
return false;
};
}
/**

@@ -63,5 +43,5 @@ * Get the prefix of the this.item

*/
ShaMapLeaf.prototype.hashPrefix = function () {
hashPrefix() {
return this.item === undefined ? buffer_1.Buffer.alloc(0) : this.item.hashPrefix();
};
}
/**

@@ -72,7 +52,7 @@ * Hash the bytes representation of this

*/
ShaMapLeaf.prototype.hash = function () {
var hash = hashes_1.Sha512Half.put(this.hashPrefix());
hash() {
const hash = hashes_1.Sha512Half.put(this.hashPrefix());
this.toBytesSink(hash);
return hash.finish();
};
}
/**

@@ -82,3 +62,3 @@ * Write the bytes representation of this to a BytesList

*/
ShaMapLeaf.prototype.toBytesSink = function (list) {
toBytesSink(list) {
if (this.item !== undefined) {

@@ -88,5 +68,4 @@ this.item.toBytesSink(list);

this.index.toBytesSink(list);
};
return ShaMapLeaf;
}(ShaMapNode));
}
}
exports.ShaMapLeaf = ShaMapLeaf;

@@ -96,11 +75,8 @@ /**

*/
var ShaMapInner = /** @class */ (function (_super) {
__extends(ShaMapInner, _super);
function ShaMapInner(depth) {
if (depth === void 0) { depth = 0; }
var _this = _super.call(this) || this;
_this.depth = depth;
_this.slotBits = 0;
_this.branches = Array(16);
return _this;
class ShaMapInner extends ShaMapNode {
constructor(depth = 0) {
super();
this.depth = depth;
this.slotBits = 0;
this.branches = Array(16);
}

@@ -110,11 +86,11 @@ /**

*/
ShaMapInner.prototype.isInner = function () {
isInner() {
return true;
};
}
/**
* @returns false as ShaMapInner is not a leaf node
*/
ShaMapInner.prototype.isLeaf = function () {
isLeaf() {
return false;
};
}
/**

@@ -125,5 +101,5 @@ * Get the hash prefix for this node

*/
ShaMapInner.prototype.hashPrefix = function () {
hashPrefix() {
return hash_prefixes_1.HashPrefix.innerNode;
};
}
/**

@@ -135,12 +111,12 @@ * Set a branch of this node to be another node

*/
ShaMapInner.prototype.setBranch = function (slot, branch) {
setBranch(slot, branch) {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
};
}
/**
* @returns true if node is empty
*/
ShaMapInner.prototype.empty = function () {
empty() {
return this.slotBits === 0;
};
}
/**

@@ -151,10 +127,10 @@ * Compute the hash of this node

*/
ShaMapInner.prototype.hash = function () {
hash() {
if (this.empty()) {
return types_1.coreTypes.Hash256.ZERO_256;
}
var hash = hashes_1.Sha512Half.put(this.hashPrefix());
const hash = hashes_1.Sha512Half.put(this.hashPrefix());
this.toBytesSink(hash);
return hash.finish();
};
}
/**

@@ -165,9 +141,11 @@ * Writes the bytes representation of this node to a BytesList

*/
ShaMapInner.prototype.toBytesSink = function (list) {
for (var i = 0; i < this.branches.length; i++) {
var branch = this.branches[i];
var hash = branch ? branch.hash() : types_1.coreTypes.Hash256.ZERO_256;
toBytesSink(list) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch
? branch.hash()
: types_1.coreTypes.Hash256.ZERO_256;
hash.toBytesSink(list);
}
};
}
/**

@@ -180,32 +158,28 @@ * Add item to the SHAMap

*/
ShaMapInner.prototype.addItem = function (index, item, leaf) {
addItem(index, item, leaf) {
assert_1.strict.ok(index !== undefined);
var nibble = index.nibblet(this.depth);
var existing = this.branches[nibble];
if (existing === undefined) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
if (index !== undefined) {
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (existing === undefined) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
}
else if (existing instanceof ShaMapLeaf) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, undefined, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
}
else if (existing instanceof ShaMapInner) {
existing.addItem(index, item, leaf);
}
else {
throw new Error('invalid ShaMap.addItem call');
}
}
else if (existing instanceof ShaMapLeaf) {
var newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, undefined, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
}
else if (existing instanceof ShaMapInner) {
existing.addItem(index, item, leaf);
}
else {
throw new Error('invalid ShaMap.addItem call');
}
};
return ShaMapInner;
}(ShaMapNode));
var ShaMap = /** @class */ (function (_super) {
__extends(ShaMap, _super);
function ShaMap() {
return _super !== null && _super.apply(this, arguments) || this;
}
return ShaMap;
}(ShaMapInner));
}
class ShaMap extends ShaMapInner {
}
exports.ShaMap = ShaMap;
//# sourceMappingURL=shamap.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.AccountID = void 0;
var ripple_address_codec_1 = require("ripple-address-codec");
var hash_160_1 = require("./hash-160");
var buffer_1 = require("buffer/");
var HEX_REGEX = /^[A-F0-9]{40}$/;
const ripple_address_codec_1 = require("ripple-address-codec");
const hash_160_1 = require("./hash-160");
const buffer_1 = require("buffer/");
const HEX_REGEX = /^[A-F0-9]{40}$/;
/**
* Class defining how to encode and decode an AccountID
*/
var AccountID = /** @class */ (function (_super) {
__extends(AccountID, _super);
function AccountID(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : AccountID.defaultAccountID.bytes) || this;
class AccountID extends hash_160_1.Hash160 {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : AccountID.defaultAccountID.bytes);
}

@@ -37,3 +21,3 @@ /**

*/
AccountID.from = function (value) {
static from(value) {
if (value instanceof AccountID) {

@@ -51,3 +35,3 @@ return value;

throw new Error('Cannot construct AccountID from value given');
};
}
/**

@@ -59,5 +43,5 @@ * Defines how to build an AccountID from a base58 r-Address

*/
AccountID.fromBase58 = function (value) {
static fromBase58(value) {
if ((0, ripple_address_codec_1.isValidXAddress)(value)) {
var classic = (0, ripple_address_codec_1.xAddressToClassicAddress)(value);
const classic = (0, ripple_address_codec_1.xAddressToClassicAddress)(value);
if (classic.tag !== false)

@@ -68,3 +52,3 @@ throw new Error('Only allowed to have tag on Account or Destination');

return new AccountID(buffer_1.Buffer.from((0, ripple_address_codec_1.decodeAccountID)(value)));
};
}
/**

@@ -75,5 +59,5 @@ * Overload of toJSON

*/
AccountID.prototype.toJSON = function () {
toJSON() {
return this.toBase58();
};
}
/**

@@ -84,11 +68,10 @@ * Defines how to encode AccountID into a base58 address

*/
AccountID.prototype.toBase58 = function () {
toBase58() {
/* eslint-disable @typescript-eslint/no-explicit-any */
return (0, ripple_address_codec_1.encodeAccountID)(this.bytes);
/* eslint-enable @typescript-eslint/no-explicit-any */
};
AccountID.defaultAccountID = new AccountID(buffer_1.Buffer.alloc(20));
return AccountID;
}(hash_160_1.Hash160));
}
}
exports.AccountID = AccountID;
AccountID.defaultAccountID = new AccountID(buffer_1.Buffer.alloc(20));
//# sourceMappingURL=account-id.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Amount = void 0;
var decimal_js_1 = require("decimal.js");
var binary_parser_1 = require("../serdes/binary-parser");
var account_id_1 = require("./account-id");
var currency_1 = require("./currency");
var serialized_type_1 = require("./serialized-type");
var bigInt = require("big-integer");
var buffer_1 = require("buffer/");
const decimal_js_1 = require("decimal.js");
const binary_parser_1 = require("../serdes/binary-parser");
const account_id_1 = require("./account-id");
const currency_1 = require("./currency");
const serialized_type_1 = require("./serialized-type");
const bigInt = require("big-integer");
const buffer_1 = require("buffer/");
/**
* Constants for validating amounts
*/
var MIN_IOU_EXPONENT = -96;
var MAX_IOU_EXPONENT = 80;
var MAX_IOU_PRECISION = 16;
var MAX_DROPS = new decimal_js_1.Decimal('1e17');
var MIN_XRP = new decimal_js_1.Decimal('1e-6');
var mask = bigInt(0x00000000ffffffff);
const MIN_IOU_EXPONENT = -96;
const MAX_IOU_EXPONENT = 80;
const MAX_IOU_PRECISION = 16;
const MAX_DROPS = new decimal_js_1.Decimal('1e17');
const MIN_XRP = new decimal_js_1.Decimal('1e-6');
const mask = bigInt(0x00000000ffffffff);
/**

@@ -46,3 +31,3 @@ * decimal.js configuration for Amount IOUs

function isAmountObject(arg) {
var keys = Object.keys(arg).sort();
const keys = Object.keys(arg).sort();
return (keys.length === 3 &&

@@ -56,6 +41,5 @@ keys[0] === 'currency' &&

*/
var Amount = /** @class */ (function (_super) {
__extends(Amount, _super);
function Amount(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Amount.defaultAmount.bytes) || this;
class Amount extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : Amount.defaultAmount.bytes);
}

@@ -69,11 +53,11 @@ /**

*/
Amount.from = function (value) {
static from(value) {
if (value instanceof Amount) {
return value;
}
var amount = buffer_1.Buffer.alloc(8);
let amount = buffer_1.Buffer.alloc(8);
if (typeof value === 'string') {
Amount.assertXrpIsValid(value);
var number = bigInt(value);
var intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
const number = bigInt(value);
const intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(number.shiftRight(32)), 0);

@@ -86,3 +70,3 @@ intBuf[1].writeUInt32BE(Number(number.and(mask)), 0);

if (isAmountObject(value)) {
var number = new decimal_js_1.Decimal(value.value);
const number = new decimal_js_1.Decimal(value.value);
Amount.assertIouIsValid(number);

@@ -93,8 +77,8 @@ if (number.isZero()) {

else {
var integerNumberString = number
.times("1e".concat(-(number.e - 15)))
const integerNumberString = number
.times(`1e${-(number.e - 15)}`)
.abs()
.toString();
var num = bigInt(integerNumberString);
var intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
const num = bigInt(integerNumberString);
const intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(num.shiftRight(32)), 0);

@@ -107,13 +91,13 @@ intBuf[1].writeUInt32BE(Number(num.and(mask)), 0);

}
var exponent = number.e - 15;
var exponentByte = 97 + exponent;
const exponent = number.e - 15;
const exponentByte = 97 + exponent;
amount[0] |= exponentByte >>> 2;
amount[1] |= (exponentByte & 0x03) << 6;
}
var currency = currency_1.Currency.from(value.currency).toBytes();
var issuer = account_id_1.AccountID.from(value.issuer).toBytes();
const currency = currency_1.Currency.from(value.currency).toBytes();
const issuer = account_id_1.AccountID.from(value.issuer).toBytes();
return new Amount(buffer_1.Buffer.concat([amount, currency, issuer]));
}
throw new Error('Invalid type to construct an Amount');
};
}
/**

@@ -125,7 +109,7 @@ * Read an amount from a BinaryParser

*/
Amount.fromParser = function (parser) {
var isXRP = parser.peek() & 0x80;
var numBytes = isXRP ? 48 : 8;
static fromParser(parser) {
const isXRP = parser.peek() & 0x80;
const numBytes = isXRP ? 48 : 8;
return new Amount(parser.read(numBytes));
};
}
/**

@@ -136,26 +120,26 @@ * Get the JSON representation of this Amount

*/
Amount.prototype.toJSON = function () {
toJSON() {
if (this.isNative()) {
var bytes = this.bytes;
var isPositive = bytes[0] & 0x40;
var sign = isPositive ? '' : '-';
const bytes = this.bytes;
const isPositive = bytes[0] & 0x40;
const sign = isPositive ? '' : '-';
bytes[0] &= 0x3f;
var msb = bigInt(bytes.slice(0, 4).readUInt32BE(0));
var lsb = bigInt(bytes.slice(4).readUInt32BE(0));
var num = msb.shiftLeft(32).or(lsb);
return "".concat(sign).concat(num.toString());
const msb = bigInt(bytes.slice(0, 4).readUInt32BE(0));
const lsb = bigInt(bytes.slice(4).readUInt32BE(0));
const num = msb.shiftLeft(32).or(lsb);
return `${sign}${num.toString()}`;
}
else {
var parser = new binary_parser_1.BinaryParser(this.toString());
var mantissa = parser.read(8);
var currency = currency_1.Currency.fromParser(parser);
var issuer = account_id_1.AccountID.fromParser(parser);
var b1 = mantissa[0];
var b2 = mantissa[1];
var isPositive = b1 & 0x40;
var sign = isPositive ? '' : '-';
var exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97;
const parser = new binary_parser_1.BinaryParser(this.toString());
const mantissa = parser.read(8);
const currency = currency_1.Currency.fromParser(parser);
const issuer = account_id_1.AccountID.fromParser(parser);
const b1 = mantissa[0];
const b2 = mantissa[1];
const isPositive = b1 & 0x40;
const sign = isPositive ? '' : '-';
const exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97;
mantissa[0] = 0;
mantissa[1] &= 0x3f;
var value = new decimal_js_1.Decimal("".concat(sign, "0x").concat(mantissa.toString('hex'))).times("1e".concat(exponent));
const value = new decimal_js_1.Decimal(`${sign}0x${mantissa.toString('hex')}`).times(`1e${exponent}`);
Amount.assertIouIsValid(value);

@@ -168,3 +152,3 @@ return {

}
};
}
/**

@@ -176,13 +160,13 @@ * Validate XRP amount

*/
Amount.assertXrpIsValid = function (amount) {
static assertXrpIsValid(amount) {
if (amount.indexOf('.') !== -1) {
throw new Error("".concat(amount.toString(), " is an illegal amount"));
throw new Error(`${amount.toString()} is an illegal amount`);
}
var decimal = new decimal_js_1.Decimal(amount);
const decimal = new decimal_js_1.Decimal(amount);
if (!decimal.isZero()) {
if (decimal.lt(MIN_XRP) || decimal.gt(MAX_DROPS)) {
throw new Error("".concat(amount.toString(), " is an illegal amount"));
throw new Error(`${amount.toString()} is an illegal amount`);
}
}
};
}
/**

@@ -194,6 +178,6 @@ * Validate IOU.value amount

*/
Amount.assertIouIsValid = function (decimal) {
static assertIouIsValid(decimal) {
if (!decimal.isZero()) {
var p = decimal.precision();
var e = decimal.e - 15;
const p = decimal.precision();
const e = decimal.e - 15;
if (p > MAX_IOU_PRECISION ||

@@ -206,3 +190,3 @@ e > MAX_IOU_EXPONENT ||

}
};
}
/**

@@ -215,5 +199,5 @@ * Ensure that the value after being multiplied by the exponent does not

*/
Amount.verifyNoDecimal = function (decimal) {
var integerNumberString = decimal
.times("1e".concat(-(decimal.e - 15)))
static verifyNoDecimal(decimal) {
const integerNumberString = decimal
.times(`1e${-(decimal.e - 15)}`)
.abs()

@@ -224,3 +208,3 @@ .toString();

}
};
}
/**

@@ -231,9 +215,8 @@ * Test if this amount is in units of Native Currency(XRP)

*/
Amount.prototype.isNative = function () {
isNative() {
return (this.bytes[0] & 0x80) === 0;
};
Amount.defaultAmount = new Amount(buffer_1.Buffer.from('4000000000000000', 'hex'));
return Amount;
}(serialized_type_1.SerializedType));
}
}
exports.Amount = Amount;
Amount.defaultAmount = new Amount(buffer_1.Buffer.from('4000000000000000', 'hex'));
//# sourceMappingURL=amount.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Blob = void 0;
var serialized_type_1 = require("./serialized-type");
var buffer_1 = require("buffer/");
const serialized_type_1 = require("./serialized-type");
const buffer_1 = require("buffer/");
/**
* Variable length encoded type
*/
var Blob = /** @class */ (function (_super) {
__extends(Blob, _super);
function Blob(bytes) {
return _super.call(this, bytes) || this;
class Blob extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes);
}

@@ -36,5 +20,5 @@ /**

*/
Blob.fromParser = function (parser, hint) {
static fromParser(parser, hint) {
return new Blob(parser.read(hint));
};
}
/**

@@ -46,3 +30,3 @@ * Create a Blob object from a hex-string

*/
Blob.from = function (value) {
static from(value) {
if (value instanceof Blob) {

@@ -55,6 +39,5 @@ return value;

throw new Error('Cannot construct Blob from value given');
};
return Blob;
}(serialized_type_1.SerializedType));
}
}
exports.Blob = Blob;
//# sourceMappingURL=blob.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Currency = void 0;
var hash_160_1 = require("./hash-160");
var buffer_1 = require("buffer/");
var XRP_HEX_REGEX = /^0{40}$/;
var ISO_REGEX = /^[A-Z0-9a-z?!@#$%^&*(){}[\]|]{3}$/;
var HEX_REGEX = /^[A-F0-9]{40}$/;
const hash_160_1 = require("./hash-160");
const buffer_1 = require("buffer/");
const XRP_HEX_REGEX = /^0{40}$/;
const ISO_REGEX = /^[A-Z0-9a-z?!@#$%^&*(){}[\]|]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/;
// eslint-disable-next-line no-control-regex
var STANDARD_FORMAT_HEX_REGEX = /^0{24}[\x00-\x7F]{6}0{10}$/;
const STANDARD_FORMAT_HEX_REGEX = /^0{24}[\x00-\x7F]{6}0{10}$/;
/**

@@ -30,5 +15,5 @@ * Convert an ISO code to a currency bytes representation

function isoToBytes(iso) {
var bytes = buffer_1.Buffer.alloc(20);
const bytes = buffer_1.Buffer.alloc(20);
if (iso !== 'XRP') {
var isoBytes = iso.split('').map(function (c) { return c.charCodeAt(0); });
const isoBytes = iso.split('').map((c) => c.charCodeAt(0));
bytes.set(isoBytes, 12);

@@ -45,3 +30,3 @@ }

function isoCodeFromHex(code) {
var iso = code.toString();
const iso = code.toString();
if (iso === 'XRP') {

@@ -86,3 +71,3 @@ return null;

if (!isValidRepresentation(input)) {
throw new Error("Unsupported Currency representation: ".concat(input));
throw new Error(`Unsupported Currency representation: ${input}`);
}

@@ -94,17 +79,15 @@ return input.length === 3 ? isoToBytes(input) : buffer_1.Buffer.from(input, 'hex');

*/
var Currency = /** @class */ (function (_super) {
__extends(Currency, _super);
function Currency(byteBuf) {
var _this = _super.call(this, byteBuf !== null && byteBuf !== void 0 ? byteBuf : Currency.XRP.bytes) || this;
var hex = _this.bytes.toString('hex');
class Currency extends hash_160_1.Hash160 {
constructor(byteBuf) {
super(byteBuf !== null && byteBuf !== void 0 ? byteBuf : Currency.XRP.bytes);
const hex = this.bytes.toString('hex');
if (XRP_HEX_REGEX.test(hex)) {
_this._iso = 'XRP';
this._iso = 'XRP';
}
else if (STANDARD_FORMAT_HEX_REGEX.test(hex)) {
_this._iso = isoCodeFromHex(_this.bytes.slice(12, 15));
this._iso = isoCodeFromHex(this.bytes.slice(12, 15));
}
else {
_this._iso = null;
this._iso = null;
}
return _this;
}

@@ -116,5 +99,5 @@ /**

*/
Currency.prototype.iso = function () {
iso() {
return this._iso;
};
}
/**

@@ -125,3 +108,3 @@ * Constructs a Currency object

*/
Currency.from = function (value) {
static from(value) {
if (value instanceof Currency) {

@@ -134,3 +117,3 @@ return value;

throw new Error('Cannot construct Currency from value given');
};
}
/**

@@ -141,4 +124,4 @@ * Gets the JSON representation of a currency

*/
Currency.prototype.toJSON = function () {
var iso = this.iso();
toJSON() {
const iso = this.iso();
if (iso !== null) {

@@ -148,7 +131,6 @@ return iso;

return this.bytes.toString('hex').toUpperCase();
};
Currency.XRP = new Currency(buffer_1.Buffer.alloc(20));
return Currency;
}(hash_160_1.Hash160));
}
}
exports.Currency = Currency;
Currency.XRP = new Currency(buffer_1.Buffer.alloc(20));
//# sourceMappingURL=currency.js.map

@@ -10,3 +10,9 @@ import { Hash } from './hash';

constructor(bytes: Buffer);
/**
* Get the hex representation of a hash-128 bytes, allowing unset
*
* @returns hex String of this.bytes
*/
toHex(): string;
}
export { Hash128 };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash128 = void 0;
var hash_1 = require("./hash");
var buffer_1 = require("buffer/");
const hash_1 = require("./hash");
const buffer_1 = require("buffer/");
/**
* Hash with a width of 128 bits
*/
var Hash128 = /** @class */ (function (_super) {
__extends(Hash128, _super);
function Hash128(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Hash128.ZERO_128.bytes) || this;
class Hash128 extends hash_1.Hash {
constructor(bytes) {
if (bytes && bytes.byteLength === 0) {
bytes = Hash128.ZERO_128.bytes;
}
super(bytes !== null && bytes !== void 0 ? bytes : Hash128.ZERO_128.bytes);
}
Hash128.width = 16;
Hash128.ZERO_128 = new Hash128(buffer_1.Buffer.alloc(Hash128.width));
return Hash128;
}(hash_1.Hash));
/**
* Get the hex representation of a hash-128 bytes, allowing unset
*
* @returns hex String of this.bytes
*/
toHex() {
const hex = this.toBytes().toString('hex').toUpperCase();
if (/^0+$/.exec(hex)) {
return '';
}
return hex;
}
}
exports.Hash128 = Hash128;
Hash128.width = 16;
Hash128.ZERO_128 = new Hash128(buffer_1.Buffer.alloc(Hash128.width));
//# sourceMappingURL=hash-128.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash160 = void 0;
var hash_1 = require("./hash");
var buffer_1 = require("buffer/");
const hash_1 = require("./hash");
const buffer_1 = require("buffer/");
/**
* Hash with a width of 160 bits
*/
var Hash160 = /** @class */ (function (_super) {
__extends(Hash160, _super);
function Hash160(bytes) {
class Hash160 extends hash_1.Hash {
constructor(bytes) {
if (bytes && bytes.byteLength === 0) {
bytes = Hash160.ZERO_160.bytes;
}
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Hash160.ZERO_160.bytes) || this;
super(bytes !== null && bytes !== void 0 ? bytes : Hash160.ZERO_160.bytes);
}
Hash160.width = 20;
Hash160.ZERO_160 = new Hash160(buffer_1.Buffer.alloc(Hash160.width));
return Hash160;
}(hash_1.Hash));
}
exports.Hash160 = Hash160;
Hash160.width = 20;
Hash160.ZERO_160 = new Hash160(buffer_1.Buffer.alloc(Hash160.width));
//# sourceMappingURL=hash-160.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash256 = void 0;
var hash_1 = require("./hash");
var buffer_1 = require("buffer/");
const hash_1 = require("./hash");
const buffer_1 = require("buffer/");
/**
* Hash with a width of 256 bits
*/
var Hash256 = /** @class */ (function (_super) {
__extends(Hash256, _super);
function Hash256(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Hash256.ZERO_256.bytes) || this;
class Hash256 extends hash_1.Hash {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : Hash256.ZERO_256.bytes);
}
Hash256.width = 32;
Hash256.ZERO_256 = new Hash256(buffer_1.Buffer.alloc(Hash256.width));
return Hash256;
}(hash_1.Hash));
}
exports.Hash256 = Hash256;
Hash256.width = 32;
Hash256.ZERO_256 = new Hash256(buffer_1.Buffer.alloc(Hash256.width));
//# sourceMappingURL=hash-256.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash = void 0;
var serialized_type_1 = require("./serialized-type");
var buffer_1 = require("buffer/");
const serialized_type_1 = require("./serialized-type");
const buffer_1 = require("buffer/");
/**
* Base class defining how to encode and decode hashes
*/
var Hash = /** @class */ (function (_super) {
__extends(Hash, _super);
function Hash(bytes) {
var _this = _super.call(this, bytes) || this;
if (_this.bytes.byteLength !== _this.constructor.width) {
throw new Error("Invalid Hash length ".concat(_this.bytes.byteLength));
class Hash extends serialized_type_1.Comparable {
constructor(bytes) {
super(bytes);
if (this.bytes.byteLength !== this.constructor.width) {
throw new Error(`Invalid Hash length ${this.bytes.byteLength}`);
}
return _this;
}

@@ -38,3 +21,3 @@ /**

*/
Hash.from = function (value) {
static from(value) {
if (value instanceof this) {

@@ -47,3 +30,3 @@ return value;

throw new Error('Cannot construct Hash from given value');
};
}
/**

@@ -55,5 +38,5 @@ * Read a Hash object from a BinaryParser

*/
Hash.fromParser = function (parser, hint) {
static fromParser(parser, hint) {
return new this(parser.read(hint !== null && hint !== void 0 ? hint : this.width));
};
}
/**

@@ -64,11 +47,11 @@ * Overloaded operator for comparing two hash objects

*/
Hash.prototype.compareTo = function (other) {
compareTo(other) {
return this.bytes.compare(this.constructor.from(other).bytes);
};
}
/**
* @returns the hex-string representation of this Hash
*/
Hash.prototype.toString = function () {
toString() {
return this.toHex();
};
}
/**

@@ -80,5 +63,5 @@ * Returns four bits at the specified depth within a hash

*/
Hash.prototype.nibblet = function (depth) {
var byteIx = depth > 0 ? (depth / 2) | 0 : 0;
var b = this.bytes[byteIx];
nibblet(depth) {
const byteIx = depth > 0 ? (depth / 2) | 0 : 0;
let b = this.bytes[byteIx];
if (depth % 2 === 0) {

@@ -91,6 +74,5 @@ b = (b & 0xf0) >>> 4;

return b;
};
return Hash;
}(serialized_type_1.Comparable));
}
}
exports.Hash = Hash;
//# sourceMappingURL=hash.js.map

@@ -8,3 +8,2 @@ import { AccountID } from './account-id';

import { Hash256 } from './hash-256';
import { Issue } from './issue';
import { PathSet } from './path-set';

@@ -18,24 +17,4 @@ import { STArray } from './st-array';

import { Vector256 } from './vector-256';
import { XChainAttestationBatch } from './xchain-attestation-batch';
import { XChainBridge } from './xchain-bridge';
declare const coreTypes: {
AccountID: typeof AccountID;
Amount: typeof Amount;
Blob: typeof Blob;
Currency: typeof Currency;
Hash128: typeof Hash128;
Hash160: typeof Hash160;
Hash256: typeof Hash256;
Issue: typeof Issue;
PathSet: typeof PathSet;
STArray: typeof STArray;
STObject: typeof STObject;
UInt8: typeof UInt8;
UInt16: typeof UInt16;
UInt32: typeof UInt32;
UInt64: typeof UInt64;
Vector256: typeof Vector256;
XChainAttestationBatch: typeof XChainAttestationBatch;
XChainBridge: typeof XChainBridge;
};
export { coreTypes };
import { type SerializedType } from './serialized-type';
declare const coreTypes: Record<string, typeof SerializedType>;
export { coreTypes, AccountID, Amount, Blob, Currency, Hash128, Hash160, Hash256, PathSet, STArray, STObject, UInt8, UInt16, UInt32, UInt64, Vector256, };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.coreTypes = void 0;
var enums_1 = require("../enums");
var account_id_1 = require("./account-id");
var amount_1 = require("./amount");
var blob_1 = require("./blob");
var currency_1 = require("./currency");
var hash_128_1 = require("./hash-128");
var hash_160_1 = require("./hash-160");
var hash_256_1 = require("./hash-256");
var issue_1 = require("./issue");
var path_set_1 = require("./path-set");
var st_array_1 = require("./st-array");
var st_object_1 = require("./st-object");
var uint_16_1 = require("./uint-16");
var uint_32_1 = require("./uint-32");
var uint_64_1 = require("./uint-64");
var uint_8_1 = require("./uint-8");
var vector_256_1 = require("./vector-256");
var xchain_attestation_batch_1 = require("./xchain-attestation-batch");
var xchain_bridge_1 = require("./xchain-bridge");
var coreTypes = {
exports.Vector256 = exports.UInt64 = exports.UInt32 = exports.UInt16 = exports.UInt8 = exports.STObject = exports.STArray = exports.PathSet = exports.Hash256 = exports.Hash160 = exports.Hash128 = exports.Currency = exports.Blob = exports.Amount = exports.AccountID = exports.coreTypes = void 0;
const account_id_1 = require("./account-id");
Object.defineProperty(exports, "AccountID", { enumerable: true, get: function () { return account_id_1.AccountID; } });
const amount_1 = require("./amount");
Object.defineProperty(exports, "Amount", { enumerable: true, get: function () { return amount_1.Amount; } });
const blob_1 = require("./blob");
Object.defineProperty(exports, "Blob", { enumerable: true, get: function () { return blob_1.Blob; } });
const currency_1 = require("./currency");
Object.defineProperty(exports, "Currency", { enumerable: true, get: function () { return currency_1.Currency; } });
const hash_128_1 = require("./hash-128");
Object.defineProperty(exports, "Hash128", { enumerable: true, get: function () { return hash_128_1.Hash128; } });
const hash_160_1 = require("./hash-160");
Object.defineProperty(exports, "Hash160", { enumerable: true, get: function () { return hash_160_1.Hash160; } });
const hash_256_1 = require("./hash-256");
Object.defineProperty(exports, "Hash256", { enumerable: true, get: function () { return hash_256_1.Hash256; } });
const path_set_1 = require("./path-set");
Object.defineProperty(exports, "PathSet", { enumerable: true, get: function () { return path_set_1.PathSet; } });
const st_array_1 = require("./st-array");
Object.defineProperty(exports, "STArray", { enumerable: true, get: function () { return st_array_1.STArray; } });
const st_object_1 = require("./st-object");
Object.defineProperty(exports, "STObject", { enumerable: true, get: function () { return st_object_1.STObject; } });
const uint_16_1 = require("./uint-16");
Object.defineProperty(exports, "UInt16", { enumerable: true, get: function () { return uint_16_1.UInt16; } });
const uint_32_1 = require("./uint-32");
Object.defineProperty(exports, "UInt32", { enumerable: true, get: function () { return uint_32_1.UInt32; } });
const uint_64_1 = require("./uint-64");
Object.defineProperty(exports, "UInt64", { enumerable: true, get: function () { return uint_64_1.UInt64; } });
const uint_8_1 = require("./uint-8");
Object.defineProperty(exports, "UInt8", { enumerable: true, get: function () { return uint_8_1.UInt8; } });
const vector_256_1 = require("./vector-256");
Object.defineProperty(exports, "Vector256", { enumerable: true, get: function () { return vector_256_1.Vector256; } });
const enums_1 = require("../enums");
const coreTypes = {
AccountID: account_id_1.AccountID,

@@ -31,3 +43,2 @@ Amount: amount_1.Amount,

Hash256: hash_256_1.Hash256,
Issue: issue_1.Issue,
PathSet: path_set_1.PathSet,

@@ -41,12 +52,8 @@ STArray: st_array_1.STArray,

Vector256: vector_256_1.Vector256,
XChainAttestationBatch: xchain_attestation_batch_1.XChainAttestationBatch,
XChainBridge: xchain_bridge_1.XChainBridge,
};
exports.coreTypes = coreTypes;
Object.values(enums_1.Field).forEach(function (field) {
field.associatedType = coreTypes[field.type.name];
});
enums_1.Field['TransactionType'].associatedType = enums_1.TransactionType;
enums_1.Field['TransactionResult'].associatedType = enums_1.TransactionResult;
enums_1.Field['LedgerEntryType'].associatedType = enums_1.LedgerEntryType;
// Ensures that the DEFAULT_DEFINITIONS object connects these types to fields for serializing/deserializing
// This is done here instead of in enums/index.ts to avoid a circular dependency
// because some of the above types depend on BinarySerializer which depends on enums/index.ts.
enums_1.DEFAULT_DEFINITIONS.associateTypes(coreTypes);
//# sourceMappingURL=index.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Issue = void 0;
var binary_parser_1 = require("../serdes/binary-parser");
var account_id_1 = require("./account-id");
var currency_1 = require("./currency");
var serialized_type_1 = require("./serialized-type");
var buffer_1 = require("buffer/");
const binary_parser_1 = require("../serdes/binary-parser");
const account_id_1 = require("./account-id");
const currency_1 = require("./currency");
const serialized_type_1 = require("./serialized-type");
const buffer_1 = require("buffer/");
/**

@@ -28,3 +13,3 @@ * Type guard for AmountObject

function isIssueObject(arg) {
var keys = Object.keys(arg).sort();
const keys = Object.keys(arg).sort();
if (keys.length === 1) {

@@ -38,6 +23,5 @@ return keys[0] === 'currency';

*/
var Issue = /** @class */ (function (_super) {
__extends(Issue, _super);
function Issue(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Issue.ZERO_ISSUED_CURRENCY.bytes) || this;
class Issue extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : Issue.ZERO_ISSUED_CURRENCY.bytes);
}

@@ -51,3 +35,3 @@ /**

*/
Issue.from = function (value) {
static from(value) {
if (value instanceof Issue) {

@@ -57,11 +41,11 @@ return value;

if (isIssueObject(value)) {
var currency = currency_1.Currency.from(value.currency).toBytes();
const currency = currency_1.Currency.from(value.currency).toBytes();
if (value.issuer == null) {
return new Issue(currency);
}
var issuer = account_id_1.AccountID.from(value.issuer).toBytes();
const issuer = account_id_1.AccountID.from(value.issuer).toBytes();
return new Issue(buffer_1.Buffer.concat([currency, issuer]));
}
throw new Error('Invalid type to construct an Amount');
};
}
/**

@@ -73,10 +57,10 @@ * Read an amount from a BinaryParser

*/
Issue.fromParser = function (parser) {
var currency = parser.read(20);
static fromParser(parser) {
const currency = parser.read(20);
if (new currency_1.Currency(currency).toJSON() === 'XRP') {
return new Issue(currency);
}
var currencyAndIssuer = [currency, parser.read(20)];
const currencyAndIssuer = [currency, parser.read(20)];
return new Issue(buffer_1.Buffer.concat(currencyAndIssuer));
};
}
/**

@@ -87,9 +71,9 @@ * Get the JSON representation of this Amount

*/
Issue.prototype.toJSON = function () {
var parser = new binary_parser_1.BinaryParser(this.toString());
var currency = currency_1.Currency.fromParser(parser);
toJSON() {
const parser = new binary_parser_1.BinaryParser(this.toString());
const currency = currency_1.Currency.fromParser(parser);
if (currency.toJSON() === 'XRP') {
return { currency: currency.toJSON() };
}
var issuer = account_id_1.AccountID.fromParser(parser);
const issuer = account_id_1.AccountID.fromParser(parser);
return {

@@ -99,7 +83,6 @@ currency: currency.toJSON(),

};
};
Issue.ZERO_ISSUED_CURRENCY = new Issue(buffer_1.Buffer.alloc(20));
return Issue;
}(serialized_type_1.SerializedType));
}
}
exports.Issue = Issue;
Issue.ZERO_ISSUED_CURRENCY = new Issue(buffer_1.Buffer.alloc(20));
//# sourceMappingURL=issue.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.PathSet = void 0;
var account_id_1 = require("./account-id");
var currency_1 = require("./currency");
var binary_parser_1 = require("../serdes/binary-parser");
var serialized_type_1 = require("./serialized-type");
var buffer_1 = require("buffer/");
const account_id_1 = require("./account-id");
const currency_1 = require("./currency");
const binary_parser_1 = require("../serdes/binary-parser");
const serialized_type_1 = require("./serialized-type");
const buffer_1 = require("buffer/");
/**
* Constants for separating Paths in a PathSet
*/
var PATHSET_END_BYTE = 0x00;
var PATH_SEPARATOR_BYTE = 0xff;
const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xff;
/**
* Constant for masking types of a Hop
*/
var TYPE_ACCOUNT = 0x01;
var TYPE_CURRENCY = 0x10;
var TYPE_ISSUER = 0x20;
const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20;
/**

@@ -54,7 +39,3 @@ * TypeGuard for HopObject

*/
var Hop = /** @class */ (function (_super) {
__extends(Hop, _super);
function Hop() {
return _super !== null && _super.apply(this, arguments) || this;
}
class Hop extends serialized_type_1.SerializedType {
/**

@@ -66,7 +47,7 @@ * Create a Hop from a HopObject

*/
Hop.from = function (value) {
static from(value) {
if (value instanceof Hop) {
return value;
}
var bytes = [buffer_1.Buffer.from([0])];
const bytes = [buffer_1.Buffer.from([0])];
if (value.account) {

@@ -85,3 +66,3 @@ bytes.push(account_id_1.AccountID.from(value.account).toBytes());

return new Hop(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -93,5 +74,5 @@ * Construct a Hop from a BinaryParser

*/
Hop.fromParser = function (parser) {
var type = parser.readUInt8();
var bytes = [buffer_1.Buffer.from([type])];
static fromParser(parser) {
const type = parser.readUInt8();
const bytes = [buffer_1.Buffer.from([type])];
if (type & TYPE_ACCOUNT) {

@@ -107,3 +88,3 @@ bytes.push(parser.read(account_id_1.AccountID.width));

return new Hop(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -114,6 +95,6 @@ * Get the JSON interpretation of this hop

*/
Hop.prototype.toJSON = function () {
var hopParser = new binary_parser_1.BinaryParser(this.bytes.toString('hex'));
var type = hopParser.readUInt8();
var account, currency, issuer;
toJSON() {
const hopParser = new binary_parser_1.BinaryParser(this.bytes.toString('hex'));
const type = hopParser.readUInt8();
let account, currency, issuer;
if (type & TYPE_ACCOUNT) {

@@ -128,3 +109,3 @@ account = account_id_1.AccountID.fromParser(hopParser).toJSON();

}
var result = {};
const result = {};
if (account) {

@@ -140,3 +121,3 @@ result.account = account;

return result;
};
}
/**

@@ -147,15 +128,10 @@ * get a number representing the type of this hop

*/
Hop.prototype.type = function () {
type() {
return this.bytes[0];
};
return Hop;
}(serialized_type_1.SerializedType));
}
}
/**
* Class for serializing/deserializing Paths
*/
var Path = /** @class */ (function (_super) {
__extends(Path, _super);
function Path() {
return _super !== null && _super.apply(this, arguments) || this;
}
class Path extends serialized_type_1.SerializedType {
/**

@@ -167,12 +143,12 @@ * construct a Path from an array of Hops

*/
Path.from = function (value) {
static from(value) {
if (value instanceof Path) {
return value;
}
var bytes = [];
value.forEach(function (hop) {
const bytes = [];
value.forEach((hop) => {
bytes.push(Hop.from(hop).toBytes());
});
return new Path(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -184,4 +160,4 @@ * Read a Path from a BinaryParser

*/
Path.fromParser = function (parser) {
var bytes = [];
static fromParser(parser) {
const bytes = [];
while (!parser.end()) {

@@ -195,3 +171,3 @@ bytes.push(Hop.fromParser(parser).toBytes());

return new Path(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -202,5 +178,5 @@ * Get the JSON representation of this Path

*/
Path.prototype.toJSON = function () {
var json = [];
var pathParser = new binary_parser_1.BinaryParser(this.toString());
toJSON() {
const json = [];
const pathParser = new binary_parser_1.BinaryParser(this.toString());
while (!pathParser.end()) {

@@ -210,13 +186,8 @@ json.push(Hop.fromParser(pathParser).toJSON());

return json;
};
return Path;
}(serialized_type_1.SerializedType));
}
}
/**
* Deserialize and Serialize the PathSet type
*/
var PathSet = /** @class */ (function (_super) {
__extends(PathSet, _super);
function PathSet() {
return _super !== null && _super.apply(this, arguments) || this;
}
class PathSet extends serialized_type_1.SerializedType {
/**

@@ -228,3 +199,3 @@ * Construct a PathSet from an Array of Arrays representing paths

*/
PathSet.from = function (value) {
static from(value) {
if (value instanceof PathSet) {

@@ -234,12 +205,12 @@ return value;

if (isPathSet(value)) {
var bytes_1 = [];
value.forEach(function (path) {
bytes_1.push(Path.from(path).toBytes());
bytes_1.push(buffer_1.Buffer.from([PATH_SEPARATOR_BYTE]));
const bytes = [];
value.forEach((path) => {
bytes.push(Path.from(path).toBytes());
bytes.push(buffer_1.Buffer.from([PATH_SEPARATOR_BYTE]));
});
bytes_1[bytes_1.length - 1] = buffer_1.Buffer.from([PATHSET_END_BYTE]);
return new PathSet(buffer_1.Buffer.concat(bytes_1));
bytes[bytes.length - 1] = buffer_1.Buffer.from([PATHSET_END_BYTE]);
return new PathSet(buffer_1.Buffer.concat(bytes));
}
throw new Error('Cannot construct PathSet from given value');
};
}
/**

@@ -251,4 +222,4 @@ * Construct a PathSet from a BinaryParser

*/
PathSet.fromParser = function (parser) {
var bytes = [];
static fromParser(parser) {
const bytes = [];
while (!parser.end()) {

@@ -262,3 +233,3 @@ bytes.push(Path.fromParser(parser).toBytes());

return new PathSet(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -269,5 +240,5 @@ * Get the JSON representation of this PathSet

*/
PathSet.prototype.toJSON = function () {
var json = [];
var pathParser = new binary_parser_1.BinaryParser(this.toString());
toJSON() {
const json = [];
const pathParser = new binary_parser_1.BinaryParser(this.toString());
while (!pathParser.end()) {

@@ -278,6 +249,5 @@ json.push(Path.fromParser(pathParser).toJSON());

return json;
};
return PathSet;
}(serialized_type_1.SerializedType));
}
}
exports.PathSet = PathSet;
//# sourceMappingURL=path-set.js.map
import { BytesList } from '../serdes/binary-serializer';
import { BinaryParser } from '../serdes/binary-parser';
import * as bigInt from 'big-integer';
import bigInt = require('big-integer');
import { Buffer } from 'buffer/';
declare type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject;
declare type JsonObject = {
type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject;
type JsonObject = {
[key: string]: JSON;

@@ -8,0 +8,0 @@ };

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Comparable = exports.SerializedType = void 0;
var binary_serializer_1 = require("../serdes/binary-serializer");
var buffer_1 = require("buffer/");
const binary_serializer_1 = require("../serdes/binary-serializer");
const buffer_1 = require("buffer/");
/**
* The base class for all binary-codec types
*/
var SerializedType = /** @class */ (function () {
function SerializedType(bytes) {
class SerializedType {
constructor(bytes) {
this.bytes = buffer_1.Buffer.alloc(0);
this.bytes = bytes !== null && bytes !== void 0 ? bytes : buffer_1.Buffer.alloc(0);
}
SerializedType.fromParser = function (parser, hint) {
static fromParser(parser, hint) {
throw new Error('fromParser not implemented');
return this.fromParser(parser, hint);
};
SerializedType.from = function (value) {
}
static from(value) {
throw new Error('from not implemented');
return this.from(value);
};
}
/**

@@ -42,5 +27,5 @@ * Write the bytes representation of a SerializedType to a BytesList

*/
SerializedType.prototype.toBytesSink = function (list) {
toBytesSink(list) {
list.put(this.bytes);
};
}
/**

@@ -51,5 +36,5 @@ * Get the hex representation of a SerializedType's bytes

*/
SerializedType.prototype.toHex = function () {
toHex() {
return this.toBytes().toString('hex').toUpperCase();
};
}
/**

@@ -60,10 +45,10 @@ * Get the bytes representation of a SerializedType

*/
SerializedType.prototype.toBytes = function () {
toBytes() {
if (this.bytes) {
return this.bytes;
}
var bytes = new binary_serializer_1.BytesList();
const bytes = new binary_serializer_1.BytesList();
this.toBytesSink(bytes);
return bytes.toBytes();
};
}
/**

@@ -74,13 +59,12 @@ * Return the JSON representation of a SerializedType

*/
SerializedType.prototype.toJSON = function () {
toJSON() {
return this.toHex();
};
}
/**
* @returns hexString representation of this.bytes
*/
SerializedType.prototype.toString = function () {
toString() {
return this.toHex();
};
return SerializedType;
}());
}
}
exports.SerializedType = SerializedType;

@@ -90,22 +74,18 @@ /**

*/
var Comparable = /** @class */ (function (_super) {
__extends(Comparable, _super);
function Comparable() {
return _super !== null && _super.apply(this, arguments) || this;
class Comparable extends SerializedType {
lt(other) {
return this.compareTo(other) < 0;
}
Comparable.prototype.lt = function (other) {
return this.compareTo(other) < 0;
};
Comparable.prototype.eq = function (other) {
eq(other) {
return this.compareTo(other) === 0;
};
Comparable.prototype.gt = function (other) {
}
gt(other) {
return this.compareTo(other) > 0;
};
Comparable.prototype.gte = function (other) {
}
gte(other) {
return this.compareTo(other) > -1;
};
Comparable.prototype.lte = function (other) {
}
lte(other) {
return this.compareTo(other) < 1;
};
}
/**

@@ -117,8 +97,7 @@ * Overload this method to define how two Comparable SerializedTypes are compared

*/
Comparable.prototype.compareTo = function (other) {
throw new Error("cannot compare ".concat(this.toString(), " and ").concat(other.toString()));
};
return Comparable;
}(SerializedType));
compareTo(other) {
throw new Error(`cannot compare ${this.toString()} and ${other.toString()}`);
}
}
exports.Comparable = Comparable;
//# sourceMappingURL=serialized-type.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.STArray = void 0;
var serialized_type_1 = require("./serialized-type");
var st_object_1 = require("./st-object");
var binary_parser_1 = require("../serdes/binary-parser");
var buffer_1 = require("buffer/");
var ARRAY_END_MARKER = buffer_1.Buffer.from([0xf1]);
var ARRAY_END_MARKER_NAME = 'ArrayEndMarker';
var OBJECT_END_MARKER = buffer_1.Buffer.from([0xe1]);
const serialized_type_1 = require("./serialized-type");
const st_object_1 = require("./st-object");
const binary_parser_1 = require("../serdes/binary-parser");
const buffer_1 = require("buffer/");
const ARRAY_END_MARKER = buffer_1.Buffer.from([0xf1]);
const ARRAY_END_MARKER_NAME = 'ArrayEndMarker';
const OBJECT_END_MARKER = buffer_1.Buffer.from([0xe1]);
/**

@@ -35,7 +20,3 @@ * TypeGuard for Array<JsonObject>

*/
var STArray = /** @class */ (function (_super) {
__extends(STArray, _super);
function STArray() {
return _super !== null && _super.apply(this, arguments) || this;
}
class STArray extends serialized_type_1.SerializedType {
/**

@@ -47,6 +28,6 @@ * Construct an STArray from a BinaryParser

*/
STArray.fromParser = function (parser) {
var bytes = [];
static fromParser(parser) {
const bytes = [];
while (!parser.end()) {
var field = parser.readField();
const field = parser.readField();
if (field.name === ARRAY_END_MARKER_NAME) {

@@ -59,3 +40,3 @@ break;

return new STArray(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -67,3 +48,3 @@ * Construct an STArray from an Array of JSON Objects

*/
STArray.from = function (value) {
static from(value) {
if (value instanceof STArray) {

@@ -73,11 +54,11 @@ return value;

if (isObjects(value)) {
var bytes_1 = [];
value.forEach(function (obj) {
bytes_1.push(st_object_1.STObject.from(obj).toBytes());
const bytes = [];
value.forEach((obj) => {
bytes.push(st_object_1.STObject.from(obj).toBytes());
});
bytes_1.push(ARRAY_END_MARKER);
return new STArray(buffer_1.Buffer.concat(bytes_1));
bytes.push(ARRAY_END_MARKER);
return new STArray(buffer_1.Buffer.concat(bytes));
}
throw new Error('Cannot construct STArray from value given');
};
}
/**

@@ -88,11 +69,11 @@ * Return the JSON representation of this.bytes

*/
STArray.prototype.toJSON = function () {
var result = [];
var arrayParser = new binary_parser_1.BinaryParser(this.toString());
toJSON() {
const result = [];
const arrayParser = new binary_parser_1.BinaryParser(this.toString());
while (!arrayParser.end()) {
var field = arrayParser.readField();
const field = arrayParser.readField();
if (field.name === ARRAY_END_MARKER_NAME) {
break;
}
var outer = {};
const outer = {};
outer[field.name] = st_object_1.STObject.fromParser(arrayParser).toJSON();

@@ -102,6 +83,5 @@ result.push(outer);

return result;
};
return STArray;
}(serialized_type_1.SerializedType));
}
}
exports.STArray = STArray;
//# sourceMappingURL=st-array.js.map

@@ -0,1 +1,2 @@

import { XrplDefinitionsBase } from '../enums';
import { SerializedType, JsonObject } from './serialized-type';

@@ -19,12 +20,14 @@ import { BinaryParser } from '../serdes/binary-parser';

* @param filter optional, denote which field to include in serialized object
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns a STObject object
*/
static from<T extends STObject | JsonObject>(value: T, filter?: (...any: any[]) => boolean): STObject;
static from<T extends STObject | JsonObject>(value: T, filter?: (...any: any[]) => boolean, definitions?: XrplDefinitionsBase): STObject;
/**
* Get the JSON interpretation of this.bytes
*
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns a JSON object
*/
toJSON(): JsonObject;
toJSON(definitions?: XrplDefinitionsBase): JsonObject;
}
export { STObject };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.STObject = void 0;
var enums_1 = require("../enums");
var serialized_type_1 = require("./serialized-type");
var ripple_address_codec_1 = require("ripple-address-codec");
var binary_parser_1 = require("../serdes/binary-parser");
var binary_serializer_1 = require("../serdes/binary-serializer");
var buffer_1 = require("buffer/");
var OBJECT_END_MARKER_BYTE = buffer_1.Buffer.from([0xe1]);
var OBJECT_END_MARKER = 'ObjectEndMarker';
var ST_OBJECT = 'STObject';
var DESTINATION = 'Destination';
var ACCOUNT = 'Account';
var SOURCE_TAG = 'SourceTag';
var DEST_TAG = 'DestinationTag';
const enums_1 = require("../enums");
const serialized_type_1 = require("./serialized-type");
const ripple_address_codec_1 = require("ripple-address-codec");
const binary_parser_1 = require("../serdes/binary-parser");
const binary_serializer_1 = require("../serdes/binary-serializer");
const buffer_1 = require("buffer/");
const OBJECT_END_MARKER_BYTE = buffer_1.Buffer.from([0xe1]);
const OBJECT_END_MARKER = 'ObjectEndMarker';
const ST_OBJECT = 'STObject';
const DESTINATION = 'Destination';
const ACCOUNT = 'Account';
const SOURCE_TAG = 'SourceTag';
const DEST_TAG = 'DestinationTag';
/**

@@ -39,5 +24,4 @@ * Break down an X-Address into an account and a tag

function handleXAddress(field, xAddress) {
var _a, _b;
var decoded = (0, ripple_address_codec_1.xAddressToClassicAddress)(xAddress);
var tagName;
const decoded = (0, ripple_address_codec_1.xAddressToClassicAddress)(xAddress);
let tagName;
if (field === DESTINATION)

@@ -48,5 +32,6 @@ tagName = DEST_TAG;

else if (decoded.tag !== false)
throw new Error("".concat(field, " cannot have an associated tag"));
throw new Error(`${field} cannot have an associated tag`);
return decoded.tag !== false
? (_a = {}, _a[field] = decoded.classicAddress, _a[tagName] = decoded.tag, _a) : (_b = {}, _b[field] = decoded.classicAddress, _b);
? { [field]: decoded.classicAddress, [tagName]: decoded.tag }
: { [field]: decoded.classicAddress };
}

@@ -69,7 +54,3 @@ /**

*/
var STObject = /** @class */ (function (_super) {
__extends(STObject, _super);
function STObject() {
return _super !== null && _super.apply(this, arguments) || this;
}
class STObject extends serialized_type_1.SerializedType {
/**

@@ -81,11 +62,11 @@ * Construct a STObject from a BinaryParser

*/
STObject.fromParser = function (parser) {
var list = new binary_serializer_1.BytesList();
var bytes = new binary_serializer_1.BinarySerializer(list);
static fromParser(parser) {
const list = new binary_serializer_1.BytesList();
const bytes = new binary_serializer_1.BinarySerializer(list);
while (!parser.end()) {
var field = parser.readField();
const field = parser.readField();
if (field.name === OBJECT_END_MARKER) {
break;
}
var associatedValue = parser.readFieldValue(field);
const associatedValue = parser.readFieldValue(field);
bytes.writeFieldAndValue(field, associatedValue);

@@ -97,3 +78,3 @@ if (field.type.name === ST_OBJECT) {

return new STObject(list.toBytes());
};
}
/**

@@ -104,15 +85,14 @@ * Construct a STObject from a JSON object

* @param filter optional, denote which field to include in serialized object
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns a STObject object
*/
STObject.from = function (value, filter) {
static from(value, filter, definitions = enums_1.DEFAULT_DEFINITIONS) {
if (value instanceof STObject) {
return value;
}
var list = new binary_serializer_1.BytesList();
var bytes = new binary_serializer_1.BinarySerializer(list);
var isUnlModify = false;
var xAddressDecoded = Object.entries(value).reduce(function (acc, _a) {
var _b;
var key = _a[0], val = _a[1];
var handled = undefined;
const list = new binary_serializer_1.BytesList();
const bytes = new binary_serializer_1.BinarySerializer(list);
let isUnlModify = false;
const xAddressDecoded = Object.entries(value).reduce((acc, [key, val]) => {
let handled = undefined;
if (val && (0, ripple_address_codec_1.isValidXAddress)(val.toString())) {

@@ -122,12 +102,10 @@ handled = handleXAddress(key, val.toString());

}
return Object.assign(acc, handled !== null && handled !== void 0 ? handled : (_b = {}, _b[key] = val, _b));
return Object.assign(acc, handled !== null && handled !== void 0 ? handled : { [key]: val });
}, {});
var sorted = Object.keys(xAddressDecoded)
.map(function (f) { return enums_1.Field[f]; })
.filter(function (f) {
return f !== undefined &&
xAddressDecoded[f.name] !== undefined &&
f.isSerialized;
})
.sort(function (a, b) {
let sorted = Object.keys(xAddressDecoded)
.map((f) => definitions.field[f])
.filter((f) => f !== undefined &&
xAddressDecoded[f.name] !== undefined &&
f.isSerialized)
.sort((a, b) => {
return a.ordinal - b.ordinal;

@@ -138,6 +116,6 @@ });

}
sorted.forEach(function (field) {
var associatedValue = field.associatedType.from(xAddressDecoded[field.name]);
sorted.forEach((field) => {
const associatedValue = field.associatedType.from(xAddressDecoded[field.name]);
if (associatedValue == undefined) {
throw new TypeError("Unable to interpret \"".concat(field.name, ": ").concat(xAddressDecoded[field.name], "\"."));
throw new TypeError(`Unable to interpret "${field.name}: ${xAddressDecoded[field.name]}".`);
}

@@ -151,3 +129,3 @@ if (associatedValue.name === 'UNLModify') {

// The Account field must not be a part of the UNLModify pseudotransaction encoding, due to a bug in rippled
var isUnlModifyWorkaround = field.name == 'Account' && isUnlModify;
const isUnlModifyWorkaround = field.name == 'Account' && isUnlModify;
bytes.writeFieldAndValue(field, associatedValue, isUnlModifyWorkaround);

@@ -159,13 +137,14 @@ if (field.type.name === ST_OBJECT) {

return new STObject(list.toBytes());
};
}
/**
* Get the JSON interpretation of this.bytes
*
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns a JSON object
*/
STObject.prototype.toJSON = function () {
var objectParser = new binary_parser_1.BinaryParser(this.toString());
var accumulator = {};
toJSON(definitions) {
const objectParser = new binary_parser_1.BinaryParser(this.toString(), definitions);
const accumulator = {};
while (!objectParser.end()) {
var field = objectParser.readField();
const field = objectParser.readField();
if (field.name === OBJECT_END_MARKER) {

@@ -177,6 +156,5 @@ break;

return accumulator;
};
return STObject;
}(serialized_type_1.SerializedType));
}
}
exports.STObject = STObject;
//# sourceMappingURL=st-object.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt16 = void 0;
var uint_1 = require("./uint");
var buffer_1 = require("buffer/");
const uint_1 = require("./uint");
const buffer_1 = require("buffer/");
/**
* Derived UInt class for serializing/deserializing 16 bit UInt
*/
var UInt16 = /** @class */ (function (_super) {
__extends(UInt16, _super);
function UInt16(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt16.defaultUInt16.bytes) || this;
class UInt16 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt16.defaultUInt16.bytes);
}
UInt16.fromParser = function (parser) {
static fromParser(parser) {
return new UInt16(parser.read(UInt16.width));
};
}
/**

@@ -37,3 +21,3 @@ * Construct a UInt16 object from a number

*/
UInt16.from = function (val) {
static from(val) {
if (val instanceof UInt16) {

@@ -43,3 +27,3 @@ return val;

if (typeof val === 'number') {
var buf = buffer_1.Buffer.alloc(UInt16.width);
const buf = buffer_1.Buffer.alloc(UInt16.width);
buf.writeUInt16BE(val, 0);

@@ -49,3 +33,3 @@ return new UInt16(buf);

throw new Error('Can not construct UInt16 with given value');
};
}
/**

@@ -56,10 +40,9 @@ * get the value of a UInt16 object

*/
UInt16.prototype.valueOf = function () {
valueOf() {
return this.bytes.readUInt16BE(0);
};
UInt16.width = 16 / 8; // 2
UInt16.defaultUInt16 = new UInt16(buffer_1.Buffer.alloc(UInt16.width));
return UInt16;
}(uint_1.UInt));
}
}
exports.UInt16 = UInt16;
UInt16.width = 16 / 8; // 2
UInt16.defaultUInt16 = new UInt16(buffer_1.Buffer.alloc(UInt16.width));
//# sourceMappingURL=uint-16.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt32 = void 0;
var uint_1 = require("./uint");
var buffer_1 = require("buffer/");
const uint_1 = require("./uint");
const buffer_1 = require("buffer/");
/**
* Derived UInt class for serializing/deserializing 32 bit UInt
*/
var UInt32 = /** @class */ (function (_super) {
__extends(UInt32, _super);
function UInt32(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt32.defaultUInt32.bytes) || this;
class UInt32 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt32.defaultUInt32.bytes);
}
UInt32.fromParser = function (parser) {
static fromParser(parser) {
return new UInt32(parser.read(UInt32.width));
};
}
/**

@@ -37,9 +21,9 @@ * Construct a UInt32 object from a number

*/
UInt32.from = function (val) {
static from(val) {
if (val instanceof UInt32) {
return val;
}
var buf = buffer_1.Buffer.alloc(UInt32.width);
const buf = buffer_1.Buffer.alloc(UInt32.width);
if (typeof val === 'string') {
var num = Number.parseInt(val);
const num = Number.parseInt(val);
buf.writeUInt32BE(num, 0);

@@ -53,3 +37,3 @@ return new UInt32(buf);

throw new Error('Cannot construct UInt32 from given value');
};
}
/**

@@ -60,10 +44,9 @@ * get the value of a UInt32 object

*/
UInt32.prototype.valueOf = function () {
valueOf() {
return this.bytes.readUInt32BE(0);
};
UInt32.width = 32 / 8; // 4
UInt32.defaultUInt32 = new UInt32(buffer_1.Buffer.alloc(UInt32.width));
return UInt32;
}(uint_1.UInt));
}
}
exports.UInt32 = UInt32;
UInt32.width = 32 / 8; // 4
UInt32.defaultUInt32 = new UInt32(buffer_1.Buffer.alloc(UInt32.width));
//# sourceMappingURL=uint-32.js.map
import { UInt } from './uint';
import { BinaryParser } from '../serdes/binary-parser';
import * as bigInt from 'big-integer';
import bigInt = require('big-integer');
import { Buffer } from 'buffer/';

@@ -5,0 +5,0 @@ /**

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt64 = void 0;
var uint_1 = require("./uint");
var bigInt = require("big-integer");
var big_integer_1 = require("big-integer");
var buffer_1 = require("buffer/");
var HEX_REGEX = /^[a-fA-F0-9]{1,16}$/;
var mask = bigInt(0x00000000ffffffff);
const uint_1 = require("./uint");
const bigInt = require("big-integer");
const big_integer_1 = require("big-integer");
const buffer_1 = require("buffer/");
const HEX_REGEX = /^[a-fA-F0-9]{1,16}$/;
const mask = bigInt(0x00000000ffffffff);
/**
* Derived UInt class for serializing/deserializing 64 bit UInt
*/
var UInt64 = /** @class */ (function (_super) {
__extends(UInt64, _super);
function UInt64(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt64.defaultUInt64.bytes) || this;
class UInt64 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt64.defaultUInt64.bytes);
}
UInt64.fromParser = function (parser) {
static fromParser(parser) {
return new UInt64(parser.read(UInt64.width));
};
}
/**

@@ -42,7 +26,7 @@ * Construct a UInt64 object

*/
UInt64.from = function (val) {
static from(val) {
if (val instanceof UInt64) {
return val;
}
var buf = buffer_1.Buffer.alloc(UInt64.width);
let buf = buffer_1.Buffer.alloc(UInt64.width);
if (typeof val === 'number') {

@@ -52,4 +36,4 @@ if (val < 0) {

}
var number = bigInt(val);
var intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
const number = bigInt(val);
const intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(number.shiftRight(32)), 0);

@@ -61,5 +45,5 @@ intBuf[1].writeUInt32BE(Number(number.and(mask)), 0);

if (!HEX_REGEX.test(val)) {
throw new Error("".concat(val, " is not a valid hex-string"));
throw new Error(`${val} is not a valid hex-string`);
}
var strBuf = val.padStart(16, '0');
const strBuf = val.padStart(16, '0');
buf = buffer_1.Buffer.from(strBuf, 'hex');

@@ -69,3 +53,3 @@ return new UInt64(buf);

if ((0, big_integer_1.isInstance)(val)) {
var intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
const intBuf = [buffer_1.Buffer.alloc(4), buffer_1.Buffer.alloc(4)];
intBuf[0].writeUInt32BE(Number(val.shiftRight(bigInt(32))), 0);

@@ -76,3 +60,3 @@ intBuf[1].writeUInt32BE(Number(val.and(mask)), 0);

throw new Error('Cannot construct UInt64 from given value');
};
}
/**

@@ -83,5 +67,5 @@ * The JSON representation of a UInt64 object

*/
UInt64.prototype.toJSON = function () {
toJSON() {
return this.bytes.toString('hex').toUpperCase();
};
}
/**

@@ -92,7 +76,7 @@ * Get the value of the UInt64

*/
UInt64.prototype.valueOf = function () {
var msb = bigInt(this.bytes.slice(0, 4).readUInt32BE(0));
var lsb = bigInt(this.bytes.slice(4).readUInt32BE(0));
valueOf() {
const msb = bigInt(this.bytes.slice(0, 4).readUInt32BE(0));
const lsb = bigInt(this.bytes.slice(4).readUInt32BE(0));
return msb.shiftLeft(bigInt(32)).or(lsb);
};
}
/**

@@ -103,10 +87,9 @@ * Get the bytes representation of the UInt64 object

*/
UInt64.prototype.toBytes = function () {
toBytes() {
return this.bytes;
};
UInt64.width = 64 / 8; // 8
UInt64.defaultUInt64 = new UInt64(buffer_1.Buffer.alloc(UInt64.width));
return UInt64;
}(uint_1.UInt));
}
}
exports.UInt64 = UInt64;
UInt64.width = 64 / 8; // 8
UInt64.defaultUInt64 = new UInt64(buffer_1.Buffer.alloc(UInt64.width));
//# sourceMappingURL=uint-64.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt8 = void 0;
var uint_1 = require("./uint");
var buffer_1 = require("buffer/");
const uint_1 = require("./uint");
const buffer_1 = require("buffer/");
/**
* Derived UInt class for serializing/deserializing 8 bit UInt
*/
var UInt8 = /** @class */ (function (_super) {
__extends(UInt8, _super);
function UInt8(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt8.defaultUInt8.bytes) || this;
class UInt8 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt8.defaultUInt8.bytes);
}
UInt8.fromParser = function (parser) {
static fromParser(parser) {
return new UInt8(parser.read(UInt8.width));
};
}
/**

@@ -37,3 +21,3 @@ * Construct a UInt8 object from a number

*/
UInt8.from = function (val) {
static from(val) {
if (val instanceof UInt8) {

@@ -43,3 +27,3 @@ return val;

if (typeof val === 'number') {
var buf = buffer_1.Buffer.alloc(UInt8.width);
const buf = buffer_1.Buffer.alloc(UInt8.width);
buf.writeUInt8(val, 0);

@@ -49,3 +33,3 @@ return new UInt8(buf);

throw new Error('Cannot construct UInt8 from given value');
};
}
/**

@@ -56,10 +40,9 @@ * get the value of a UInt8 object

*/
UInt8.prototype.valueOf = function () {
valueOf() {
return this.bytes.readUInt8(0);
};
UInt8.width = 8 / 8; // 1
UInt8.defaultUInt8 = new UInt8(buffer_1.Buffer.alloc(UInt8.width));
return UInt8;
}(uint_1.UInt));
}
}
exports.UInt8 = UInt8;
UInt8.width = 8 / 8; // 1
UInt8.defaultUInt8 = new UInt8(buffer_1.Buffer.alloc(UInt8.width));
//# sourceMappingURL=uint-8.js.map

@@ -1,2 +0,2 @@

import * as bigInt from 'big-integer';
import bigInt = require('big-integer');
import { Comparable } from './serialized-type';

@@ -3,0 +3,0 @@ import { Buffer } from 'buffer/';

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt = void 0;
var serialized_type_1 = require("./serialized-type");
const serialized_type_1 = require("./serialized-type");
/**

@@ -33,6 +18,5 @@ * Compare numbers and bigInts n1 and n2

*/
var UInt = /** @class */ (function (_super) {
__extends(UInt, _super);
function UInt(bytes) {
return _super.call(this, bytes) || this;
class UInt extends serialized_type_1.Comparable {
constructor(bytes) {
super(bytes);
}

@@ -45,5 +29,5 @@ /**

*/
UInt.prototype.compareTo = function (other) {
compareTo(other) {
return compare(this.valueOf(), other.valueOf());
};
}
/**

@@ -54,9 +38,8 @@ * Convert a UInt object to JSON

*/
UInt.prototype.toJSON = function () {
var val = this.valueOf();
toJSON() {
const val = this.valueOf();
return typeof val === 'number' ? val : val.toString();
};
return UInt;
}(serialized_type_1.Comparable));
}
}
exports.UInt = UInt;
//# sourceMappingURL=uint.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Vector256 = void 0;
var serialized_type_1 = require("./serialized-type");
var hash_256_1 = require("./hash-256");
var binary_serializer_1 = require("../serdes/binary-serializer");
const serialized_type_1 = require("./serialized-type");
const hash_256_1 = require("./hash-256");
const binary_serializer_1 = require("../serdes/binary-serializer");
/**

@@ -31,6 +16,5 @@ * TypeGuard for Array<string>

*/
var Vector256 = /** @class */ (function (_super) {
__extends(Vector256, _super);
function Vector256(bytes) {
return _super.call(this, bytes) || this;
class Vector256 extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes);
}

@@ -44,11 +28,11 @@ /**

*/
Vector256.fromParser = function (parser, hint) {
var bytesList = new binary_serializer_1.BytesList();
var bytes = hint !== null && hint !== void 0 ? hint : parser.size();
var hashes = bytes / 32;
for (var i = 0; i < hashes; i++) {
static fromParser(parser, hint) {
const bytesList = new binary_serializer_1.BytesList();
const bytes = hint !== null && hint !== void 0 ? hint : parser.size();
const hashes = bytes / 32;
for (let i = 0; i < hashes; i++) {
hash_256_1.Hash256.fromParser(parser).toBytesSink(bytesList);
}
return new Vector256(bytesList.toBytes());
};
}
/**

@@ -60,3 +44,3 @@ * Construct a Vector256 object from an array of hashes

*/
Vector256.from = function (value) {
static from(value) {
if (value instanceof Vector256) {

@@ -66,10 +50,10 @@ return value;

if (isStrings(value)) {
var bytesList_1 = new binary_serializer_1.BytesList();
value.forEach(function (hash) {
hash_256_1.Hash256.from(hash).toBytesSink(bytesList_1);
const bytesList = new binary_serializer_1.BytesList();
value.forEach((hash) => {
hash_256_1.Hash256.from(hash).toBytesSink(bytesList);
});
return new Vector256(bytesList_1.toBytes());
return new Vector256(bytesList.toBytes());
}
throw new Error('Cannot construct Vector256 from given value');
};
}
/**

@@ -80,8 +64,8 @@ * Return an Array of hex-strings represented by this.bytes

*/
Vector256.prototype.toJSON = function () {
toJSON() {
if (this.bytes.byteLength % 32 !== 0) {
throw new Error('Invalid bytes for Vector256');
}
var result = [];
for (var i = 0; i < this.bytes.byteLength; i += 32) {
const result = [];
for (let i = 0; i < this.bytes.byteLength; i += 32) {
result.push(this.bytes

@@ -93,6 +77,5 @@ .slice(i, i + 32)

return result;
};
return Vector256;
}(serialized_type_1.SerializedType));
}
}
exports.Vector256 = Vector256;
//# sourceMappingURL=vector-256.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.XChainBridge = void 0;
var binary_parser_1 = require("../serdes/binary-parser");
var account_id_1 = require("./account-id");
var serialized_type_1 = require("./serialized-type");
var buffer_1 = require("buffer/");
var issue_1 = require("./issue");
const binary_parser_1 = require("../serdes/binary-parser");
const account_id_1 = require("./account-id");
const serialized_type_1 = require("./serialized-type");
const buffer_1 = require("buffer/");
const issue_1 = require("./issue");
/**

@@ -28,3 +13,3 @@ * Type guard for XChainBridgeObject

function isXChainBridgeObject(arg) {
var keys = Object.keys(arg).sort();
const keys = Object.keys(arg).sort();
return (keys.length === 4 &&

@@ -39,6 +24,5 @@ keys[0] === 'IssuingChainDoor' &&

*/
var XChainBridge = /** @class */ (function (_super) {
__extends(XChainBridge, _super);
function XChainBridge(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : XChainBridge.ZERO_XCHAIN_BRIDGE.bytes) || this;
class XChainBridge extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : XChainBridge.ZERO_XCHAIN_BRIDGE.bytes);
}

@@ -51,3 +35,3 @@ /**

*/
XChainBridge.from = function (value) {
static from(value) {
if (value instanceof XChainBridge) {

@@ -57,15 +41,15 @@ return value;

if (isXChainBridgeObject(value)) {
var bytes_1 = [];
this.TYPE_ORDER.forEach(function (item) {
var name = item.name, type = item.type;
const bytes = [];
this.TYPE_ORDER.forEach((item) => {
const { name, type } = item;
if (type === account_id_1.AccountID) {
bytes_1.push(buffer_1.Buffer.from([0x14]));
bytes.push(buffer_1.Buffer.from([0x14]));
}
var object = type.from(value[name]);
bytes_1.push(object.toBytes());
const object = type.from(value[name]);
bytes.push(object.toBytes());
});
return new XChainBridge(buffer_1.Buffer.concat(bytes_1));
return new XChainBridge(buffer_1.Buffer.concat(bytes));
}
throw new Error('Invalid type to construct a XChainBridge');
};
}
/**

@@ -77,6 +61,6 @@ * Read a XChainBridge from a BinaryParser

*/
XChainBridge.fromParser = function (parser) {
var bytes = [];
this.TYPE_ORDER.forEach(function (item) {
var type = item.type;
static fromParser(parser) {
const bytes = [];
this.TYPE_ORDER.forEach((item) => {
const { type } = item;
if (type === account_id_1.AccountID) {

@@ -86,7 +70,7 @@ parser.skip(1);

}
var object = type.fromParser(parser);
const object = type.fromParser(parser);
bytes.push(object.toBytes());
});
return new XChainBridge(buffer_1.Buffer.concat(bytes));
};
}
/**

@@ -97,30 +81,29 @@ * Get the JSON representation of this XChainBridge

*/
XChainBridge.prototype.toJSON = function () {
var parser = new binary_parser_1.BinaryParser(this.toString());
var json = {};
XChainBridge.TYPE_ORDER.forEach(function (item) {
var name = item.name, type = item.type;
toJSON() {
const parser = new binary_parser_1.BinaryParser(this.toString());
const json = {};
XChainBridge.TYPE_ORDER.forEach((item) => {
const { name, type } = item;
if (type === account_id_1.AccountID) {
parser.skip(1);
}
var object = type.fromParser(parser).toJSON();
const object = type.fromParser(parser).toJSON();
json[name] = object;
});
return json;
};
XChainBridge.ZERO_XCHAIN_BRIDGE = new XChainBridge(buffer_1.Buffer.concat([
buffer_1.Buffer.from([0x14]),
buffer_1.Buffer.alloc(40),
buffer_1.Buffer.from([0x14]),
buffer_1.Buffer.alloc(40),
]));
XChainBridge.TYPE_ORDER = [
{ name: 'LockingChainDoor', type: account_id_1.AccountID },
{ name: 'LockingChainIssue', type: issue_1.Issue },
{ name: 'IssuingChainDoor', type: account_id_1.AccountID },
{ name: 'IssuingChainIssue', type: issue_1.Issue },
];
return XChainBridge;
}(serialized_type_1.SerializedType));
}
}
exports.XChainBridge = XChainBridge;
XChainBridge.ZERO_XCHAIN_BRIDGE = new XChainBridge(buffer_1.Buffer.concat([
buffer_1.Buffer.from([0x14]),
buffer_1.Buffer.alloc(40),
buffer_1.Buffer.from([0x14]),
buffer_1.Buffer.alloc(40),
]));
XChainBridge.TYPE_ORDER = [
{ name: 'LockingChainDoor', type: account_id_1.AccountID },
{ name: 'LockingChainIssue', type: issue_1.Issue },
{ name: 'IssuingChainDoor', type: account_id_1.AccountID },
{ name: 'IssuingChainIssue', type: issue_1.Issue },
];
//# sourceMappingURL=xchain-bridge.js.map
{
"name": "ripple-binary-codec",
"version": "1.6.0-beta.0",
"version": "1.6.0",
"description": "XRP Ledger binary codec",

@@ -17,6 +17,6 @@ "files": [

"big-integer": "^1.6.48",
"buffer": "5.6.0",
"buffer": "6.0.3",
"create-hash": "^1.2.0",
"decimal.js": "^10.2.0",
"ripple-address-codec": "^4.2.4"
"ripple-address-codec": "^4.3.0"
},

@@ -26,4 +26,4 @@ "scripts": {

"clean": "rm -rf ./dist && rm -rf tsconfig.tsbuildinfo",
"prepare": "npm test",
"test": "npm run build && jest",
"prepublishOnly": "npm test",
"test": "npm run build && jest --verbose false --silent=false ./test/*.test.js",
"lint": "eslint . --ext .ts --ext .test.js"

@@ -43,5 +43,5 @@ },

"engines": {
"node": ">=10.22.0"
"node": ">= 10"
},
"gitHead": "409bafd29a26e20dc8195f2b723c788332aa4e48"
"gitHead": "3791c6292cee7e6d6ff46a17fcd26aae2d323439"
}
const { loadFixture } = require('./utils')
const { coreTypes } = require('../dist/types')
const { coreTypes } = require('../src/types')
const { Amount } = coreTypes

@@ -4,0 +4,0 @@ const fixtures = loadFixture('data-driven-tests.json')

const fixtures = require('./fixtures/codec-fixtures.json')
const { decode, encode, decodeLedgerData } = require('../dist')
const { decode, encode, decodeLedgerData } = require('../src')

@@ -4,0 +4,0 @@ function json(object) {

@@ -1,12 +0,12 @@

const { coreTypes } = require('../dist/types')
const { coreTypes } = require('../src/types')
const Decimal = require('decimal.js')
const { encodeAccountID } = require('ripple-address-codec')
const { binary } = require('../dist/coretypes')
const { binary } = require('../src/coretypes')
const { Amount, Hash160 } = coreTypes
const { makeParser, readJSON } = binary
const { Field, TransactionType } = require('./../dist/enums')
const { Field, TransactionType } = require('./../src/enums')
const { parseHexOnly, hexOnly, loadFixture } = require('./utils')
const fixtures = loadFixture('data-driven-tests.json')
const { BytesList } = require('../dist/serdes/binary-serializer')
const { BytesList } = require('../src/serdes/binary-serializer')
const { Buffer } = require('buffer/')

@@ -13,0 +13,0 @@

@@ -1,5 +0,5 @@

const { binary } = require('../dist/coretypes')
const { encode, decode } = require('../dist')
const { binary } = require('../src/coretypes')
const { encode, decode } = require('../src')
const { makeParser, BytesList, BinarySerializer } = binary
const { coreTypes } = require('../dist/types')
const { coreTypes } = require('../src/types')
const { UInt8, UInt16, UInt32, UInt64, STObject } = coreTypes

@@ -6,0 +6,0 @@ const bigInt = require('big-integer')

@@ -1,5 +0,31 @@

const { coreTypes } = require('../dist/types')
const { Hash160, Hash256, AccountID, Currency } = coreTypes
const { coreTypes } = require('../src/types')
const { Hash128, Hash160, Hash256, AccountID, Currency } = coreTypes
const { Buffer } = require('buffer/')
describe('Hash128', function () {
test('has a static width member', function () {
expect(Hash128.width).toBe(16)
})
test('can be unset', function () {
const h1 = Hash128.from('')
expect(h1.toJSON()).toBe('')
})
test('can be compared against another', function () {
const h1 = Hash128.from('100000000000000000000000000000000')
const h2 = Hash128.from('200000000000000000000000000000000')
const h3 = Hash128.from('000000000000000000000000000000003')
expect(h1.lt(h2)).toBe(true)
expect(h3.lt(h2)).toBe(true)
expect(h2.gt(h1)).toBe(true)
expect(h1.gt(h3)).toBe(true)
})
test('throws when constructed from invalid hash length', () => {
expect(() => Hash128.from('1000000000000000000000000000000')).toThrow(
'Invalid Hash length 15',
)
expect(() => Hash128.from('10000000000000000000000000000000000')).toThrow(
'Invalid Hash length 17',
)
})
})
describe('Hash160', function () {

@@ -6,0 +32,0 @@ test('has a static width member', function () {

@@ -6,3 +6,3 @@ const { loadFixture } = require('./utils')

accountStateHash,
} = require('../dist/ledger-hashes')
} = require('../src/ledger-hashes')

@@ -9,0 +9,0 @@ describe('Ledger Hashes', function () {

@@ -1,2 +0,2 @@

const { encode, decode } = require('../dist')
const { encode, decode } = require('../src')

@@ -3,0 +3,0 @@ let str =

@@ -1,2 +0,2 @@

const { encode, decode } = require('../dist')
const { encode, decode } = require('../src')

@@ -3,0 +3,0 @@ let json = {

@@ -1,2 +0,2 @@

const { quality } = require('../dist/coretypes')
const { quality } = require('../src/coretypes')

@@ -3,0 +3,0 @@ describe('Quality encode/decode', function () {

@@ -1,4 +0,4 @@

const { ShaMap } = require('../dist/shamap.js')
const { binary, HashPrefix } = require('../dist/coretypes')
const { coreTypes } = require('../dist/types')
const { ShaMap } = require('../src/shamap')
const { binary, HashPrefix } = require('../src/coretypes')
const { coreTypes } = require('../src/types')
const { loadFixture } = require('./utils')

@@ -5,0 +5,0 @@ const { Buffer } = require('buffer/')

@@ -6,4 +6,7 @@ const { throws } = require('assert')

encodeForMultisigning,
} = require('../dist')
} = require('../src')
const { XrplDefinitions } = require('../src/enums/xrpl-definitions')
const normalDefinitions = require('../src/enums/definitions.json')
const tx_json = {

@@ -71,2 +74,49 @@ Account: 'r9LqNeG6qHxjeUocjvVki2XR35weJ9mZgQ',

test('can create single signing blobs with modified type', function () {
const customPaymentDefinitions = JSON.parse(
JSON.stringify(normalDefinitions),
)
customPaymentDefinitions.TRANSACTION_TYPES.Payment = 31
const newDefs = new XrplDefinitions(customPaymentDefinitions)
const actual = encodeForSigning(tx_json, newDefs)
expect(actual).toBe(
[
'53545800', // signingPrefix
// TransactionType
'12',
'001F',
// Flags
'22',
'80000000',
// Sequence
'24',
'00000001',
// Amount
'61',
// native amount
'40000000000003E8',
// Fee
'68',
// native amount
'400000000000000A',
// SigningPubKey
'73',
// VLLength
'21',
'ED5F5AC8B98974A3CA843326D9B88CEBD0560177B973EE0B149F782CFAA06DC66A',
// Account
'81',
// VLLength
'14',
'5B812C9D57731E27A2DA8B1830195F88EF32A3B6',
// Destination
'83',
// VLLength
'14',
'B5F762798A53D543A014CAF8B297CFF8F2F937E8',
].join(''),
)
})
test('can fail gracefully for invalid TransactionType', function () {

@@ -83,3 +133,3 @@ const invalidTransactionType = {

const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
const signingJson = Object.assign({}, tx_json, { SigningPubKey: '' })
const signingJson = { ...tx_json, SigningPubKey: '' }
const actual = encodeForMultisigning(signingJson, signingAccount)

@@ -126,2 +176,54 @@ expect(actual).toBe(

})
test('can create multi signing blobs with custom definitions', function () {
const customPaymentDefinitions = JSON.parse(
JSON.stringify(normalDefinitions),
)
customPaymentDefinitions.TRANSACTION_TYPES.Payment = 31
const newDefs = new XrplDefinitions(customPaymentDefinitions)
const signingAccount = 'rJZdUusLDtY9NEsGea7ijqhVrXv98rYBYN'
const signingJson = { ...tx_json, SigningPubKey: '' }
const actual = encodeForMultisigning(signingJson, signingAccount, newDefs)
expect(actual).toBe(
[
'534D5400', // signingPrefix
// TransactionType
'12',
'001F',
// Flags
'22',
'80000000',
// Sequence
'24',
'00000001',
// Amount
'61',
// native amount
'40000000000003E8',
// Fee
'68',
// native amount
'400000000000000A',
// SigningPubKey
'73',
// VLLength
'00',
// '',
// Account
'81',
// VLLength
'14',
'5B812C9D57731E27A2DA8B1830195F88EF32A3B6',
// Destination
'83',
// VLLength
'14',
'B5F762798A53D543A014CAF8B297CFF8F2F937E8',
// signingAccount suffix
'C0A5ABEF242802EFED4B041E8F2D4A8CC86AE3D1',
].join(''),
)
})
test('can create claim blob', function () {

@@ -128,0 +230,0 @@ const channel =

@@ -1,2 +0,2 @@

const { encode, decode } = require('../dist')
const { encode, decode } = require('../src')

@@ -3,0 +3,0 @@ // Notice: no Amount or Fee

@@ -1,3 +0,3 @@

const { coreTypes } = require('../dist/types')
const { SerializedType } = require('../dist/types/serialized-type')
const { coreTypes } = require('../src/types')
const { SerializedType } = require('../src/types/serialized-type')

@@ -4,0 +4,0 @@ describe('SerializedType interfaces', () => {

@@ -1,5 +0,5 @@

const { coreTypes } = require('../dist/types')
const { coreTypes } = require('../src/types')
const { UInt8, UInt64 } = coreTypes
const { encode } = require('../dist')
const { encode } = require('../src')

@@ -6,0 +6,0 @@ const binary =

@@ -1,2 +0,2 @@

const { encode, decode } = require('./../dist/index')
const { encode, decode } = require('./../src/index')
const fixtures = require('./fixtures/x-codec-fixtures.json')

@@ -3,0 +3,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc