Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

ripple-binary-codec

Package Overview
Dependencies
Maintainers
13
Versions
86
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ripple-binary-codec - npm Package Compare versions

Comparing version 1.0.2 to 1.0.3-mpt-beta

dist/enums/bytes.d.ts

54

dist/binary.d.ts

@@ -1,14 +0,16 @@

/// <reference types="node" />
import { BinaryParser } from "./serdes/binary-parser";
import { AccountID } from "./types/account-id";
import { BinarySerializer, BytesList } from "./serdes/binary-serializer";
import { sha512Half, transactionID } from "./hashes";
import { JsonObject } from "./types/serialized-type";
import { BinaryParser } from './serdes/binary-parser';
import { AccountID } from './types/account-id';
import { BinarySerializer, BytesList } from './serdes/binary-serializer';
import { sha512Half, transactionID } from './hashes';
import { type XrplDefinitionsBase } from './enums';
import { JsonObject } from './types/serialized-type';
/**
* Construct a BinaryParser
*
* @param bytes hex-string to construct BinaryParser from
* @returns A BinaryParser
* @param bytes hex-string or Uint8Array to construct BinaryParser from
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns BinaryParser
*/
declare const makeParser: (bytes: string) => BinaryParser;
declare const makeParser: (bytes: string | Uint8Array, definitions?: XrplDefinitionsBase) => BinaryParser;
/**

@@ -18,5 +20,7 @@ * Parse BinaryParser into JSON

* @param parser BinaryParser object
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON for the bytes in the BinaryParser
*/
declare const readJSON: (parser: BinaryParser) => JsonObject;
declare const readJSON: (parser: BinaryParser, definitions?: XrplDefinitionsBase) => JsonObject;
/**

@@ -26,5 +30,7 @@ * Parse a hex-string into its JSON interpretation

* @param bytes hex-string to parse into JSON
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON
*/
declare const binaryToJSON: (bytes: string) => JsonObject;
declare const binaryToJSON: (bytes: string, definitions?: XrplDefinitionsBase) => JsonObject;
/**

@@ -36,5 +42,6 @@ * Interface for passing parameters to SerializeObject

interface OptionObject {
prefix?: Buffer;
suffix?: Buffer;
prefix?: Uint8Array;
suffix?: Uint8Array;
signingFieldsOnly?: boolean;
definitions?: XrplDefinitionsBase;
}

@@ -45,6 +52,6 @@ /**

* @param object JSON object to serialize
* @param opts options for serializing, including optional prefix, suffix, and signingFieldOnly
* @returns A Buffer containing the serialized object
* @param opts options for serializing, including optional prefix, suffix, signingFieldOnly, and definitions
* @returns A Uint8Array containing the serialized object
*/
declare function serializeObject(object: JsonObject, opts?: OptionObject): Buffer;
declare function serializeObject(object: JsonObject, opts?: OptionObject): Uint8Array;
/**

@@ -55,5 +62,8 @@ * Serialize an object for signing

* @param prefix Prefix bytes to put before the serialized object
* @returns A Buffer with the serialized object
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns A Uint8Array with the serialized object
*/
declare function signingData(transaction: JsonObject, prefix?: Buffer): Buffer;
declare function signingData(transaction: JsonObject, prefix?: Uint8Array, opts?: {
definitions?: XrplDefinitionsBase;
}): Uint8Array;
/**

@@ -70,5 +80,6 @@ * Interface describing fields required for a Claim

* @param claim A claim object to serialize
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the serialized object with appropriate prefix
*/
declare function signingClaimData(claim: ClaimObject): Buffer;
declare function signingClaimData(claim: ClaimObject): Uint8Array;
/**

@@ -79,5 +90,8 @@ * Serialize a transaction object for multiSigning

* @param signingAccount Account to sign the transaction with
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns serialized transaction with appropriate prefix and suffix
*/
declare function multiSigningData(transaction: JsonObject, signingAccount: string | AccountID): Buffer;
declare function multiSigningData(transaction: JsonObject, signingAccount: string | AccountID, opts?: {
definitions: XrplDefinitionsBase;
}): Uint8Array;
export { BinaryParser, BinarySerializer, BytesList, ClaimObject, makeParser, serializeObject, readJSON, multiSigningData, signingData, signingClaimData, binaryToJSON, sha512Half, transactionID, };

@@ -5,19 +5,23 @@ "use strict";

exports.transactionID = exports.sha512Half = exports.binaryToJSON = exports.signingClaimData = exports.signingData = exports.multiSigningData = exports.readJSON = exports.serializeObject = exports.makeParser = exports.BytesList = exports.BinarySerializer = exports.BinaryParser = void 0;
var types_1 = require("./types");
var binary_parser_1 = require("./serdes/binary-parser");
const utils_1 = require("@xrplf/isomorphic/utils");
const types_1 = require("./types");
const binary_parser_1 = require("./serdes/binary-parser");
Object.defineProperty(exports, "BinaryParser", { enumerable: true, get: function () { return binary_parser_1.BinaryParser; } });
var hash_prefixes_1 = require("./hash-prefixes");
var binary_serializer_1 = require("./serdes/binary-serializer");
const hash_prefixes_1 = require("./hash-prefixes");
const binary_serializer_1 = require("./serdes/binary-serializer");
Object.defineProperty(exports, "BinarySerializer", { enumerable: true, get: function () { return binary_serializer_1.BinarySerializer; } });
Object.defineProperty(exports, "BytesList", { enumerable: true, get: function () { return binary_serializer_1.BytesList; } });
var hashes_1 = require("./hashes");
const hashes_1 = require("./hashes");
Object.defineProperty(exports, "sha512Half", { enumerable: true, get: function () { return hashes_1.sha512Half; } });
Object.defineProperty(exports, "transactionID", { enumerable: true, get: function () { return hashes_1.transactionID; } });
const enums_1 = require("./enums");
/**
* Construct a BinaryParser
*
* @param bytes hex-string to construct BinaryParser from
* @returns A BinaryParser
* @param bytes hex-string or Uint8Array to construct BinaryParser from
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns BinaryParser
*/
var makeParser = function (bytes) { return new binary_parser_1.BinaryParser(bytes); };
const makeParser = (bytes, definitions) => new binary_parser_1.BinaryParser(bytes instanceof Uint8Array ? (0, utils_1.bytesToHex)(bytes) : bytes, definitions);
exports.makeParser = makeParser;

@@ -28,7 +32,7 @@ /**

* @param parser BinaryParser object
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON for the bytes in the BinaryParser
*/
var readJSON = function (parser) {
return parser.readType(types_1.coreTypes.STObject).toJSON();
};
const readJSON = (parser, definitions = enums_1.DEFAULT_DEFINITIONS) => parser.readType(types_1.coreTypes.STObject).toJSON(definitions);
exports.readJSON = readJSON;

@@ -39,5 +43,7 @@ /**

* @param bytes hex-string to parse into JSON
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns JSON
*/
var binaryToJSON = function (bytes) { return readJSON(makeParser(bytes)); };
const binaryToJSON = (bytes, definitions) => readJSON(makeParser(bytes, definitions), definitions);
exports.binaryToJSON = binaryToJSON;

@@ -48,16 +54,17 @@ /**

* @param object JSON object to serialize
* @param opts options for serializing, including optional prefix, suffix, and signingFieldOnly
* @returns A Buffer containing the serialized object
* @param opts options for serializing, including optional prefix, suffix, signingFieldOnly, and definitions
* @returns A Uint8Array containing the serialized object
*/
function serializeObject(object, opts) {
if (opts === void 0) { opts = {}; }
var prefix = opts.prefix, suffix = opts.suffix, _a = opts.signingFieldsOnly, signingFieldsOnly = _a === void 0 ? false : _a;
var bytesList = new binary_serializer_1.BytesList();
function serializeObject(object, opts = {}) {
const { prefix, suffix, signingFieldsOnly = false, definitions } = opts;
const bytesList = new binary_serializer_1.BytesList();
if (prefix) {
bytesList.put(prefix);
}
var filter = signingFieldsOnly
? function (f) { return f.isSigningField; }
const filter = signingFieldsOnly
? (f) => f.isSigningField
: undefined;
types_1.coreTypes.STObject.from(object, filter).toBytesSink(bytesList);
types_1.coreTypes.STObject
.from(object, filter, definitions)
.toBytesSink(bytesList);
if (suffix) {

@@ -74,7 +81,11 @@ bytesList.put(suffix);

* @param prefix Prefix bytes to put before the serialized object
* @returns A Buffer with the serialized object
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns A Uint8Array with the serialized object
*/
function signingData(transaction, prefix) {
if (prefix === void 0) { prefix = hash_prefixes_1.HashPrefix.transactionSig; }
return serializeObject(transaction, { prefix: prefix, signingFieldsOnly: true });
function signingData(transaction, prefix = hash_prefixes_1.HashPrefix.transactionSig, opts = {}) {
return serializeObject(transaction, {
prefix,
signingFieldsOnly: true,
definitions: opts.definitions,
});
}

@@ -86,9 +97,11 @@ exports.signingData = signingData;

* @param claim A claim object to serialize
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the serialized object with appropriate prefix
*/
function signingClaimData(claim) {
var prefix = hash_prefixes_1.HashPrefix.paymentChannelClaim;
var channel = types_1.coreTypes.Hash256.from(claim.channel).toBytes();
var amount = types_1.coreTypes.UInt64.from(BigInt(claim.amount)).toBytes();
var bytesList = new binary_serializer_1.BytesList();
const num = BigInt(String(claim.amount));
const prefix = hash_prefixes_1.HashPrefix.paymentChannelClaim;
const channel = types_1.coreTypes.Hash256.from(claim.channel).toBytes();
const amount = types_1.coreTypes.UInt64.from(num).toBytes();
const bytesList = new binary_serializer_1.BytesList();
bytesList.put(prefix);

@@ -105,11 +118,15 @@ bytesList.put(channel);

* @param signingAccount Account to sign the transaction with
* @param opts.definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns serialized transaction with appropriate prefix and suffix
*/
function multiSigningData(transaction, signingAccount) {
var prefix = hash_prefixes_1.HashPrefix.transactionMultiSig;
var suffix = types_1.coreTypes.AccountID.from(signingAccount).toBytes();
function multiSigningData(transaction, signingAccount, opts = {
definitions: enums_1.DEFAULT_DEFINITIONS,
}) {
const prefix = hash_prefixes_1.HashPrefix.transactionMultiSig;
const suffix = types_1.coreTypes.AccountID.from(signingAccount).toBytes();
return serializeObject(transaction, {
prefix: prefix,
suffix: suffix,
prefix,
suffix,
signingFieldsOnly: true,
definitions: opts.definitions,
});

@@ -116,0 +133,0 @@ }

@@ -1,9 +0,9 @@

import { Field, TransactionType, LedgerEntryType, Type, TransactionResult } from "./enums";
import * as types from "./types";
import * as binary from "./binary";
import { ShaMap } from "./shamap";
import * as ledgerHashes from "./ledger-hashes";
import * as hashes from "./hashes";
import { quality } from "./quality";
import { HashPrefix } from "./hash-prefixes";
export { hashes, binary, ledgerHashes, Field, TransactionType, LedgerEntryType, Type, TransactionResult, quality, HashPrefix, ShaMap, types, };
import { DEFAULT_DEFINITIONS, Field, TransactionType, LedgerEntryType, Type, TransactionResult } from './enums';
import * as types from './types';
import * as binary from './binary';
import { ShaMap } from './shamap';
import * as ledgerHashes from './ledger-hashes';
import * as hashes from './hashes';
import { quality } from './quality';
import { HashPrefix } from './hash-prefixes';
export { hashes, binary, ledgerHashes, DEFAULT_DEFINITIONS, Field, TransactionType, LedgerEntryType, Type, TransactionResult, quality, HashPrefix, ShaMap, types, };
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.types = exports.ShaMap = exports.HashPrefix = exports.quality = exports.TransactionResult = exports.Type = exports.LedgerEntryType = exports.TransactionType = exports.Field = exports.ledgerHashes = exports.binary = exports.hashes = void 0;
var enums_1 = require("./enums");
exports.types = exports.ShaMap = exports.HashPrefix = exports.quality = exports.TransactionResult = exports.Type = exports.LedgerEntryType = exports.TransactionType = exports.Field = exports.DEFAULT_DEFINITIONS = exports.ledgerHashes = exports.binary = exports.hashes = void 0;
const enums_1 = require("./enums");
Object.defineProperty(exports, "DEFAULT_DEFINITIONS", { enumerable: true, get: function () { return enums_1.DEFAULT_DEFINITIONS; } });
Object.defineProperty(exports, "Field", { enumerable: true, get: function () { return enums_1.Field; } });

@@ -10,16 +34,16 @@ Object.defineProperty(exports, "TransactionType", { enumerable: true, get: function () { return enums_1.TransactionType; } });

Object.defineProperty(exports, "TransactionResult", { enumerable: true, get: function () { return enums_1.TransactionResult; } });
var types = require("./types");
const types = __importStar(require("./types"));
exports.types = types;
var binary = require("./binary");
const binary = __importStar(require("./binary"));
exports.binary = binary;
var shamap_1 = require("./shamap");
const shamap_1 = require("./shamap");
Object.defineProperty(exports, "ShaMap", { enumerable: true, get: function () { return shamap_1.ShaMap; } });
var ledgerHashes = require("./ledger-hashes");
const ledgerHashes = __importStar(require("./ledger-hashes"));
exports.ledgerHashes = ledgerHashes;
var hashes = require("./hashes");
const hashes = __importStar(require("./hashes"));
exports.hashes = hashes;
var quality_1 = require("./quality");
const quality_1 = require("./quality");
Object.defineProperty(exports, "quality", { enumerable: true, get: function () { return quality_1.quality; } });
var hash_prefixes_1 = require("./hash-prefixes");
const hash_prefixes_1 = require("./hash-prefixes");
Object.defineProperty(exports, "HashPrefix", { enumerable: true, get: function () { return hash_prefixes_1.HashPrefix; } });
//# sourceMappingURL=coretypes.js.map

@@ -1,46 +0,12 @@

/// <reference types="node" />
import { SerializedType } from "../types/serialized-type";
declare class Bytes {
readonly name: string;
readonly ordinal: number;
readonly ordinalWidth: number;
readonly bytes: Uint8Array;
constructor(name: string, ordinal: number, ordinalWidth: number);
toJSON(): string;
toBytesSink(sink: any): void;
toBytes(): Uint8Array;
}
declare class BytesLookup {
readonly ordinalWidth: number;
constructor(types: Record<string, number>, ordinalWidth: number);
from(value: Bytes | string): Bytes;
fromParser(parser: any): Bytes;
}
interface FieldInfo {
nth: number;
isVLEncoded: boolean;
isSerialized: boolean;
isSigningField: boolean;
type: string;
}
interface FieldInstance {
readonly nth: number;
readonly isVariableLengthEncoded: boolean;
readonly isSerialized: boolean;
readonly isSigningField: boolean;
readonly type: Bytes;
readonly ordinal: number;
readonly name: string;
readonly header: Buffer;
readonly associatedType: typeof SerializedType;
}
declare class FieldLookup {
constructor(fields: Array<[string, FieldInfo]>);
fromString(value: string): FieldInstance;
}
declare const Type: BytesLookup;
declare const LedgerEntryType: BytesLookup;
declare const TransactionType: BytesLookup;
declare const TransactionResult: BytesLookup;
declare const Field: FieldLookup;
export { Field, FieldInstance, Type, LedgerEntryType, TransactionResult, TransactionType, };
import { XrplDefinitionsBase, FieldInstance, Bytes } from './xrpl-definitions-base';
/**
* By default, coreTypes from the `types` folder is where known type definitions are initialized to avoid import cycles.
*/
declare const DEFAULT_DEFINITIONS: XrplDefinitionsBase;
declare const Type: import("./bytes").BytesLookup;
declare const LedgerEntryType: import("./bytes").BytesLookup;
declare const TransactionType: import("./bytes").BytesLookup;
declare const TransactionResult: import("./bytes").BytesLookup;
declare const Field: import("./field").FieldLookup;
declare const TRANSACTION_TYPES: string[];
export { Bytes, XrplDefinitionsBase, DEFAULT_DEFINITIONS, Field, FieldInstance, Type, LedgerEntryType, TransactionResult, TransactionType, TRANSACTION_TYPES, };
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TransactionType = exports.TransactionResult = exports.LedgerEntryType = exports.Type = exports.Field = void 0;
var enums = require("./definitions.json");
var serialized_type_1 = require("../types/serialized-type");
var TYPE_WIDTH = 2;
var LEDGER_ENTRY_WIDTH = 2;
var TRANSACTION_TYPE_WIDTH = 2;
var TRANSACTION_RESULT_WIDTH = 1;
/*
* @brief: Serialize a field based on type_code and Field.nth
exports.TRANSACTION_TYPES = exports.TransactionType = exports.TransactionResult = exports.LedgerEntryType = exports.Type = exports.Field = exports.DEFAULT_DEFINITIONS = exports.XrplDefinitionsBase = exports.Bytes = void 0;
const definitions_json_1 = __importDefault(require("./definitions.json"));
const xrpl_definitions_base_1 = require("./xrpl-definitions-base");
Object.defineProperty(exports, "XrplDefinitionsBase", { enumerable: true, get: function () { return xrpl_definitions_base_1.XrplDefinitionsBase; } });
Object.defineProperty(exports, "Bytes", { enumerable: true, get: function () { return xrpl_definitions_base_1.Bytes; } });
/**
* By default, coreTypes from the `types` folder is where known type definitions are initialized to avoid import cycles.
*/
function fieldHeader(type, nth) {
var header = [];
if (type < 16) {
if (nth < 16) {
header.push((type << 4) | nth);
}
else {
header.push(type << 4, nth);
}
}
else if (nth < 16) {
header.push(nth, type);
}
else {
header.push(0, type, nth);
}
return Buffer.from(header);
}
/*
* @brief: Bytes, name, and ordinal representing one type, ledger_type, transaction type, or result
*/
var Bytes = /** @class */ (function () {
function Bytes(name, ordinal, ordinalWidth) {
this.name = name;
this.ordinal = ordinal;
this.ordinalWidth = ordinalWidth;
this.bytes = Buffer.alloc(ordinalWidth);
for (var i = 0; i < ordinalWidth; i++) {
this.bytes[ordinalWidth - i - 1] = (ordinal >>> (i * 8)) & 0xff;
}
}
Bytes.prototype.toJSON = function () {
return this.name;
};
Bytes.prototype.toBytesSink = function (sink) {
sink.put(this.bytes);
};
Bytes.prototype.toBytes = function () {
return this.bytes;
};
return Bytes;
}());
/*
* @brief: Collection of Bytes objects, mapping bidirectionally
*/
var BytesLookup = /** @class */ (function () {
function BytesLookup(types, ordinalWidth) {
var _this = this;
this.ordinalWidth = ordinalWidth;
Object.entries(types).forEach(function (_a) {
var k = _a[0], v = _a[1];
_this[k] = new Bytes(k, v, ordinalWidth);
_this[v.toString()] = _this[k];
});
}
BytesLookup.prototype.from = function (value) {
return value instanceof Bytes ? value : this[value];
};
BytesLookup.prototype.fromParser = function (parser) {
return this.from(parser.readUIntN(this.ordinalWidth).toString());
};
return BytesLookup;
}());
function buildField(_a) {
var name = _a[0], info = _a[1];
var typeOrdinal = enums.TYPES[info.type];
var field = fieldHeader(typeOrdinal, info.nth);
return {
name: name,
nth: info.nth,
isVariableLengthEncoded: info.isVLEncoded,
isSerialized: info.isSerialized,
isSigningField: info.isSigningField,
ordinal: (typeOrdinal << 16) | info.nth,
type: new Bytes(info.type, typeOrdinal, TYPE_WIDTH),
header: field,
associatedType: serialized_type_1.SerializedType,
};
}
/*
* @brief: The collection of all fields as defined in definitions.json
*/
var FieldLookup = /** @class */ (function () {
function FieldLookup(fields) {
var _this = this;
fields.forEach(function (_a) {
var k = _a[0], v = _a[1];
_this[k] = buildField([k, v]);
_this[_this[k].ordinal.toString()] = _this[k];
});
}
FieldLookup.prototype.fromString = function (value) {
return this[value];
};
return FieldLookup;
}());
var Type = new BytesLookup(enums.TYPES, TYPE_WIDTH);
const DEFAULT_DEFINITIONS = new xrpl_definitions_base_1.XrplDefinitionsBase(definitions_json_1.default, {});
exports.DEFAULT_DEFINITIONS = DEFAULT_DEFINITIONS;
const Type = DEFAULT_DEFINITIONS.type;
exports.Type = Type;
var LedgerEntryType = new BytesLookup(enums.LEDGER_ENTRY_TYPES, LEDGER_ENTRY_WIDTH);
const LedgerEntryType = DEFAULT_DEFINITIONS.ledgerEntryType;
exports.LedgerEntryType = LedgerEntryType;
var TransactionType = new BytesLookup(enums.TRANSACTION_TYPES, TRANSACTION_TYPE_WIDTH);
const TransactionType = DEFAULT_DEFINITIONS.transactionType;
exports.TransactionType = TransactionType;
var TransactionResult = new BytesLookup(enums.TRANSACTION_RESULTS, TRANSACTION_RESULT_WIDTH);
const TransactionResult = DEFAULT_DEFINITIONS.transactionResult;
exports.TransactionResult = TransactionResult;
var Field = new FieldLookup(enums.FIELDS);
const Field = DEFAULT_DEFINITIONS.field;
exports.Field = Field;
/*
* @brief: All valid transaction types
*/
const TRANSACTION_TYPES = DEFAULT_DEFINITIONS.transactionNames;
exports.TRANSACTION_TYPES = TRANSACTION_TYPES;
//# sourceMappingURL=index.js.map

@@ -5,3 +5,3 @@ "use strict";

*/
var input = {
const input = {
temBAD_SEND_XRP_PATHS: -283,

@@ -97,10 +97,10 @@ temBAD_SEQUENCE: -282,

};
var startingFromTemBADSENDXRPPATHS = -284;
var startingFromTefFAILURE = -199;
var startingFromTerRETRY = -99;
var tesSUCCESS = 0;
var startingFromTecCLAIM = 100;
var startingFromTecDIRFULL = 121;
var previousKey = "tem";
Object.keys(input).forEach(function (key) {
let startingFromTemBADSENDXRPPATHS = -284;
let startingFromTefFAILURE = -199;
let startingFromTerRETRY = -99;
const tesSUCCESS = 0;
let startingFromTecCLAIM = 100;
const startingFromTecDIRFULL = 121;
let previousKey = 'tem';
Object.keys(input).forEach((key) => {
if (key.substring(0, 3) !== previousKey.substring(0, 3)) {

@@ -110,21 +110,21 @@ console.log();

}
if (key.substring(0, 3) === "tem") {
console.log(" \"" + key + "\": " + startingFromTemBADSENDXRPPATHS++ + ",");
if (key.substring(0, 3) === 'tem') {
console.log(` "${key}": ${startingFromTemBADSENDXRPPATHS++},`);
}
else if (key.substring(0, 3) === "tef") {
console.log(" \"" + key + "\": " + startingFromTefFAILURE++ + ",");
else if (key.substring(0, 3) === 'tef') {
console.log(` "${key}": ${startingFromTefFAILURE++},`);
}
else if (key.substring(0, 3) === "ter") {
console.log(" \"" + key + "\": " + startingFromTerRETRY++ + ",");
else if (key.substring(0, 3) === 'ter') {
console.log(` "${key}": ${startingFromTerRETRY++},`);
}
else if (key.substring(0, 3) === "tes") {
console.log(" \"" + key + "\": " + tesSUCCESS + ",");
else if (key.substring(0, 3) === 'tes') {
console.log(` "${key}": ${tesSUCCESS},`);
}
else if (key.substring(0, 3) === "tec") {
if (key === "tecDIR_FULL") {
else if (key.substring(0, 3) === 'tec') {
if (key === 'tecDIR_FULL') {
startingFromTecCLAIM = startingFromTecDIRFULL;
}
console.log(" \"" + key + "\": " + startingFromTecCLAIM++ + ",");
console.log(` "${key}": ${startingFromTecCLAIM++},`);
}
});
//# sourceMappingURL=utils-renumber.js.map

@@ -1,6 +0,5 @@

/// <reference types="node" />
/**
* Maps HashPrefix names to their byte representation
*/
declare const HashPrefix: Record<string, Buffer>;
declare const HashPrefix: Record<string, Uint8Array>;
export { HashPrefix };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.HashPrefix = void 0;
const utils_1 = require("./utils");
/**
* Write a 32 bit integer to a Buffer
* Write a 32 bit integer to a Uint8Array
*
* @param uint32 32 bit integer to write to buffer
* @returns a buffer with the bytes representation of uint32
* @param uint32 32 bit integer to write to Uint8Array
* @returns a Uint8Array with the bytes representation of uint32
*/
function bytes(uint32) {
var result = Buffer.alloc(4);
result.writeUInt32BE(uint32);
const result = new Uint8Array(4);
(0, utils_1.writeUInt32BE)(result, uint32, 0);
return result;

@@ -18,3 +19,3 @@ }

*/
var HashPrefix = {
const HashPrefix = {
transactionID: bytes(0x54584e00),

@@ -21,0 +22,0 @@ // transaction plus metadata

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { Hash256 } from "./types/hash-256";
import { BytesList } from "./serdes/binary-serializer";
import { Hash256 } from './types';
import { BytesList } from './serdes/binary-serializer';
/**

@@ -16,3 +15,3 @@ * Class for hashing with SHA512

*/
static put(bytes: Buffer): Sha512Half;
static put(bytes: Uint8Array): Sha512Half;
/**

@@ -24,3 +23,3 @@ * Write bytes to an existing Sha512Hash

*/
put(bytes: Buffer): Sha512Half;
put(bytes: Uint8Array): Sha512Half;
/**

@@ -31,3 +30,3 @@ * Compute SHA512 hash and slice in half

*/
finish256(): Buffer;
finish256(): Uint8Array;
/**

@@ -46,3 +45,3 @@ * Constructs a Hash256 from the Sha512Half object

*/
declare function sha512Half(...args: Buffer[]): Buffer;
declare function sha512Half(...args: Uint8Array[]): Uint8Array;
/**

@@ -54,3 +53,3 @@ * Construct a transactionID from a Serialized Transaction

*/
declare function transactionID(serialized: Buffer): Hash256;
declare function transactionID(serialized: Uint8Array): Hash256;
export { Sha512Half, sha512Half, transactionID };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.transactionID = exports.sha512Half = exports.Sha512Half = void 0;
var hash_prefixes_1 = require("./hash-prefixes");
var createHash = require("create-hash");
var hash_256_1 = require("./types/hash-256");
var binary_serializer_1 = require("./serdes/binary-serializer");
const hash_prefixes_1 = require("./hash-prefixes");
const types_1 = require("./types");
const binary_serializer_1 = require("./serdes/binary-serializer");
const sha512_1 = require("@xrplf/isomorphic/sha512");
/**

@@ -25,8 +12,6 @@ * Class for hashing with SHA512

*/
var Sha512Half = /** @class */ (function (_super) {
__extends(Sha512Half, _super);
function Sha512Half() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.hash = createHash("sha512");
return _this;
class Sha512Half extends binary_serializer_1.BytesList {
constructor() {
super(...arguments);
this.hash = sha512_1.sha512.create();
}

@@ -39,5 +24,5 @@ /**

*/
Sha512Half.put = function (bytes) {
static put(bytes) {
return new Sha512Half().put(bytes);
};
}
/**

@@ -49,6 +34,6 @@ * Write bytes to an existing Sha512Hash

*/
Sha512Half.prototype.put = function (bytes) {
put(bytes) {
this.hash.update(bytes);
return this;
};
}
/**

@@ -59,6 +44,5 @@ * Compute SHA512 hash and slice in half

*/
Sha512Half.prototype.finish256 = function () {
var bytes = this.hash.digest();
return bytes.slice(0, 32);
};
finish256() {
return Uint8Array.from(this.hash.digest().slice(0, 32));
}
/**

@@ -69,7 +53,6 @@ * Constructs a Hash256 from the Sha512Half object

*/
Sha512Half.prototype.finish = function () {
return new hash_256_1.Hash256(this.finish256());
};
return Sha512Half;
}(binary_serializer_1.BytesList));
finish() {
return new types_1.Hash256(this.finish256());
}
}
exports.Sha512Half = Sha512Half;

@@ -82,9 +65,5 @@ /**

*/
function sha512Half() {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
var hash = new Sha512Half();
args.forEach(function (a) { return hash.put(a); });
function sha512Half(...args) {
const hash = new Sha512Half();
args.forEach((a) => hash.put(a));
return hash.finish256();

@@ -100,5 +79,5 @@ }

function transactionID(serialized) {
return new hash_256_1.Hash256(sha512Half(hash_prefixes_1.HashPrefix.transactionID, serialized));
return new types_1.Hash256(sha512Half(hash_prefixes_1.HashPrefix.transactionID, serialized));
}
exports.transactionID = transactionID;
//# sourceMappingURL=hashes.js.map

@@ -1,3 +0,6 @@

import { decodeLedgerData } from "./ledger-hashes";
import { JsonObject } from "./types/serialized-type";
import { decodeLedgerData } from './ledger-hashes';
import { JsonObject } from './types/serialized-type';
import { XrplDefinitionsBase, TRANSACTION_TYPES, DEFAULT_DEFINITIONS } from './enums';
import { XrplDefinitions } from './enums/xrpl-definitions';
import { coreTypes } from './types';
/**

@@ -7,5 +10,6 @@ * Decode a transaction

* @param binary hex-string of the encoded transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the JSON representation of the transaction
*/
declare function decode(binary: string): JsonObject;
declare function decode(binary: string, definitions?: XrplDefinitionsBase): JsonObject;
/**

@@ -15,5 +19,7 @@ * Encode a transaction

* @param json The JSON representation of a transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
*
* @returns A hex-string of the encoded transaction
*/
declare function encode(json: object): string;
declare function encode(json: object, definitions?: XrplDefinitionsBase): string;
/**

@@ -24,5 +30,6 @@ * Encode a transaction and prepare for signing

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
declare function encodeForSigning(json: object): string;
declare function encodeForSigning(json: object, definitions?: XrplDefinitionsBase): string;
/**

@@ -33,2 +40,3 @@ * Encode a transaction and prepare for signing with a claim

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction

@@ -42,5 +50,6 @@ */

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
declare function encodeForMultisigning(json: object, signer: string): string;
declare function encodeForMultisigning(json: object, signer: string, definitions?: XrplDefinitionsBase): string;
/**

@@ -60,12 +69,2 @@ * Encode a quality value

declare function decodeQuality(value: string): string;
declare const _default: {
decode: typeof decode;
encode: typeof encode;
encodeForSigning: typeof encodeForSigning;
encodeForSigningClaim: typeof encodeForSigningClaim;
encodeForMultisigning: typeof encodeForMultisigning;
encodeQuality: typeof encodeQuality;
decodeQuality: typeof decodeQuality;
decodeLedgerData: typeof decodeLedgerData;
};
export = _default;
export { decode, encode, encodeForSigning, encodeForSigningClaim, encodeForMultisigning, encodeQuality, decodeQuality, decodeLedgerData, TRANSACTION_TYPES, XrplDefinitions, XrplDefinitionsBase, DEFAULT_DEFINITIONS, coreTypes, };
"use strict";
var assert = require("assert");
var coretypes_1 = require("./coretypes");
var ledger_hashes_1 = require("./ledger-hashes");
var signingData = coretypes_1.binary.signingData, signingClaimData = coretypes_1.binary.signingClaimData, multiSigningData = coretypes_1.binary.multiSigningData, binaryToJSON = coretypes_1.binary.binaryToJSON, serializeObject = coretypes_1.binary.serializeObject;
Object.defineProperty(exports, "__esModule", { value: true });
exports.coreTypes = exports.DEFAULT_DEFINITIONS = exports.XrplDefinitionsBase = exports.XrplDefinitions = exports.TRANSACTION_TYPES = exports.decodeLedgerData = exports.decodeQuality = exports.encodeQuality = exports.encodeForMultisigning = exports.encodeForSigningClaim = exports.encodeForSigning = exports.encode = exports.decode = void 0;
const coretypes_1 = require("./coretypes");
const ledger_hashes_1 = require("./ledger-hashes");
Object.defineProperty(exports, "decodeLedgerData", { enumerable: true, get: function () { return ledger_hashes_1.decodeLedgerData; } });
const enums_1 = require("./enums");
Object.defineProperty(exports, "XrplDefinitionsBase", { enumerable: true, get: function () { return enums_1.XrplDefinitionsBase; } });
Object.defineProperty(exports, "TRANSACTION_TYPES", { enumerable: true, get: function () { return enums_1.TRANSACTION_TYPES; } });
Object.defineProperty(exports, "DEFAULT_DEFINITIONS", { enumerable: true, get: function () { return enums_1.DEFAULT_DEFINITIONS; } });
const xrpl_definitions_1 = require("./enums/xrpl-definitions");
Object.defineProperty(exports, "XrplDefinitions", { enumerable: true, get: function () { return xrpl_definitions_1.XrplDefinitions; } });
const types_1 = require("./types");
Object.defineProperty(exports, "coreTypes", { enumerable: true, get: function () { return types_1.coreTypes; } });
const utils_1 = require("@xrplf/isomorphic/utils");
const { signingData, signingClaimData, multiSigningData, binaryToJSON, serializeObject, } = coretypes_1.binary;
/**

@@ -10,8 +21,12 @@ * Decode a transaction

* @param binary hex-string of the encoded transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns the JSON representation of the transaction
*/
function decode(binary) {
assert(typeof binary === "string", "binary must be a hex string");
return binaryToJSON(binary);
function decode(binary, definitions) {
if (typeof binary !== 'string') {
throw new Error('binary must be a hex string');
}
return binaryToJSON(binary, definitions);
}
exports.decode = decode;
/**

@@ -21,10 +36,13 @@ * Encode a transaction

* @param json The JSON representation of a transaction
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
*
* @returns A hex-string of the encoded transaction
*/
function encode(json) {
assert(typeof json === "object");
return serializeObject(json)
.toString("hex")
.toUpperCase();
function encode(json, definitions) {
if (typeof json !== 'object') {
throw new Error();
}
return (0, utils_1.bytesToHex)(serializeObject(json, { definitions }));
}
exports.encode = encode;
/**

@@ -35,10 +53,14 @@ * Encode a transaction and prepare for signing

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
function encodeForSigning(json) {
assert(typeof json === "object");
return signingData(json)
.toString("hex")
.toUpperCase();
function encodeForSigning(json, definitions) {
if (typeof json !== 'object') {
throw new Error();
}
return (0, utils_1.bytesToHex)(signingData(json, coretypes_1.HashPrefix.transactionSig, {
definitions,
}));
}
exports.encodeForSigning = encodeForSigning;
/**

@@ -49,10 +71,12 @@ * Encode a transaction and prepare for signing with a claim

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
function encodeForSigningClaim(json) {
assert(typeof json === "object");
return signingClaimData(json)
.toString("hex")
.toUpperCase();
if (typeof json !== 'object') {
throw new Error();
}
return (0, utils_1.bytesToHex)(signingClaimData(json));
}
exports.encodeForSigningClaim = encodeForSigningClaim;
/**

@@ -63,11 +87,16 @@ * Encode a transaction and prepare for multi-signing

* @param signer string representing the account to sign the transaction with
* @param definitions Custom rippled types to use instead of the default. Used for sidechains and amendments.
* @returns a hex string of the encoded transaction
*/
function encodeForMultisigning(json, signer) {
assert(typeof json === "object");
assert.equal(json["SigningPubKey"], "");
return multiSigningData(json, signer)
.toString("hex")
.toUpperCase();
function encodeForMultisigning(json, signer, definitions) {
if (typeof json !== 'object') {
throw new Error();
}
if (json['SigningPubKey'] !== '') {
throw new Error();
}
const definitionsOpt = definitions ? { definitions } : undefined;
return (0, utils_1.bytesToHex)(multiSigningData(json, signer, definitionsOpt));
}
exports.encodeForMultisigning = encodeForMultisigning;
/**

@@ -80,5 +109,8 @@ * Encode a quality value

function encodeQuality(value) {
assert(typeof value === "string");
return coretypes_1.quality.encode(value).toString("hex").toUpperCase();
if (typeof value !== 'string') {
throw new Error();
}
return (0, utils_1.bytesToHex)(coretypes_1.quality.encode(value));
}
exports.encodeQuality = encodeQuality;
/**

@@ -91,15 +123,8 @@ * Decode a quality value

function decodeQuality(value) {
assert(typeof value === "string");
if (typeof value !== 'string') {
throw new Error();
}
return coretypes_1.quality.decode(value).toString();
}
module.exports = {
decode: decode,
encode: encode,
encodeForSigning: encodeForSigning,
encodeForSigningClaim: encodeForSigningClaim,
encodeForMultisigning: encodeForMultisigning,
encodeQuality: encodeQuality,
decodeQuality: decodeQuality,
decodeLedgerData: ledger_hashes_1.decodeLedgerData,
};
exports.decodeQuality = decodeQuality;
//# sourceMappingURL=index.js.map

@@ -1,3 +0,4 @@

import { Hash256 } from "./types/hash-256";
import { JsonObject } from "./types/serialized-type";
import { Hash256 } from './types/hash-256';
import { JsonObject } from './types/serialized-type';
import { XrplDefinitionsBase } from './enums';
/**

@@ -42,5 +43,7 @@ * Function computing the hash of a transaction tree

* @param binary A serialized ledger header
* @param definitions Type definitions to parse the ledger objects.
* Used if there are non-default ledger objects to decode.
* @returns A JSON object describing a ledger header
*/
declare function decodeLedgerData(binary: string): object;
declare function decodeLedgerData(binary: string, definitions?: XrplDefinitionsBase): object;
export { accountStateHash, transactionTreeHash, ledgerHash, decodeLedgerData };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.decodeLedgerData = exports.ledgerHash = exports.transactionTreeHash = exports.accountStateHash = void 0;
var assert = require("assert");
var shamap_1 = require("./shamap");
var hash_prefixes_1 = require("./hash-prefixes");
var hashes_1 = require("./hashes");
var binary_1 = require("./binary");
var hash_256_1 = require("./types/hash-256");
var st_object_1 = require("./types/st-object");
var uint_64_1 = require("./types/uint-64");
var uint_32_1 = require("./types/uint-32");
var uint_8_1 = require("./types/uint-8");
var binary_parser_1 = require("./serdes/binary-parser");
const shamap_1 = require("./shamap");
const hash_prefixes_1 = require("./hash-prefixes");
const hashes_1 = require("./hashes");
const binary_1 = require("./binary");
const hash_256_1 = require("./types/hash-256");
const st_object_1 = require("./types/st-object");
const uint_64_1 = require("./types/uint-64");
const uint_32_1 = require("./types/uint-32");
const uint_8_1 = require("./types/uint-8");
const binary_parser_1 = require("./serdes/binary-parser");
/**

@@ -23,4 +22,4 @@ * Computes the hash of a list of objects

function computeHash(itemizer, itemsJson) {
var map = new shamap_1.ShaMap();
itemsJson.forEach(function (item) { return map.addItem.apply(map, itemizer(item)); });
const map = new shamap_1.ShaMap();
itemsJson.forEach((item) => map.addItem(...itemizer(item)));
return map.hash();

@@ -35,10 +34,12 @@ }

function transactionItemizer(json) {
assert(json.hash);
var index = hash_256_1.Hash256.from(json.hash);
var item = {
hashPrefix: function () {
if (!json.hash) {
throw new Error();
}
const index = hash_256_1.Hash256.from(json.hash);
const item = {
hashPrefix() {
return hash_prefixes_1.HashPrefix.transaction;
},
toBytesSink: function (sink) {
var serializer = new binary_1.BinarySerializer(sink);
toBytesSink(sink) {
const serializer = new binary_1.BinarySerializer(sink);
serializer.writeLengthEncoded(st_object_1.STObject.from(json));

@@ -57,9 +58,9 @@ serializer.writeLengthEncoded(st_object_1.STObject.from(json.metaData));

function entryItemizer(json) {
var index = hash_256_1.Hash256.from(json.index);
var bytes = binary_1.serializeObject(json);
var item = {
hashPrefix: function () {
const index = hash_256_1.Hash256.from(json.index);
const bytes = (0, binary_1.serializeObject)(json);
const item = {
hashPrefix() {
return hash_prefixes_1.HashPrefix.accountStateEntry;
},
toBytesSink: function (sink) {
toBytesSink(sink) {
sink.put(bytes);

@@ -77,3 +78,3 @@ },

function transactionTreeHash(param) {
var itemizer = transactionItemizer;
const itemizer = transactionItemizer;
return computeHash(itemizer, param);

@@ -89,3 +90,3 @@ }

function accountStateHash(param) {
var itemizer = entryItemizer;
const itemizer = entryItemizer;
return computeHash(itemizer, param);

@@ -101,8 +102,10 @@ }

function ledgerHash(header) {
var hash = new hashes_1.Sha512Half();
const hash = new hashes_1.Sha512Half();
hash.put(hash_prefixes_1.HashPrefix.ledgerHeader);
assert(header.parent_close_time !== undefined);
assert(header.close_flags !== undefined);
if (header.parent_close_time === undefined ||
header.close_flags === undefined) {
throw new Error();
}
uint_32_1.UInt32.from(header.ledger_index).toBytesSink(hash);
uint_64_1.UInt64.from(BigInt(header.total_coins)).toBytesSink(hash);
uint_64_1.UInt64.from(BigInt(String(header.total_coins))).toBytesSink(hash);
hash_256_1.Hash256.from(header.parent_hash).toBytesSink(hash);

@@ -122,7 +125,11 @@ hash_256_1.Hash256.from(header.transaction_hash).toBytesSink(hash);

* @param binary A serialized ledger header
* @param definitions Type definitions to parse the ledger objects.
* Used if there are non-default ledger objects to decode.
* @returns A JSON object describing a ledger header
*/
function decodeLedgerData(binary) {
assert(typeof binary === "string", "binary must be a hex string");
var parser = new binary_parser_1.BinaryParser(binary);
function decodeLedgerData(binary, definitions) {
if (typeof binary !== 'string') {
throw new Error('binary must be a hex string');
}
const parser = new binary_parser_1.BinaryParser(binary, definitions);
return {

@@ -129,0 +136,0 @@ ledger_index: parser.readUInt32(),

@@ -1,3 +0,2 @@

/// <reference types="node" />
import { Decimal } from "decimal.js";
import BigNumber from 'bignumber.js';
/**

@@ -13,3 +12,3 @@ * class for encoding and decoding quality

*/
static encode(quality: string): Buffer;
static encode(quality: string): Uint8Array;
/**

@@ -21,4 +20,4 @@ * Decode quality amount

*/
static decode(quality: string): Decimal;
static decode(quality: string): BigNumber;
}
export { quality };
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.quality = void 0;
var types_1 = require("./types");
var decimal_js_1 = require("decimal.js");
const types_1 = require("./types");
const bignumber_js_1 = __importDefault(require("bignumber.js"));
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* class for encoding and decoding quality
*/
var quality = /** @class */ (function () {
function quality() {
}
class quality {
/**

@@ -18,10 +20,10 @@ * Encode quality amount

*/
quality.encode = function (quality) {
var decimal = new decimal_js_1.Decimal(quality);
var exponent = decimal.e - 15;
var qualityString = decimal.times("1e" + -exponent).abs().toString();
var bytes = types_1.coreTypes.UInt64.from(BigInt(qualityString)).toBytes();
static encode(quality) {
const decimal = (0, bignumber_js_1.default)(quality);
const exponent = ((decimal === null || decimal === void 0 ? void 0 : decimal.e) || 0) - 15;
const qualityString = decimal.times(`1e${-exponent}`).abs().toString();
const bytes = types_1.coreTypes.UInt64.from(BigInt(qualityString)).toBytes();
bytes[0] = exponent + 100;
return bytes;
};
}
/**

@@ -33,11 +35,10 @@ * Decode quality amount

*/
quality.decode = function (quality) {
var bytes = Buffer.from(quality, "hex").slice(-8);
var exponent = bytes[0] - 100;
var mantissa = new decimal_js_1.Decimal("0x" + bytes.slice(1).toString("hex"));
return mantissa.times("1e" + exponent);
};
return quality;
}());
static decode(quality) {
const bytes = (0, utils_1.hexToBytes)(quality).slice(-8);
const exponent = bytes[0] - 100;
const mantissa = new bignumber_js_1.default(`0x${(0, utils_1.bytesToHex)(bytes.slice(1))}`);
return mantissa.times(`1e${exponent}`);
}
}
exports.quality = quality;
//# sourceMappingURL=quality.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { FieldInstance } from "../enums";
import { SerializedType } from "../types/serialized-type";
import { XrplDefinitionsBase, FieldInstance } from '../enums';
import { type SerializedType } from '../types/serialized-type';
/**

@@ -9,2 +8,3 @@ * BinaryParser is used to compute fields and values from a HexString

private bytes;
definitions: XrplDefinitionsBase;
/**

@@ -14,4 +14,6 @@ * Initialize bytes to a hex string

* @param hexBytes a hex string
* @param definitions Rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
*/
constructor(hexBytes: string);
constructor(hexBytes: string, definitions?: XrplDefinitionsBase);
/**

@@ -35,3 +37,3 @@ * Peek the first byte of the BinaryParser

*/
read(n: number): Buffer;
read(n: number): Uint8Array;
/**

@@ -54,3 +56,3 @@ * Read an integer of given size

*/
readVariableLength(): Buffer;
readVariableLength(): Uint8Array;
/**

@@ -57,0 +59,0 @@ * Reads the length of the variable length encoded bytes

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BinaryParser = void 0;
var assert = require("assert");
var enums_1 = require("../enums");
const enums_1 = require("../enums");
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* BinaryParser is used to compute fields and values from a HexString
*/
var BinaryParser = /** @class */ (function () {
class BinaryParser {
/**

@@ -14,5 +14,8 @@ * Initialize bytes to a hex string

* @param hexBytes a hex string
* @param definitions Rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
*/
function BinaryParser(hexBytes) {
this.bytes = Buffer.from(hexBytes, "hex");
constructor(hexBytes, definitions = enums_1.DEFAULT_DEFINITIONS) {
this.bytes = (0, utils_1.hexToBytes)(hexBytes);
this.definitions = definitions;
}

@@ -24,6 +27,8 @@ /**

*/
BinaryParser.prototype.peek = function () {
assert(this.bytes.byteLength !== 0);
peek() {
if (this.bytes.byteLength === 0) {
throw new Error();
}
return this.bytes[0];
};
}
/**

@@ -34,6 +39,8 @@ * Consume the first n bytes of the BinaryParser

*/
BinaryParser.prototype.skip = function (n) {
assert(n <= this.bytes.byteLength);
skip(n) {
if (n > this.bytes.byteLength) {
throw new Error();
}
this.bytes = this.bytes.slice(n);
};
}
/**

@@ -45,8 +52,10 @@ * read the first n bytes from the BinaryParser

*/
BinaryParser.prototype.read = function (n) {
assert(n <= this.bytes.byteLength);
var slice = this.bytes.slice(0, n);
read(n) {
if (n > this.bytes.byteLength) {
throw new Error();
}
const slice = this.bytes.slice(0, n);
this.skip(n);
return slice;
};
}
/**

@@ -58,22 +67,24 @@ * Read an integer of given size

*/
BinaryParser.prototype.readUIntN = function (n) {
assert(0 < n && n <= 4, "invalid n");
return this.read(n).reduce(function (a, b) { return (a << 8) | b; }) >>> 0;
};
BinaryParser.prototype.readUInt8 = function () {
readUIntN(n) {
if (0 >= n || n > 4) {
throw new Error('invalid n');
}
return this.read(n).reduce((a, b) => (a << 8) | b) >>> 0;
}
readUInt8() {
return this.readUIntN(1);
};
BinaryParser.prototype.readUInt16 = function () {
}
readUInt16() {
return this.readUIntN(2);
};
BinaryParser.prototype.readUInt32 = function () {
}
readUInt32() {
return this.readUIntN(4);
};
BinaryParser.prototype.size = function () {
}
size() {
return this.bytes.byteLength;
};
BinaryParser.prototype.end = function (customEnd) {
var length = this.bytes.byteLength;
}
end(customEnd) {
const length = this.bytes.byteLength;
return length === 0 || (customEnd !== undefined && length <= customEnd);
};
}
/**

@@ -84,5 +95,5 @@ * Reads variable length encoded bytes

*/
BinaryParser.prototype.readVariableLength = function () {
readVariableLength() {
return this.read(this.readVariableLengthLength());
};
}
/**

@@ -93,4 +104,4 @@ * Reads the length of the variable length encoded bytes

*/
BinaryParser.prototype.readVariableLengthLength = function () {
var b1 = this.readUInt8();
readVariableLengthLength() {
const b1 = this.readUInt8();
if (b1 <= 192) {

@@ -100,12 +111,12 @@ return b1;

else if (b1 <= 240) {
var b2 = this.readUInt8();
const b2 = this.readUInt8();
return 193 + (b1 - 193) * 256 + b2;
}
else if (b1 <= 254) {
var b2 = this.readUInt8();
var b3 = this.readUInt8();
const b2 = this.readUInt8();
const b3 = this.readUInt8();
return 12481 + (b1 - 241) * 65536 + b2 * 256 + b3;
}
throw new Error("Invalid variable length indicator");
};
throw new Error('Invalid variable length indicator');
}
/**

@@ -116,5 +127,5 @@ * Reads the field ordinal from the BinaryParser

*/
BinaryParser.prototype.readFieldOrdinal = function () {
var type = this.readUInt8();
var nth = type & 15;
readFieldOrdinal() {
let type = this.readUInt8();
let nth = type & 15;
type >>= 4;

@@ -124,3 +135,3 @@ if (type === 0) {

if (type === 0 || type < 16) {
throw new Error("Cannot read FieldOrdinal, type_code out of range");
throw new Error(`Cannot read FieldOrdinal, type_code ${type} out of range`);
}

@@ -131,7 +142,7 @@ }

if (nth === 0 || nth < 16) {
throw new Error("Cannot read FieldOrdinal, field_code out of range");
throw new Error(`Cannot read FieldOrdinal, field_code ${nth} out of range`);
}
}
return (type << 16) | nth;
};
}
/**

@@ -142,5 +153,5 @@ * Read the field from the BinaryParser

*/
BinaryParser.prototype.readField = function () {
return enums_1.Field.fromString(this.readFieldOrdinal().toString());
};
readField() {
return this.definitions.field.fromString(this.readFieldOrdinal().toString());
}
/**

@@ -152,5 +163,5 @@ * Read a given type from the BinaryParser

*/
BinaryParser.prototype.readType = function (type) {
readType(type) {
return type.fromParser(this);
};
}
/**

@@ -162,5 +173,5 @@ * Get the type associated with a given field

*/
BinaryParser.prototype.typeForField = function (field) {
typeForField(field) {
return field.associatedType;
};
}
/**

@@ -172,16 +183,16 @@ * Read value of the type specified by field from the BinaryParser

*/
BinaryParser.prototype.readFieldValue = function (field) {
var type = this.typeForField(field);
readFieldValue(field) {
const type = this.typeForField(field);
if (!type) {
throw new Error("unsupported: (" + field.name + ", " + field.type.name + ")");
throw new Error(`unsupported: (${field.name}, ${field.type.name})`);
}
var sizeHint = field.isVariableLengthEncoded
const sizeHint = field.isVariableLengthEncoded
? this.readVariableLengthLength()
: undefined;
var value = type.fromParser(this, sizeHint);
const value = type.fromParser(this, sizeHint);
if (value === undefined) {
throw new Error("fromParser for (" + field.name + ", " + field.type.name + ") -> undefined ");
throw new Error(`fromParser for (${field.name}, ${field.type.name}) -> undefined `);
}
return value;
};
}
/**

@@ -192,9 +203,8 @@ * Get the next field and value from the BinaryParser

*/
BinaryParser.prototype.readFieldAndValue = function () {
var field = this.readField();
readFieldAndValue() {
const field = this.readField();
return [field, this.readFieldValue(field)];
};
return BinaryParser;
}());
}
}
exports.BinaryParser = BinaryParser;
//# sourceMappingURL=binary-parser.js.map

@@ -1,6 +0,5 @@

/// <reference types="node" />
import { FieldInstance } from "../enums";
import { SerializedType } from "../types/serialized-type";
import { FieldInstance } from '../enums';
import { type SerializedType } from '../types/serialized-type';
/**
* Bytes list is a collection of buffer objects
* Bytes list is a collection of Uint8Array objects
*/

@@ -18,6 +17,6 @@ declare class BytesList {

*
* @param bytesArg A Buffer
* @param bytesArg A Uint8Array
* @return this BytesList
*/
put(bytesArg: Buffer): BytesList;
put(bytesArg: Uint8Array): BytesList;
/**

@@ -29,7 +28,7 @@ * Write this BytesList to the back of another bytes list

toBytesSink(list: BytesList): void;
toBytes(): Buffer;
toBytes(): Uint8Array;
toHex(): string;
}
/**
* BinarySerializer is used to write fields and values to buffers
* BinarySerializer is used to write fields and values to Uint8Arrays
*/

@@ -50,3 +49,3 @@ declare class BinarySerializer {

*/
put(bytes: Buffer): void;
put(bytes: Uint8Array): void;
/**

@@ -77,3 +76,3 @@ * Write a value of a given type to this BinarySerializer

*/
writeFieldAndValue(field: FieldInstance, value: SerializedType): void;
writeFieldAndValue(field: FieldInstance, value: SerializedType, isUnlModifyWorkaround?: boolean): void;
/**

@@ -84,4 +83,4 @@ * Write a variable length encoded value to the BinarySerializer

*/
writeLengthEncoded(value: SerializedType): void;
writeLengthEncoded(value: SerializedType, isUnlModifyWorkaround?: boolean): void;
}
export { BytesList, BinarySerializer };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.BinarySerializer = exports.BytesList = void 0;
var assert = require("assert");
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* Bytes list is a collection of buffer objects
* Bytes list is a collection of Uint8Array objects
*/
var BytesList = /** @class */ (function () {
function BytesList() {
class BytesList {
constructor() {
this.bytesArray = [];

@@ -17,16 +17,16 @@ }

*/
BytesList.prototype.getLength = function () {
return Buffer.concat(this.bytesArray).byteLength;
};
getLength() {
return (0, utils_1.concat)(this.bytesArray).byteLength;
}
/**
* Put bytes in the BytesList
*
* @param bytesArg A Buffer
* @param bytesArg A Uint8Array
* @return this BytesList
*/
BytesList.prototype.put = function (bytesArg) {
var bytes = Buffer.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
put(bytesArg) {
const bytes = Uint8Array.from(bytesArg); // Temporary, to catch instances of Uint8Array being passed in
this.bytesArray.push(bytes);
return this;
};
}
/**

@@ -37,19 +37,18 @@ * Write this BytesList to the back of another bytes list

*/
BytesList.prototype.toBytesSink = function (list) {
toBytesSink(list) {
list.put(this.toBytes());
};
BytesList.prototype.toBytes = function () {
return Buffer.concat(this.bytesArray);
};
BytesList.prototype.toHex = function () {
return this.toBytes().toString("hex").toUpperCase();
};
return BytesList;
}());
}
toBytes() {
return (0, utils_1.concat)(this.bytesArray);
}
toHex() {
return (0, utils_1.bytesToHex)(this.toBytes());
}
}
exports.BytesList = BytesList;
/**
* BinarySerializer is used to write fields and values to buffers
* BinarySerializer is used to write fields and values to Uint8Arrays
*/
var BinarySerializer = /** @class */ (function () {
function BinarySerializer(sink) {
class BinarySerializer {
constructor(sink) {
this.sink = new BytesList();

@@ -63,5 +62,5 @@ this.sink = sink;

*/
BinarySerializer.prototype.write = function (value) {
write(value) {
value.toBytesSink(this.sink);
};
}
/**

@@ -72,5 +71,5 @@ * Write bytes to this BinarySerializer

*/
BinarySerializer.prototype.put = function (bytes) {
put(bytes) {
this.sink.put(bytes);
};
}
/**

@@ -82,5 +81,5 @@ * Write a value of a given type to this BinarySerializer

*/
BinarySerializer.prototype.writeType = function (type, value) {
writeType(type, value) {
this.write(type.from(value));
};
}
/**

@@ -91,5 +90,5 @@ * Write BytesList to this BinarySerializer

*/
BinarySerializer.prototype.writeBytesList = function (bl) {
writeBytesList(bl) {
bl.toBytesSink(this.sink);
};
}
/**

@@ -100,4 +99,4 @@ * Calculate the header of Variable Length encoded bytes

*/
BinarySerializer.prototype.encodeVariableLength = function (length) {
var lenBytes = Buffer.alloc(3);
encodeVariableLength(length) {
const lenBytes = new Uint8Array(3);
if (length <= 192) {

@@ -120,4 +119,4 @@ lenBytes[0] = length;

}
throw new Error("Overflow error");
};
throw new Error('Overflow error');
}
/**

@@ -129,9 +128,10 @@ * Write field and value to BinarySerializer

*/
BinarySerializer.prototype.writeFieldAndValue = function (field, value) {
var associatedValue = field.associatedType.from(value);
assert(associatedValue.toBytesSink !== undefined);
assert(field.name !== undefined);
writeFieldAndValue(field, value, isUnlModifyWorkaround = false) {
const associatedValue = field.associatedType.from(value);
if (associatedValue.toBytesSink === undefined || field.name === undefined) {
throw new Error();
}
this.sink.put(field.header);
if (field.isVariableLengthEncoded) {
this.writeLengthEncoded(associatedValue);
this.writeLengthEncoded(associatedValue, isUnlModifyWorkaround);
}

@@ -141,3 +141,3 @@ else {

}
};
}
/**

@@ -148,11 +148,13 @@ * Write a variable length encoded value to the BinarySerializer

*/
BinarySerializer.prototype.writeLengthEncoded = function (value) {
var bytes = new BytesList();
value.toBytesSink(bytes);
writeLengthEncoded(value, isUnlModifyWorkaround = false) {
const bytes = new BytesList();
if (!isUnlModifyWorkaround) {
// this part doesn't happen for the Account field in a UNLModify transaction
value.toBytesSink(bytes);
}
this.put(this.encodeVariableLength(bytes.getLength()));
this.writeBytesList(bytes);
};
return BinarySerializer;
}());
}
}
exports.BinarySerializer = BinarySerializer;
//# sourceMappingURL=binary-serializer.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { Hash256 } from "./types/hash-256";
import { BytesList } from "./serdes/binary-serializer";
import { Hash256 } from './types/hash-256';
import { BytesList } from './serdes/binary-serializer';
/**

@@ -8,3 +7,3 @@ * Abstract class describing a SHAMapNode

declare abstract class ShaMapNode {
abstract hashPrefix(): Buffer;
abstract hashPrefix(): Uint8Array;
abstract isLeaf(): boolean;

@@ -33,5 +32,5 @@ abstract isInner(): boolean;

*
* @returns The hash prefix, unless this.item is undefined, then it returns an empty Buffer
* @returns The hash prefix, unless this.item is undefined, then it returns an empty Uint8Array
*/
hashPrefix(): Buffer;
hashPrefix(): Uint8Array;
/**

@@ -70,3 +69,3 @@ * Hash the bytes representation of this

*/
hashPrefix(): Buffer;
hashPrefix(): Uint8Array;
/**

@@ -73,0 +72,0 @@ * Set a branch of this node to be another node

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.ShaMapLeaf = exports.ShaMapNode = exports.ShaMap = void 0;
var assert_1 = require("assert");
var types_1 = require("./types");
var hash_prefixes_1 = require("./hash-prefixes");
var hashes_1 = require("./hashes");
const types_1 = require("./types");
const hash_prefixes_1 = require("./hash-prefixes");
const hashes_1 = require("./hashes");
/**
* Abstract class describing a SHAMapNode
*/
var ShaMapNode = /** @class */ (function () {
function ShaMapNode() {
}
return ShaMapNode;
}());
class ShaMapNode {
}
exports.ShaMapNode = ShaMapNode;

@@ -33,9 +16,7 @@ /**

*/
var ShaMapLeaf = /** @class */ (function (_super) {
__extends(ShaMapLeaf, _super);
function ShaMapLeaf(index, item) {
var _this = _super.call(this) || this;
_this.index = index;
_this.item = item;
return _this;
class ShaMapLeaf extends ShaMapNode {
constructor(index, item) {
super();
this.index = index;
this.item = item;
}

@@ -45,19 +26,19 @@ /**

*/
ShaMapLeaf.prototype.isLeaf = function () {
isLeaf() {
return true;
};
}
/**
* @returns false as ShaMapLeaf is not an inner node
*/
ShaMapLeaf.prototype.isInner = function () {
isInner() {
return false;
};
}
/**
* Get the prefix of the this.item
*
* @returns The hash prefix, unless this.item is undefined, then it returns an empty Buffer
* @returns The hash prefix, unless this.item is undefined, then it returns an empty Uint8Array
*/
ShaMapLeaf.prototype.hashPrefix = function () {
return this.item === undefined ? Buffer.alloc(0) : this.item.hashPrefix();
};
hashPrefix() {
return this.item === undefined ? new Uint8Array(0) : this.item.hashPrefix();
}
/**

@@ -68,7 +49,7 @@ * Hash the bytes representation of this

*/
ShaMapLeaf.prototype.hash = function () {
var hash = hashes_1.Sha512Half.put(this.hashPrefix());
hash() {
const hash = hashes_1.Sha512Half.put(this.hashPrefix());
this.toBytesSink(hash);
return hash.finish();
};
}
/**

@@ -78,3 +59,3 @@ * Write the bytes representation of this to a BytesList

*/
ShaMapLeaf.prototype.toBytesSink = function (list) {
toBytesSink(list) {
if (this.item !== undefined) {

@@ -84,5 +65,4 @@ this.item.toBytesSink(list);

this.index.toBytesSink(list);
};
return ShaMapLeaf;
}(ShaMapNode));
}
}
exports.ShaMapLeaf = ShaMapLeaf;

@@ -92,11 +72,8 @@ /**

*/
var ShaMapInner = /** @class */ (function (_super) {
__extends(ShaMapInner, _super);
function ShaMapInner(depth) {
if (depth === void 0) { depth = 0; }
var _this = _super.call(this) || this;
_this.depth = depth;
_this.slotBits = 0;
_this.branches = Array(16);
return _this;
class ShaMapInner extends ShaMapNode {
constructor(depth = 0) {
super();
this.depth = depth;
this.slotBits = 0;
this.branches = Array(16);
}

@@ -106,11 +83,11 @@ /**

*/
ShaMapInner.prototype.isInner = function () {
isInner() {
return true;
};
}
/**
* @returns false as ShaMapInner is not a leaf node
*/
ShaMapInner.prototype.isLeaf = function () {
isLeaf() {
return false;
};
}
/**

@@ -121,5 +98,5 @@ * Get the hash prefix for this node

*/
ShaMapInner.prototype.hashPrefix = function () {
hashPrefix() {
return hash_prefixes_1.HashPrefix.innerNode;
};
}
/**

@@ -131,12 +108,12 @@ * Set a branch of this node to be another node

*/
ShaMapInner.prototype.setBranch = function (slot, branch) {
setBranch(slot, branch) {
this.slotBits = this.slotBits | (1 << slot);
this.branches[slot] = branch;
};
}
/**
* @returns true if node is empty
*/
ShaMapInner.prototype.empty = function () {
empty() {
return this.slotBits === 0;
};
}
/**

@@ -147,10 +124,10 @@ * Compute the hash of this node

*/
ShaMapInner.prototype.hash = function () {
hash() {
if (this.empty()) {
return types_1.coreTypes.Hash256.ZERO_256;
}
var hash = hashes_1.Sha512Half.put(this.hashPrefix());
const hash = hashes_1.Sha512Half.put(this.hashPrefix());
this.toBytesSink(hash);
return hash.finish();
};
}
/**

@@ -161,9 +138,11 @@ * Writes the bytes representation of this node to a BytesList

*/
ShaMapInner.prototype.toBytesSink = function (list) {
for (var i = 0; i < this.branches.length; i++) {
var branch = this.branches[i];
var hash = branch ? branch.hash() : types_1.coreTypes.Hash256.ZERO_256;
toBytesSink(list) {
for (let i = 0; i < this.branches.length; i++) {
const branch = this.branches[i];
const hash = branch
? branch.hash()
: types_1.coreTypes.Hash256.ZERO_256;
hash.toBytesSink(list);
}
};
}
/**

@@ -176,32 +155,30 @@ * Add item to the SHAMap

*/
ShaMapInner.prototype.addItem = function (index, item, leaf) {
assert_1.strict(index !== undefined);
var nibble = index.nibblet(this.depth);
var existing = this.branches[nibble];
if (existing === undefined) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
addItem(index, item, leaf) {
if (index === undefined) {
throw new Error();
}
else if (existing instanceof ShaMapLeaf) {
var newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, undefined, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
if (index !== undefined) {
const nibble = index.nibblet(this.depth);
const existing = this.branches[nibble];
if (existing === undefined) {
this.setBranch(nibble, leaf || new ShaMapLeaf(index, item));
}
else if (existing instanceof ShaMapLeaf) {
const newInner = new ShaMapInner(this.depth + 1);
newInner.addItem(existing.index, undefined, existing);
newInner.addItem(index, item, leaf);
this.setBranch(nibble, newInner);
}
else if (existing instanceof ShaMapInner) {
existing.addItem(index, item, leaf);
}
else {
throw new Error('invalid ShaMap.addItem call');
}
}
else if (existing instanceof ShaMapInner) {
existing.addItem(index, item, leaf);
}
else {
throw new Error("invalid ShaMap.addItem call");
}
};
return ShaMapInner;
}(ShaMapNode));
var ShaMap = /** @class */ (function (_super) {
__extends(ShaMap, _super);
function ShaMap() {
return _super !== null && _super.apply(this, arguments) || this;
}
return ShaMap;
}(ShaMapInner));
}
class ShaMap extends ShaMapInner {
}
exports.ShaMap = ShaMap;
//# sourceMappingURL=shamap.js.map

@@ -1,3 +0,2 @@

/// <reference types="node" />
import { Hash160 } from "./hash-160";
import { Hash160 } from './hash-160';
/**

@@ -8,3 +7,3 @@ * Class defining how to encode and decode an AccountID

static readonly defaultAccountID: AccountID;
constructor(bytes?: Buffer);
constructor(bytes?: Uint8Array);
/**

@@ -11,0 +10,0 @@ * Defines how to construct an AccountID

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.AccountID = void 0;
var ripple_address_codec_1 = require("ripple-address-codec");
var hash_160_1 = require("./hash-160");
var HEX_REGEX = /^[A-F0-9]{40}$/;
const ripple_address_codec_1 = require("ripple-address-codec");
const hash_160_1 = require("./hash-160");
const utils_1 = require("@xrplf/isomorphic/utils");
const HEX_REGEX = /^[A-F0-9]{40}$/;
/**
* Class defining how to encode and decode an AccountID
*/
var AccountID = /** @class */ (function (_super) {
__extends(AccountID, _super);
function AccountID(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : AccountID.defaultAccountID.bytes) || this;
class AccountID extends hash_160_1.Hash160 {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : AccountID.defaultAccountID.bytes);
}

@@ -34,16 +21,16 @@ /**

*/
AccountID.from = function (value) {
static from(value) {
if (value instanceof AccountID) {
return value;
}
if (typeof value === "string") {
if (value === "") {
if (typeof value === 'string') {
if (value === '') {
return new AccountID();
}
return HEX_REGEX.test(value)
? new AccountID(Buffer.from(value, "hex"))
? new AccountID((0, utils_1.hexToBytes)(value))
: this.fromBase58(value);
}
throw new Error("Cannot construct AccountID from value given");
};
throw new Error('Cannot construct AccountID from value given');
}
/**

@@ -55,5 +42,11 @@ * Defines how to build an AccountID from a base58 r-Address

*/
AccountID.fromBase58 = function (value) {
return new AccountID(ripple_address_codec_1.decodeAccountID(value));
};
static fromBase58(value) {
if ((0, ripple_address_codec_1.isValidXAddress)(value)) {
const classic = (0, ripple_address_codec_1.xAddressToClassicAddress)(value);
if (classic.tag !== false)
throw new Error('Only allowed to have tag on Account or Destination');
value = classic.classicAddress;
}
return new AccountID(Uint8Array.from((0, ripple_address_codec_1.decodeAccountID)(value)));
}
/**

@@ -64,5 +57,5 @@ * Overload of toJSON

*/
AccountID.prototype.toJSON = function () {
toJSON() {
return this.toBase58();
};
}
/**

@@ -73,9 +66,8 @@ * Defines how to encode AccountID into a base58 address

*/
AccountID.prototype.toBase58 = function () {
return ripple_address_codec_1.encodeAccountID(this.bytes);
};
AccountID.defaultAccountID = new AccountID(Buffer.alloc(20));
return AccountID;
}(hash_160_1.Hash160));
toBase58() {
return (0, ripple_address_codec_1.encodeAccountID)(this.bytes);
}
}
exports.AccountID = AccountID;
AccountID.defaultAccountID = new AccountID(new Uint8Array(20));
//# sourceMappingURL=account-id.js.map

@@ -1,8 +0,4 @@

/// <reference types="node" />
import { BinaryParser } from "../serdes/binary-parser";
import { JsonObject, SerializedType } from "./serialized-type";
/**
* Interface for JSON objects that represent amounts
*/
interface AmountObject extends JsonObject {
import { BinaryParser } from '../serdes/binary-parser';
import { JsonObject, SerializedType } from './serialized-type';
interface AmountObjectIOU extends JsonObject {
value: string;

@@ -12,3 +8,11 @@ currency: string;

}
interface AmountObjectMPT extends JsonObject {
value: string;
mpt_issuance_id: string;
}
/**
* Interface for JSON objects that represent amounts
*/
type AmountObject = AmountObjectIOU | AmountObjectMPT;
/**
* Class for serializing/Deserializing Amounts

@@ -18,5 +22,5 @@ */

static defaultAmount: Amount;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
/**
* Construct an amount from an IOU or string amount
* Construct an amount from an IOU, MPT or string amount
*

@@ -51,3 +55,3 @@ * @param value An Amount, object representing an IOU, or a string

*
* @param decimal Decimal.js object representing IOU.value
* @param decimal BigNumber object representing IOU.value
* @returns void, but will throw if invalid amount

@@ -57,2 +61,9 @@ */

/**
* Validate MPT.value amount
*
* @param decimal BigNumber object representing MPT.value
* @returns void, but will throw if invalid amount
*/
private static assertMptIsValid;
/**
* Ensure that the value after being multiplied by the exponent does not

@@ -71,3 +82,15 @@ * contain a decimal.

private isNative;
/**
* Test if this amount is in units of MPT
*
* @returns true if MPT
*/
private isMPT;
/**
* Test if this amount is in units of IOU
*
* @returns true if IOU
*/
private isIOU;
}
export { Amount, AmountObject };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Amount = void 0;
var decimal_js_1 = require("decimal.js");
var binary_parser_1 = require("../serdes/binary-parser");
var account_id_1 = require("./account-id");
var currency_1 = require("./currency");
var serialized_type_1 = require("./serialized-type");
const binary_parser_1 = require("../serdes/binary-parser");
const account_id_1 = require("./account-id");
const currency_1 = require("./currency");
const serialized_type_1 = require("./serialized-type");
const bignumber_js_1 = __importDefault(require("bignumber.js"));
const utils_1 = require("@xrplf/isomorphic/utils");
const utils_2 = require("../utils");
const hash_192_1 = require("./hash-192");
/**
* Constants for validating amounts
*/
var MIN_IOU_EXPONENT = -96;
var MAX_IOU_EXPONENT = 80;
var MAX_IOU_PRECISION = 16;
var MAX_DROPS = new decimal_js_1.Decimal("1e17");
var MIN_XRP = new decimal_js_1.Decimal("1e-6");
const MIN_IOU_EXPONENT = -96;
const MAX_IOU_EXPONENT = 80;
const MAX_IOU_PRECISION = 16;
const MAX_DROPS = new bignumber_js_1.default('1e17');
const MIN_XRP = new bignumber_js_1.default('1e-6');
const mask = BigInt(0x00000000ffffffff);
const mptMask = BigInt(0x8000000000000000);
/**
* decimal.js configuration for Amount IOUs
* BigNumber configuration for Amount IOUs
*/
decimal_js_1.Decimal.config({
toExpPos: MAX_IOU_EXPONENT + MAX_IOU_PRECISION,
toExpNeg: MIN_IOU_EXPONENT - MAX_IOU_PRECISION,
bignumber_js_1.default.config({
EXPONENTIAL_AT: [
MIN_IOU_EXPONENT - MAX_IOU_PRECISION,
MAX_IOU_EXPONENT + MAX_IOU_PRECISION,
],
});
/**
* Type guard for AmountObject
* Type guard for AmountObjectIOU
*/
function isAmountObject(arg) {
var keys = Object.keys(arg).sort();
function isAmountObjectIOU(arg) {
const keys = Object.keys(arg).sort();
return (keys.length === 3 &&
keys[0] === "currency" &&
keys[1] === "issuer" &&
keys[2] === "value");
keys[0] === 'currency' &&
keys[1] === 'issuer' &&
keys[2] === 'value');
}
/**
* Type guard for AmountObjectMPT
*/
function isAmountObjectMPT(arg) {
const keys = Object.keys(arg).sort();
return (keys.length === 2 && keys[0] === 'mpt_issuance_id' && keys[1] === 'value');
}
/**
* Class for serializing/Deserializing Amounts
*/
var Amount = /** @class */ (function (_super) {
__extends(Amount, _super);
function Amount(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Amount.defaultAmount.bytes) || this;
class Amount extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : Amount.defaultAmount.bytes);
}
/**
* Construct an amount from an IOU or string amount
* Construct an amount from an IOU, MPT or string amount
*

@@ -62,16 +65,19 @@ * @param value An Amount, object representing an IOU, or a string

*/
Amount.from = function (value) {
static from(value) {
if (value instanceof Amount) {
return value;
}
var amount = Buffer.alloc(8);
if (typeof value === "string") {
let amount = new Uint8Array(8);
if (typeof value === 'string') {
Amount.assertXrpIsValid(value);
var number = BigInt(value);
amount.writeBigUInt64BE(number);
const number = BigInt(value);
const intBuf = [new Uint8Array(4), new Uint8Array(4)];
(0, utils_2.writeUInt32BE)(intBuf[0], Number(number >> BigInt(32)), 0);
(0, utils_2.writeUInt32BE)(intBuf[1], Number(number & BigInt(mask)), 0);
amount = (0, utils_1.concat)(intBuf);
amount[0] |= 0x40;
return new Amount(amount);
}
if (isAmountObject(value)) {
var number = new decimal_js_1.Decimal(value.value);
if (isAmountObjectIOU(value)) {
const number = new bignumber_js_1.default(value.value);
Amount.assertIouIsValid(number);

@@ -82,22 +88,38 @@ if (number.isZero()) {

else {
var integerNumberString = number
.times("1e" + -(number.e - 15))
const integerNumberString = number
.times(`1e${-((number.e || 0) - 15)}`)
.abs()
.toString();
amount.writeBigUInt64BE(BigInt(integerNumberString));
const num = BigInt(integerNumberString);
const intBuf = [new Uint8Array(4), new Uint8Array(4)];
(0, utils_2.writeUInt32BE)(intBuf[0], Number(num >> BigInt(32)), 0);
(0, utils_2.writeUInt32BE)(intBuf[1], Number(num & BigInt(mask)), 0);
amount = (0, utils_1.concat)(intBuf);
amount[0] |= 0x80;
if (number.gt(new decimal_js_1.Decimal(0))) {
if (number.gt(new bignumber_js_1.default(0))) {
amount[0] |= 0x40;
}
var exponent = number.e - 15;
var exponentByte = 97 + exponent;
const exponent = (number.e || 0) - 15;
const exponentByte = 97 + exponent;
amount[0] |= exponentByte >>> 2;
amount[1] |= (exponentByte & 0x03) << 6;
}
var currency = currency_1.Currency.from(value.currency).toBytes();
var issuer = account_id_1.AccountID.from(value.issuer).toBytes();
return new Amount(Buffer.concat([amount, currency, issuer]));
const currency = currency_1.Currency.from(value.currency).toBytes();
const issuer = account_id_1.AccountID.from(value.issuer).toBytes();
return new Amount((0, utils_1.concat)([amount, currency, issuer]));
}
throw new Error("Invalid type to construct an Amount");
};
if (isAmountObjectMPT(value)) {
Amount.assertMptIsValid(value.value);
let leadingByte = new Uint8Array(1);
leadingByte[0] |= 0x60;
const num = BigInt(value.value);
const intBuf = [new Uint8Array(4), new Uint8Array(4)];
(0, utils_2.writeUInt32BE)(intBuf[0], Number(num >> BigInt(32)), 0);
(0, utils_2.writeUInt32BE)(intBuf[1], Number(num & BigInt(mask)), 0);
amount = (0, utils_1.concat)(intBuf);
const mptIssuanceID = hash_192_1.Hash192.from(value.mpt_issuance_id).toBytes();
return new Amount((0, utils_1.concat)([leadingByte, amount, mptIssuanceID]));
}
throw new Error('Invalid type to construct an Amount');
}
/**

@@ -109,7 +131,11 @@ * Read an amount from a BinaryParser

*/
Amount.fromParser = function (parser) {
var isXRP = parser.peek() & 0x80;
var numBytes = isXRP ? 48 : 8;
static fromParser(parser) {
const isIOU = parser.peek() & 0x80;
if (isIOU)
return new Amount(parser.read(48));
// the amount can be either MPT or XRP at this point
const isMPT = parser.peek() & 0x20;
const numBytes = isMPT ? 33 : 8;
return new Amount(parser.read(numBytes));
};
}
/**

@@ -120,23 +146,26 @@ * Get the JSON representation of this Amount

*/
Amount.prototype.toJSON = function () {
toJSON() {
if (this.isNative()) {
var bytes = this.bytes;
var isPositive = bytes[0] & 0x40;
var sign = isPositive ? "" : "-";
const bytes = this.bytes;
const isPositive = bytes[0] & 0x40;
const sign = isPositive ? '' : '-';
bytes[0] &= 0x3f;
return "" + sign + bytes.readBigUInt64BE().toString();
const msb = BigInt((0, utils_2.readUInt32BE)(bytes.slice(0, 4), 0));
const lsb = BigInt((0, utils_2.readUInt32BE)(bytes.slice(4), 0));
const num = (msb << BigInt(32)) | lsb;
return `${sign}${num.toString()}`;
}
else {
var parser = new binary_parser_1.BinaryParser(this.toString());
var mantissa = parser.read(8);
var currency = currency_1.Currency.fromParser(parser);
var issuer = account_id_1.AccountID.fromParser(parser);
var b1 = mantissa[0];
var b2 = mantissa[1];
var isPositive = b1 & 0x40;
var sign = isPositive ? "" : "-";
var exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97;
if (this.isIOU()) {
const parser = new binary_parser_1.BinaryParser(this.toString());
const mantissa = parser.read(8);
const currency = currency_1.Currency.fromParser(parser);
const issuer = account_id_1.AccountID.fromParser(parser);
const b1 = mantissa[0];
const b2 = mantissa[1];
const isPositive = b1 & 0x40;
const sign = isPositive ? '' : '-';
const exponent = ((b1 & 0x3f) << 2) + ((b2 & 0xff) >> 6) - 97;
mantissa[0] = 0;
mantissa[1] &= 0x3f;
var value = new decimal_js_1.Decimal(sign + "0x" + mantissa.toString("hex")).times("1e" + exponent);
const value = new bignumber_js_1.default(`${sign}0x${(0, utils_1.bytesToHex)(mantissa)}`).times(`1e${exponent}`);
Amount.assertIouIsValid(value);

@@ -149,3 +178,19 @@ return {

}
};
if (this.isMPT()) {
const parser = new binary_parser_1.BinaryParser(this.toString());
const leadingByte = parser.read(1);
const amount = parser.read(8);
const mptID = hash_192_1.Hash192.fromParser(parser);
const isPositive = leadingByte[0] & 0x40;
const sign = isPositive ? '' : '-';
const msb = BigInt((0, utils_2.readUInt32BE)(amount.slice(0, 4), 0));
const lsb = BigInt((0, utils_2.readUInt32BE)(amount.slice(4), 0));
const num = (msb << BigInt(32)) | lsb;
return {
value: `${sign}${num.toString()}`,
mpt_issuance_id: mptID.toString(),
};
}
throw new Error('Invalid amount to construct JSON');
}
/**

@@ -157,32 +202,52 @@ * Validate XRP amount

*/
Amount.assertXrpIsValid = function (amount) {
if (amount.indexOf(".") !== -1) {
throw new Error(amount.toString() + " is an illegal amount");
static assertXrpIsValid(amount) {
if (amount.indexOf('.') !== -1) {
throw new Error(`${amount.toString()} is an illegal amount`);
}
var decimal = new decimal_js_1.Decimal(amount);
const decimal = new bignumber_js_1.default(amount);
if (!decimal.isZero()) {
if (decimal.lt(MIN_XRP) || decimal.gt(MAX_DROPS)) {
throw new Error(amount.toString() + " is an illegal amount");
throw new Error(`${amount.toString()} is an illegal amount`);
}
}
};
}
/**
* Validate IOU.value amount
*
* @param decimal Decimal.js object representing IOU.value
* @param decimal BigNumber object representing IOU.value
* @returns void, but will throw if invalid amount
*/
Amount.assertIouIsValid = function (decimal) {
static assertIouIsValid(decimal) {
if (!decimal.isZero()) {
var p = decimal.precision();
var e = decimal.e - 15;
const p = decimal.precision();
const e = (decimal.e || 0) - 15;
if (p > MAX_IOU_PRECISION ||
e > MAX_IOU_EXPONENT ||
e < MIN_IOU_EXPONENT) {
throw new Error("Decimal precision out of range");
throw new Error('Decimal precision out of range');
}
this.verifyNoDecimal(decimal);
}
};
}
/**
* Validate MPT.value amount
*
* @param decimal BigNumber object representing MPT.value
* @returns void, but will throw if invalid amount
*/
static assertMptIsValid(amount) {
if (amount.indexOf('.') !== -1) {
throw new Error(`${amount.toString()} is an illegal amount`);
}
const decimal = new bignumber_js_1.default(amount);
if (!decimal.isZero()) {
if (decimal < (0, bignumber_js_1.default)(0)) {
throw new Error(`${amount.toString()} is an illegal amount`);
}
if (Number(BigInt(amount) & BigInt(mptMask)) != 0) {
throw new Error(`${amount.toString()} is an illegal amount`);
}
}
}
/**
* Ensure that the value after being multiplied by the exponent does not

@@ -194,11 +259,11 @@ * contain a decimal.

*/
Amount.verifyNoDecimal = function (decimal) {
var integerNumberString = decimal
.times("1e" + -(decimal.e - 15))
static verifyNoDecimal(decimal) {
const integerNumberString = decimal
.times(`1e${-((decimal.e || 0) - 15)}`)
.abs()
.toString();
if (integerNumberString.indexOf(".") !== -1) {
throw new Error("Decimal place found in integerNumberString");
if (integerNumberString.indexOf('.') !== -1) {
throw new Error('Decimal place found in integerNumberString');
}
};
}
/**

@@ -209,9 +274,24 @@ * Test if this amount is in units of Native Currency(XRP)

*/
Amount.prototype.isNative = function () {
return (this.bytes[0] & 0x80) === 0;
};
Amount.defaultAmount = new Amount(Buffer.from("4000000000000000", "hex"));
return Amount;
}(serialized_type_1.SerializedType));
isNative() {
return (this.bytes[0] & 0x80) === 0 && (this.bytes[0] & 0x20) === 0;
}
/**
* Test if this amount is in units of MPT
*
* @returns true if MPT
*/
isMPT() {
return (this.bytes[0] & 0x80) === 0 && (this.bytes[0] & 0x20) !== 0;
}
/**
* Test if this amount is in units of IOU
*
* @returns true if IOU
*/
isIOU() {
return (this.bytes[0] & 0x80) !== 0;
}
}
exports.Amount = Amount;
Amount.defaultAmount = new Amount((0, utils_1.hexToBytes)('4000000000000000'));
//# sourceMappingURL=amount.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { SerializedType } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { SerializedType } from './serialized-type';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -8,3 +7,3 @@ * Variable length encoded type

declare class Blob extends SerializedType {
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
/**

@@ -11,0 +10,0 @@ * Defines how to read a Blob from a BinaryParser

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Blob = void 0;
var serialized_type_1 = require("./serialized-type");
const serialized_type_1 = require("./serialized-type");
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* Variable length encoded type
*/
var Blob = /** @class */ (function (_super) {
__extends(Blob, _super);
function Blob(bytes) {
return _super.call(this, bytes) || this;
class Blob extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes);
}

@@ -33,5 +20,5 @@ /**

*/
Blob.fromParser = function (parser, hint) {
static fromParser(parser, hint) {
return new Blob(parser.read(hint));
};
}
/**

@@ -43,14 +30,16 @@ * Create a Blob object from a hex-string

*/
Blob.from = function (value) {
static from(value) {
if (value instanceof Blob) {
return value;
}
if (typeof value === "string") {
return new Blob(Buffer.from(value, "hex"));
if (typeof value === 'string') {
if (!/^[A-F0-9]*$/iu.test(value)) {
throw new Error('Cannot construct Blob from a non-hex string');
}
return new Blob((0, utils_1.hexToBytes)(value));
}
throw new Error("Cannot construct Blob from value given");
};
return Blob;
}(serialized_type_1.SerializedType));
throw new Error('Cannot construct Blob from value given');
}
}
exports.Blob = Blob;
//# sourceMappingURL=blob.js.map

@@ -1,3 +0,2 @@

/// <reference types="node" />
import { Hash160 } from "./hash-160";
import { Hash160 } from './hash-160';
/**

@@ -8,17 +7,10 @@ * Class defining how to encode and decode Currencies

static readonly XRP: Currency;
private readonly _iso?;
private readonly _isNative;
constructor(byteBuf: Buffer);
private readonly _iso;
constructor(byteBuf: Uint8Array);
/**
* Tells if this currency is native
*
* @returns true if native, false if not
*/
isNative(): boolean;
/**
* Return the ISO code of this currency
*
* @returns ISO code if it exists, else undefined
* @returns ISO code if it exists, else null
*/
iso(): string | undefined;
iso(): string | null;
/**

@@ -25,0 +17,0 @@ * Constructs a Currency object

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Currency = void 0;
var hash_160_1 = require("./hash-160");
var ISO_REGEX = /^[A-Z0-9]{3}$/;
var HEX_REGEX = /^[A-F0-9]{40}$/;
const hash_160_1 = require("./hash-160");
const utils_1 = require("@xrplf/isomorphic/utils");
const XRP_HEX_REGEX = /^0{40}$/;
const ISO_REGEX = /^[A-Z0-9a-z?!@#$%^&*(){}[\]|]{3}$/;
const HEX_REGEX = /^[A-F0-9]{40}$/;
// eslint-disable-next-line no-control-regex
const STANDARD_FORMAT_HEX_REGEX = /^0{24}[\x00-\x7F]{6}0{10}$/;
/**

@@ -24,5 +15,5 @@ * Convert an ISO code to a currency bytes representation

function isoToBytes(iso) {
var bytes = Buffer.alloc(20);
if (iso !== "XRP") {
var isoBytes = iso.split("").map(function (c) { return c.charCodeAt(0); });
const bytes = new Uint8Array(20);
if (iso !== 'XRP') {
const isoBytes = iso.split('').map((c) => c.charCodeAt(0));
bytes.set(isoBytes, 12);

@@ -36,4 +27,14 @@ }

function isIsoCode(iso) {
return iso.length === 3;
return ISO_REGEX.test(iso);
}
function isoCodeFromHex(code) {
const iso = (0, utils_1.hexToString)((0, utils_1.bytesToHex)(code));
if (iso === 'XRP') {
return null;
}
if (isIsoCode(iso)) {
return iso;
}
return null;
}
/**

@@ -49,6 +50,6 @@ * Tests if hex is a valid hex-string

function isStringRepresentation(input) {
return isIsoCode(input) || isHex(input);
return input.length === 3 || isHex(input);
}
/**
* Tests if a Buffer is a valid representation of a currency
* Tests if a Uint8Array is a valid representation of a currency
*/

@@ -62,3 +63,3 @@ function isBytesArray(bytes) {

function isValidRepresentation(input) {
return input instanceof Buffer
return input instanceof Uint8Array
? isBytesArray(input)

@@ -68,9 +69,9 @@ : isStringRepresentation(input);

/**
* Generate bytes from a string or buffer representation of a currency
* Generate bytes from a string or UInt8Array representation of a currency
*/
function bytesFromRepresentation(input) {
if (!isValidRepresentation(input)) {
throw new Error("Unsupported Currency representation: " + input);
throw new Error(`Unsupported Currency representation: ${input}`);
}
return input.length === 3 ? isoToBytes(input) : Buffer.from(input, "hex");
return input.length === 3 ? isoToBytes(input) : (0, utils_1.hexToBytes)(input);
}

@@ -80,37 +81,24 @@ /**

*/
var Currency = /** @class */ (function (_super) {
__extends(Currency, _super);
function Currency(byteBuf) {
var _this = _super.call(this, byteBuf !== null && byteBuf !== void 0 ? byteBuf : Currency.XRP.bytes) || this;
var onlyISO = true;
var bytes = _this.bytes;
var code = _this.bytes.slice(12, 15);
var iso = code.toString();
for (var i = bytes.length - 1; i >= 0; i--) {
if (bytes[i] !== 0 && !(i === 12 || i === 13 || i === 14)) {
onlyISO = false;
break;
}
class Currency extends hash_160_1.Hash160 {
constructor(byteBuf) {
super(byteBuf !== null && byteBuf !== void 0 ? byteBuf : Currency.XRP.bytes);
const hex = (0, utils_1.bytesToHex)(this.bytes);
if (XRP_HEX_REGEX.test(hex)) {
this._iso = 'XRP';
}
var lossLessISO = onlyISO && iso !== "XRP" && ISO_REGEX.test(iso);
_this._isNative = onlyISO && code.toString("hex") === "000000";
_this._iso = _this._isNative ? "XRP" : lossLessISO ? iso : undefined;
return _this;
else if (STANDARD_FORMAT_HEX_REGEX.test(hex)) {
this._iso = isoCodeFromHex(this.bytes.slice(12, 15));
}
else {
this._iso = null;
}
}
/**
* Tells if this currency is native
*
* @returns true if native, false if not
*/
Currency.prototype.isNative = function () {
return this._isNative;
};
/**
* Return the ISO code of this currency
*
* @returns ISO code if it exists, else undefined
* @returns ISO code if it exists, else null
*/
Currency.prototype.iso = function () {
iso() {
return this._iso;
};
}
/**

@@ -121,11 +109,11 @@ * Constructs a Currency object

*/
Currency.from = function (value) {
static from(value) {
if (value instanceof Currency) {
return value;
}
if (typeof value === "string") {
if (typeof value === 'string') {
return new Currency(bytesFromRepresentation(value));
}
throw new Error("Cannot construct Currency from value given");
};
throw new Error('Cannot construct Currency from value given');
}
/**

@@ -136,13 +124,12 @@ * Gets the JSON representation of a currency

*/
Currency.prototype.toJSON = function () {
var iso = this.iso();
if (iso !== undefined) {
toJSON() {
const iso = this.iso();
if (iso !== null) {
return iso;
}
return this.bytes.toString("hex").toUpperCase();
};
Currency.XRP = new Currency(Buffer.alloc(20));
return Currency;
}(hash_160_1.Hash160));
return (0, utils_1.bytesToHex)(this.bytes);
}
}
exports.Currency = Currency;
Currency.XRP = new Currency(new Uint8Array(20));
//# sourceMappingURL=currency.js.map

@@ -1,3 +0,2 @@

/// <reference types="node" />
import { Hash } from "./hash";
import { Hash } from './hash';
/**

@@ -9,4 +8,10 @@ * Hash with a width of 128 bits

static readonly ZERO_128: Hash128;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
/**
* Get the hex representation of a hash-128 bytes, allowing unset
*
* @returns hex String of this.bytes
*/
toHex(): string;
}
export { Hash128 };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash128 = void 0;
var hash_1 = require("./hash");
const hash_1 = require("./hash");
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* Hash with a width of 128 bits
*/
var Hash128 = /** @class */ (function (_super) {
__extends(Hash128, _super);
function Hash128(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Hash128.ZERO_128.bytes) || this;
class Hash128 extends hash_1.Hash {
constructor(bytes) {
if (bytes && bytes.byteLength === 0) {
bytes = Hash128.ZERO_128.bytes;
}
super(bytes !== null && bytes !== void 0 ? bytes : Hash128.ZERO_128.bytes);
}
Hash128.width = 16;
Hash128.ZERO_128 = new Hash128(Buffer.alloc(Hash128.width));
return Hash128;
}(hash_1.Hash));
/**
* Get the hex representation of a hash-128 bytes, allowing unset
*
* @returns hex String of this.bytes
*/
toHex() {
const hex = (0, utils_1.bytesToHex)(this.toBytes());
if (/^0+$/.exec(hex)) {
return '';
}
return hex;
}
}
exports.Hash128 = Hash128;
Hash128.width = 16;
Hash128.ZERO_128 = new Hash128(new Uint8Array(Hash128.width));
//# sourceMappingURL=hash-128.js.map

@@ -1,3 +0,2 @@

/// <reference types="node" />
import { Hash } from "./hash";
import { Hash } from './hash';
/**

@@ -9,4 +8,4 @@ * Hash with a width of 160 bits

static readonly ZERO_160: Hash160;
constructor(bytes?: Buffer);
constructor(bytes?: Uint8Array);
}
export { Hash160 };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash160 = void 0;
var hash_1 = require("./hash");
const hash_1 = require("./hash");
/**
* Hash with a width of 160 bits
*/
var Hash160 = /** @class */ (function (_super) {
__extends(Hash160, _super);
function Hash160(bytes) {
var _this = this;
class Hash160 extends hash_1.Hash {
constructor(bytes) {
if (bytes && bytes.byteLength === 0) {
bytes = Hash160.ZERO_160.bytes;
}
_this = _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Hash160.ZERO_160.bytes) || this;
return _this;
super(bytes !== null && bytes !== void 0 ? bytes : Hash160.ZERO_160.bytes);
}
Hash160.width = 20;
Hash160.ZERO_160 = new Hash160(Buffer.alloc(Hash160.width));
return Hash160;
}(hash_1.Hash));
}
exports.Hash160 = Hash160;
Hash160.width = 20;
Hash160.ZERO_160 = new Hash160(new Uint8Array(Hash160.width));
//# sourceMappingURL=hash-160.js.map

@@ -1,3 +0,2 @@

/// <reference types="node" />
import { Hash } from "./hash";
import { Hash } from './hash';
/**

@@ -9,4 +8,4 @@ * Hash with a width of 256 bits

static readonly ZERO_256: Hash256;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
}
export { Hash256 };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash256 = void 0;
var hash_1 = require("./hash");
const hash_1 = require("./hash");
/**
* Hash with a width of 256 bits
*/
var Hash256 = /** @class */ (function (_super) {
__extends(Hash256, _super);
function Hash256(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : Hash256.ZERO_256.bytes) || this;
class Hash256 extends hash_1.Hash {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : Hash256.ZERO_256.bytes);
}
Hash256.width = 32;
Hash256.ZERO_256 = new Hash256(Buffer.alloc(Hash256.width));
return Hash256;
}(hash_1.Hash));
}
exports.Hash256 = Hash256;
Hash256.width = 32;
Hash256.ZERO_256 = new Hash256(new Uint8Array(Hash256.width));
//# sourceMappingURL=hash-256.js.map

@@ -1,10 +0,9 @@

/// <reference types="node" />
import { Comparable } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { Comparable } from './serialized-type';
import { BinaryParser } from '../serdes/binary-parser';
/**
* Base class defining how to encode and decode hashes
*/
declare class Hash extends Comparable {
declare class Hash extends Comparable<Hash | string> {
static readonly width: number;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
/**

@@ -11,0 +10,0 @@ * Construct a Hash object from an existing Hash object or a hex-string

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Hash = void 0;
var serialized_type_1 = require("./serialized-type");
const serialized_type_1 = require("./serialized-type");
const utils_1 = require("@xrplf/isomorphic/utils");
const utils_2 = require("../utils");
/**
* Base class defining how to encode and decode hashes
*/
var Hash = /** @class */ (function (_super) {
__extends(Hash, _super);
function Hash(bytes) {
var _this = _super.call(this, bytes) || this;
if (_this.bytes.byteLength !== _this.constructor.width) {
throw new Error("Invalid Hash length " + _this.bytes.byteLength);
class Hash extends serialized_type_1.Comparable {
constructor(bytes) {
super(bytes);
if (this.bytes.length !== this.constructor.width) {
throw new Error(`Invalid Hash length ${this.bytes.byteLength}`);
}
return _this;
}

@@ -35,11 +22,11 @@ /**

*/
Hash.from = function (value) {
static from(value) {
if (value instanceof this) {
return value;
}
if (typeof value === "string") {
return new this(Buffer.from(value, "hex"));
if (typeof value === 'string') {
return new this((0, utils_1.hexToBytes)(value));
}
throw new Error("Cannot construct Hash from given value");
};
throw new Error('Cannot construct Hash from given value');
}
/**

@@ -51,5 +38,5 @@ * Read a Hash object from a BinaryParser

*/
Hash.fromParser = function (parser, hint) {
static fromParser(parser, hint) {
return new this(parser.read(hint !== null && hint !== void 0 ? hint : this.width));
};
}
/**

@@ -60,11 +47,11 @@ * Overloaded operator for comparing two hash objects

*/
Hash.prototype.compareTo = function (other) {
return Buffer.compare(this.bytes, this.constructor.from(other).bytes);
};
compareTo(other) {
return (0, utils_2.compare)(this.bytes, this.constructor.from(other).bytes);
}
/**
* @returns the hex-string representation of this Hash
*/
Hash.prototype.toString = function () {
toString() {
return this.toHex();
};
}
/**

@@ -76,5 +63,5 @@ * Returns four bits at the specified depth within a hash

*/
Hash.prototype.nibblet = function (depth) {
var byteIx = depth > 0 ? (depth / 2) | 0 : 0;
var b = this.bytes[byteIx];
nibblet(depth) {
const byteIx = depth > 0 ? (depth / 2) | 0 : 0;
let b = this.bytes[byteIx];
if (depth % 2 === 0) {

@@ -87,6 +74,5 @@ b = (b & 0xf0) >>> 4;

return b;
};
return Hash;
}(serialized_type_1.Comparable));
}
}
exports.Hash = Hash;
//# sourceMappingURL=hash.js.map

@@ -1,33 +0,19 @@

import { AccountID } from "./account-id";
import { Amount } from "./amount";
import { Blob } from "./blob";
import { Currency } from "./currency";
import { Hash128 } from "./hash-128";
import { Hash160 } from "./hash-160";
import { Hash256 } from "./hash-256";
import { PathSet } from "./path-set";
import { STArray } from "./st-array";
import { STObject } from "./st-object";
import { UInt16 } from "./uint-16";
import { UInt32 } from "./uint-32";
import { UInt64 } from "./uint-64";
import { UInt8 } from "./uint-8";
import { Vector256 } from "./vector-256";
declare const coreTypes: {
AccountID: typeof AccountID;
Amount: typeof Amount;
Blob: typeof Blob;
Currency: typeof Currency;
Hash128: typeof Hash128;
Hash160: typeof Hash160;
Hash256: typeof Hash256;
PathSet: typeof PathSet;
STArray: typeof STArray;
STObject: typeof STObject;
UInt8: typeof UInt8;
UInt16: typeof UInt16;
UInt32: typeof UInt32;
UInt64: typeof UInt64;
Vector256: typeof Vector256;
};
export { coreTypes };
import { AccountID } from './account-id';
import { Amount } from './amount';
import { Blob } from './blob';
import { Currency } from './currency';
import { Hash128 } from './hash-128';
import { Hash160 } from './hash-160';
import { Hash192 } from './hash-192';
import { Hash256 } from './hash-256';
import { PathSet } from './path-set';
import { STArray } from './st-array';
import { STObject } from './st-object';
import { UInt16 } from './uint-16';
import { UInt32 } from './uint-32';
import { UInt64 } from './uint-64';
import { UInt8 } from './uint-8';
import { Vector256 } from './vector-256';
import { type SerializedType } from './serialized-type';
declare const coreTypes: Record<string, typeof SerializedType>;
export { coreTypes, AccountID, Amount, Blob, Currency, Hash128, Hash160, Hash192, Hash256, PathSet, STArray, STObject, UInt8, UInt16, UInt32, UInt64, Vector256, };
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.coreTypes = void 0;
var enums_1 = require("../enums");
var account_id_1 = require("./account-id");
var amount_1 = require("./amount");
var blob_1 = require("./blob");
var currency_1 = require("./currency");
var hash_128_1 = require("./hash-128");
var hash_160_1 = require("./hash-160");
var hash_256_1 = require("./hash-256");
var path_set_1 = require("./path-set");
var st_array_1 = require("./st-array");
var st_object_1 = require("./st-object");
var uint_16_1 = require("./uint-16");
var uint_32_1 = require("./uint-32");
var uint_64_1 = require("./uint-64");
var uint_8_1 = require("./uint-8");
var vector_256_1 = require("./vector-256");
var coreTypes = {
exports.Vector256 = exports.UInt64 = exports.UInt32 = exports.UInt16 = exports.UInt8 = exports.STObject = exports.STArray = exports.PathSet = exports.Hash256 = exports.Hash192 = exports.Hash160 = exports.Hash128 = exports.Currency = exports.Blob = exports.Amount = exports.AccountID = exports.coreTypes = void 0;
const account_id_1 = require("./account-id");
Object.defineProperty(exports, "AccountID", { enumerable: true, get: function () { return account_id_1.AccountID; } });
const amount_1 = require("./amount");
Object.defineProperty(exports, "Amount", { enumerable: true, get: function () { return amount_1.Amount; } });
const blob_1 = require("./blob");
Object.defineProperty(exports, "Blob", { enumerable: true, get: function () { return blob_1.Blob; } });
const currency_1 = require("./currency");
Object.defineProperty(exports, "Currency", { enumerable: true, get: function () { return currency_1.Currency; } });
const hash_128_1 = require("./hash-128");
Object.defineProperty(exports, "Hash128", { enumerable: true, get: function () { return hash_128_1.Hash128; } });
const hash_160_1 = require("./hash-160");
Object.defineProperty(exports, "Hash160", { enumerable: true, get: function () { return hash_160_1.Hash160; } });
const hash_192_1 = require("./hash-192");
Object.defineProperty(exports, "Hash192", { enumerable: true, get: function () { return hash_192_1.Hash192; } });
const hash_256_1 = require("./hash-256");
Object.defineProperty(exports, "Hash256", { enumerable: true, get: function () { return hash_256_1.Hash256; } });
const issue_1 = require("./issue");
const path_set_1 = require("./path-set");
Object.defineProperty(exports, "PathSet", { enumerable: true, get: function () { return path_set_1.PathSet; } });
const st_array_1 = require("./st-array");
Object.defineProperty(exports, "STArray", { enumerable: true, get: function () { return st_array_1.STArray; } });
const st_object_1 = require("./st-object");
Object.defineProperty(exports, "STObject", { enumerable: true, get: function () { return st_object_1.STObject; } });
const uint_16_1 = require("./uint-16");
Object.defineProperty(exports, "UInt16", { enumerable: true, get: function () { return uint_16_1.UInt16; } });
const uint_32_1 = require("./uint-32");
Object.defineProperty(exports, "UInt32", { enumerable: true, get: function () { return uint_32_1.UInt32; } });
const uint_64_1 = require("./uint-64");
Object.defineProperty(exports, "UInt64", { enumerable: true, get: function () { return uint_64_1.UInt64; } });
const uint_8_1 = require("./uint-8");
Object.defineProperty(exports, "UInt8", { enumerable: true, get: function () { return uint_8_1.UInt8; } });
const vector_256_1 = require("./vector-256");
Object.defineProperty(exports, "Vector256", { enumerable: true, get: function () { return vector_256_1.Vector256; } });
const xchain_bridge_1 = require("./xchain-bridge");
const enums_1 = require("../enums");
const coreTypes = {
AccountID: account_id_1.AccountID,

@@ -27,3 +46,5 @@ Amount: amount_1.Amount,

Hash160: hash_160_1.Hash160,
Hash192: hash_192_1.Hash192,
Hash256: hash_256_1.Hash256,
Issue: issue_1.Issue,
PathSet: path_set_1.PathSet,

@@ -37,10 +58,9 @@ STArray: st_array_1.STArray,

Vector256: vector_256_1.Vector256,
XChainBridge: xchain_bridge_1.XChainBridge,
};
exports.coreTypes = coreTypes;
Object.values(enums_1.Field).forEach(function (field) {
field.associatedType = coreTypes[field.type.name];
});
enums_1.Field["TransactionType"].associatedType = enums_1.TransactionType;
enums_1.Field["TransactionResult"].associatedType = enums_1.TransactionResult;
enums_1.Field["LedgerEntryType"].associatedType = enums_1.LedgerEntryType;
// Ensures that the DEFAULT_DEFINITIONS object connects these types to fields for serializing/deserializing
// This is done here instead of in enums/index.ts to avoid a circular dependency
// because some of the above types depend on BinarySerializer which depends on enums/index.ts.
enums_1.DEFAULT_DEFINITIONS.associateTypes(coreTypes);
//# sourceMappingURL=index.js.map

@@ -1,3 +0,3 @@

import { BinaryParser } from "../serdes/binary-parser";
import { SerializedType, JsonObject } from "./serialized-type";
import { BinaryParser } from '../serdes/binary-parser';
import { SerializedType, JsonObject } from './serialized-type';
/**

@@ -4,0 +4,0 @@ * The object representation of a Hop, an issuer AccountID, an account AccountID, and a Currency

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.PathSet = void 0;
var account_id_1 = require("./account-id");
var currency_1 = require("./currency");
var binary_parser_1 = require("../serdes/binary-parser");
var serialized_type_1 = require("./serialized-type");
const account_id_1 = require("./account-id");
const currency_1 = require("./currency");
const binary_parser_1 = require("../serdes/binary-parser");
const serialized_type_1 = require("./serialized-type");
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* Constants for separating Paths in a PathSet
*/
var PATHSET_END_BYTE = 0x00;
var PATH_SEPARATOR_BYTE = 0xff;
const PATHSET_END_BYTE = 0x00;
const PATH_SEPARATOR_BYTE = 0xff;
/**
* Constant for masking types of a Hop
*/
var TYPE_ACCOUNT = 0x01;
var TYPE_CURRENCY = 0x10;
var TYPE_ISSUER = 0x20;
const TYPE_ACCOUNT = 0x01;
const TYPE_CURRENCY = 0x10;
const TYPE_ISSUER = 0x20;
/**

@@ -51,7 +39,3 @@ * TypeGuard for HopObject

*/
var Hop = /** @class */ (function (_super) {
__extends(Hop, _super);
function Hop() {
return _super !== null && _super.apply(this, arguments) || this;
}
class Hop extends serialized_type_1.SerializedType {
/**

@@ -63,7 +47,7 @@ * Create a Hop from a HopObject

*/
Hop.from = function (value) {
static from(value) {
if (value instanceof Hop) {
return value;
}
var bytes = [Buffer.from([0])];
const bytes = [Uint8Array.from([0])];
if (value.account) {

@@ -81,4 +65,4 @@ bytes.push(account_id_1.AccountID.from(value.account).toBytes());

}
return new Hop(Buffer.concat(bytes));
};
return new Hop((0, utils_1.concat)(bytes));
}
/**

@@ -90,5 +74,5 @@ * Construct a Hop from a BinaryParser

*/
Hop.fromParser = function (parser) {
var type = parser.readUInt8();
var bytes = [Buffer.from([type])];
static fromParser(parser) {
const type = parser.readUInt8();
const bytes = [Uint8Array.from([type])];
if (type & TYPE_ACCOUNT) {

@@ -103,4 +87,4 @@ bytes.push(parser.read(account_id_1.AccountID.width));

}
return new Hop(Buffer.concat(bytes));
};
return new Hop((0, utils_1.concat)(bytes));
}
/**

@@ -111,6 +95,6 @@ * Get the JSON interpretation of this hop

*/
Hop.prototype.toJSON = function () {
var hopParser = new binary_parser_1.BinaryParser(this.bytes.toString("hex"));
var type = hopParser.readUInt8();
var account, currency, issuer = undefined;
toJSON() {
const hopParser = new binary_parser_1.BinaryParser((0, utils_1.bytesToHex)(this.bytes));
const type = hopParser.readUInt8();
let account, currency, issuer;
if (type & TYPE_ACCOUNT) {

@@ -125,8 +109,14 @@ account = account_id_1.AccountID.fromParser(hopParser).toJSON();

}
return {
account: account,
issuer: issuer,
currency: currency,
};
};
const result = {};
if (account) {
result.account = account;
}
if (issuer) {
result.issuer = issuer;
}
if (currency) {
result.currency = currency;
}
return result;
}
/**

@@ -137,15 +127,10 @@ * get a number representing the type of this hop

*/
Hop.prototype.type = function () {
type() {
return this.bytes[0];
};
return Hop;
}(serialized_type_1.SerializedType));
}
}
/**
* Class for serializing/deserializing Paths
*/
var Path = /** @class */ (function (_super) {
__extends(Path, _super);
function Path() {
return _super !== null && _super.apply(this, arguments) || this;
}
class Path extends serialized_type_1.SerializedType {
/**

@@ -157,12 +142,12 @@ * construct a Path from an array of Hops

*/
Path.from = function (value) {
static from(value) {
if (value instanceof Path) {
return value;
}
var bytes = [];
value.forEach(function (hop) {
const bytes = [];
value.forEach((hop) => {
bytes.push(Hop.from(hop).toBytes());
});
return new Path(Buffer.concat(bytes));
};
return new Path((0, utils_1.concat)(bytes));
}
/**

@@ -174,4 +159,4 @@ * Read a Path from a BinaryParser

*/
Path.fromParser = function (parser) {
var bytes = [];
static fromParser(parser) {
const bytes = [];
while (!parser.end()) {

@@ -184,4 +169,4 @@ bytes.push(Hop.fromParser(parser).toBytes());

}
return new Path(Buffer.concat(bytes));
};
return new Path((0, utils_1.concat)(bytes));
}
/**

@@ -192,5 +177,5 @@ * Get the JSON representation of this Path

*/
Path.prototype.toJSON = function () {
var json = [];
var pathParser = new binary_parser_1.BinaryParser(this.toString());
toJSON() {
const json = [];
const pathParser = new binary_parser_1.BinaryParser(this.toString());
while (!pathParser.end()) {

@@ -200,13 +185,8 @@ json.push(Hop.fromParser(pathParser).toJSON());

return json;
};
return Path;
}(serialized_type_1.SerializedType));
}
}
/**
* Deserialize and Serialize the PathSet type
*/
var PathSet = /** @class */ (function (_super) {
__extends(PathSet, _super);
function PathSet() {
return _super !== null && _super.apply(this, arguments) || this;
}
class PathSet extends serialized_type_1.SerializedType {
/**

@@ -218,3 +198,3 @@ * Construct a PathSet from an Array of Arrays representing paths

*/
PathSet.from = function (value) {
static from(value) {
if (value instanceof PathSet) {

@@ -224,12 +204,12 @@ return value;

if (isPathSet(value)) {
var bytes_1 = [];
value.forEach(function (path) {
bytes_1.push(Path.from(path).toBytes());
bytes_1.push(Buffer.from([PATH_SEPARATOR_BYTE]));
const bytes = [];
value.forEach((path) => {
bytes.push(Path.from(path).toBytes());
bytes.push(Uint8Array.from([PATH_SEPARATOR_BYTE]));
});
bytes_1[bytes_1.length - 1] = Buffer.from([PATHSET_END_BYTE]);
return new PathSet(Buffer.concat(bytes_1));
bytes[bytes.length - 1] = Uint8Array.from([PATHSET_END_BYTE]);
return new PathSet((0, utils_1.concat)(bytes));
}
throw new Error("Cannot construct PathSet from given value");
};
throw new Error('Cannot construct PathSet from given value');
}
/**

@@ -241,4 +221,4 @@ * Construct a PathSet from a BinaryParser

*/
PathSet.fromParser = function (parser) {
var bytes = [];
static fromParser(parser) {
const bytes = [];
while (!parser.end()) {

@@ -251,4 +231,4 @@ bytes.push(Path.fromParser(parser).toBytes());

}
return new PathSet(Buffer.concat(bytes));
};
return new PathSet((0, utils_1.concat)(bytes));
}
/**

@@ -259,5 +239,5 @@ * Get the JSON representation of this PathSet

*/
PathSet.prototype.toJSON = function () {
var json = [];
var pathParser = new binary_parser_1.BinaryParser(this.toString());
toJSON() {
const json = [];
const pathParser = new binary_parser_1.BinaryParser(this.toString());
while (!pathParser.end()) {

@@ -268,6 +248,5 @@ json.push(Path.fromParser(pathParser).toJSON());

return json;
};
return PathSet;
}(serialized_type_1.SerializedType));
}
}
exports.PathSet = PathSet;
//# sourceMappingURL=path-set.js.map

@@ -1,6 +0,6 @@

/// <reference types="node" />
import { BytesList } from "../serdes/binary-serializer";
import { BinaryParser } from "../serdes/binary-parser";
declare type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject;
declare type JsonObject = {
import { BytesList } from '../serdes/binary-serializer';
import { BinaryParser } from '../serdes/binary-parser';
import { XrplDefinitionsBase } from '../enums';
type JSON = string | number | boolean | null | undefined | JSON[] | JsonObject;
type JsonObject = {
[key: string]: JSON;

@@ -12,4 +12,4 @@ };

declare class SerializedType {
protected readonly bytes: Buffer;
constructor(bytes: Buffer);
protected readonly bytes: Uint8Array;
constructor(bytes?: Uint8Array);
static fromParser(parser: BinaryParser, hint?: number): SerializedType;

@@ -32,11 +32,14 @@ static from(value: SerializedType | JSON | bigint): SerializedType;

*
* @returns A buffer of the bytes
* @returns A Uint8Array of the bytes
*/
toBytes(): Buffer;
toBytes(): Uint8Array;
/**
* Return the JSON representation of a SerializedType
*
* @param _definitions rippled definitions used to parse the values of transaction types and such.
* Unused in default, but used in STObject, STArray
* Can be customized for sidechains and amendments.
* @returns any type, if not overloaded returns hexString representation of bytes
*/
toJSON(): JSON;
toJSON(_definitions?: XrplDefinitionsBase): JSON;
/**

@@ -48,10 +51,15 @@ * @returns hexString representation of this.bytes

/**
* Base class for SerializedTypes that are comparable
* Base class for SerializedTypes that are comparable.
*
* @template T - What types you want to allow comparisons between. You must specify all types. Primarily used to allow
* comparisons between built-in types (like `string`) and SerializedType subclasses (like `Hash`).
*
* Ex. `class Hash extends Comparable<Hash | string>`
*/
declare class Comparable extends SerializedType {
lt(other: Comparable): boolean;
eq(other: Comparable): boolean;
gt(other: Comparable): boolean;
gte(other: Comparable): boolean;
lte(other: Comparable): boolean;
declare class Comparable<T extends Object> extends SerializedType {
lt(other: T): boolean;
eq(other: T): boolean;
gt(other: T): boolean;
gte(other: T): boolean;
lte(other: T): boolean;
/**

@@ -63,4 +71,4 @@ * Overload this method to define how two Comparable SerializedTypes are compared

*/
compareTo(other: Comparable): number;
compareTo(other: T): number;
}
export { SerializedType, Comparable, JSON, JsonObject };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Comparable = exports.SerializedType = void 0;
var binary_serializer_1 = require("../serdes/binary-serializer");
const binary_serializer_1 = require("../serdes/binary-serializer");
const utils_1 = require("@xrplf/isomorphic/utils");
/**
* The base class for all binary-codec types
*/
var SerializedType = /** @class */ (function () {
function SerializedType(bytes) {
this.bytes = Buffer.alloc(0);
this.bytes = bytes !== null && bytes !== void 0 ? bytes : Buffer.alloc(0);
class SerializedType {
constructor(bytes) {
this.bytes = new Uint8Array(0);
this.bytes = bytes !== null && bytes !== void 0 ? bytes : new Uint8Array(0);
}
SerializedType.fromParser = function (parser, hint) {
throw new Error("fromParser not implemented");
static fromParser(parser, hint) {
throw new Error('fromParser not implemented');
return this.fromParser(parser, hint);
};
SerializedType.from = function (value) {
throw new Error("from not implemented");
}
static from(value) {
throw new Error('from not implemented');
return this.from(value);
};
}
/**

@@ -39,5 +27,5 @@ * Write the bytes representation of a SerializedType to a BytesList

*/
SerializedType.prototype.toBytesSink = function (list) {
toBytesSink(list) {
list.put(this.bytes);
};
}
/**

@@ -48,58 +36,61 @@ * Get the hex representation of a SerializedType's bytes

*/
SerializedType.prototype.toHex = function () {
return this.toBytes().toString("hex").toUpperCase();
};
toHex() {
return (0, utils_1.bytesToHex)(this.toBytes());
}
/**
* Get the bytes representation of a SerializedType
*
* @returns A buffer of the bytes
* @returns A Uint8Array of the bytes
*/
SerializedType.prototype.toBytes = function () {
toBytes() {
if (this.bytes) {
return this.bytes;
}
var bytes = new binary_serializer_1.BytesList();
const bytes = new binary_serializer_1.BytesList();
this.toBytesSink(bytes);
return bytes.toBytes();
};
}
/**
* Return the JSON representation of a SerializedType
*
* @param _definitions rippled definitions used to parse the values of transaction types and such.
* Unused in default, but used in STObject, STArray
* Can be customized for sidechains and amendments.
* @returns any type, if not overloaded returns hexString representation of bytes
*/
SerializedType.prototype.toJSON = function () {
toJSON(_definitions) {
return this.toHex();
};
}
/**
* @returns hexString representation of this.bytes
*/
SerializedType.prototype.toString = function () {
toString() {
return this.toHex();
};
return SerializedType;
}());
}
}
exports.SerializedType = SerializedType;
/**
* Base class for SerializedTypes that are comparable
* Base class for SerializedTypes that are comparable.
*
* @template T - What types you want to allow comparisons between. You must specify all types. Primarily used to allow
* comparisons between built-in types (like `string`) and SerializedType subclasses (like `Hash`).
*
* Ex. `class Hash extends Comparable<Hash | string>`
*/
var Comparable = /** @class */ (function (_super) {
__extends(Comparable, _super);
function Comparable() {
return _super !== null && _super.apply(this, arguments) || this;
class Comparable extends SerializedType {
lt(other) {
return this.compareTo(other) < 0;
}
Comparable.prototype.lt = function (other) {
return this.compareTo(other) < 0;
};
Comparable.prototype.eq = function (other) {
eq(other) {
return this.compareTo(other) === 0;
};
Comparable.prototype.gt = function (other) {
}
gt(other) {
return this.compareTo(other) > 0;
};
Comparable.prototype.gte = function (other) {
}
gte(other) {
return this.compareTo(other) > -1;
};
Comparable.prototype.lte = function (other) {
}
lte(other) {
return this.compareTo(other) < 1;
};
}
/**

@@ -111,8 +102,7 @@ * Overload this method to define how two Comparable SerializedTypes are compared

*/
Comparable.prototype.compareTo = function (other) {
throw new Error("cannot compare " + this.toString() + " and " + other.toString());
};
return Comparable;
}(SerializedType));
compareTo(other) {
throw new Error(`cannot compare ${this.toString()} and ${other.toString()}`);
}
}
exports.Comparable = Comparable;
//# sourceMappingURL=serialized-type.js.map

@@ -1,3 +0,4 @@

import { SerializedType, JsonObject } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { XrplDefinitionsBase } from '../enums';
import { SerializedType, JsonObject } from './serialized-type';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -18,12 +19,14 @@ * Class for serializing and deserializing Arrays of Objects

* @param value STArray or Array of Objects to parse into an STArray
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns An STArray object
*/
static from<T extends STArray | Array<JsonObject>>(value: T): STArray;
static from<T extends STArray | Array<JsonObject>>(value: T, definitions?: XrplDefinitionsBase): STArray;
/**
* Return the JSON representation of this.bytes
*
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns An Array of JSON objects
*/
toJSON(): Array<JsonObject>;
toJSON(definitions?: XrplDefinitionsBase): Array<JsonObject>;
}
export { STArray };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.STArray = void 0;
var serialized_type_1 = require("./serialized-type");
var st_object_1 = require("./st-object");
var binary_parser_1 = require("../serdes/binary-parser");
var ARRAY_END_MARKER = Buffer.from([0xf1]);
var ARRAY_END_MARKER_NAME = "ArrayEndMarker";
var OBJECT_END_MARKER = Buffer.from([0xe1]);
const enums_1 = require("../enums");
const serialized_type_1 = require("./serialized-type");
const st_object_1 = require("./st-object");
const binary_parser_1 = require("../serdes/binary-parser");
const utils_1 = require("@xrplf/isomorphic/utils");
const ARRAY_END_MARKER = Uint8Array.from([0xf1]);
const ARRAY_END_MARKER_NAME = 'ArrayEndMarker';
const OBJECT_END_MARKER = Uint8Array.from([0xe1]);
/**

@@ -27,3 +16,6 @@ * TypeGuard for Array<JsonObject>

function isObjects(args) {
return (Array.isArray(args) && (args.length === 0 || typeof args[0] === "object"));
return (Array.isArray(args) &&
args.every((arg) => typeof arg === 'object' &&
Object.keys(arg).length === 1 &&
typeof Object.values(arg)[0] === 'object'));
}

@@ -33,7 +25,3 @@ /**

*/
var STArray = /** @class */ (function (_super) {
__extends(STArray, _super);
function STArray() {
return _super !== null && _super.apply(this, arguments) || this;
}
class STArray extends serialized_type_1.SerializedType {
/**

@@ -45,6 +33,6 @@ * Construct an STArray from a BinaryParser

*/
STArray.fromParser = function (parser) {
var bytes = [];
static fromParser(parser) {
const bytes = [];
while (!parser.end()) {
var field = parser.readField();
const field = parser.readField();
if (field.name === ARRAY_END_MARKER_NAME) {

@@ -56,4 +44,4 @@ break;

bytes.push(ARRAY_END_MARKER);
return new STArray(Buffer.concat(bytes));
};
return new STArray((0, utils_1.concat)(bytes));
}
/**

@@ -63,5 +51,6 @@ * Construct an STArray from an Array of JSON Objects

* @param value STArray or Array of Objects to parse into an STArray
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns An STArray object
*/
STArray.from = function (value) {
static from(value, definitions = enums_1.DEFAULT_DEFINITIONS) {
if (value instanceof STArray) {

@@ -71,33 +60,33 @@ return value;

if (isObjects(value)) {
var bytes_1 = [];
value.forEach(function (obj) {
bytes_1.push(st_object_1.STObject.from(obj).toBytes());
const bytes = [];
value.forEach((obj) => {
bytes.push(st_object_1.STObject.from(obj, undefined, definitions).toBytes());
});
bytes_1.push(ARRAY_END_MARKER);
return new STArray(Buffer.concat(bytes_1));
bytes.push(ARRAY_END_MARKER);
return new STArray((0, utils_1.concat)(bytes));
}
throw new Error("Cannot construct Currency from value given");
};
throw new Error('Cannot construct STArray from value given');
}
/**
* Return the JSON representation of this.bytes
*
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns An Array of JSON objects
*/
STArray.prototype.toJSON = function () {
var result = [];
var arrayParser = new binary_parser_1.BinaryParser(this.toString());
toJSON(definitions = enums_1.DEFAULT_DEFINITIONS) {
const result = [];
const arrayParser = new binary_parser_1.BinaryParser(this.toString(), definitions);
while (!arrayParser.end()) {
var field = arrayParser.readField();
const field = arrayParser.readField();
if (field.name === ARRAY_END_MARKER_NAME) {
break;
}
var outer = {};
outer[field.name] = st_object_1.STObject.fromParser(arrayParser).toJSON();
const outer = {};
outer[field.name] = st_object_1.STObject.fromParser(arrayParser).toJSON(definitions);
result.push(outer);
}
return result;
};
return STArray;
}(serialized_type_1.SerializedType));
}
}
exports.STArray = STArray;
//# sourceMappingURL=st-array.js.map

@@ -1,3 +0,4 @@

import { SerializedType, JsonObject } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { XrplDefinitionsBase } from '../enums';
import { SerializedType, JsonObject } from './serialized-type';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -19,12 +20,14 @@ * Class for Serializing/Deserializing objects

* @param filter optional, denote which field to include in serialized object
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns a STObject object
*/
static from<T extends STObject | JsonObject>(value: T, filter?: (...any: any[]) => boolean): STObject;
static from<T extends STObject | JsonObject>(value: T, filter?: (...any: any[]) => boolean, definitions?: XrplDefinitionsBase): STObject;
/**
* Get the JSON interpretation of this.bytes
*
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns a JSON object
*/
toJSON(): JsonObject;
toJSON(definitions?: XrplDefinitionsBase): JsonObject;
}
export { STObject };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.STObject = void 0;
var enums_1 = require("../enums");
var serialized_type_1 = require("./serialized-type");
var ripple_address_codec_1 = require("ripple-address-codec");
var binary_parser_1 = require("../serdes/binary-parser");
var binary_serializer_1 = require("../serdes/binary-serializer");
var OBJECT_END_MARKER_BYTE = Buffer.from([0xe1]);
var OBJECT_END_MARKER = "ObjectEndMarker";
var ST_OBJECT = "STObject";
var DESTINATION = "Destination";
var ACCOUNT = "Account";
var SOURCE_TAG = "SourceTag";
var DEST_TAG = "DestinationTag";
const enums_1 = require("../enums");
const serialized_type_1 = require("./serialized-type");
const ripple_address_codec_1 = require("ripple-address-codec");
const binary_parser_1 = require("../serdes/binary-parser");
const binary_serializer_1 = require("../serdes/binary-serializer");
const st_array_1 = require("./st-array");
const OBJECT_END_MARKER_BYTE = Uint8Array.from([0xe1]);
const OBJECT_END_MARKER = 'ObjectEndMarker';
const ST_OBJECT = 'STObject';
const DESTINATION = 'Destination';
const ACCOUNT = 'Account';
const SOURCE_TAG = 'SourceTag';
const DEST_TAG = 'DestinationTag';
/**

@@ -36,5 +24,4 @@ * Break down an X-Address into an account and a tag

function handleXAddress(field, xAddress) {
var _a, _b;
var decoded = ripple_address_codec_1.xAddressToClassicAddress(xAddress);
var tagName;
const decoded = (0, ripple_address_codec_1.xAddressToClassicAddress)(xAddress);
let tagName;
if (field === DESTINATION)

@@ -45,5 +32,6 @@ tagName = DEST_TAG;

else if (decoded.tag !== false)
throw new Error(field + " cannot have an associated tag");
throw new Error(`${field} cannot have an associated tag`);
return decoded.tag !== false
? (_a = {}, _a[field] = decoded.classicAddress, _a[tagName] = decoded.tag, _a) : (_b = {}, _b[field] = decoded.classicAddress, _b);
? { [field]: decoded.classicAddress, [tagName]: decoded.tag }
: { [field]: decoded.classicAddress };
}

@@ -59,5 +47,5 @@ /**

if (!(obj1[SOURCE_TAG] === undefined || obj2[SOURCE_TAG] === undefined))
throw new Error("Cannot have Account X-Address and SourceTag");
throw new Error('Cannot have Account X-Address and SourceTag');
if (!(obj1[DEST_TAG] === undefined || obj2[DEST_TAG] === undefined))
throw new Error("Cannot have Destination X-Address and DestinationTag");
throw new Error('Cannot have Destination X-Address and DestinationTag');
}

@@ -67,7 +55,3 @@ /**

*/
var STObject = /** @class */ (function (_super) {
__extends(STObject, _super);
function STObject() {
return _super !== null && _super.apply(this, arguments) || this;
}
class STObject extends serialized_type_1.SerializedType {
/**

@@ -79,11 +63,11 @@ * Construct a STObject from a BinaryParser

*/
STObject.fromParser = function (parser) {
var list = new binary_serializer_1.BytesList();
var bytes = new binary_serializer_1.BinarySerializer(list);
static fromParser(parser) {
const list = new binary_serializer_1.BytesList();
const bytes = new binary_serializer_1.BinarySerializer(list);
while (!parser.end()) {
var field = parser.readField();
const field = parser.readField();
if (field.name === OBJECT_END_MARKER) {
break;
}
var associatedValue = parser.readFieldValue(field);
const associatedValue = parser.readFieldValue(field);
bytes.writeFieldAndValue(field, associatedValue);

@@ -95,3 +79,3 @@ if (field.type.name === ST_OBJECT) {

return new STObject(list.toBytes());
};
}
/**

@@ -102,28 +86,26 @@ * Construct a STObject from a JSON object

* @param filter optional, denote which field to include in serialized object
* @param definitions optional, types and values to use to encode/decode a transaction
* @returns a STObject object
*/
STObject.from = function (value, filter) {
static from(value, filter, definitions = enums_1.DEFAULT_DEFINITIONS) {
if (value instanceof STObject) {
return value;
}
var list = new binary_serializer_1.BytesList();
var bytes = new binary_serializer_1.BinarySerializer(list);
var xAddressDecoded = Object.entries(value).reduce(function (acc, _a) {
var _b;
var key = _a[0], val = _a[1];
var handled = undefined;
if (ripple_address_codec_1.isValidXAddress(val)) {
handled = handleXAddress(key, val);
const list = new binary_serializer_1.BytesList();
const bytes = new binary_serializer_1.BinarySerializer(list);
let isUnlModify = false;
const xAddressDecoded = Object.entries(value).reduce((acc, [key, val]) => {
let handled = undefined;
if (val && (0, ripple_address_codec_1.isValidXAddress)(val.toString())) {
handled = handleXAddress(key, val.toString());
checkForDuplicateTags(handled, value);
}
return Object.assign(acc, handled !== null && handled !== void 0 ? handled : (_b = {}, _b[key] = val, _b));
return Object.assign(acc, handled !== null && handled !== void 0 ? handled : { [key]: val });
}, {});
var sorted = Object.keys(xAddressDecoded)
.map(function (f) { return enums_1.Field[f]; })
.filter(function (f) {
return f !== undefined &&
xAddressDecoded[f.name] !== undefined &&
f.isSerialized;
})
.sort(function (a, b) {
let sorted = Object.keys(xAddressDecoded)
.map((f) => definitions.field[f])
.filter((f) => f !== undefined &&
xAddressDecoded[f.name] !== undefined &&
f.isSerialized)
.sort((a, b) => {
return a.ordinal - b.ordinal;

@@ -134,5 +116,20 @@ });

}
sorted.forEach(function (field) {
var associatedValue = field.associatedType.from(xAddressDecoded[field.name]);
bytes.writeFieldAndValue(field, associatedValue);
sorted.forEach((field) => {
const associatedValue = field.type.name === ST_OBJECT
? this.from(xAddressDecoded[field.name], undefined, definitions)
: field.type.name === 'STArray'
? st_array_1.STArray.from(xAddressDecoded[field.name], definitions)
: field.associatedType.from(xAddressDecoded[field.name]);
if (associatedValue == undefined) {
throw new TypeError(`Unable to interpret "${field.name}: ${xAddressDecoded[field.name]}".`);
}
if (associatedValue.name === 'UNLModify') {
// triggered when the TransactionType field has a value of 'UNLModify'
isUnlModify = true;
}
// true when in the UNLModify pseudotransaction (after the transaction type has been processed) and working with the
// Account field
// The Account field must not be a part of the UNLModify pseudotransaction encoding, due to a bug in rippled
const isUnlModifyWorkaround = field.name == 'Account' && isUnlModify;
bytes.writeFieldAndValue(field, associatedValue, isUnlModifyWorkaround);
if (field.type.name === ST_OBJECT) {

@@ -143,23 +140,25 @@ bytes.put(OBJECT_END_MARKER_BYTE);

return new STObject(list.toBytes());
};
}
/**
* Get the JSON interpretation of this.bytes
*
* @param definitions rippled definitions used to parse the values of transaction types and such.
* Can be customized for sidechains and amendments.
* @returns a JSON object
*/
STObject.prototype.toJSON = function () {
var objectParser = new binary_parser_1.BinaryParser(this.toString());
var accumulator = {};
toJSON(definitions) {
const objectParser = new binary_parser_1.BinaryParser(this.toString(), definitions);
const accumulator = {};
while (!objectParser.end()) {
var field = objectParser.readField();
const field = objectParser.readField();
if (field.name === OBJECT_END_MARKER) {
break;
}
accumulator[field.name] = objectParser.readFieldValue(field).toJSON();
accumulator[field.name] = objectParser
.readFieldValue(field)
.toJSON(definitions);
}
return accumulator;
};
return STObject;
}(serialized_type_1.SerializedType));
}
}
exports.STObject = STObject;
//# sourceMappingURL=st-object.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { UInt } from './uint';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -10,3 +9,3 @@ * Derived UInt class for serializing/deserializing 16 bit UInt

static readonly defaultUInt16: UInt16;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
static fromParser(parser: BinaryParser): UInt;

@@ -13,0 +12,0 @@ /**

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt16 = void 0;
var uint_1 = require("./uint");
const uint_1 = require("./uint");
const utils_1 = require("../utils");
/**
* Derived UInt class for serializing/deserializing 16 bit UInt
*/
var UInt16 = /** @class */ (function (_super) {
__extends(UInt16, _super);
function UInt16(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt16.defaultUInt16.bytes) || this;
class UInt16 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt16.defaultUInt16.bytes);
}
UInt16.fromParser = function (parser) {
static fromParser(parser) {
return new UInt16(parser.read(UInt16.width));
};
}
/**

@@ -34,13 +21,14 @@ * Construct a UInt16 object from a number

*/
UInt16.from = function (val) {
static from(val) {
if (val instanceof UInt16) {
return val;
}
if (typeof val === "number") {
var buf = Buffer.alloc(UInt16.width);
buf.writeUInt16BE(val);
if (typeof val === 'number') {
UInt16.checkUintRange(val, 0, 0xffff);
const buf = new Uint8Array(UInt16.width);
(0, utils_1.writeUInt16BE)(buf, val, 0);
return new UInt16(buf);
}
throw new Error("Can not construct UInt16 with given value");
};
throw new Error('Can not construct UInt16 with given value');
}
/**

@@ -51,10 +39,9 @@ * get the value of a UInt16 object

*/
UInt16.prototype.valueOf = function () {
return this.bytes.readUInt16BE();
};
UInt16.width = 16 / 8; // 2
UInt16.defaultUInt16 = new UInt16(Buffer.alloc(UInt16.width));
return UInt16;
}(uint_1.UInt));
valueOf() {
return parseInt((0, utils_1.readUInt16BE)(this.bytes, 0));
}
}
exports.UInt16 = UInt16;
UInt16.width = 16 / 8; // 2
UInt16.defaultUInt16 = new UInt16(new Uint8Array(UInt16.width));
//# sourceMappingURL=uint-16.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { UInt } from './uint';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -10,3 +9,3 @@ * Derived UInt class for serializing/deserializing 32 bit UInt

static readonly defaultUInt32: UInt32;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
static fromParser(parser: BinaryParser): UInt;

@@ -13,0 +12,0 @@ /**

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt32 = void 0;
var uint_1 = require("./uint");
const uint_1 = require("./uint");
const utils_1 = require("../utils");
/**
* Derived UInt class for serializing/deserializing 32 bit UInt
*/
var UInt32 = /** @class */ (function (_super) {
__extends(UInt32, _super);
function UInt32(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt32.defaultUInt32.bytes) || this;
class UInt32 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt32.defaultUInt32.bytes);
}
UInt32.fromParser = function (parser) {
static fromParser(parser) {
return new UInt32(parser.read(UInt32.width));
};
}
/**

@@ -34,18 +21,19 @@ * Construct a UInt32 object from a number

*/
UInt32.from = function (val) {
static from(val) {
if (val instanceof UInt32) {
return val;
}
var buf = Buffer.alloc(UInt32.width);
if (typeof val === "string") {
var num = Number.parseInt(val);
buf.writeUInt32BE(num);
const buf = new Uint8Array(UInt32.width);
if (typeof val === 'string') {
const num = Number.parseInt(val);
(0, utils_1.writeUInt32BE)(buf, num, 0);
return new UInt32(buf);
}
if (typeof val === "number") {
buf.writeUInt32BE(val);
if (typeof val === 'number') {
UInt32.checkUintRange(val, 0, 0xffffffff);
(0, utils_1.writeUInt32BE)(buf, val, 0);
return new UInt32(buf);
}
throw new Error("Cannot construct UInt32 from given value");
};
throw new Error('Cannot construct UInt32 from given value');
}
/**

@@ -56,10 +44,9 @@ * get the value of a UInt32 object

*/
UInt32.prototype.valueOf = function () {
return this.bytes.readUInt32BE();
};
UInt32.width = 32 / 8; // 4
UInt32.defaultUInt32 = new UInt32(Buffer.alloc(UInt32.width));
return UInt32;
}(uint_1.UInt));
valueOf() {
return parseInt((0, utils_1.readUInt32BE)(this.bytes, 0), 10);
}
}
exports.UInt32 = UInt32;
UInt32.width = 32 / 8; // 4
UInt32.defaultUInt32 = new UInt32(new Uint8Array(UInt32.width));
//# sourceMappingURL=uint-32.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { UInt } from './uint';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -10,3 +9,3 @@ * Derived UInt class for serializing/deserializing 64 bit UInt

static readonly defaultUInt64: UInt64;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
static fromParser(parser: BinaryParser): UInt;

@@ -16,3 +15,3 @@ /**

*
* @param val A UInt64, hex-string, bigint, or number
* @param val A UInt64, hex-string, bigInt, or number
* @returns A UInt64 object

@@ -38,4 +37,4 @@ */

*/
toBytes(): Buffer;
toBytes(): Uint8Array;
}
export { UInt64 };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt64 = void 0;
var uint_1 = require("./uint");
var HEX_REGEX = /^[A-F0-9]{16}$/;
const uint_1 = require("./uint");
const utils_1 = require("@xrplf/isomorphic/utils");
const utils_2 = require("../utils");
const HEX_REGEX = /^[a-fA-F0-9]{1,16}$/;
const mask = BigInt(0x00000000ffffffff);
/**
* Derived UInt class for serializing/deserializing 64 bit UInt
*/
var UInt64 = /** @class */ (function (_super) {
__extends(UInt64, _super);
function UInt64(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt64.defaultUInt64.bytes) || this;
class UInt64 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt64.defaultUInt64.bytes);
}
UInt64.fromParser = function (parser) {
static fromParser(parser) {
return new UInt64(parser.read(UInt64.width));
};
}
/**
* Construct a UInt64 object
*
* @param val A UInt64, hex-string, bigint, or number
* @param val A UInt64, hex-string, bigInt, or number
* @returns A UInt64 object
*/
UInt64.from = function (val) {
static from(val) {
if (val instanceof UInt64) {
return val;
}
var buf = Buffer.alloc(UInt64.width);
if (typeof val === "number") {
let buf = new Uint8Array(UInt64.width);
if (typeof val === 'number') {
if (val < 0) {
throw new Error("value must be an unsigned integer");
throw new Error('value must be an unsigned integer');
}
buf.writeBigUInt64BE(BigInt(val));
return new UInt64(buf);
const number = BigInt(val);
const intBuf = [new Uint8Array(4), new Uint8Array(4)];
(0, utils_2.writeUInt32BE)(intBuf[0], Number(number >> BigInt(32)), 0);
(0, utils_2.writeUInt32BE)(intBuf[1], Number(number & BigInt(mask)), 0);
return new UInt64((0, utils_1.concat)(intBuf));
}
if (typeof val === "string") {
if (typeof val === 'string') {
if (!HEX_REGEX.test(val)) {
throw new Error(val + " is not a valid hex-string");
throw new Error(`${val} is not a valid hex-string`);
}
buf = Buffer.from(val, "hex");
const strBuf = val.padStart(16, '0');
buf = (0, utils_1.hexToBytes)(strBuf);
return new UInt64(buf);
}
if (typeof val === "bigint") {
buf.writeBigUInt64BE(val);
return new UInt64(buf);
if (typeof val === 'bigint') {
const intBuf = [new Uint8Array(4), new Uint8Array(4)];
(0, utils_2.writeUInt32BE)(intBuf[0], Number(Number(val >> BigInt(32))), 0);
(0, utils_2.writeUInt32BE)(intBuf[1], Number(val & BigInt(mask)), 0);
return new UInt64((0, utils_1.concat)(intBuf));
}
throw new Error("Cannot construct UInt64 from given value");
};
throw new Error('Cannot construct UInt64 from given value');
}
/**

@@ -66,5 +61,5 @@ * The JSON representation of a UInt64 object

*/
UInt64.prototype.toJSON = function () {
return this.bytes.toString("hex").toUpperCase();
};
toJSON() {
return (0, utils_1.bytesToHex)(this.bytes);
}
/**

@@ -75,5 +70,7 @@ * Get the value of the UInt64

*/
UInt64.prototype.valueOf = function () {
return this.bytes.readBigUInt64BE();
};
valueOf() {
const msb = BigInt((0, utils_2.readUInt32BE)(this.bytes.slice(0, 4), 0));
const lsb = BigInt((0, utils_2.readUInt32BE)(this.bytes.slice(4), 0));
return (msb << BigInt(32)) | lsb;
}
/**

@@ -84,10 +81,9 @@ * Get the bytes representation of the UInt64 object

*/
UInt64.prototype.toBytes = function () {
toBytes() {
return this.bytes;
};
UInt64.width = 64 / 8; // 8
UInt64.defaultUInt64 = new UInt64(Buffer.alloc(UInt64.width));
return UInt64;
}(uint_1.UInt));
}
}
exports.UInt64 = UInt64;
UInt64.width = 64 / 8; // 8
UInt64.defaultUInt64 = new UInt64(new Uint8Array(UInt64.width));
//# sourceMappingURL=uint-64.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { UInt } from "./uint";
import { BinaryParser } from "../serdes/binary-parser";
import { UInt } from './uint';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -10,3 +9,3 @@ * Derived UInt class for serializing/deserializing 8 bit UInt

static readonly defaultUInt8: UInt8;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
static fromParser(parser: BinaryParser): UInt;

@@ -13,0 +12,0 @@ /**

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt8 = void 0;
var uint_1 = require("./uint");
const uint_1 = require("./uint");
const utils_1 = require("@xrplf/isomorphic/utils");
const utils_2 = require("../utils");
/**
* Derived UInt class for serializing/deserializing 8 bit UInt
*/
var UInt8 = /** @class */ (function (_super) {
__extends(UInt8, _super);
function UInt8(bytes) {
return _super.call(this, bytes !== null && bytes !== void 0 ? bytes : UInt8.defaultUInt8.bytes) || this;
class UInt8 extends uint_1.UInt {
constructor(bytes) {
super(bytes !== null && bytes !== void 0 ? bytes : UInt8.defaultUInt8.bytes);
}
UInt8.fromParser = function (parser) {
static fromParser(parser) {
return new UInt8(parser.read(UInt8.width));
};
}
/**

@@ -34,13 +22,14 @@ * Construct a UInt8 object from a number

*/
UInt8.from = function (val) {
static from(val) {
if (val instanceof UInt8) {
return val;
}
if (typeof val === "number") {
var buf = Buffer.alloc(UInt8.width);
buf.writeUInt8(val);
if (typeof val === 'number') {
UInt8.checkUintRange(val, 0, 0xff);
const buf = new Uint8Array(UInt8.width);
(0, utils_2.writeUInt8)(buf, val, 0);
return new UInt8(buf);
}
throw new Error("Cannot construct UInt8 from given value");
};
throw new Error('Cannot construct UInt8 from given value');
}
/**

@@ -51,10 +40,9 @@ * get the value of a UInt8 object

*/
UInt8.prototype.valueOf = function () {
return this.bytes.readUInt8();
};
UInt8.width = 8 / 8; // 1
UInt8.defaultUInt8 = new UInt8(Buffer.alloc(UInt8.width));
return UInt8;
}(uint_1.UInt));
valueOf() {
return parseInt((0, utils_1.bytesToHex)(this.bytes), 16);
}
}
exports.UInt8 = UInt8;
UInt8.width = 8 / 8; // 1
UInt8.defaultUInt8 = new UInt8(new Uint8Array(UInt8.width));
//# sourceMappingURL=uint-8.js.map

@@ -1,9 +0,8 @@

/// <reference types="node" />
import { Comparable } from "./serialized-type";
import { Comparable } from './serialized-type';
/**
* Base class for serializing and deserializing unsigned integers.
*/
declare abstract class UInt extends Comparable {
declare abstract class UInt extends Comparable<UInt | number> {
protected static width: number;
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
/**

@@ -15,3 +14,3 @@ * Overload of compareTo for Comparable

*/
compareTo(other: UInt): number;
compareTo(other: UInt | number): number;
/**

@@ -29,3 +28,4 @@ * Convert a UInt object to JSON

abstract valueOf(): number | bigint;
static checkUintRange(val: number, min: number, max: number): void;
}
export { UInt };
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.UInt = void 0;
var serialized_type_1 = require("./serialized-type");
const serialized_type_1 = require("./serialized-type");
/**
* Compare numbers and bigints n1 and n2
* Compare numbers and bigInts n1 and n2
*

@@ -31,6 +18,5 @@ * @param n1 First object to compare

*/
var UInt = /** @class */ (function (_super) {
__extends(UInt, _super);
function UInt(bytes) {
return _super.call(this, bytes) || this;
class UInt extends serialized_type_1.Comparable {
constructor(bytes) {
super(bytes);
}

@@ -43,5 +29,5 @@ /**

*/
UInt.prototype.compareTo = function (other) {
compareTo(other) {
return compare(this.valueOf(), other.valueOf());
};
}
/**

@@ -52,9 +38,13 @@ * Convert a UInt object to JSON

*/
UInt.prototype.toJSON = function () {
var val = this.valueOf();
return typeof val === "number" ? val : val.toString();
};
return UInt;
}(serialized_type_1.Comparable));
toJSON() {
const val = this.valueOf();
return typeof val === 'number' ? val : val.toString();
}
static checkUintRange(val, min, max) {
if (val < min || val > max) {
throw new Error(`Invalid ${this.constructor.name}: ${val} must be >= ${min} and <= ${max}`);
}
}
}
exports.UInt = UInt;
//# sourceMappingURL=uint.js.map

@@ -1,4 +0,3 @@

/// <reference types="node" />
import { SerializedType } from "./serialized-type";
import { BinaryParser } from "../serdes/binary-parser";
import { SerializedType } from './serialized-type';
import { BinaryParser } from '../serdes/binary-parser';
/**

@@ -8,3 +7,3 @@ * Class for serializing and deserializing vectors of Hash256

declare class Vector256 extends SerializedType {
constructor(bytes: Buffer);
constructor(bytes: Uint8Array);
/**

@@ -11,0 +10,0 @@ * Construct a Vector256 from a BinaryParser

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
exports.Vector256 = void 0;
var serialized_type_1 = require("./serialized-type");
var hash_256_1 = require("./hash-256");
var binary_serializer_1 = require("../serdes/binary-serializer");
const serialized_type_1 = require("./serialized-type");
const hash_256_1 = require("./hash-256");
const binary_serializer_1 = require("../serdes/binary-serializer");
const utils_1 = require("@xrplf/isomorphic/utils");
/**

@@ -24,3 +12,3 @@ * TypeGuard for Array<string>

function isStrings(arg) {
return Array.isArray(arg) && (arg.length === 0 || typeof arg[0] === "string");
return Array.isArray(arg) && (arg.length === 0 || typeof arg[0] === 'string');
}

@@ -30,6 +18,5 @@ /**

*/
var Vector256 = /** @class */ (function (_super) {
__extends(Vector256, _super);
function Vector256(bytes) {
return _super.call(this, bytes) || this;
class Vector256 extends serialized_type_1.SerializedType {
constructor(bytes) {
super(bytes);
}

@@ -43,11 +30,11 @@ /**

*/
Vector256.fromParser = function (parser, hint) {
var bytesList = new binary_serializer_1.BytesList();
var bytes = hint !== null && hint !== void 0 ? hint : parser.size();
var hashes = bytes / 32;
for (var i = 0; i < hashes; i++) {
static fromParser(parser, hint) {
const bytesList = new binary_serializer_1.BytesList();
const bytes = hint !== null && hint !== void 0 ? hint : parser.size();
const hashes = bytes / 32;
for (let i = 0; i < hashes; i++) {
hash_256_1.Hash256.fromParser(parser).toBytesSink(bytesList);
}
return new Vector256(bytesList.toBytes());
};
}
/**

@@ -59,3 +46,3 @@ * Construct a Vector256 object from an array of hashes

*/
Vector256.from = function (value) {
static from(value) {
if (value instanceof Vector256) {

@@ -65,10 +52,10 @@ return value;

if (isStrings(value)) {
var bytesList_1 = new binary_serializer_1.BytesList();
value.forEach(function (hash) {
hash_256_1.Hash256.from(hash).toBytesSink(bytesList_1);
const bytesList = new binary_serializer_1.BytesList();
value.forEach((hash) => {
hash_256_1.Hash256.from(hash).toBytesSink(bytesList);
});
return new Vector256(bytesList_1.toBytes());
return new Vector256(bytesList.toBytes());
}
throw new Error("Cannot construct Vector256 from given value");
};
throw new Error('Cannot construct Vector256 from given value');
}
/**

@@ -79,18 +66,14 @@ * Return an Array of hex-strings represented by this.bytes

*/
Vector256.prototype.toJSON = function () {
toJSON() {
if (this.bytes.byteLength % 32 !== 0) {
throw new Error("Invalid bytes for Vector256");
throw new Error('Invalid bytes for Vector256');
}
var result = [];
for (var i = 0; i < this.bytes.byteLength; i += 32) {
result.push(this.bytes
.slice(i, i + 32)
.toString("hex")
.toUpperCase());
const result = [];
for (let i = 0; i < this.bytes.byteLength; i += 32) {
result.push((0, utils_1.bytesToHex)(this.bytes.slice(i, i + 32)));
}
return result;
};
return Vector256;
}(serialized_type_1.SerializedType));
}
}
exports.Vector256 = Vector256;
//# sourceMappingURL=vector-256.js.map
{
"name": "ripple-binary-codec",
"version": "1.0.2",
"version": "1.0.3-mpt-beta",
"description": "XRP Ledger binary codec",
"files": [
"dist/*",
"bin/*",
"test/*"
"src/*"
],

@@ -15,44 +14,35 @@ "main": "dist/",

"dependencies": {
"create-hash": "^1.2.0",
"decimal.js": "^10.2.0",
"ripple-address-codec": "^4.1.1"
"@xrplf/isomorphic": "^1.0.1",
"bignumber.js": "^9.0.0",
"ripple-address-codec": "^5.0.0"
},
"devDependencies": {
"@types/jest": "^26.0.7",
"@types/node": "^14.0.10",
"@typescript-eslint/eslint-plugin": "^3.2.0",
"@typescript-eslint/parser": "^3.2.0",
"eslint": "^7.7.0",
"eslint-config-prettier": "^6.11.0",
"eslint-plugin-import": "^2.21.1",
"eslint-plugin-jest": "^23.20.0",
"eslint-plugin-mocha": "^7.0.1",
"eslint-plugin-prettier": "^3.1.3",
"jest": "^26.0.1",
"prettier": "^2.0.4",
"typescript": "^3.9.5"
},
"scripts": {
"compile": "tsc && cp ./src/enums/definitions.json ./dist/enums",
"prepare": "npm run compile && npm test",
"test": "jest",
"build": "tsc --build tsconfig.build.json && copyfiles ./src/enums/definitions.json ./dist/enums/",
"clean": "rm -rf ./dist ./coverage ./test/testCompiledForWeb tsconfig.build.tsbuildinfo",
"prepublishOnly": "npm test",
"test": "npm run build && jest --verbose false --silent=false ./test/*.test.ts",
"test:browser": "npm run build && karma start ./karma.config.js",
"lint": "eslint . --ext .ts --ext .test.js"
},
"keywords": [
"ripple",
"xrp",
"xrp ledger",
"xrpl"
],
"repository": {
"type": "git",
"url": "git://github.com/ripple/ripple-binary-codec.git"
"url": "git@github.com:XRPLF/xrpl.js.git"
},
"bugs": {
"url": "https://github.com/ripple/ripple-binary-codec/issues"
"url": "https://github.com/XRPLF/xrpl.js/issues"
},
"homepage": "https://github.com/ripple/ripple-binary-codec#readme",
"homepage": "https://github.com/XRPLF/xrpl.js/tree/main/packages/ripple-binary-codec#readme",
"license": "ISC",
"readmeFilename": "README.md",
"prettier": {
"semi": true
"prettier": "@xrplf/prettier-config",
"engines": {
"node": ">= 16"
},
"engines": {
"node": ">=10.22.0",
"yarn": "^1.22.4"
}
"gitHead": "0ff8b47616b659385f07b43d508e81233809ca62"
}

@@ -101,8 +101,8 @@ # ripple-binary-codec [![NPM](https://img.shields.io/npm/v/ripple-binary-codec.svg)](https://npmjs.org/package/ripple-binary-codec)

yarn test
npm test
Use `--coverage` to generate and display code coverage information:
yarn test --coverage
npm test --coverage
This tells jest to output code coverage info in the `./coverage` directory, in addition to showing it on the command line.
This tells jest to output code coverage info in the `./coverage` directory, in addition to showing it on the command line.

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc