Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

tigerbeetle-node

Package Overview
Dependencies
Maintainers
3
Versions
320
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

tigerbeetle-node - npm Package Compare versions

Comparing version 0.14.157 to 0.14.158

122

dist/benchmark.js

@@ -11,6 +11,3 @@ "use strict";

const IS_TWO_PHASE_TRANSFER = false;
const IS_RAW_REQUEST = false;
const PREVIOUS_RAW_REQUEST_RESULT = IS_TWO_PHASE_TRANSFER ? 300000 : 620000;
const PREVIOUS_RESULT = IS_TWO_PHASE_TRANSFER ? 150000 : 310000;
const PREVIOUS_BENCHMARK = IS_RAW_REQUEST ? PREVIOUS_RAW_REQUEST_RESULT : PREVIOUS_RESULT;
const PREVIOUS_BENCHMARK = IS_TWO_PHASE_TRANSFER ? 150000 : 310000;
const TOLERANCE = 10;

@@ -52,117 +49,2 @@ const client = (0, _1.createClient)({

};
const rawCreateTransfers = async (batch) => {
return new Promise((resolve, reject) => {
const callback = (error, results) => {
if (error) {
reject(error);
}
resolve(results);
};
try {
client.rawRequest(_1.Operation.create_transfers, batch, callback);
}
catch (error) {
reject(error);
}
});
};
const encodeTransfer = (transfer, offset, output) => {
(0, assert_1.default)(BigInt((offset + TRANSFER_SIZE)) <= BigInt(output.length), `Transfer ${transfer} exceeds buffer of ${output}!`);
output.writeBigUInt64LE(transfer.id, offset);
output.writeBigUInt64LE(transfer.debit_account_id, offset + 16);
output.writeBigUInt64LE(transfer.credit_account_id, offset + 32);
output.writeBigUInt64LE(transfer.amount, offset + 48);
output.writeBigUInt64LE(transfer.pending_id, offset + 64);
output.writeBigUInt64LE(transfer.user_data_128, offset + 80);
output.writeBigUInt64LE(transfer.user_data_64, offset + 96);
output.writeInt32LE(transfer.user_data_32, offset + 104);
output.writeInt32LE(transfer.timeout, offset + 108);
output.writeUInt32LE(transfer.ledger, offset + 112);
output.writeUInt32LE(transfer.code, offset + 116);
output.writeUInt32LE(transfer.flags, offset + 118);
output.writeBigUInt64LE(transfer.timestamp, offset + 120);
};
const runBenchmarkRawRequest = async () => {
(0, assert_1.default)(MAX_TRANSFERS % MAX_REQUEST_BATCH_SIZE === 0, "The raw request benchmark requires MAX_TRANSFERS to be a multiple of MAX_REQUEST_BATCH_SIZE");
console.log(`pre-allocating ${MAX_TRANSFERS} transfers and posts...`);
const transfers = [];
const posts = [];
let count = 0;
while (count < MAX_TRANSFERS) {
const transferBatch = Buffer.alloc(MAX_REQUEST_BATCH_SIZE * TRANSFER_SIZE, 0);
const postTransferBatch = Buffer.alloc(MAX_REQUEST_BATCH_SIZE * TRANSFER_SIZE, 0);
for (let i = 0; i < MAX_REQUEST_BATCH_SIZE; i++) {
if (count === MAX_TRANSFERS)
break;
count += 1;
encodeTransfer({
id: BigInt(count),
debit_account_id: accountA.id,
credit_account_id: accountB.id,
amount: 1n,
pending_id: 0n,
user_data_128: 0n,
user_data_64: 0n,
user_data_32: 0,
timeout: IS_TWO_PHASE_TRANSFER ? 2 : 0,
ledger: 1,
code: 1,
flags: IS_TWO_PHASE_TRANSFER ? _1.TransferFlags.pending : 0,
timestamp: 0n,
}, i * TRANSFER_SIZE, transferBatch);
if (IS_TWO_PHASE_TRANSFER) {
encodeTransfer({
id: BigInt((MAX_TRANSFERS + count)),
debit_account_id: accountA.id,
credit_account_id: accountB.id,
amount: 1n,
pending_id: BigInt(count),
user_data_128: 0n,
user_data_64: 0n,
user_data_32: 0,
timeout: 0,
ledger: 1,
code: 1,
flags: _1.TransferFlags.post_pending_transfer,
timestamp: 0n,
}, i * TRANSFER_SIZE, postTransferBatch);
}
}
transfers.push(transferBatch);
if (IS_TWO_PHASE_TRANSFER)
posts.push(postTransferBatch);
if (count % 100)
console.log(`${Number((count / MAX_TRANSFERS) * 100).toFixed(1)}%`);
}
(0, assert_1.default)(count === MAX_TRANSFERS);
console.log(`starting benchmark. MAX_TRANSFERS=${MAX_TRANSFERS} REQUEST_BATCH_SIZE=${MAX_REQUEST_BATCH_SIZE} NUMBER_OF_BATCHES=${transfers.length}`);
let maxCreateTransfersLatency = 0;
let maxCommitTransfersLatency = 0;
const start = Date.now();
for (let i = 0; i < transfers.length; i++) {
const ms1 = Date.now();
const transferErrors = await rawCreateTransfers(transfers[i]);
(0, assert_1.default)(transferErrors.length === 0);
const ms2 = Date.now();
const createTransferLatency = ms2 - ms1;
if (createTransferLatency > maxCreateTransfersLatency) {
maxCreateTransfersLatency = createTransferLatency;
}
if (IS_TWO_PHASE_TRANSFER) {
const commitErrors = await rawCreateTransfers(posts[i]);
(0, assert_1.default)(commitErrors.length === 0);
const ms3 = Date.now();
const commitTransferLatency = ms3 - ms2;
if (commitTransferLatency > maxCommitTransfersLatency) {
maxCommitTransfersLatency = commitTransferLatency;
}
}
}
const ms = Date.now() - start;
return {
ms,
maxCommitTransfersLatency,
maxCreateTransfersLatency
};
};
const runBenchmark = async () => {

@@ -255,3 +137,3 @@ console.log(`pre-allocating ${MAX_TRANSFERS} transfers and posts...`);

(0, assert_1.default)(accountResults[1].debits_posted === 0n);
const benchmark = IS_RAW_REQUEST ? await runBenchmarkRawRequest() : await runBenchmark();
const benchmark = await runBenchmark();
const accounts = await client.lookupAccounts([accountA.id, accountB.id]);

@@ -258,0 +140,0 @@ const result = Math.floor((1000 * MAX_TRANSFERS) / benchmark.ms);

18

dist/index.d.ts

@@ -1,8 +0,3 @@

/// <reference types="node" />
export * from './bindings';
import { Account, Transfer, CreateAccountsError, CreateTransfersError, Operation } from './bindings';
export interface InitArgs {
cluster_id: number;
replica_addresses: Array<string | number>;
}
import { Account, Transfer, CreateAccountsError, CreateTransfersError } from './bindings';
export declare type Context = object;

@@ -13,3 +8,8 @@ export declare type AccountID = bigint;

export declare type Result = CreateAccountsError | CreateTransfersError | Account | Transfer;
export declare type ResultCallback = (error: undefined | Error, results: Result[]) => void;
export declare type ResultCallback = (error: Error | null, results: Result[] | null) => void;
export interface ClientInitArgs {
cluster_id: number;
concurrency_max?: number;
replica_addresses: Array<string | number>;
}
export interface Client {

@@ -20,6 +20,4 @@ createAccounts: (batch: Account[]) => Promise<CreateAccountsError[]>;

lookupTransfers: (batch: TransferID[]) => Promise<Transfer[]>;
request: (operation: Operation, batch: Event[], callback: ResultCallback) => void;
rawRequest: (operation: Operation, rawBatch: Buffer, callback: ResultCallback) => void;
destroy: () => void;
}
export declare function createClient(args: InitArgs): Client;
export declare function createClient(args: ClientInitArgs): Client;

@@ -16,3 +16,3 @@ "use strict";

const bindings_1 = require("./bindings");
function getBinding() {
const binding = (() => {
const { arch, platform } = process;

@@ -49,148 +49,39 @@ const archMap = {

return require(filename);
}
const binding = getBinding();
let _args = undefined;
const isSameArgs = (args) => {
if (typeof _args === 'undefined') {
return false;
}
if (_args.replica_addresses.length !== args.replica_addresses.length) {
return false;
}
let isSameReplicas = true;
args.replica_addresses.forEach((entry, index) => {
if (_args?.replica_addresses[index] !== entry) {
isSameReplicas = false;
}
});
return args.cluster_id === _args.cluster_id && isSameReplicas;
};
let _client = undefined;
let _interval = undefined;
let _pinged = false;
})();
function createClient(args) {
const duplicateArgs = isSameArgs(args);
if (!duplicateArgs && typeof _client !== 'undefined') {
throw new Error('Client has already been initialized with different arguments.');
}
if (duplicateArgs && typeof _client !== 'undefined') {
throw new Error('Client has already been initialized with the same arguments.');
}
_args = Object.assign({}, { ...args });
const concurrency_max_default = 32;
const context = binding.init({
...args,
replica_addresses: Buffer.from(args.replica_addresses.join(','))
cluster_id: args.cluster_id,
concurrency: args.concurrency_max || concurrency_max_default,
replica_addresses: Buffer.from(args.replica_addresses.join(',')),
});
const request = (operation, batch, callback) => {
binding.request(context, operation, batch, callback);
};
const rawRequest = (operation, rawBatch, callback) => {
binding.raw_request(context, operation, rawBatch, callback);
};
const createAccounts = async (batch) => {
if (!_pinged) {
await new Promise(resolve => {
setTimeout(() => {
_pinged = true;
resolve();
}, 600);
});
}
const request = (operation, batch) => {
return new Promise((resolve, reject) => {
const callback = (error, results) => {
if (error) {
reject(error);
return;
}
resolve(results);
};
try {
binding.request(context, bindings_1.Operation.create_accounts, batch, callback);
binding.submit(context, operation, batch, (error, result) => {
if (error) {
reject(error);
}
else if (result) {
resolve(result);
}
else {
throw new Error("UB: Binding invoked callback without error or result");
}
});
}
catch (error) {
reject(error);
catch (err) {
reject(err);
}
});
};
const createTransfers = async (batch) => {
if (!_pinged) {
await new Promise(resolve => {
setTimeout(() => {
_pinged = true;
resolve();
}, 600);
});
}
return new Promise((resolve, reject) => {
const callback = (error, results) => {
if (error) {
reject(error);
return;
}
resolve(results);
};
try {
binding.request(context, bindings_1.Operation.create_transfers, batch, callback);
}
catch (error) {
reject(error);
}
});
return {
createAccounts(batch) { return request(bindings_1.Operation.create_accounts, batch); },
createTransfers(batch) { return request(bindings_1.Operation.create_transfers, batch); },
lookupAccounts(batch) { return request(bindings_1.Operation.lookup_accounts, batch); },
lookupTransfers(batch) { return request(bindings_1.Operation.lookup_transfers, batch); },
destroy() { binding.deinit(context); },
};
const lookupAccounts = async (batch) => {
return new Promise((resolve, reject) => {
const callback = (error, results) => {
if (error) {
reject(error);
return;
}
resolve(results);
};
try {
binding.request(context, bindings_1.Operation.lookup_accounts, batch, callback);
}
catch (error) {
reject(error);
}
});
};
const lookupTransfers = async (batch) => {
return new Promise((resolve, reject) => {
const callback = (error, results) => {
if (error) {
reject(error);
return;
}
resolve(results);
};
try {
binding.request(context, bindings_1.Operation.lookup_transfers, batch, callback);
}
catch (error) {
reject(error);
}
});
};
const destroy = () => {
binding.deinit(context);
if (_interval) {
clearInterval(_interval);
}
_client = undefined;
};
_client = {
createAccounts,
createTransfers,
lookupAccounts,
lookupTransfers,
request,
rawRequest,
destroy
};
_interval = setInterval(() => {
binding.tick(context);
}, binding.tick_ms);
return _client;
}
exports.createClient = createClient;
//# sourceMappingURL=index.js.map
{
"name": "tigerbeetle-node",
"version": "0.14.157",
"version": "0.14.158",
"lockfileVersion": 1,

@@ -5,0 +5,0 @@ "requires": true,

{
"name": "tigerbeetle-node",
"version": "0.14.157",
"version": "0.14.158",
"description": "TigerBeetle Node.js client",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

@@ -14,6 +14,3 @@ import assert from 'assert'

const IS_TWO_PHASE_TRANSFER = false
const IS_RAW_REQUEST = false
const PREVIOUS_RAW_REQUEST_RESULT = IS_TWO_PHASE_TRANSFER ? 300000 : 620000
const PREVIOUS_RESULT = IS_TWO_PHASE_TRANSFER ? 150000 : 310000
const PREVIOUS_BENCHMARK = IS_RAW_REQUEST ? PREVIOUS_RAW_REQUEST_RESULT : PREVIOUS_RESULT
const PREVIOUS_BENCHMARK = IS_TWO_PHASE_TRANSFER ? 150000 : 310000
const TOLERANCE = 10 // percent that the benchmark is allowed to deviate from the previous benchmark

@@ -59,159 +56,2 @@

// Helper function to promisify the raw_request:
const rawCreateTransfers = async (batch: Buffer): Promise<CreateTransfersError[]> => {
return new Promise((resolve, reject) => {
const callback = (error: undefined | Error, results: CreateTransfersError[]) => {
if (error) {
reject(error)
}
resolve(results)
}
try {
client.rawRequest(Operation.create_transfers, batch, callback)
} catch (error) {
reject(error)
}
})
}
/**
* This encoding function is only for this benchmark script.
*
* ID_OFFSET = 0 (0 -> 16)
* DEBIT_ACCOUNT_ID_OFFSET = 0 + 16 = 16 (16 -> 32)
* CREDIT_ACCOUNT_ID_OFFSET = 16 + 16 = 32 (32 -> 48)
* AMOUNT_OFFSET = 48 + 16 = 64 (48 -> 64)
* PENDING_ID_OFFSET = 64 + 16 = 80 (64 -> 80)
* USER_DATA_128_OFFSET = 80 + 16 = 96 (80 -> 96)
* USER_DATA_64_OFFSET = 96 + 8 = 104 (96 -> 104)
* USER_DATA_32_OFFSET = 104 + 4 = 108 (104 -> 108)
* TIMEOUT_OFFSET = 108 + 4 = 112 (108 -> 112)
* LEDGER_OFFSET = 112 + 4 = 116 (112 -> 116)
* CODE_OFFSET = 116 + 2 = 118 (116 -> 118)
* FLAGS_OFFSET = 118 + 2 = 120 (118 -> 120)
* TIMESTAMP = 120 + 8 = 128 (120 -> 128)
*/
const encodeTransfer = (transfer: Transfer, offset: number, output: Buffer): void => {
assert(BigInt((offset + TRANSFER_SIZE)) <= BigInt(output.length), `Transfer ${transfer} exceeds buffer of ${output}!`)
output.writeBigUInt64LE(transfer.id, offset)
output.writeBigUInt64LE(transfer.debit_account_id, offset + 16)
output.writeBigUInt64LE(transfer.credit_account_id, offset + 32)
output.writeBigUInt64LE(transfer.amount, offset + 48)
output.writeBigUInt64LE(transfer.pending_id, offset + 64)
output.writeBigUInt64LE(transfer.user_data_128, offset + 80)
output.writeBigUInt64LE(transfer.user_data_64, offset + 96)
output.writeInt32LE(transfer.user_data_32, offset + 104)
output.writeInt32LE(transfer.timeout, offset + 108)
output.writeUInt32LE(transfer.ledger, offset + 112)
output.writeUInt32LE(transfer.code, offset + 116)
output.writeUInt32LE(transfer.flags, offset + 118)
output.writeBigUInt64LE(transfer.timestamp, offset + 120)
}
const runBenchmarkRawRequest = async () => {
assert(
MAX_TRANSFERS % MAX_REQUEST_BATCH_SIZE === 0,
"The raw request benchmark requires MAX_TRANSFERS to be a multiple of MAX_REQUEST_BATCH_SIZE"
)
console.log(`pre-allocating ${MAX_TRANSFERS} transfers and posts...`)
const transfers: Buffer[] = []
const posts: Buffer[] = []
let count = 0
while (count < MAX_TRANSFERS) {
const transferBatch = Buffer.alloc(MAX_REQUEST_BATCH_SIZE * TRANSFER_SIZE, 0)
const postTransferBatch = Buffer.alloc(MAX_REQUEST_BATCH_SIZE * TRANSFER_SIZE, 0)
for (let i = 0; i < MAX_REQUEST_BATCH_SIZE; i++) {
if (count === MAX_TRANSFERS) break
count += 1
encodeTransfer(
{
id: BigInt(count),
debit_account_id: accountA.id,
credit_account_id: accountB.id,
amount: 1n,
pending_id: 0n,
user_data_128: 0n,
user_data_64: 0n,
user_data_32: 0,
timeout: IS_TWO_PHASE_TRANSFER ? 2 : 0,
ledger: 1,
code: 1,
flags: IS_TWO_PHASE_TRANSFER ? TransferFlags.pending : 0,
timestamp: 0n,
},
i * TRANSFER_SIZE,
transferBatch
)
if (IS_TWO_PHASE_TRANSFER) {
encodeTransfer(
{
id: BigInt((MAX_TRANSFERS + count)),
debit_account_id: accountA.id,
credit_account_id: accountB.id,
amount: 1n,
pending_id: BigInt(count),
user_data_128: 0n,
user_data_64: 0n,
user_data_32: 0,
timeout: 0,
ledger: 1,
code: 1,
flags: TransferFlags.post_pending_transfer,
timestamp: 0n,
},
i * TRANSFER_SIZE,
postTransferBatch
)
}
}
transfers.push(transferBatch)
if (IS_TWO_PHASE_TRANSFER) posts.push(postTransferBatch)
if (count % 100) console.log(`${Number((count / MAX_TRANSFERS) * 100).toFixed(1)}%`)
}
assert(count === MAX_TRANSFERS)
console.log(`starting benchmark. MAX_TRANSFERS=${MAX_TRANSFERS} REQUEST_BATCH_SIZE=${MAX_REQUEST_BATCH_SIZE} NUMBER_OF_BATCHES=${transfers.length}`)
let maxCreateTransfersLatency = 0
let maxCommitTransfersLatency = 0
const start = Date.now()
for (let i = 0; i < transfers.length; i++) {
const ms1 = Date.now()
const transferErrors = await rawCreateTransfers(transfers[i])
assert(transferErrors.length === 0)
const ms2 = Date.now()
const createTransferLatency = ms2 - ms1
if (createTransferLatency > maxCreateTransfersLatency) {
maxCreateTransfersLatency = createTransferLatency
}
if (IS_TWO_PHASE_TRANSFER) {
const commitErrors = await rawCreateTransfers(posts[i])
assert(commitErrors.length === 0)
const ms3 = Date.now()
const commitTransferLatency = ms3 - ms2
if (commitTransferLatency > maxCommitTransfersLatency) {
maxCommitTransfersLatency = commitTransferLatency
}
}
}
const ms = Date.now() - start
return {
ms,
maxCommitTransfersLatency,
maxCreateTransfersLatency
}
}
const runBenchmark = async () => {

@@ -316,3 +156,3 @@ console.log(`pre-allocating ${MAX_TRANSFERS} transfers and posts...`)

const benchmark = IS_RAW_REQUEST ? await runBenchmarkRawRequest() : await runBenchmark()
const benchmark = await runBenchmark()

@@ -319,0 +159,0 @@ const accounts = await client.lookupAccounts([accountA.id, accountB.id])

@@ -10,3 +10,3 @@ export * from './bindings'

function getBinding (): Binding {
const binding: Binding = (() => {
const { arch, platform } = process

@@ -64,35 +64,29 @@

return require(filename)
})()
export type Context = object // tb_client
export type AccountID = bigint // u128
export type TransferID = bigint // u128
export type Event = Account | Transfer | AccountID | TransferID
export type Result = CreateAccountsError | CreateTransfersError | Account | Transfer
export type ResultCallback = (error: Error | null, results: Result[] | null) => void
interface BindingInitArgs {
cluster_id: number, // u32
concurrency: number, // u32
replica_addresses: Buffer,
}
const binding = getBinding()
interface Binding {
init: (args: BindingInitArgs) => Context
request: (context: Context, operation: Operation, batch: Event[], result: ResultCallback) => void
raw_request: (context: Context, operation: Operation, raw_batch: Buffer, result: ResultCallback) => void
tick: (context: Context) => void,
submit: (context: Context, operation: Operation, batch: Event[], callback: ResultCallback) => void
deinit: (context: Context) => void,
tick_ms: number
}
interface BindingInitArgs {
export interface ClientInitArgs {
cluster_id: number, // u32
replica_addresses: Buffer,
}
export interface InitArgs {
cluster_id: number, // u32
concurrency_max?: number, // u32
replica_addresses: Array<string | number>,
}
export type Context = object
export type AccountID = bigint // u128
export type TransferID = bigint // u128
export type Event = Account | Transfer | AccountID | TransferID
export type Result = CreateAccountsError | CreateTransfersError | Account | Transfer
// Note: as of #990, the error is always `undefined` here.
export type ResultCallback = (error: undefined | Error, results: Result[]) => void
export interface Client {

@@ -103,79 +97,27 @@ createAccounts: (batch: Account[]) => Promise<CreateAccountsError[]>

lookupTransfers: (batch: TransferID[]) => Promise<Transfer[]>
request: (operation: Operation, batch: Event[], callback: ResultCallback) => void
rawRequest: (operation: Operation, rawBatch: Buffer, callback: ResultCallback) => void
destroy: () => void
}
let _args: InitArgs | undefined = undefined
const isSameArgs = (args: InitArgs): boolean => {
if (typeof _args === 'undefined') {
return false
}
if (_args.replica_addresses.length !== args.replica_addresses.length) {
return false
}
let isSameReplicas = true
args.replica_addresses.forEach((entry, index) => {
if (_args?.replica_addresses[index] !== entry) {
isSameReplicas = false
}
})
return args.cluster_id === _args.cluster_id && isSameReplicas
}
let _client: Client | undefined = undefined
let _interval: NodeJS.Timeout | undefined = undefined
// Here to wait until `ping` is sent to server so that connection is registered - temporary till client table and sessions are implemented.
let _pinged = false
// TODO: allow creation of clients if the arguments are different. Will require changes in node.zig as well.
export function createClient (args: InitArgs): Client {
const duplicateArgs = isSameArgs(args)
if (!duplicateArgs && typeof _client !== 'undefined'){
throw new Error('Client has already been initialized with different arguments.')
}
if (duplicateArgs && typeof _client !== 'undefined'){
throw new Error('Client has already been initialized with the same arguments.')
}
_args = Object.assign({}, { ...args })
export function createClient (args: ClientInitArgs): Client {
const concurrency_max_default = 32 // arbitrary
const context = binding.init({
...args,
replica_addresses: Buffer.from(args.replica_addresses.join(','))
cluster_id: args.cluster_id,
concurrency: args.concurrency_max || concurrency_max_default,
replica_addresses: Buffer.from(args.replica_addresses.join(',')),
})
const request = (operation: Operation, batch: Event[], callback: ResultCallback) => {
binding.request(context, operation, batch, callback)
}
const rawRequest = (operation: Operation, rawBatch: Buffer, callback: ResultCallback) => {
binding.raw_request(context, operation, rawBatch, callback)
}
const createAccounts = async (batch: Account[]): Promise<CreateAccountsError[]> => {
// Here to wait until `ping` is sent to server so that connection is registered - temporary till client table and sessions are implemented.
if (!_pinged) {
await new Promise<void>(resolve => {
setTimeout(() => {
_pinged = true
resolve()
}, 600)
})
}
const request = <T extends Result>(operation: Operation, batch: Event[]): Promise<T[]> => {
return new Promise((resolve, reject) => {
const callback = (error: undefined | Error, results: CreateAccountsError[]) => {
if (error) {
reject(error)
return
}
resolve(results)
}
try {
binding.request(context, Operation.create_accounts, batch, callback)
} catch (error) {
reject(error)
binding.submit(context, operation, batch, (error, result) => {
if (error) {
reject(error)
} else if (result) {
resolve(result as T[])
} else {
throw new Error("UB: Binding invoked callback without error or result")
}
})
} catch (err) {
reject(err)
}

@@ -185,88 +127,9 @@ })

const createTransfers = async (batch: Transfer[]): Promise<CreateTransfersError[]> => {
// Here to wait until `ping` is sent to server so that connection is registered - temporary till client table and sessions are implemented.
if (!_pinged) {
await new Promise<void>(resolve => {
setTimeout(() => {
_pinged = true
resolve()
}, 600)
})
}
return new Promise((resolve, reject) => {
const callback = (error: undefined | Error, results: CreateTransfersError[]) => {
if (error) {
reject(error)
return
}
resolve(results)
}
try {
binding.request(context, Operation.create_transfers, batch, callback)
} catch (error) {
reject(error)
}
})
return {
createAccounts(batch) { return request(Operation.create_accounts, batch) },
createTransfers(batch) { return request(Operation.create_transfers, batch) },
lookupAccounts(batch) { return request(Operation.lookup_accounts, batch) },
lookupTransfers(batch) { return request(Operation.lookup_transfers, batch) },
destroy() { binding.deinit(context) },
}
const lookupAccounts = async (batch: AccountID[]): Promise<Account[]> => {
return new Promise((resolve, reject) => {
const callback = (error: undefined | Error, results: Account[]) => {
if (error) {
reject(error)
return
}
resolve(results)
}
try {
binding.request(context, Operation.lookup_accounts, batch, callback)
} catch (error) {
reject(error)
}
})
}
const lookupTransfers = async (batch: TransferID[]): Promise<Transfer[]> => {
return new Promise((resolve, reject) => {
const callback = (error: undefined | Error, results: Transfer[]) => {
if (error) {
reject(error)
return
}
resolve(results)
}
try {
binding.request(context, Operation.lookup_transfers, batch, callback)
} catch (error) {
reject(error)
}
})
}
const destroy = (): void => {
binding.deinit(context)
if (_interval){
clearInterval(_interval)
}
_client = undefined
}
_client = {
createAccounts,
createTransfers,
lookupAccounts,
lookupTransfers,
request,
rawRequest,
destroy
}
_interval = setInterval(() => {
binding.tick(context)
}, binding.tick_ms)
return _client
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc