@helium/spl-utils
Advanced tools
Comparing version 0.0.26 to 0.0.27
@@ -11,5 +11,21 @@ "use strict"; | ||
}; | ||
var __rest = (this && this.__rest) || function (s, e) { | ||
var t = {}; | ||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) | ||
t[p] = s[p]; | ||
if (s != null && typeof Object.getOwnPropertySymbols === "function") | ||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { | ||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) | ||
t[p[i]] = s[p[i]]; | ||
} | ||
return t; | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.bufferToTransaction = exports.stringToTransaction = exports.sendAndConfirmWithRetry = exports.awaitTransactionSignatureConfirmation = exports.executeBig = exports.execute = exports.sendMultipleInstructions = exports.sendInstructions = void 0; | ||
exports.bulkSendRawTransactions = exports.bulkSendTransactions = exports.bufferToTransaction = exports.stringToTransaction = exports.sendAndConfirmWithRetry = exports.awaitTransactionSignatureConfirmation = exports.executeBig = exports.execute = exports.sendMultipleInstructions = exports.sendInstructions = void 0; | ||
const web3_js_1 = require("@solana/web3.js"); | ||
const accountFetchCache_1 = require("./accountFetchCache"); | ||
const bs58_1 = __importDefault(require("bs58")); | ||
const anchorError_1 = require("./anchorError"); | ||
@@ -36,3 +52,3 @@ function sleep(ms) { | ||
let tx = new web3_js_1.Transaction(); | ||
tx.recentBlockhash = (yield provider.connection.getRecentBlockhash()).blockhash; | ||
tx.recentBlockhash = (yield provider.connection.getLatestBlockhash()).blockhash; | ||
tx.feePayer = payer || provider.wallet.publicKey; | ||
@@ -66,3 +82,3 @@ tx.add(...instructions); | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const recentBlockhash = (yield provider.connection.getRecentBlockhash("confirmed")).blockhash; | ||
const recentBlockhash = (yield provider.connection.getLatestBlockhash("confirmed")).blockhash; | ||
const ixAndSigners = instructionGroups | ||
@@ -149,2 +165,3 @@ .map((instructions, i) => { | ||
} | ||
const SEND_TRANSACTION_INTERVAL = 10; | ||
const awaitTransactionSignatureConfirmation = (txid, timeout, connection, commitment = "recent", queryStatus = false) => __awaiter(void 0, void 0, void 0, function* () { | ||
@@ -326,2 +343,132 @@ let done = false; | ||
exports.bufferToTransaction = bufferToTransaction; | ||
function withRetries(tries, input) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
for (let i = 0; i < tries; i++) { | ||
try { | ||
return yield input(); | ||
} | ||
catch (e) { | ||
console.log(`Retrying ${i}...`, e); | ||
} | ||
} | ||
throw new Error("Failed after retries"); | ||
}); | ||
} | ||
const TX_BATCH_SIZE = 200; | ||
function bulkSendTransactions(provider, txs, onProgress, triesRemaining = 5 // Number of blockhashes to try resending txs with before giving up | ||
) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
let ret = []; | ||
// attempt to chunk by blockhash bounds (so signing doesn't take too long) | ||
for (let chunk of (0, accountFetchCache_1.chunks)(txs, TX_BATCH_SIZE)) { | ||
const thisRet = []; | ||
// Continually send in bulk while resetting blockhash until we send them all | ||
while (true) { | ||
const recentBlockhash = yield withRetries(5, () => provider.connection.getLatestBlockhash("confirmed")); | ||
const signedTxs = yield Promise.all(chunk.map((tx) => __awaiter(this, void 0, void 0, function* () { | ||
tx.recentBlockhash = recentBlockhash.blockhash; | ||
// @ts-ignore | ||
const signed = yield provider.wallet.signTransaction(tx); | ||
tx.signatures[0].signature; | ||
return signed; | ||
}))); | ||
const txsWithSigs = signedTxs.map((tx, index) => { | ||
return { | ||
transaction: chunk[index], | ||
sig: bs58_1.default.encode(tx.signatures[0].signature), | ||
}; | ||
}); | ||
const confirmedTxs = yield bulkSendRawTransactions(provider.connection, signedTxs.map((s) => s.serialize()), (_a) => { | ||
var { totalProgress } = _a, rest = __rest(_a, ["totalProgress"]); | ||
return onProgress && | ||
onProgress(Object.assign(Object.assign({}, rest), { totalProgress: totalProgress + ret.length })); | ||
}, recentBlockhash.lastValidBlockHeight); | ||
thisRet.push(...confirmedTxs); | ||
if (confirmedTxs.length == signedTxs.length) { | ||
break; | ||
} | ||
const retSet = new Set(thisRet); | ||
chunk = txsWithSigs | ||
.filter(({ sig }) => !retSet.has(sig)) | ||
.map(({ transaction }) => transaction); | ||
triesRemaining--; | ||
if (triesRemaining <= 0) { | ||
throw new Error(`Failed to submit all txs after ${triesRemaining} blockhashes expired`); | ||
} | ||
} | ||
ret.push(...thisRet); | ||
} | ||
return ret; | ||
}); | ||
} | ||
exports.bulkSendTransactions = bulkSendTransactions; | ||
// Returns the list of succesfully sent txns | ||
// NOTE: The return signatures are ordered by confirmation, not by order they are passed | ||
// This list should be in order. Seom txns may fail | ||
// due to blockhash exp | ||
function bulkSendRawTransactions(connection, txs, onProgress, lastValidBlockHeight) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const txBatchSize = TX_BATCH_SIZE; | ||
let totalProgress = 0; | ||
const ret = []; | ||
if (!lastValidBlockHeight) { | ||
const blockhash = yield withRetries(5, () => connection.getLatestBlockhash("confirmed")); | ||
lastValidBlockHeight = blockhash.lastValidBlockHeight; | ||
} | ||
for (let chunk of (0, accountFetchCache_1.chunks)(txs, txBatchSize)) { | ||
let currentBatchProgress = 0; | ||
let pendingCount = txs.length; | ||
let txids = []; | ||
let lastRetry = 0; | ||
while (pendingCount > 0) { | ||
if ((yield withRetries(5, () => connection.getBlockHeight())) > | ||
lastValidBlockHeight) { | ||
return ret; | ||
} | ||
// only resend txs every 4s | ||
if (lastRetry < new Date().valueOf() - 4 * 1000) { | ||
lastRetry = new Date().valueOf(); | ||
txids = []; | ||
for (const tx of chunk) { | ||
const txid = yield connection.sendRawTransaction(tx, { | ||
skipPreflight: true, | ||
}); | ||
txids.push(txid); | ||
} | ||
} | ||
const statuses = yield getAllTxns(connection, txids); | ||
const completed = statuses.filter((status) => status !== null); | ||
totalProgress += completed.length; | ||
currentBatchProgress += completed.length; | ||
onProgress && | ||
onProgress({ | ||
totalProgress: totalProgress, | ||
currentBatchProgress: currentBatchProgress, | ||
currentBatchSize: txBatchSize, | ||
}); | ||
const failures = completed | ||
.map((status) => { var _a; return status !== null && ((_a = status.meta) === null || _a === void 0 ? void 0 : _a.err); }) | ||
.filter(truthy); | ||
if (failures.length > 0) { | ||
console.error(failures); | ||
throw new Error("Failed to run txs"); | ||
} | ||
pendingCount -= completed.length; | ||
chunk = chunk.filter((_, index) => statuses[index] === null); | ||
ret.push(...completed | ||
.map((status, idx) => (status ? txids[idx] : null)) | ||
.filter(truthy)); | ||
yield sleep(1000); // Wait one seconds before querying again | ||
} | ||
} | ||
return ret; | ||
}); | ||
} | ||
exports.bulkSendRawTransactions = bulkSendRawTransactions; | ||
const MAX_GET_SIGNATURE_STATUSES_QUERY_ITEMS = 200; | ||
function getAllTxns(connection, txids) { | ||
return __awaiter(this, void 0, void 0, function* () { | ||
return (yield Promise.all((0, accountFetchCache_1.chunks)(txids, MAX_GET_SIGNATURE_STATUSES_QUERY_ITEMS).map((txids) => connection.getTransactions(txids, "confirmed")))).flat(); | ||
}); | ||
} | ||
//# sourceMappingURL=transaction.js.map |
import { Transaction, } from "@solana/web3.js"; | ||
import { chunks } from "./accountFetchCache"; | ||
import bs58 from "bs58"; | ||
import { ProgramError } from "./anchorError"; | ||
@@ -18,3 +20,3 @@ async function sleep(ms) { | ||
let tx = new Transaction(); | ||
tx.recentBlockhash = (await provider.connection.getRecentBlockhash()).blockhash; | ||
tx.recentBlockhash = (await provider.connection.getLatestBlockhash()).blockhash; | ||
tx.feePayer = payer || provider.wallet.publicKey; | ||
@@ -45,3 +47,3 @@ tx.add(...instructions); | ||
export async function sendMultipleInstructions(provider, instructionGroups, signerGroups, payer, finality = "confirmed", idlErrors = new Map()) { | ||
const recentBlockhash = (await provider.connection.getRecentBlockhash("confirmed")).blockhash; | ||
const recentBlockhash = (await provider.connection.getLatestBlockhash("confirmed")).blockhash; | ||
const ixAndSigners = instructionGroups | ||
@@ -121,2 +123,3 @@ .map((instructions, i) => { | ||
} | ||
const SEND_TRANSACTION_INTERVAL = 10; | ||
export const awaitTransactionSignatureConfirmation = async (txid, timeout, connection, commitment = "recent", queryStatus = false) => { | ||
@@ -289,2 +292,119 @@ let done = false; | ||
} | ||
async function withRetries(tries, input) { | ||
for (let i = 0; i < tries; i++) { | ||
try { | ||
return await input(); | ||
} | ||
catch (e) { | ||
console.log(`Retrying ${i}...`, e); | ||
} | ||
} | ||
throw new Error("Failed after retries"); | ||
} | ||
const TX_BATCH_SIZE = 200; | ||
export async function bulkSendTransactions(provider, txs, onProgress, triesRemaining = 5 // Number of blockhashes to try resending txs with before giving up | ||
) { | ||
let ret = []; | ||
// attempt to chunk by blockhash bounds (so signing doesn't take too long) | ||
for (let chunk of chunks(txs, TX_BATCH_SIZE)) { | ||
const thisRet = []; | ||
// Continually send in bulk while resetting blockhash until we send them all | ||
while (true) { | ||
const recentBlockhash = await withRetries(5, () => provider.connection.getLatestBlockhash("confirmed")); | ||
const signedTxs = await Promise.all(chunk.map(async (tx) => { | ||
tx.recentBlockhash = recentBlockhash.blockhash; | ||
// @ts-ignore | ||
const signed = await provider.wallet.signTransaction(tx); | ||
tx.signatures[0].signature; | ||
return signed; | ||
})); | ||
const txsWithSigs = signedTxs.map((tx, index) => { | ||
return { | ||
transaction: chunk[index], | ||
sig: bs58.encode(tx.signatures[0].signature), | ||
}; | ||
}); | ||
const confirmedTxs = await bulkSendRawTransactions(provider.connection, signedTxs.map((s) => s.serialize()), ({ totalProgress, ...rest }) => onProgress && | ||
onProgress({ ...rest, totalProgress: totalProgress + ret.length }), recentBlockhash.lastValidBlockHeight); | ||
thisRet.push(...confirmedTxs); | ||
if (confirmedTxs.length == signedTxs.length) { | ||
break; | ||
} | ||
const retSet = new Set(thisRet); | ||
chunk = txsWithSigs | ||
.filter(({ sig }) => !retSet.has(sig)) | ||
.map(({ transaction }) => transaction); | ||
triesRemaining--; | ||
if (triesRemaining <= 0) { | ||
throw new Error(`Failed to submit all txs after ${triesRemaining} blockhashes expired`); | ||
} | ||
} | ||
ret.push(...thisRet); | ||
} | ||
return ret; | ||
} | ||
// Returns the list of succesfully sent txns | ||
// NOTE: The return signatures are ordered by confirmation, not by order they are passed | ||
// This list should be in order. Seom txns may fail | ||
// due to blockhash exp | ||
export async function bulkSendRawTransactions(connection, txs, onProgress, lastValidBlockHeight) { | ||
const txBatchSize = TX_BATCH_SIZE; | ||
let totalProgress = 0; | ||
const ret = []; | ||
if (!lastValidBlockHeight) { | ||
const blockhash = await withRetries(5, () => connection.getLatestBlockhash("confirmed")); | ||
lastValidBlockHeight = blockhash.lastValidBlockHeight; | ||
} | ||
for (let chunk of chunks(txs, txBatchSize)) { | ||
let currentBatchProgress = 0; | ||
let pendingCount = txs.length; | ||
let txids = []; | ||
let lastRetry = 0; | ||
while (pendingCount > 0) { | ||
if ((await withRetries(5, () => connection.getBlockHeight())) > | ||
lastValidBlockHeight) { | ||
return ret; | ||
} | ||
// only resend txs every 4s | ||
if (lastRetry < new Date().valueOf() - 4 * 1000) { | ||
lastRetry = new Date().valueOf(); | ||
txids = []; | ||
for (const tx of chunk) { | ||
const txid = await connection.sendRawTransaction(tx, { | ||
skipPreflight: true, | ||
}); | ||
txids.push(txid); | ||
} | ||
} | ||
const statuses = await getAllTxns(connection, txids); | ||
const completed = statuses.filter((status) => status !== null); | ||
totalProgress += completed.length; | ||
currentBatchProgress += completed.length; | ||
onProgress && | ||
onProgress({ | ||
totalProgress: totalProgress, | ||
currentBatchProgress: currentBatchProgress, | ||
currentBatchSize: txBatchSize, | ||
}); | ||
const failures = completed | ||
.map((status) => status !== null && status.meta?.err) | ||
.filter(truthy); | ||
if (failures.length > 0) { | ||
console.error(failures); | ||
throw new Error("Failed to run txs"); | ||
} | ||
pendingCount -= completed.length; | ||
chunk = chunk.filter((_, index) => statuses[index] === null); | ||
ret.push(...completed | ||
.map((status, idx) => (status ? txids[idx] : null)) | ||
.filter(truthy)); | ||
await sleep(1000); // Wait one seconds before querying again | ||
} | ||
} | ||
return ret; | ||
} | ||
const MAX_GET_SIGNATURE_STATUSES_QUERY_ITEMS = 200; | ||
async function getAllTxns(connection, txids) { | ||
return (await Promise.all(chunks(txids, MAX_GET_SIGNATURE_STATUSES_QUERY_ITEMS).map((txids) => connection.getTransactions(txids, "confirmed")))).flat(); | ||
} | ||
//# sourceMappingURL=transaction.js.map |
/// <reference types="node" /> | ||
import { AnchorProvider, Program } from "@coral-xyz/anchor"; | ||
import { AnchorProvider, Program, Provider } from "@coral-xyz/anchor"; | ||
import { Commitment, Connection, Finality, PublicKey, SendOptions, SignatureStatus, Signer, Transaction, TransactionInstruction, TransactionSignature } from "@solana/web3.js"; | ||
@@ -28,2 +28,10 @@ export interface InstructionResult<A> { | ||
export declare function bufferToTransaction(solanaTransaction: Buffer): Transaction; | ||
type Status = { | ||
totalProgress: number; | ||
currentBatchProgress: number; | ||
currentBatchSize: number; | ||
}; | ||
export declare function bulkSendTransactions(provider: Provider, txs: Transaction[], onProgress?: (status: Status) => void, triesRemaining?: number): Promise<string[]>; | ||
export declare function bulkSendRawTransactions(connection: Connection, txs: Buffer[], onProgress?: (status: Status) => void, lastValidBlockHeight?: number): Promise<string[]>; | ||
export {}; | ||
//# sourceMappingURL=transaction.d.ts.map |
{ | ||
"name": "@helium/spl-utils", | ||
"version": "0.0.26", | ||
"version": "0.0.27", | ||
"description": "Utils shared across spl suite", | ||
@@ -40,3 +40,3 @@ "publishConfig": { | ||
"borsh": "^0.7.0", | ||
"bs58": "^4.0.1" | ||
"bs58": "^5.0.0" | ||
}, | ||
@@ -49,3 +49,3 @@ "devDependencies": { | ||
}, | ||
"gitHead": "b8adb8d496838387169c5a429ecd8399d143e082" | ||
"gitHead": "821b0614352a829783816ad38bb7f308e19efb7f" | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
505090
118
5211
2
Updatedbs58@^5.0.0