Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

arweave

Package Overview
Dependencies
Maintainers
1
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

arweave - npm Package Compare versions

Comparing version 1.7.1 to 1.8.0

node/chunks.d.ts

6

node/common.d.ts

@@ -11,2 +11,3 @@ import Ar from "./ar";

import Silo from "./silo";
import Chunks from "./chunks";
export interface Config {

@@ -23,5 +24,5 @@ api: ApiConfig;

quantity: string;
data: string | Uint8Array;
data: string | Uint8Array | ArrayBuffer;
data_size: string;
data_root?: string;
data_root: string;
reward: string;

@@ -36,2 +37,3 @@ }

silo: Silo;
chunks: Chunks;
static init: (apiConfig: ApiConfig) => Arweave;

@@ -38,0 +40,0 @@ static crypto: CryptoInterface;

@@ -9,5 +9,5 @@ "use strict";

const transaction_1 = require("./lib/transaction");
const Merkle = require("./lib/merkle");
const ArweaveUtils = require("./lib/utils");
const silo_1 = require("./silo");
const chunks_1 = require("./chunks");
class Arweave {

@@ -21,2 +21,3 @@ constructor(apiConfig) {

this.ar = new ar_1.default();
this.chunks = new chunks_1.default(this.api);
}

@@ -53,4 +54,7 @@ /** @deprecated */

}
if (attributes.data instanceof ArrayBuffer) {
attributes.data = new Uint8Array(attributes.data);
}
if (attributes.data && !(attributes.data instanceof Uint8Array)) {
throw new Error("Expected data to be a string or Uint8Array");
throw new Error("Expected data to be a string, Uint8Array or ArrayBuffer");
}

@@ -61,8 +65,5 @@ if (attributes.reward == undefined) {

}
if (attributes.data) {
const rootHash = await Merkle.computeRootHash(attributes.data);
transaction.data_size = attributes.data.byteLength.toString();
transaction.data_root = ArweaveUtils.bufferTob64Url(rootHash);
transaction.data = ArweaveUtils.bufferTob64Url(attributes.data);
}
transaction.data_root = '';
transaction.data_size = attributes.data ? attributes.data.byteLength.toString() : '0';
transaction.data = attributes.data || new Uint8Array(0);
return new transaction_1.default(transaction);

@@ -69,0 +70,0 @@ }

@@ -17,1 +17,10 @@ import { AxiosResponse } from "axios";

}
declare type AxiosResponseLite = {
status: number;
statusText?: string;
data: {
error: string;
} | any;
};
export declare const getError: (resp: AxiosResponseLite) => any;
export {};

@@ -19,2 +19,9 @@ "use strict";

exports.default = ArweaveError;
// Safely get error string
// from an axios response, falling back to
// resp.data, statusText or 'unknown'.
exports.getError = (resp) => resp.data ?
(resp.data.error || resp.data)
:
(resp.statusText || 'unknown');
//# sourceMappingURL=error.js.map

@@ -0,1 +1,84 @@

export interface Chunk {
dataHash: Uint8Array;
minByteRange: number;
maxByteRange: number;
}
interface BranchNode {
readonly id: Uint8Array;
readonly type: "branch";
readonly byteRange: number;
readonly maxByteRange: number;
readonly leftChild?: MerkelNode;
readonly rightChild?: MerkelNode;
}
interface LeafNode {
readonly id: Uint8Array;
readonly dataHash: Uint8Array;
readonly type: "leaf";
readonly minByteRange: number;
readonly maxByteRange: number;
}
export declare type MerkelNode = BranchNode | LeafNode;
export declare const MAX_CHUNK_SIZE: number;
export declare const MIN_CHUNK_SIZE: number;
/**
* Takes the input data and chunks it into (mostly) equal sized chunks.
* The last chunk will be a bit smaller as it contains the remainder
* from the chunking process.
*/
export declare function chunkData(data: Uint8Array): Promise<Chunk[]>;
export declare function generateLeaves(chunks: Chunk[]): Promise<LeafNode[]>;
/**
* Builds an arweave merkle tree and gets the root hash for the given input.
*/
export declare function computeRootHash(data: Uint8Array): Promise<Uint8Array>;
export declare function generateTree(data: Uint8Array): Promise<MerkelNode>;
/**
* Generates the data_root, chunks & proofs
* needed for a transaction.
*
* This also checks if the last chunk is a zero-length
* chunk and discards that chunk and proof if so.
* (we do not need to upload this zero length chunk)
*
* @param data
*/
export declare function generateTransactionChunks(data: Uint8Array): Promise<{
data_root: Uint8Array;
chunks: Chunk[];
proofs: Proof[];
}>;
/**
* Starting with the bottom layer of leaf nodes, hash every second pair
* into a new branch node, push those branch nodes onto a new layer,
* and then recurse, building up the tree to it's root, where the
* layer only consists of two items.
*/
export declare function buildLayers(nodes: MerkelNode[], level?: number): Promise<MerkelNode>;
/**
* Recursively search through all branches of the tree,
* and generate a proof for each leaf node.
*/
export declare function generateProofs(root: MerkelNode): Proof[];
export interface Proof {
offset: number;
proof: Uint8Array;
}
export declare function arrayFlatten<T = any>(input: T[]): T[];
export declare function intToBuffer(note: number): Uint8Array;
export declare function bufferToInt(buffer: Uint8Array): number;
export declare const arrayCompare: (a: any[] | Uint8Array, b: any[] | Uint8Array) => boolean;
export declare function validatePath(id: Uint8Array, dest: number, leftBound: number, rightBound: number, path: Uint8Array): Promise<false | {
offset: number;
leftBound: number;
rightBound: number;
chunkSize: number;
}>;
/**
* Inspect an arweave chunk proof.
* Takes proof, parses, reads and displays the values for console logging.
* One proof section per line
* Format: left,right,offset => hash
*/
export declare function debug(proof: Uint8Array, output?: string): Promise<string>;
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* @see {@link https://github.com/ArweaveTeam/arweave/blob/fbc381e0e36efffa45d13f2faa6199d3766edaa2/apps/arweave/src/ar_merkle.erl}
*/
const common_1 = require("../common");
const CHUNK_SIZE = 256 * 1024;
const utils_1 = require("./utils");
const util_1 = require("util");
exports.MAX_CHUNK_SIZE = 256 * 1024;
exports.MIN_CHUNK_SIZE = 32 * 1024;
const NOTE_SIZE = 32;
async function computeRootHash(data) {
let taggedChunks = [];
{
let rest = data;
let pos = 0;
while (rest.byteLength >= CHUNK_SIZE) {
let chunk = rest.slice(0, CHUNK_SIZE);
let id = await common_1.default.crypto.hash(chunk);
pos += chunk.byteLength;
taggedChunks.push({ id, end: pos });
rest = rest.slice(CHUNK_SIZE);
const HASH_SIZE = 32;
/**
* Takes the input data and chunks it into (mostly) equal sized chunks.
* The last chunk will be a bit smaller as it contains the remainder
* from the chunking process.
*/
async function chunkData(data) {
let chunks = [];
let rest = data;
let cursor = 0;
while (rest.byteLength >= exports.MAX_CHUNK_SIZE) {
let chunkSize = exports.MAX_CHUNK_SIZE;
// If the total bytes left will produce a chunk < MIN_CHUNK_SIZE,
// then adjust the amount we put in this 2nd last chunk.
let nextChunkSize = rest.byteLength - exports.MAX_CHUNK_SIZE;
if (nextChunkSize > 0 && nextChunkSize < exports.MIN_CHUNK_SIZE) {
chunkSize = Math.ceil(rest.byteLength / 2);
// console.log(`Last chunk will be: ${nextChunkSize} which is below ${MIN_CHUNK_SIZE}, adjusting current to ${chunkSize} with ${rest.byteLength} left.`)
}
taggedChunks.push({
id: await common_1.default.crypto.hash(rest),
end: pos + rest.byteLength
const chunk = rest.slice(0, chunkSize);
const dataHash = await common_1.default.crypto.hash(chunk);
cursor += chunk.byteLength;
chunks.push({
dataHash,
minByteRange: cursor - chunk.byteLength,
maxByteRange: cursor,
});
rest = rest.slice(chunkSize);
}
let nodes = await Promise.all(taggedChunks.map(({ id, end }) => hashLeaf(id, end)));
while (nodes.length > 1) {
let nextNodes = [];
for (let i = 0; i < nodes.length; i += 2) {
nextNodes.push(await hashBranch(nodes[i], nodes[i + 1]));
}
nodes = nextNodes;
chunks.push({
dataHash: await common_1.default.crypto.hash(rest),
minByteRange: cursor,
maxByteRange: cursor + rest.byteLength,
});
return chunks;
}
exports.chunkData = chunkData;
async function generateLeaves(chunks) {
return Promise.all(chunks.map(async ({ dataHash, minByteRange, maxByteRange }) => {
return {
type: "leaf",
id: await hash(await Promise.all([hash(dataHash), hash(intToBuffer(maxByteRange))])),
dataHash: dataHash,
minByteRange,
maxByteRange,
};
}));
}
exports.generateLeaves = generateLeaves;
/**
* Builds an arweave merkle tree and gets the root hash for the given input.
*/
async function computeRootHash(data) {
const rootNode = await generateTree(data);
return rootNode.id;
}
exports.computeRootHash = computeRootHash;
async function generateTree(data) {
const rootNode = await buildLayers(await generateLeaves(await chunkData(data)));
return rootNode;
}
exports.generateTree = generateTree;
/**
* Generates the data_root, chunks & proofs
* needed for a transaction.
*
* This also checks if the last chunk is a zero-length
* chunk and discards that chunk and proof if so.
* (we do not need to upload this zero length chunk)
*
* @param data
*/
async function generateTransactionChunks(data) {
const chunks = await chunkData(data);
const leaves = await generateLeaves(chunks);
const root = await buildLayers(leaves);
const proofs = await generateProofs(root);
// Discard the last chunk & proof if it's zero length.
const lastChunk = chunks.slice(-1)[0];
if (lastChunk.maxByteRange - lastChunk.minByteRange === 0) {
chunks.splice(chunks.length - 1, 1);
proofs.splice(proofs.length - 1, 1);
}
const [{ id: rootHash }] = nodes;
return rootHash;
return {
data_root: root.id,
chunks,
proofs
};
}
exports.computeRootHash = computeRootHash;
exports.generateTransactionChunks = generateTransactionChunks;
/**
* Starting with the bottom layer of leaf nodes, hash every second pair
* into a new branch node, push those branch nodes onto a new layer,
* and then recurse, building up the tree to it's root, where the
* layer only consists of two items.
*/
async function buildLayers(nodes, level = 0) {
// If there are only 2 nodes left, this is going to be the root node
if (nodes.length < 2) {
const root = await hashBranch(nodes[0], nodes[1]);
// console.log("Root layer", root);
return root;
}
const nextLayer = [];
for (let i = 0; i < nodes.length; i += 2) {
nextLayer.push(await hashBranch(nodes[i], nodes[i + 1]));
}
// console.log("Layer", nextLayer);
return buildLayers(nextLayer, level + 1);
}
exports.buildLayers = buildLayers;
/**
* Recursively search through all branches of the tree,
* and generate a proof for each leaf node.
*/
function generateProofs(root) {
const proofs = resolveBranchProofs(root);
if (!Array.isArray(proofs)) {
return [proofs];
}
return arrayFlatten(proofs);
}
exports.generateProofs = generateProofs;
function resolveBranchProofs(node, proof = new Uint8Array(), depth = 0) {
if (node.type == "leaf") {
return {
offset: node.maxByteRange - 1,
proof: utils_1.concatBuffers([
proof,
node.dataHash,
intToBuffer(node.maxByteRange),
]),
};
}
if (node.type == "branch") {
const partialProof = utils_1.concatBuffers([
proof,
node.leftChild.id,
node.rightChild.id,
intToBuffer(node.byteRange),
]);
return [
resolveBranchProofs(node.leftChild, partialProof, depth + 1),
resolveBranchProofs(node.rightChild, partialProof, depth + 1),
];
}
throw new Error(`Unexpected node type`);
}
function arrayFlatten(input) {
const flat = [];
input.forEach((item) => {
if (Array.isArray(item)) {
flat.push(...arrayFlatten(item));
}
else {
flat.push(item);
}
});
return flat;
}
exports.arrayFlatten = arrayFlatten;
async function hashBranch(left, right) {

@@ -39,17 +177,16 @@ if (!right) {

}
return {
let branch = {
type: "branch",
id: await hash([
await hash(left.id),
await hash(right.id),
await hash(noteToBuffer(left.max))
await hash(intToBuffer(left.maxByteRange)),
]),
max: right.max
byteRange: left.maxByteRange,
maxByteRange: right.maxByteRange,
leftChild: left,
rightChild: right,
};
return branch;
}
async function hashLeaf(data, note) {
return {
id: await hash([await hash(data), await hash(noteToBuffer(note))]),
max: note
};
}
async function hash(data) {

@@ -59,11 +196,90 @@ if (Array.isArray(data)) {

}
return await common_1.default.crypto.hash(data);
return new Uint8Array(await common_1.default.crypto.hash(data));
}
function noteToBuffer(note) {
function intToBuffer(note) {
const buffer = new Uint8Array(NOTE_SIZE);
for (let i = NOTE_SIZE - 1; i > 0 && note > 0; i--, note = note >> 8) {
buffer[i] = note;
for (var i = buffer.length - 1; i >= 0; i--) {
var byte = note % 256;
buffer[i] = byte;
note = (note - byte) / 256;
}
return buffer;
}
exports.intToBuffer = intToBuffer;
function bufferToInt(buffer) {
let value = 0;
for (var i = 0; i < buffer.length; i++) {
value *= 256;
value += buffer[i];
}
return value;
}
exports.bufferToInt = bufferToInt;
exports.arrayCompare = (a, b) => a.every((value, index) => b[index] === value);
async function validatePath(id, dest, leftBound, rightBound, path) {
if (rightBound <= 0) {
return false;
}
if (dest >= rightBound) {
return validatePath(id, 0, rightBound - 1, rightBound, path);
}
if (dest < 0) {
return validatePath(id, 0, 0, rightBound, path);
}
if (path.length == HASH_SIZE + NOTE_SIZE) {
const pathData = path.slice(0, HASH_SIZE);
const endOffsetBuffer = path.slice(pathData.length, pathData.length + NOTE_SIZE);
const pathDataHash = await hash([
await hash(pathData),
await hash(endOffsetBuffer),
]);
let result = exports.arrayCompare(id, pathDataHash);
if (result) {
return { offset: rightBound - 1, leftBound: leftBound, rightBound: rightBound, chunkSize: rightBound - leftBound };
}
return false;
}
const left = path.slice(0, HASH_SIZE);
const right = path.slice(left.length, left.length + HASH_SIZE);
const offsetBuffer = path.slice(left.length + right.length, left.length + right.length + NOTE_SIZE);
const offset = bufferToInt(offsetBuffer);
const remainder = path.slice(left.length + right.length + offsetBuffer.length);
const pathHash = await hash([
await hash(left),
await hash(right),
await hash(offsetBuffer),
]);
if (exports.arrayCompare(id, pathHash)) {
if (dest < offset) {
return await validatePath(left, dest, leftBound, Math.min(rightBound, offset), remainder);
}
return await validatePath(right, dest, Math.max(leftBound, offset), rightBound, remainder);
}
return false;
}
exports.validatePath = validatePath;
/**
* Inspect an arweave chunk proof.
* Takes proof, parses, reads and displays the values for console logging.
* One proof section per line
* Format: left,right,offset => hash
*/
async function debug(proof, output = "") {
if (proof.byteLength < 1) {
return output;
}
const left = proof.slice(0, HASH_SIZE);
const right = proof.slice(left.length, left.length + HASH_SIZE);
const offsetBuffer = proof.slice(left.length + right.length, left.length + right.length + NOTE_SIZE);
const offset = bufferToInt(offsetBuffer);
const remainder = proof.slice(left.length + right.length + offsetBuffer.length);
const pathHash = await hash([
await hash(left),
await hash(right),
await hash(offsetBuffer),
]);
const updatedOutput = `${output}\n${util_1.inspect(Buffer.from(left))},${util_1.inspect(Buffer.from(right))},${offset} => ${util_1.inspect(pathHash)}`;
return debug(remainder, updatedOutput);
}
exports.debug = debug;
//# sourceMappingURL=merkle.js.map

@@ -0,1 +1,2 @@

import { Chunk, Proof } from './merkle';
declare class BaseObject {

@@ -26,3 +27,3 @@ [key: string]: any;

quantity: string;
data: string;
data: Uint8Array;
reward: string;

@@ -32,3 +33,2 @@ signature: string;

data_root: string;
data_tree: string[];
}

@@ -43,8 +43,12 @@ export default class Transaction extends BaseObject implements TransactionInterface {

readonly quantity: string;
readonly data: string;
readonly data_size: string;
readonly data_root: string;
readonly data_tree: string[];
readonly reward: string;
data: Uint8Array;
data_root: string;
reward: string;
signature: string;
chunks?: {
data_root: Uint8Array;
chunks: Chunk[];
proofs: Proof[];
};
constructor(attributes?: Partial<TransactionInterface>);

@@ -63,3 +67,3 @@ addTag(name: string, value: string): void;

data_root: string;
data_tree: string[];
data_tree: any;
reward: string;

@@ -72,4 +76,12 @@ signature: string;

}): void;
prepareChunks(data: Uint8Array): Promise<void>;
getChunk(idx: number, data: Uint8Array): {
data_root: string;
data_size: string;
data_path: string;
offset: string;
chunk: string;
};
getSignatureData(): Promise<Uint8Array>;
}
export {};

@@ -5,2 +5,3 @@ "use strict";

const deepHash_1 = require("./deepHash");
const merkle_1 = require("./merkle");
class BaseObject {

@@ -11,2 +12,14 @@ get(field, options) {

}
// Handle fields that are Uint8Arrays.
// To maintain compat we encode them to b64url
// if decode option is not specificed.
if (this[field] instanceof Uint8Array) {
if (options && options.decode && options.string) {
return ArweaveUtils.bufferToString(this[field]);
}
if (options && options.decode && !options.string) {
return this[field];
}
return ArweaveUtils.bufferTob64Url(this[field]);
}
if (options && options.decode == true) {

@@ -39,9 +52,14 @@ if (options && options.string) {

this.quantity = "0";
this.data = "";
this.data_size = "0";
this.data = new Uint8Array();
this.data_root = "";
this.data_tree = [];
this.reward = "0";
this.signature = "";
Object.assign(this, attributes);
// If something passes in a Tx that has been toJSON'ed and back,
// or where the data was filled in from /tx/data endpoint.
// data will be b64url encoded, so decode it.
if (typeof this.data === 'string') {
this.data = ArweaveUtils.b64UrlToBuffer(this.data);
}
if (attributes.tags) {

@@ -65,3 +83,3 @@ this.tags = attributes.tags.map((tag) => {

quantity: this.quantity,
data: this.data,
data: ArweaveUtils.bufferTob64Url(this.data),
data_size: this.data_size,

@@ -78,2 +96,38 @@ data_root: this.data_root,

}
async prepareChunks(data) {
// Note: we *do not* use `this.data`, the caller may be
// operating on a transaction with an zero length data field.
// This function computes the chunks for the data passed in and
// assigns the result to this transaction. It should not read the
// data *from* this transaction.
if (!this.chunks && data.byteLength > 0) {
this.chunks = await merkle_1.generateTransactionChunks(data);
this.data_root = ArweaveUtils.bufferTob64Url(this.chunks.data_root);
}
if (!this.chunks && data.byteLength === 0) {
this.chunks = {
chunks: [],
data_root: new Uint8Array(),
proofs: [],
};
this.data_root = '';
}
}
// Returns a chunk in a format suitable for posting to /chunk.
// Similar to `prepareChunks()` this does not operate `this.data`,
// instead using the data passed in.
getChunk(idx, data) {
if (!this.chunks) {
throw new Error(`Chunks have not been prepared`);
}
const proof = this.chunks.proofs[idx];
const chunk = this.chunks.chunks[idx];
return {
data_root: this.data_root,
data_size: this.data_size,
data_path: ArweaveUtils.bufferTob64Url(proof.proof),
offset: proof.offset.toString(),
chunk: ArweaveUtils.bufferTob64Url(data.slice(chunk.minByteRange, chunk.maxByteRange))
};
}
async getSignatureData() {

@@ -97,2 +151,3 @@ switch (this.format) {

case 2:
await this.prepareChunks(this.data);
const tagList = this.tags.map((tag) => [

@@ -99,0 +154,0 @@ tag.get("name", { decode: true, string: false }),

@@ -6,3 +6,3 @@ /// <reference types="node" />

import { JWKInterface } from "./lib/wallet";
import { AxiosResponse } from "axios";
import { TransactionUploader, SerializedUploader } from "./lib/transaction-uploader";
export interface TransactionConfirmedData {

@@ -33,3 +33,40 @@ block_indep_hash: string;

verify(transaction: Transaction): Promise<boolean>;
post(transaction: Transaction | Buffer | string | object): Promise<AxiosResponse>;
post(transaction: Transaction | Buffer | string | object): Promise<{
status: number;
statusText: string;
data: any;
}>;
/**
* Gets an uploader than can be used to upload a transaction chunk by chunk, giving progress
* and the ability to resume.
*
* Usage example:
*
* ```
* const uploader = arweave.transactions.getUploader(transaction);
* while (!uploader.isComplete) {
* await uploader.uploadChunk();
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save progress object, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
getUploader(upload: Transaction | SerializedUploader | string, data?: Uint8Array | ArrayBuffer): Promise<TransactionUploader>;
/**
* Async generator version of uploader
*
* Usage example:
*
* ```
* for await (const uploader of arweave.transactions.upload(tx)) {
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save uploader, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
upload(upload: Transaction | SerializedUploader | string, data?: Uint8Array): AsyncIterableIterator<TransactionUploader>;
}
"use strict";
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -6,2 +18,3 @@ const error_1 = require("./lib/error");

const ArweaveUtils = require("./lib/utils");
const transaction_uploader_1 = require("./lib/transaction-uploader");
class Transactions {

@@ -103,3 +116,3 @@ constructor(api, crypto) {

}
return null;
return (options && options.decode) ? new Uint8Array(0) : '';
});

@@ -135,5 +148,102 @@ }

}
post(transaction) {
return this.api.post(`tx`, transaction).then(response => {
return response;
async post(transaction) {
if (typeof transaction === 'string') {
transaction = new transaction_1.default(JSON.parse(transaction));
}
else if (transaction instanceof Buffer) {
transaction = new transaction_1.default(JSON.parse(transaction.toString()));
}
else if (typeof transaction === 'object' && !(transaction instanceof transaction_1.default)) {
transaction = new transaction_1.default(transaction);
}
if (!(transaction instanceof transaction_1.default)) {
throw new Error(`Must be Transaction object`);
}
if (transaction.data.byteLength > 1024 * 1024 * 10) {
console.warn(`transactions.getUploader() or transactions.upload() is recommended for large data transactions`);
}
const uploader = await this.getUploader(transaction);
// Emulate existing error & return value behaviour.
try {
while (!uploader.isComplete) {
await uploader.uploadChunk();
}
}
catch (e) {
if (uploader.lastResponseStatus > 0) {
return {
status: uploader.lastResponseStatus,
statusText: uploader.lastResponseError,
data: {
error: uploader.lastResponseError,
}
};
}
throw (e);
}
return {
status: 200,
statusText: 'OK',
data: {}
};
}
/**
* Gets an uploader than can be used to upload a transaction chunk by chunk, giving progress
* and the ability to resume.
*
* Usage example:
*
* ```
* const uploader = arweave.transactions.getUploader(transaction);
* while (!uploader.isComplete) {
* await uploader.uploadChunk();
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save progress object, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
async getUploader(upload, data) {
let uploader;
if (upload instanceof transaction_1.default) {
uploader = new transaction_uploader_1.TransactionUploader(this.api, upload);
}
else {
if (data instanceof ArrayBuffer) {
data = new Uint8Array(data);
}
if (!data || !(data instanceof Uint8Array)) {
throw new Error(`Must provide data when resuming upload`);
}
if (typeof upload === 'string') {
upload = await transaction_uploader_1.TransactionUploader.fromTransactionId(this.api, upload);
}
// upload should be a serialized upload.
uploader = await transaction_uploader_1.TransactionUploader.fromSerialized(this.api, upload, data);
}
return uploader;
}
/**
* Async generator version of uploader
*
* Usage example:
*
* ```
* for await (const uploader of arweave.transactions.upload(tx)) {
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save uploader, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
upload(upload, data) {
return __asyncGenerator(this, arguments, function* upload_1() {
const uploader = yield __await(this.getUploader(upload, data));
while (!uploader.isComplete) {
yield __await(uploader.uploadChunk());
yield yield __await(uploader);
}
return yield __await(uploader);
});

@@ -140,0 +250,0 @@ }

{
"name": "arweave",
"version": "1.7.1",
"version": "1.8.0",
"description": "Arweave JS client library",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -26,2 +26,3 @@ # Arweave JS

- [Submit a transaction](#submit-a-transaction)
- [Chunked uploading advanced options](#chunked-uploading-advanced-options)
- [Get a transaction status](#get-a-transaction-status)

@@ -99,3 +100,3 @@ - [Get a transaction](#get-a-transaction)

<body>
</body>

@@ -188,10 +189,10 @@ </html>

// Plain text
let transactionA = arweave.createTransaction({
let transactionA = await arweave.createTransaction({
data: '<html><head><meta charset="UTF-8"><title>Hello world!</title></head><body></body></html>'
}, jwk);
}, key);
// Buffer
let transactionB = arweave.createTransaction({
let transactionB = await arweave.createTransaction({
data: Buffer.from('Some data', 'utf8')
}, jwk);
}, key);

@@ -223,6 +224,6 @@

// Send 10.5 AR to 1seRanklLU_1VTGkEk7P0xAwMJfA7owA1JHW5KyZKlY
let transaction = arweave.createTransaction({
let transaction = await arweave.createTransaction({
target: '1seRanklLU_1VTGkEk7P0xAwMJfA7owA1JHW5KyZKlY',
quantity: arweave.ar.arToWinston('10.5')
}, jwk);
}, key);

@@ -319,5 +320,26 @@ console.log(transaction);

Once a transaction is submitted to the network it'll be broadcast around all nodes and mined into a block.
The preferred method of submitting a data transaction is to use chunk uploading. This method will allow larger transaction sizes, resuming a transaction upload if its interrupted and give progress updates while uploading.
Simple example:
```js
let data = fs.readFileSync('path/to/file.pdf');
let transaction = await arweave.createTransaction({ data: data }, key);
transaction.addTag('Content-Type', 'application/pdf');
await arweave.transaction.sign(transaction, key);
let uploader = arweave.transactions.getUploader(transaction);
while (!uploader.isComplete) {
await uploader.uploadChunk();
console.log(`${uploader.pctComplete}% complete, ${uploader.uploadedChunks}/${uploader.totalChunks}`);
}
```
You can also submit transactions using `transactions.post()` which is suitable for small transactions or token transfers:
```js
let key = await arweave.wallets.generate();

@@ -340,2 +362,43 @@

##### Chunked uploading advanced options
You can resume an upload from a saved uploader object, that you have persisted in storage some using `JSON.stringify(uploader)` at any stage of the upload. To resume, parse it back into an object pass it to `getUploader()` along with the transactions data:
```js
let data = fs.readFileSync('path/to/file.pdf'); // get the same data
let resumeObject = JSON.parse(savedUploader); // get uploader object from where you stored it.
let uploader = arweave.transactions.getUploader(resumeObject, data);
while (!uploader.isComplete) {
await uploader.uploadChunk();
}
```
When resuming the upload, you *must provide the same data* as the original upload. When you serialize the uploader object with `JSON.stringify()` to save it somewhere, it will not include the data.
You can also resume an upload from just the transaction ID and data, once it has been mined into a block. This can be useful if you didn't save the uploader somewhere but the upload got interrupted. This will re-upload all of the data from the beginning, since we don't know which parts have been uploaded:
```js
let data = fs.readFileSync('path/to/file.pdf'); // get the same data
let resumeTxId = 'mytxid' // a transaction id for a mined transaction that didn't complete the upload.
let uploader = arweave.transactions.getUploader(resumeTxId, data);
while (!uploader.isComplete) {
await uploader.uploadChunks();
console.log(`${progress.pctComplete}% complete`);
}
```
There is also a async iterator interface to chunk uploading, but this method means you'll need to ensure you are using a transpiler and polyfill for the asyncIterator symbol for some environments. (Safari on iOS in particular). This method takes the same arguments for uploading/resuming a transaction as `getUploader()` and just has a slightly shorter syntax:
```js
for await (const uploader of arweave.transactions.upload(tx) {
console.log(`${uploader.pctComplete}% Complete`);
}
// done.
```
#### Get a transaction status

@@ -482,2 +545,2 @@

- https://www.npmjs.com/package/arlang
- https://www.npmjs.com/package/arql-ops
- https://www.npmjs.com/package/arql-ops

@@ -11,2 +11,3 @@ import Ar from "./ar";

import Silo from "./silo";
import Chunks from "./chunks";
export interface Config {

@@ -23,5 +24,5 @@ api: ApiConfig;

quantity: string;
data: string | Uint8Array;
data: string | Uint8Array | ArrayBuffer;
data_size: string;
data_root?: string;
data_root: string;
reward: string;

@@ -36,2 +37,3 @@ }

silo: Silo;
chunks: Chunks;
static init: (apiConfig: ApiConfig) => Arweave;

@@ -38,0 +40,0 @@ static crypto: CryptoInterface;

@@ -9,5 +9,5 @@ "use strict";

const transaction_1 = require("./lib/transaction");
const Merkle = require("./lib/merkle");
const ArweaveUtils = require("./lib/utils");
const silo_1 = require("./silo");
const chunks_1 = require("./chunks");
class Arweave {

@@ -21,2 +21,3 @@ constructor(apiConfig) {

this.ar = new ar_1.default();
this.chunks = new chunks_1.default(this.api);
}

@@ -53,4 +54,7 @@ /** @deprecated */

}
if (attributes.data instanceof ArrayBuffer) {
attributes.data = new Uint8Array(attributes.data);
}
if (attributes.data && !(attributes.data instanceof Uint8Array)) {
throw new Error("Expected data to be a string or Uint8Array");
throw new Error("Expected data to be a string, Uint8Array or ArrayBuffer");
}

@@ -61,8 +65,5 @@ if (attributes.reward == undefined) {

}
if (attributes.data) {
const rootHash = await Merkle.computeRootHash(attributes.data);
transaction.data_size = attributes.data.byteLength.toString();
transaction.data_root = ArweaveUtils.bufferTob64Url(rootHash);
transaction.data = ArweaveUtils.bufferTob64Url(attributes.data);
}
transaction.data_root = '';
transaction.data_size = attributes.data ? attributes.data.byteLength.toString() : '0';
transaction.data = attributes.data || new Uint8Array(0);
return new transaction_1.default(transaction);

@@ -69,0 +70,0 @@ }

@@ -17,1 +17,10 @@ import { AxiosResponse } from "axios";

}
declare type AxiosResponseLite = {
status: number;
statusText?: string;
data: {
error: string;
} | any;
};
export declare const getError: (resp: AxiosResponseLite) => any;
export {};

@@ -19,2 +19,9 @@ "use strict";

exports.default = ArweaveError;
// Safely get error string
// from an axios response, falling back to
// resp.data, statusText or 'unknown'.
exports.getError = (resp) => resp.data ?
(resp.data.error || resp.data)
:
(resp.statusText || 'unknown');
//# sourceMappingURL=error.js.map

@@ -0,1 +1,84 @@

export interface Chunk {
dataHash: Uint8Array;
minByteRange: number;
maxByteRange: number;
}
interface BranchNode {
readonly id: Uint8Array;
readonly type: "branch";
readonly byteRange: number;
readonly maxByteRange: number;
readonly leftChild?: MerkelNode;
readonly rightChild?: MerkelNode;
}
interface LeafNode {
readonly id: Uint8Array;
readonly dataHash: Uint8Array;
readonly type: "leaf";
readonly minByteRange: number;
readonly maxByteRange: number;
}
export declare type MerkelNode = BranchNode | LeafNode;
export declare const MAX_CHUNK_SIZE: number;
export declare const MIN_CHUNK_SIZE: number;
/**
* Takes the input data and chunks it into (mostly) equal sized chunks.
* The last chunk will be a bit smaller as it contains the remainder
* from the chunking process.
*/
export declare function chunkData(data: Uint8Array): Promise<Chunk[]>;
export declare function generateLeaves(chunks: Chunk[]): Promise<LeafNode[]>;
/**
* Builds an arweave merkle tree and gets the root hash for the given input.
*/
export declare function computeRootHash(data: Uint8Array): Promise<Uint8Array>;
export declare function generateTree(data: Uint8Array): Promise<MerkelNode>;
/**
* Generates the data_root, chunks & proofs
* needed for a transaction.
*
* This also checks if the last chunk is a zero-length
* chunk and discards that chunk and proof if so.
* (we do not need to upload this zero length chunk)
*
* @param data
*/
export declare function generateTransactionChunks(data: Uint8Array): Promise<{
data_root: Uint8Array;
chunks: Chunk[];
proofs: Proof[];
}>;
/**
* Starting with the bottom layer of leaf nodes, hash every second pair
* into a new branch node, push those branch nodes onto a new layer,
* and then recurse, building up the tree to it's root, where the
* layer only consists of two items.
*/
export declare function buildLayers(nodes: MerkelNode[], level?: number): Promise<MerkelNode>;
/**
* Recursively search through all branches of the tree,
* and generate a proof for each leaf node.
*/
export declare function generateProofs(root: MerkelNode): Proof[];
export interface Proof {
offset: number;
proof: Uint8Array;
}
export declare function arrayFlatten<T = any>(input: T[]): T[];
export declare function intToBuffer(note: number): Uint8Array;
export declare function bufferToInt(buffer: Uint8Array): number;
export declare const arrayCompare: (a: any[] | Uint8Array, b: any[] | Uint8Array) => boolean;
export declare function validatePath(id: Uint8Array, dest: number, leftBound: number, rightBound: number, path: Uint8Array): Promise<false | {
offset: number;
leftBound: number;
rightBound: number;
chunkSize: number;
}>;
/**
* Inspect an arweave chunk proof.
* Takes proof, parses, reads and displays the values for console logging.
* One proof section per line
* Format: left,right,offset => hash
*/
export declare function debug(proof: Uint8Array, output?: string): Promise<string>;
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* @see {@link https://github.com/ArweaveTeam/arweave/blob/fbc381e0e36efffa45d13f2faa6199d3766edaa2/apps/arweave/src/ar_merkle.erl}
*/
const common_1 = require("../common");
const CHUNK_SIZE = 256 * 1024;
const utils_1 = require("./utils");
const util_1 = require("util");
exports.MAX_CHUNK_SIZE = 256 * 1024;
exports.MIN_CHUNK_SIZE = 32 * 1024;
const NOTE_SIZE = 32;
async function computeRootHash(data) {
let taggedChunks = [];
{
let rest = data;
let pos = 0;
while (rest.byteLength >= CHUNK_SIZE) {
let chunk = rest.slice(0, CHUNK_SIZE);
let id = await common_1.default.crypto.hash(chunk);
pos += chunk.byteLength;
taggedChunks.push({ id, end: pos });
rest = rest.slice(CHUNK_SIZE);
const HASH_SIZE = 32;
/**
* Takes the input data and chunks it into (mostly) equal sized chunks.
* The last chunk will be a bit smaller as it contains the remainder
* from the chunking process.
*/
async function chunkData(data) {
let chunks = [];
let rest = data;
let cursor = 0;
while (rest.byteLength >= exports.MAX_CHUNK_SIZE) {
let chunkSize = exports.MAX_CHUNK_SIZE;
// If the total bytes left will produce a chunk < MIN_CHUNK_SIZE,
// then adjust the amount we put in this 2nd last chunk.
let nextChunkSize = rest.byteLength - exports.MAX_CHUNK_SIZE;
if (nextChunkSize > 0 && nextChunkSize < exports.MIN_CHUNK_SIZE) {
chunkSize = Math.ceil(rest.byteLength / 2);
// console.log(`Last chunk will be: ${nextChunkSize} which is below ${MIN_CHUNK_SIZE}, adjusting current to ${chunkSize} with ${rest.byteLength} left.`)
}
taggedChunks.push({
id: await common_1.default.crypto.hash(rest),
end: pos + rest.byteLength
const chunk = rest.slice(0, chunkSize);
const dataHash = await common_1.default.crypto.hash(chunk);
cursor += chunk.byteLength;
chunks.push({
dataHash,
minByteRange: cursor - chunk.byteLength,
maxByteRange: cursor,
});
rest = rest.slice(chunkSize);
}
let nodes = await Promise.all(taggedChunks.map(({ id, end }) => hashLeaf(id, end)));
while (nodes.length > 1) {
let nextNodes = [];
for (let i = 0; i < nodes.length; i += 2) {
nextNodes.push(await hashBranch(nodes[i], nodes[i + 1]));
}
nodes = nextNodes;
chunks.push({
dataHash: await common_1.default.crypto.hash(rest),
minByteRange: cursor,
maxByteRange: cursor + rest.byteLength,
});
return chunks;
}
exports.chunkData = chunkData;
async function generateLeaves(chunks) {
return Promise.all(chunks.map(async ({ dataHash, minByteRange, maxByteRange }) => {
return {
type: "leaf",
id: await hash(await Promise.all([hash(dataHash), hash(intToBuffer(maxByteRange))])),
dataHash: dataHash,
minByteRange,
maxByteRange,
};
}));
}
exports.generateLeaves = generateLeaves;
/**
* Builds an arweave merkle tree and gets the root hash for the given input.
*/
async function computeRootHash(data) {
const rootNode = await generateTree(data);
return rootNode.id;
}
exports.computeRootHash = computeRootHash;
async function generateTree(data) {
const rootNode = await buildLayers(await generateLeaves(await chunkData(data)));
return rootNode;
}
exports.generateTree = generateTree;
/**
* Generates the data_root, chunks & proofs
* needed for a transaction.
*
* This also checks if the last chunk is a zero-length
* chunk and discards that chunk and proof if so.
* (we do not need to upload this zero length chunk)
*
* @param data
*/
async function generateTransactionChunks(data) {
const chunks = await chunkData(data);
const leaves = await generateLeaves(chunks);
const root = await buildLayers(leaves);
const proofs = await generateProofs(root);
// Discard the last chunk & proof if it's zero length.
const lastChunk = chunks.slice(-1)[0];
if (lastChunk.maxByteRange - lastChunk.minByteRange === 0) {
chunks.splice(chunks.length - 1, 1);
proofs.splice(proofs.length - 1, 1);
}
const [{ id: rootHash }] = nodes;
return rootHash;
return {
data_root: root.id,
chunks,
proofs
};
}
exports.computeRootHash = computeRootHash;
exports.generateTransactionChunks = generateTransactionChunks;
/**
* Starting with the bottom layer of leaf nodes, hash every second pair
* into a new branch node, push those branch nodes onto a new layer,
* and then recurse, building up the tree to it's root, where the
* layer only consists of two items.
*/
async function buildLayers(nodes, level = 0) {
// If there are only 2 nodes left, this is going to be the root node
if (nodes.length < 2) {
const root = await hashBranch(nodes[0], nodes[1]);
// console.log("Root layer", root);
return root;
}
const nextLayer = [];
for (let i = 0; i < nodes.length; i += 2) {
nextLayer.push(await hashBranch(nodes[i], nodes[i + 1]));
}
// console.log("Layer", nextLayer);
return buildLayers(nextLayer, level + 1);
}
exports.buildLayers = buildLayers;
/**
* Recursively search through all branches of the tree,
* and generate a proof for each leaf node.
*/
function generateProofs(root) {
const proofs = resolveBranchProofs(root);
if (!Array.isArray(proofs)) {
return [proofs];
}
return arrayFlatten(proofs);
}
exports.generateProofs = generateProofs;
function resolveBranchProofs(node, proof = new Uint8Array(), depth = 0) {
if (node.type == "leaf") {
return {
offset: node.maxByteRange - 1,
proof: utils_1.concatBuffers([
proof,
node.dataHash,
intToBuffer(node.maxByteRange),
]),
};
}
if (node.type == "branch") {
const partialProof = utils_1.concatBuffers([
proof,
node.leftChild.id,
node.rightChild.id,
intToBuffer(node.byteRange),
]);
return [
resolveBranchProofs(node.leftChild, partialProof, depth + 1),
resolveBranchProofs(node.rightChild, partialProof, depth + 1),
];
}
throw new Error(`Unexpected node type`);
}
function arrayFlatten(input) {
const flat = [];
input.forEach((item) => {
if (Array.isArray(item)) {
flat.push(...arrayFlatten(item));
}
else {
flat.push(item);
}
});
return flat;
}
exports.arrayFlatten = arrayFlatten;
async function hashBranch(left, right) {

@@ -39,17 +177,16 @@ if (!right) {

}
return {
let branch = {
type: "branch",
id: await hash([
await hash(left.id),
await hash(right.id),
await hash(noteToBuffer(left.max))
await hash(intToBuffer(left.maxByteRange)),
]),
max: right.max
byteRange: left.maxByteRange,
maxByteRange: right.maxByteRange,
leftChild: left,
rightChild: right,
};
return branch;
}
async function hashLeaf(data, note) {
return {
id: await hash([await hash(data), await hash(noteToBuffer(note))]),
max: note
};
}
async function hash(data) {

@@ -59,11 +196,90 @@ if (Array.isArray(data)) {

}
return await common_1.default.crypto.hash(data);
return new Uint8Array(await common_1.default.crypto.hash(data));
}
function noteToBuffer(note) {
function intToBuffer(note) {
const buffer = new Uint8Array(NOTE_SIZE);
for (let i = NOTE_SIZE - 1; i > 0 && note > 0; i--, note = note >> 8) {
buffer[i] = note;
for (var i = buffer.length - 1; i >= 0; i--) {
var byte = note % 256;
buffer[i] = byte;
note = (note - byte) / 256;
}
return buffer;
}
exports.intToBuffer = intToBuffer;
function bufferToInt(buffer) {
let value = 0;
for (var i = 0; i < buffer.length; i++) {
value *= 256;
value += buffer[i];
}
return value;
}
exports.bufferToInt = bufferToInt;
exports.arrayCompare = (a, b) => a.every((value, index) => b[index] === value);
async function validatePath(id, dest, leftBound, rightBound, path) {
if (rightBound <= 0) {
return false;
}
if (dest >= rightBound) {
return validatePath(id, 0, rightBound - 1, rightBound, path);
}
if (dest < 0) {
return validatePath(id, 0, 0, rightBound, path);
}
if (path.length == HASH_SIZE + NOTE_SIZE) {
const pathData = path.slice(0, HASH_SIZE);
const endOffsetBuffer = path.slice(pathData.length, pathData.length + NOTE_SIZE);
const pathDataHash = await hash([
await hash(pathData),
await hash(endOffsetBuffer),
]);
let result = exports.arrayCompare(id, pathDataHash);
if (result) {
return { offset: rightBound - 1, leftBound: leftBound, rightBound: rightBound, chunkSize: rightBound - leftBound };
}
return false;
}
const left = path.slice(0, HASH_SIZE);
const right = path.slice(left.length, left.length + HASH_SIZE);
const offsetBuffer = path.slice(left.length + right.length, left.length + right.length + NOTE_SIZE);
const offset = bufferToInt(offsetBuffer);
const remainder = path.slice(left.length + right.length + offsetBuffer.length);
const pathHash = await hash([
await hash(left),
await hash(right),
await hash(offsetBuffer),
]);
if (exports.arrayCompare(id, pathHash)) {
if (dest < offset) {
return await validatePath(left, dest, leftBound, Math.min(rightBound, offset), remainder);
}
return await validatePath(right, dest, Math.max(leftBound, offset), rightBound, remainder);
}
return false;
}
exports.validatePath = validatePath;
/**
* Inspect an arweave chunk proof.
* Takes proof, parses, reads and displays the values for console logging.
* One proof section per line
* Format: left,right,offset => hash
*/
async function debug(proof, output = "") {
if (proof.byteLength < 1) {
return output;
}
const left = proof.slice(0, HASH_SIZE);
const right = proof.slice(left.length, left.length + HASH_SIZE);
const offsetBuffer = proof.slice(left.length + right.length, left.length + right.length + NOTE_SIZE);
const offset = bufferToInt(offsetBuffer);
const remainder = proof.slice(left.length + right.length + offsetBuffer.length);
const pathHash = await hash([
await hash(left),
await hash(right),
await hash(offsetBuffer),
]);
const updatedOutput = `${output}\n${util_1.inspect(Buffer.from(left))},${util_1.inspect(Buffer.from(right))},${offset} => ${util_1.inspect(pathHash)}`;
return debug(remainder, updatedOutput);
}
exports.debug = debug;
//# sourceMappingURL=merkle.js.map

@@ -0,1 +1,2 @@

import { Chunk, Proof } from './merkle';
declare class BaseObject {

@@ -26,3 +27,3 @@ [key: string]: any;

quantity: string;
data: string;
data: Uint8Array;
reward: string;

@@ -32,3 +33,2 @@ signature: string;

data_root: string;
data_tree: string[];
}

@@ -43,8 +43,12 @@ export default class Transaction extends BaseObject implements TransactionInterface {

readonly quantity: string;
readonly data: string;
readonly data_size: string;
readonly data_root: string;
readonly data_tree: string[];
readonly reward: string;
data: Uint8Array;
data_root: string;
reward: string;
signature: string;
chunks?: {
data_root: Uint8Array;
chunks: Chunk[];
proofs: Proof[];
};
constructor(attributes?: Partial<TransactionInterface>);

@@ -63,3 +67,3 @@ addTag(name: string, value: string): void;

data_root: string;
data_tree: string[];
data_tree: any;
reward: string;

@@ -72,4 +76,12 @@ signature: string;

}): void;
prepareChunks(data: Uint8Array): Promise<void>;
getChunk(idx: number, data: Uint8Array): {
data_root: string;
data_size: string;
data_path: string;
offset: string;
chunk: string;
};
getSignatureData(): Promise<Uint8Array>;
}
export {};

@@ -5,2 +5,3 @@ "use strict";

const deepHash_1 = require("./deepHash");
const merkle_1 = require("./merkle");
class BaseObject {

@@ -11,2 +12,14 @@ get(field, options) {

}
// Handle fields that are Uint8Arrays.
// To maintain compat we encode them to b64url
// if decode option is not specificed.
if (this[field] instanceof Uint8Array) {
if (options && options.decode && options.string) {
return ArweaveUtils.bufferToString(this[field]);
}
if (options && options.decode && !options.string) {
return this[field];
}
return ArweaveUtils.bufferTob64Url(this[field]);
}
if (options && options.decode == true) {

@@ -39,9 +52,14 @@ if (options && options.string) {

this.quantity = "0";
this.data = "";
this.data_size = "0";
this.data = new Uint8Array();
this.data_root = "";
this.data_tree = [];
this.reward = "0";
this.signature = "";
Object.assign(this, attributes);
// If something passes in a Tx that has been toJSON'ed and back,
// or where the data was filled in from /tx/data endpoint.
// data will be b64url encoded, so decode it.
if (typeof this.data === 'string') {
this.data = ArweaveUtils.b64UrlToBuffer(this.data);
}
if (attributes.tags) {

@@ -65,3 +83,3 @@ this.tags = attributes.tags.map((tag) => {

quantity: this.quantity,
data: this.data,
data: ArweaveUtils.bufferTob64Url(this.data),
data_size: this.data_size,

@@ -78,2 +96,38 @@ data_root: this.data_root,

}
async prepareChunks(data) {
// Note: we *do not* use `this.data`, the caller may be
// operating on a transaction with an zero length data field.
// This function computes the chunks for the data passed in and
// assigns the result to this transaction. It should not read the
// data *from* this transaction.
if (!this.chunks && data.byteLength > 0) {
this.chunks = await merkle_1.generateTransactionChunks(data);
this.data_root = ArweaveUtils.bufferTob64Url(this.chunks.data_root);
}
if (!this.chunks && data.byteLength === 0) {
this.chunks = {
chunks: [],
data_root: new Uint8Array(),
proofs: [],
};
this.data_root = '';
}
}
// Returns a chunk in a format suitable for posting to /chunk.
// Similar to `prepareChunks()` this does not operate `this.data`,
// instead using the data passed in.
getChunk(idx, data) {
if (!this.chunks) {
throw new Error(`Chunks have not been prepared`);
}
const proof = this.chunks.proofs[idx];
const chunk = this.chunks.chunks[idx];
return {
data_root: this.data_root,
data_size: this.data_size,
data_path: ArweaveUtils.bufferTob64Url(proof.proof),
offset: proof.offset.toString(),
chunk: ArweaveUtils.bufferTob64Url(data.slice(chunk.minByteRange, chunk.maxByteRange))
};
}
async getSignatureData() {

@@ -97,2 +151,3 @@ switch (this.format) {

case 2:
await this.prepareChunks(this.data);
const tagList = this.tags.map((tag) => [

@@ -99,0 +154,0 @@ tag.get("name", { decode: true, string: false }),

@@ -6,3 +6,3 @@ /// <reference types="node" />

import { JWKInterface } from "./lib/wallet";
import { AxiosResponse } from "axios";
import { TransactionUploader, SerializedUploader } from "./lib/transaction-uploader";
export interface TransactionConfirmedData {

@@ -33,3 +33,40 @@ block_indep_hash: string;

verify(transaction: Transaction): Promise<boolean>;
post(transaction: Transaction | Buffer | string | object): Promise<AxiosResponse>;
post(transaction: Transaction | Buffer | string | object): Promise<{
status: number;
statusText: string;
data: any;
}>;
/**
* Gets an uploader than can be used to upload a transaction chunk by chunk, giving progress
* and the ability to resume.
*
* Usage example:
*
* ```
* const uploader = arweave.transactions.getUploader(transaction);
* while (!uploader.isComplete) {
* await uploader.uploadChunk();
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save progress object, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
getUploader(upload: Transaction | SerializedUploader | string, data?: Uint8Array | ArrayBuffer): Promise<TransactionUploader>;
/**
* Async generator version of uploader
*
* Usage example:
*
* ```
* for await (const uploader of arweave.transactions.upload(tx)) {
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save uploader, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
upload(upload: Transaction | SerializedUploader | string, data?: Uint8Array): AsyncIterableIterator<TransactionUploader>;
}
"use strict";
var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var g = generator.apply(thisArg, _arguments || []), i, q = [];
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
function fulfill(value) { resume("next", value); }
function reject(value) { resume("throw", value); }
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -6,2 +18,3 @@ const error_1 = require("./lib/error");

const ArweaveUtils = require("./lib/utils");
const transaction_uploader_1 = require("./lib/transaction-uploader");
class Transactions {

@@ -103,3 +116,3 @@ constructor(api, crypto) {

}
return null;
return (options && options.decode) ? new Uint8Array(0) : '';
});

@@ -135,5 +148,102 @@ }

}
post(transaction) {
return this.api.post(`tx`, transaction).then(response => {
return response;
async post(transaction) {
if (typeof transaction === 'string') {
transaction = new transaction_1.default(JSON.parse(transaction));
}
else if (transaction instanceof Buffer) {
transaction = new transaction_1.default(JSON.parse(transaction.toString()));
}
else if (typeof transaction === 'object' && !(transaction instanceof transaction_1.default)) {
transaction = new transaction_1.default(transaction);
}
if (!(transaction instanceof transaction_1.default)) {
throw new Error(`Must be Transaction object`);
}
if (transaction.data.byteLength > 1024 * 1024 * 10) {
console.warn(`transactions.getUploader() or transactions.upload() is recommended for large data transactions`);
}
const uploader = await this.getUploader(transaction);
// Emulate existing error & return value behaviour.
try {
while (!uploader.isComplete) {
await uploader.uploadChunk();
}
}
catch (e) {
if (uploader.lastResponseStatus > 0) {
return {
status: uploader.lastResponseStatus,
statusText: uploader.lastResponseError,
data: {
error: uploader.lastResponseError,
}
};
}
throw (e);
}
return {
status: 200,
statusText: 'OK',
data: {}
};
}
/**
* Gets an uploader than can be used to upload a transaction chunk by chunk, giving progress
* and the ability to resume.
*
* Usage example:
*
* ```
* const uploader = arweave.transactions.getUploader(transaction);
* while (!uploader.isComplete) {
* await uploader.uploadChunk();
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save progress object, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
async getUploader(upload, data) {
let uploader;
if (upload instanceof transaction_1.default) {
uploader = new transaction_uploader_1.TransactionUploader(this.api, upload);
}
else {
if (data instanceof ArrayBuffer) {
data = new Uint8Array(data);
}
if (!data || !(data instanceof Uint8Array)) {
throw new Error(`Must provide data when resuming upload`);
}
if (typeof upload === 'string') {
upload = await transaction_uploader_1.TransactionUploader.fromTransactionId(this.api, upload);
}
// upload should be a serialized upload.
uploader = await transaction_uploader_1.TransactionUploader.fromSerialized(this.api, upload, data);
}
return uploader;
}
/**
* Async generator version of uploader
*
* Usage example:
*
* ```
* for await (const uploader of arweave.transactions.upload(tx)) {
* console.log(`${uploader.pctComplete}%`);
* }
* ```
*
* @param upload a Transaction object, a previously save uploader, or a transaction id.
* @param data the data of the transaction. Required when resuming an upload.
*/
upload(upload, data) {
return __asyncGenerator(this, arguments, function* upload_1() {
const uploader = yield __await(this.getUploader(upload, data));
while (!uploader.isComplete) {
yield __await(uploader.uploadChunk());
yield yield __await(uploader);
}
return yield __await(uploader);
});

@@ -140,0 +250,0 @@ }

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc