Comparing version 1.9.0 to 1.9.1
@@ -62,4 +62,6 @@ "use strict"; | ||
} | ||
transaction.data_root = ''; | ||
transaction.data_size = attributes.data ? attributes.data.byteLength.toString() : '0'; | ||
transaction.data_root = ""; | ||
transaction.data_size = attributes.data | ||
? attributes.data.byteLength.toString() | ||
: "0"; | ||
transaction.data = attributes.data || new Uint8Array(0); | ||
@@ -66,0 +68,0 @@ return new transaction_1.default(transaction); |
@@ -5,3 +5,3 @@ "use strict"; | ||
// @ts-ignore | ||
const asn = require("arweave-asn1"); | ||
const asn = require("asn1.js"); | ||
function urlize(base64) { | ||
@@ -8,0 +8,0 @@ return base64 |
@@ -20,4 +20,4 @@ "use strict"; | ||
exports.default = ArweaveError; | ||
// Safely get error string | ||
// from an axios response, falling back to | ||
// Safely get error string | ||
// from an axios response, falling back to | ||
// resp.data, statusText or 'unknown'. | ||
@@ -29,8 +29,7 @@ // Note: a wrongly set content-type can | ||
let data = resp.data; | ||
if (typeof resp.data === 'string') { | ||
if (typeof resp.data === "string") { | ||
try { | ||
data = JSON.parse(resp.data); | ||
} | ||
catch (e) { | ||
} | ||
catch (e) { } | ||
} | ||
@@ -41,11 +40,7 @@ if (resp.data instanceof ArrayBuffer || resp.data instanceof Uint8Array) { | ||
} | ||
catch (e) { | ||
} | ||
catch (e) { } | ||
} | ||
return data ? | ||
(data.error || data) | ||
: | ||
(resp.statusText || 'unknown'); | ||
return data ? data.error || data : resp.statusText || "unknown"; | ||
} | ||
exports.getError = getError; | ||
//# sourceMappingURL=error.js.map |
@@ -25,3 +25,3 @@ "use strict"; | ||
let chunkSize = exports.MAX_CHUNK_SIZE; | ||
// If the total bytes left will produce a chunk < MIN_CHUNK_SIZE, | ||
// If the total bytes left will produce a chunk < MIN_CHUNK_SIZE, | ||
// then adjust the amount we put in this 2nd last chunk. | ||
@@ -39,3 +39,3 @@ let nextChunkSize = rest.byteLength - exports.MAX_CHUNK_SIZE; | ||
minByteRange: cursor - chunk.byteLength, | ||
maxByteRange: cursor, | ||
maxByteRange: cursor | ||
}); | ||
@@ -47,3 +47,3 @@ rest = rest.slice(chunkSize); | ||
minByteRange: cursor, | ||
maxByteRange: cursor + rest.byteLength, | ||
maxByteRange: cursor + rest.byteLength | ||
}); | ||
@@ -60,3 +60,3 @@ return chunks; | ||
minByteRange, | ||
maxByteRange, | ||
maxByteRange | ||
}; | ||
@@ -147,4 +147,4 @@ })); | ||
node.dataHash, | ||
intToBuffer(node.maxByteRange), | ||
]), | ||
intToBuffer(node.maxByteRange) | ||
]) | ||
}; | ||
@@ -157,7 +157,7 @@ } | ||
node.rightChild.id, | ||
intToBuffer(node.byteRange), | ||
intToBuffer(node.byteRange) | ||
]); | ||
return [ | ||
resolveBranchProofs(node.leftChild, partialProof, depth + 1), | ||
resolveBranchProofs(node.rightChild, partialProof, depth + 1), | ||
resolveBranchProofs(node.rightChild, partialProof, depth + 1) | ||
]; | ||
@@ -169,3 +169,3 @@ } | ||
const flat = []; | ||
input.forEach((item) => { | ||
input.forEach(item => { | ||
if (Array.isArray(item)) { | ||
@@ -190,3 +190,3 @@ flat.push(...arrayFlatten(item)); | ||
await hash(right.id), | ||
await hash(intToBuffer(left.maxByteRange)), | ||
await hash(intToBuffer(left.maxByteRange)) | ||
]), | ||
@@ -196,3 +196,3 @@ byteRange: left.maxByteRange, | ||
leftChild: left, | ||
rightChild: right, | ||
rightChild: right | ||
}; | ||
@@ -242,7 +242,12 @@ return branch; | ||
await hash(pathData), | ||
await hash(endOffsetBuffer), | ||
await hash(endOffsetBuffer) | ||
]); | ||
let result = exports.arrayCompare(id, pathDataHash); | ||
if (result) { | ||
return { offset: rightBound - 1, leftBound: leftBound, rightBound: rightBound, chunkSize: rightBound - leftBound }; | ||
return { | ||
offset: rightBound - 1, | ||
leftBound: leftBound, | ||
rightBound: rightBound, | ||
chunkSize: rightBound - leftBound | ||
}; | ||
} | ||
@@ -259,3 +264,3 @@ return false; | ||
await hash(right), | ||
await hash(offsetBuffer), | ||
await hash(offsetBuffer) | ||
]); | ||
@@ -289,3 +294,3 @@ if (exports.arrayCompare(id, pathHash)) { | ||
await hash(right), | ||
await hash(offsetBuffer), | ||
await hash(offsetBuffer) | ||
]); | ||
@@ -292,0 +297,0 @@ const updatedOutput = `${output}\n${util_1.inspect(Buffer.from(left))},${util_1.inspect(Buffer.from(right))},${offset} => ${util_1.inspect(pathHash)}`; |
@@ -1,3 +0,3 @@ | ||
import Transaction from './transaction'; | ||
import Api from './api'; | ||
import Transaction from "./transaction"; | ||
import Api from "./api"; | ||
export interface SerializedUploader { | ||
@@ -4,0 +4,0 @@ chunkIndex: number; |
@@ -10,5 +10,5 @@ "use strict"; | ||
const MAX_CHUNKS_IN_BODY = 1; | ||
// We assume these errors are intermitment and we can try again after a delay: | ||
// - not_joined | ||
// - timeout | ||
// We assume these errors are intermitment and we can try again after a delay: | ||
// - not_joined | ||
// - timeout | ||
// - data_root_not_found (we may have hit a node that just hasn't seen it yet) | ||
@@ -18,3 +18,11 @@ // - exceeds_disk_pool_size_limit | ||
// Errors from /chunk we should never try and continue on. | ||
const FATAL_CHUNK_UPLOAD_ERRORS = ['invalid_json', 'chunk_too_big', 'data_path_too_big', 'offset_too_big', 'data_size_too_big', 'chunk_proof_ratio_not_attractive', 'invalid_proof']; | ||
const FATAL_CHUNK_UPLOAD_ERRORS = [ | ||
"invalid_json", | ||
"chunk_too_big", | ||
"data_path_too_big", | ||
"offset_too_big", | ||
"data_size_too_big", | ||
"chunk_proof_ratio_not_attractive", | ||
"invalid_proof" | ||
]; | ||
// Amount we will delay on receiving an error response but do want to continue. | ||
@@ -30,3 +38,3 @@ const ERROR_DELAY = 1000 * 40; | ||
this.lastResponseStatus = 0; | ||
this.lastResponseError = ''; | ||
this.lastResponseError = ""; | ||
if (!transaction.id) { | ||
@@ -43,3 +51,4 @@ throw new Error(`Transaction is not signed`); | ||
get isComplete() { | ||
return this.txPosted && this.chunkIndex === this.transaction.chunks.chunks.length; | ||
return (this.txPosted && | ||
this.chunkIndex === this.transaction.chunks.chunks.length); | ||
} | ||
@@ -65,3 +74,3 @@ get totalChunks() { | ||
} | ||
if (this.lastResponseError !== '') { | ||
if (this.lastResponseError !== "") { | ||
this.totalErrors++; | ||
@@ -72,3 +81,3 @@ } | ||
} | ||
// We have been trying for about an hour receiving an | ||
// We have been trying for about an hour receiving an | ||
// error every time, so eventually bail. | ||
@@ -78,10 +87,11 @@ if (this.totalErrors === 100) { | ||
} | ||
let delay = this.lastResponseError === '' ? 0 : | ||
Math.max((this.lastRequestTimeEnd + ERROR_DELAY) - Date.now(), ERROR_DELAY); | ||
let delay = this.lastResponseError === "" | ||
? 0 | ||
: Math.max(this.lastRequestTimeEnd + ERROR_DELAY - Date.now(), ERROR_DELAY); | ||
if (delay > 0) { | ||
// Jitter delay bcoz networks, subtract up to 30% from 40 seconds | ||
delay = delay - (delay * Math.random() * 0.30); | ||
delay = delay - delay * Math.random() * 0.3; | ||
await new Promise(res => setTimeout(res, delay)); | ||
} | ||
this.lastResponseError = ''; | ||
this.lastResponseError = ""; | ||
if (!this.txPosted) { | ||
@@ -123,9 +133,11 @@ await this.postTransaction(); | ||
static async fromSerialized(api, serialized, data) { | ||
if (!serialized || typeof serialized.chunkIndex !== 'number' || typeof serialized.transaction !== 'object') { | ||
if (!serialized || | ||
typeof serialized.chunkIndex !== "number" || | ||
typeof serialized.transaction !== "object") { | ||
throw new Error(`Serialized object does not match expected format.`); | ||
} | ||
// Everything looks ok, reconstruct the TransactionUpload, | ||
// Everything looks ok, reconstruct the TransactionUpload, | ||
// prepare the chunks again and verify the data_root matches | ||
const upload = new TransactionUploader(api, new transaction_1.default(serialized.transaction)); | ||
// Copy the serialized upload information, and data passed in. | ||
// Copy the serialized upload information, and data passed in. | ||
upload.chunkIndex = serialized.chunkIndex; | ||
@@ -160,6 +172,6 @@ upload.lastRequestTimeEnd = serialized.lastRequestTimeEnd; | ||
chunkIndex: 0, | ||
lastResponseError: '', | ||
lastResponseError: "", | ||
lastRequestTimeEnd: 0, | ||
lastResponseStatus: 0, | ||
transaction, | ||
transaction | ||
}; | ||
@@ -184,5 +196,3 @@ return serialized; | ||
this.transaction.data = this.data; | ||
const resp = await this.api | ||
.post(`tx`, this.transaction) | ||
.catch(e => { | ||
const resp = await this.api.post(`tx`, this.transaction).catch(e => { | ||
console.error(e); | ||
@@ -189,0 +199,0 @@ return { status: -1, data: { error: e.message } }; |
@@ -1,2 +0,2 @@ | ||
import { Chunk, Proof } from './merkle'; | ||
import { Chunk, Proof } from "./merkle"; | ||
declare class BaseObject { | ||
@@ -3,0 +3,0 @@ [key: string]: any; |
@@ -13,3 +13,3 @@ "use strict"; | ||
// Handle fields that are Uint8Arrays. | ||
// To maintain compat we encode them to b64url | ||
// To maintain compat we encode them to b64url | ||
// if decode option is not specificed. | ||
@@ -58,6 +58,6 @@ if (this[field] instanceof Uint8Array) { | ||
Object.assign(this, attributes); | ||
// If something passes in a Tx that has been toJSON'ed and back, | ||
// or where the data was filled in from /tx/data endpoint. | ||
// If something passes in a Tx that has been toJSON'ed and back, | ||
// or where the data was filled in from /tx/data endpoint. | ||
// data will be b64url encoded, so decode it. | ||
if (typeof this.data === 'string') { | ||
if (typeof this.data === "string") { | ||
this.data = ArweaveUtils.b64UrlToBuffer(this.data); | ||
@@ -88,3 +88,3 @@ } | ||
reward: this.reward, | ||
signature: this.signature, | ||
signature: this.signature | ||
}; | ||
@@ -110,9 +110,9 @@ } | ||
data_root: new Uint8Array(), | ||
proofs: [], | ||
proofs: [] | ||
}; | ||
this.data_root = ''; | ||
this.data_root = ""; | ||
} | ||
} | ||
// Returns a chunk in a format suitable for posting to /chunk. | ||
// Similar to `prepareChunks()` this does not operate `this.data`, | ||
// Similar to `prepareChunks()` this does not operate `this.data`, | ||
// instead using the data passed in. | ||
@@ -148,9 +148,9 @@ getChunk(idx, data) { | ||
this.get("last_tx", { decode: true, string: false }), | ||
ArweaveUtils.stringToBuffer(tagString), | ||
ArweaveUtils.stringToBuffer(tagString) | ||
]); | ||
case 2: | ||
await this.prepareChunks(this.data); | ||
const tagList = this.tags.map((tag) => [ | ||
const tagList = this.tags.map(tag => [ | ||
tag.get("name", { decode: true, string: false }), | ||
tag.get("value", { decode: true, string: false }), | ||
tag.get("value", { decode: true, string: false }) | ||
]); | ||
@@ -166,3 +166,3 @@ return await deepHash_1.default([ | ||
ArweaveUtils.stringToBuffer(this.data_size), | ||
this.get("data_root", { decode: true, string: false }), | ||
this.get("data_root", { decode: true, string: false }) | ||
]); | ||
@@ -169,0 +169,0 @@ default: |
@@ -57,3 +57,5 @@ "use strict"; | ||
const data_size = parseInt(response.data.data_size); | ||
if (response.data.format >= 2 && data_size > 0 && data_size <= 1024 * 1024 * 12) { | ||
if (response.data.format >= 2 && | ||
data_size > 0 && | ||
data_size <= 1024 * 1024 * 12) { | ||
const data = await this.getData(id); | ||
@@ -108,3 +110,3 @@ return new transaction_1.default(Object.assign(Object.assign({}, response.data), { data })); | ||
// Attempt to download from /txid, fall back to downloading chunks. | ||
const resp = await this.api.get(`${id}`, { responseType: 'arraybuffer' }); | ||
const resp = await this.api.get(`${id}`, { responseType: "arraybuffer" }); | ||
let data = undefined; | ||
@@ -114,7 +116,7 @@ if (resp.status === 200) { | ||
} | ||
if (resp.status === 400 && error_1.getError(resp) === 'tx_data_too_big') { | ||
if (resp.status === 400 && error_1.getError(resp) === "tx_data_too_big") { | ||
data = await this.chunks.downloadChunkedData(id); | ||
} | ||
// If we don't have data, throw an exception. Previously we | ||
// just returned an empty data object. | ||
// If we don't have data, throw an exception. Previously we | ||
// just returned an empty data object. | ||
if (!data) { | ||
@@ -170,9 +172,10 @@ if (resp.status == 202) { | ||
async post(transaction) { | ||
if (typeof transaction === 'string') { | ||
if (typeof transaction === "string") { | ||
transaction = new transaction_1.default(JSON.parse(transaction)); | ||
} | ||
else if (typeof transaction.readInt32BE === 'function') { | ||
else if (typeof transaction.readInt32BE === "function") { | ||
transaction = new transaction_1.default(JSON.parse(transaction.toString())); | ||
} | ||
else if (typeof transaction === 'object' && !(transaction instanceof transaction_1.default)) { | ||
else if (typeof transaction === "object" && | ||
!(transaction instanceof transaction_1.default)) { | ||
transaction = new transaction_1.default(transaction); | ||
@@ -199,11 +202,11 @@ } | ||
data: { | ||
error: uploader.lastResponseError, | ||
error: uploader.lastResponseError | ||
} | ||
}; | ||
} | ||
throw (e); | ||
throw e; | ||
} | ||
return { | ||
status: 200, | ||
statusText: 'OK', | ||
statusText: "OK", | ||
data: {} | ||
@@ -241,3 +244,3 @@ }; | ||
} | ||
if (typeof upload === 'string') { | ||
if (typeof upload === "string") { | ||
upload = await transaction_uploader_1.TransactionUploader.fromTransactionId(this.api, upload); | ||
@@ -244,0 +247,0 @@ } |
{ | ||
"name": "arweave", | ||
"version": "1.9.0", | ||
"version": "1.9.1", | ||
"description": "Arweave JS client library", | ||
@@ -78,3 +78,3 @@ "main": "./node/index.js", | ||
"dependencies": { | ||
"arweave-asn1": "github:arweave-kyle/asn1.js", | ||
"asn1.js": "^5.4.1", | ||
"axios": "^0.19.2", | ||
@@ -81,0 +81,0 @@ "base64-js": "^1.3.1", |
@@ -30,3 +30,3 @@ # Arweave JS | ||
- [Get transaction data](#get-transaction-data) | ||
- [Decode data and tags from transactions](#decode-data-and-tags-from-transactions) | ||
- [Decode tags from transactions](#decode-tags-from-transactions) | ||
- [ArQL](#arql) | ||
@@ -63,3 +63,3 @@ - [License](#license) | ||
```js | ||
const Arweave = require('arweave/node'); | ||
const Arweave = require('arweave'); | ||
@@ -75,3 +75,3 @@ const arweave = Arweave.init({ | ||
```js | ||
import Arweave from 'arweave/web'; | ||
import Arweave from 'arweave'; | ||
@@ -415,2 +415,5 @@ // Since v1.5.1 you're now able to call the init function for the web version without options. The current path will be used by default, recommended. | ||
**Update since v1.9.0** | ||
Due to how the API has evolved over time and with larger transaction support, the `data` field is no longer _guaranteed_ to be returned from the network as part of the transaction json, therefore, it is not recommended that you use this function for fetching data anymore. You should update your applications to use [`arweave.transactions.getData()`](#get-transaction-data) instead, this will handle small transactions, as well as the reassembling of chunks for larger ones, it can also benefit from gateway optimisations. | ||
```js | ||
@@ -454,3 +457,3 @@ const transaction = arweave.transactions.get('hKMMPNh_emBf8v_at1tFzNYACisyMQNcKzeeE1QE9p8').then(transaction => { | ||
// Get the data decoded to a Uint8Array for binary data | ||
getData('bNbA3TEQVL60xlgCcqdz4ZPHFZ711cZ3hmkpGttDt_U', {decode: true}).then(data => { | ||
arweave.transactions.getData('bNbA3TEQVL60xlgCcqdz4ZPHFZ711cZ3hmkpGttDt_U', {decode: true}).then(data => { | ||
console.log(data); | ||
@@ -467,3 +470,3 @@ // Uint8Array [10, 60, 33, 68, ...] | ||
#### Decode data and tags from transactions | ||
#### Decode tags from transactions | ||
@@ -473,22 +476,2 @@ ```js | ||
// Use the get method to get a specific transaction field. | ||
console.log(transaction.get('signature')); | ||
// NLiRQSci56KVNk-x86eLT1TyF1ST8pzE-s7jdCJbW-V... | ||
console.log(transaction.get('data')); | ||
//CjwhRE9DVFlQRSBodG1sPgo8aHRtbCBsYW5nPSJlbiI-C... | ||
// Get the data base64 decoded as a Uint8Array byte array. | ||
console.log(transaction.get('data', {decode: true})); | ||
//Uint8Array[10,60,33,68,79,67,84,89,80,69... | ||
// Get the data base64 decoded as a string. | ||
console.log(transaction.get('data', {decode: true, string: true})); | ||
//<!DOCTYPE html> | ||
//<html lang="en"> | ||
//<head> | ||
// <meta charset="UTF-8"> | ||
// <meta name="viewport" content="width=device-width, initial-scale=1.0"> | ||
// <title>ARWEAVE / PEER EXPLORER</title> | ||
transaction.get('tags').forEach(tag => { | ||
@@ -495,0 +478,0 @@ let key = tag.get('name', {decode: true, string: true}); |
@@ -62,4 +62,6 @@ "use strict"; | ||
} | ||
transaction.data_root = ''; | ||
transaction.data_size = attributes.data ? attributes.data.byteLength.toString() : '0'; | ||
transaction.data_root = ""; | ||
transaction.data_size = attributes.data | ||
? attributes.data.byteLength.toString() | ||
: "0"; | ||
transaction.data = attributes.data || new Uint8Array(0); | ||
@@ -66,0 +68,0 @@ return new transaction_1.default(transaction); |
@@ -5,3 +5,3 @@ "use strict"; | ||
// @ts-ignore | ||
const asn = require("arweave-asn1"); | ||
const asn = require("asn1.js"); | ||
function urlize(base64) { | ||
@@ -8,0 +8,0 @@ return base64 |
@@ -20,4 +20,4 @@ "use strict"; | ||
exports.default = ArweaveError; | ||
// Safely get error string | ||
// from an axios response, falling back to | ||
// Safely get error string | ||
// from an axios response, falling back to | ||
// resp.data, statusText or 'unknown'. | ||
@@ -29,8 +29,7 @@ // Note: a wrongly set content-type can | ||
let data = resp.data; | ||
if (typeof resp.data === 'string') { | ||
if (typeof resp.data === "string") { | ||
try { | ||
data = JSON.parse(resp.data); | ||
} | ||
catch (e) { | ||
} | ||
catch (e) { } | ||
} | ||
@@ -41,11 +40,7 @@ if (resp.data instanceof ArrayBuffer || resp.data instanceof Uint8Array) { | ||
} | ||
catch (e) { | ||
} | ||
catch (e) { } | ||
} | ||
return data ? | ||
(data.error || data) | ||
: | ||
(resp.statusText || 'unknown'); | ||
return data ? data.error || data : resp.statusText || "unknown"; | ||
} | ||
exports.getError = getError; | ||
//# sourceMappingURL=error.js.map |
@@ -25,3 +25,3 @@ "use strict"; | ||
let chunkSize = exports.MAX_CHUNK_SIZE; | ||
// If the total bytes left will produce a chunk < MIN_CHUNK_SIZE, | ||
// If the total bytes left will produce a chunk < MIN_CHUNK_SIZE, | ||
// then adjust the amount we put in this 2nd last chunk. | ||
@@ -39,3 +39,3 @@ let nextChunkSize = rest.byteLength - exports.MAX_CHUNK_SIZE; | ||
minByteRange: cursor - chunk.byteLength, | ||
maxByteRange: cursor, | ||
maxByteRange: cursor | ||
}); | ||
@@ -47,3 +47,3 @@ rest = rest.slice(chunkSize); | ||
minByteRange: cursor, | ||
maxByteRange: cursor + rest.byteLength, | ||
maxByteRange: cursor + rest.byteLength | ||
}); | ||
@@ -60,3 +60,3 @@ return chunks; | ||
minByteRange, | ||
maxByteRange, | ||
maxByteRange | ||
}; | ||
@@ -147,4 +147,4 @@ })); | ||
node.dataHash, | ||
intToBuffer(node.maxByteRange), | ||
]), | ||
intToBuffer(node.maxByteRange) | ||
]) | ||
}; | ||
@@ -157,7 +157,7 @@ } | ||
node.rightChild.id, | ||
intToBuffer(node.byteRange), | ||
intToBuffer(node.byteRange) | ||
]); | ||
return [ | ||
resolveBranchProofs(node.leftChild, partialProof, depth + 1), | ||
resolveBranchProofs(node.rightChild, partialProof, depth + 1), | ||
resolveBranchProofs(node.rightChild, partialProof, depth + 1) | ||
]; | ||
@@ -169,3 +169,3 @@ } | ||
const flat = []; | ||
input.forEach((item) => { | ||
input.forEach(item => { | ||
if (Array.isArray(item)) { | ||
@@ -190,3 +190,3 @@ flat.push(...arrayFlatten(item)); | ||
await hash(right.id), | ||
await hash(intToBuffer(left.maxByteRange)), | ||
await hash(intToBuffer(left.maxByteRange)) | ||
]), | ||
@@ -196,3 +196,3 @@ byteRange: left.maxByteRange, | ||
leftChild: left, | ||
rightChild: right, | ||
rightChild: right | ||
}; | ||
@@ -242,7 +242,12 @@ return branch; | ||
await hash(pathData), | ||
await hash(endOffsetBuffer), | ||
await hash(endOffsetBuffer) | ||
]); | ||
let result = exports.arrayCompare(id, pathDataHash); | ||
if (result) { | ||
return { offset: rightBound - 1, leftBound: leftBound, rightBound: rightBound, chunkSize: rightBound - leftBound }; | ||
return { | ||
offset: rightBound - 1, | ||
leftBound: leftBound, | ||
rightBound: rightBound, | ||
chunkSize: rightBound - leftBound | ||
}; | ||
} | ||
@@ -259,3 +264,3 @@ return false; | ||
await hash(right), | ||
await hash(offsetBuffer), | ||
await hash(offsetBuffer) | ||
]); | ||
@@ -289,3 +294,3 @@ if (exports.arrayCompare(id, pathHash)) { | ||
await hash(right), | ||
await hash(offsetBuffer), | ||
await hash(offsetBuffer) | ||
]); | ||
@@ -292,0 +297,0 @@ const updatedOutput = `${output}\n${util_1.inspect(Buffer.from(left))},${util_1.inspect(Buffer.from(right))},${offset} => ${util_1.inspect(pathHash)}`; |
@@ -1,3 +0,3 @@ | ||
import Transaction from './transaction'; | ||
import Api from './api'; | ||
import Transaction from "./transaction"; | ||
import Api from "./api"; | ||
export interface SerializedUploader { | ||
@@ -4,0 +4,0 @@ chunkIndex: number; |
@@ -10,5 +10,5 @@ "use strict"; | ||
const MAX_CHUNKS_IN_BODY = 1; | ||
// We assume these errors are intermitment and we can try again after a delay: | ||
// - not_joined | ||
// - timeout | ||
// We assume these errors are intermitment and we can try again after a delay: | ||
// - not_joined | ||
// - timeout | ||
// - data_root_not_found (we may have hit a node that just hasn't seen it yet) | ||
@@ -18,3 +18,11 @@ // - exceeds_disk_pool_size_limit | ||
// Errors from /chunk we should never try and continue on. | ||
const FATAL_CHUNK_UPLOAD_ERRORS = ['invalid_json', 'chunk_too_big', 'data_path_too_big', 'offset_too_big', 'data_size_too_big', 'chunk_proof_ratio_not_attractive', 'invalid_proof']; | ||
const FATAL_CHUNK_UPLOAD_ERRORS = [ | ||
"invalid_json", | ||
"chunk_too_big", | ||
"data_path_too_big", | ||
"offset_too_big", | ||
"data_size_too_big", | ||
"chunk_proof_ratio_not_attractive", | ||
"invalid_proof" | ||
]; | ||
// Amount we will delay on receiving an error response but do want to continue. | ||
@@ -30,3 +38,3 @@ const ERROR_DELAY = 1000 * 40; | ||
this.lastResponseStatus = 0; | ||
this.lastResponseError = ''; | ||
this.lastResponseError = ""; | ||
if (!transaction.id) { | ||
@@ -43,3 +51,4 @@ throw new Error(`Transaction is not signed`); | ||
get isComplete() { | ||
return this.txPosted && this.chunkIndex === this.transaction.chunks.chunks.length; | ||
return (this.txPosted && | ||
this.chunkIndex === this.transaction.chunks.chunks.length); | ||
} | ||
@@ -65,3 +74,3 @@ get totalChunks() { | ||
} | ||
if (this.lastResponseError !== '') { | ||
if (this.lastResponseError !== "") { | ||
this.totalErrors++; | ||
@@ -72,3 +81,3 @@ } | ||
} | ||
// We have been trying for about an hour receiving an | ||
// We have been trying for about an hour receiving an | ||
// error every time, so eventually bail. | ||
@@ -78,10 +87,11 @@ if (this.totalErrors === 100) { | ||
} | ||
let delay = this.lastResponseError === '' ? 0 : | ||
Math.max((this.lastRequestTimeEnd + ERROR_DELAY) - Date.now(), ERROR_DELAY); | ||
let delay = this.lastResponseError === "" | ||
? 0 | ||
: Math.max(this.lastRequestTimeEnd + ERROR_DELAY - Date.now(), ERROR_DELAY); | ||
if (delay > 0) { | ||
// Jitter delay bcoz networks, subtract up to 30% from 40 seconds | ||
delay = delay - (delay * Math.random() * 0.30); | ||
delay = delay - delay * Math.random() * 0.3; | ||
await new Promise(res => setTimeout(res, delay)); | ||
} | ||
this.lastResponseError = ''; | ||
this.lastResponseError = ""; | ||
if (!this.txPosted) { | ||
@@ -123,9 +133,11 @@ await this.postTransaction(); | ||
static async fromSerialized(api, serialized, data) { | ||
if (!serialized || typeof serialized.chunkIndex !== 'number' || typeof serialized.transaction !== 'object') { | ||
if (!serialized || | ||
typeof serialized.chunkIndex !== "number" || | ||
typeof serialized.transaction !== "object") { | ||
throw new Error(`Serialized object does not match expected format.`); | ||
} | ||
// Everything looks ok, reconstruct the TransactionUpload, | ||
// Everything looks ok, reconstruct the TransactionUpload, | ||
// prepare the chunks again and verify the data_root matches | ||
const upload = new TransactionUploader(api, new transaction_1.default(serialized.transaction)); | ||
// Copy the serialized upload information, and data passed in. | ||
// Copy the serialized upload information, and data passed in. | ||
upload.chunkIndex = serialized.chunkIndex; | ||
@@ -160,6 +172,6 @@ upload.lastRequestTimeEnd = serialized.lastRequestTimeEnd; | ||
chunkIndex: 0, | ||
lastResponseError: '', | ||
lastResponseError: "", | ||
lastRequestTimeEnd: 0, | ||
lastResponseStatus: 0, | ||
transaction, | ||
transaction | ||
}; | ||
@@ -184,5 +196,3 @@ return serialized; | ||
this.transaction.data = this.data; | ||
const resp = await this.api | ||
.post(`tx`, this.transaction) | ||
.catch(e => { | ||
const resp = await this.api.post(`tx`, this.transaction).catch(e => { | ||
console.error(e); | ||
@@ -189,0 +199,0 @@ return { status: -1, data: { error: e.message } }; |
@@ -1,2 +0,2 @@ | ||
import { Chunk, Proof } from './merkle'; | ||
import { Chunk, Proof } from "./merkle"; | ||
declare class BaseObject { | ||
@@ -3,0 +3,0 @@ [key: string]: any; |
@@ -13,3 +13,3 @@ "use strict"; | ||
// Handle fields that are Uint8Arrays. | ||
// To maintain compat we encode them to b64url | ||
// To maintain compat we encode them to b64url | ||
// if decode option is not specificed. | ||
@@ -58,6 +58,6 @@ if (this[field] instanceof Uint8Array) { | ||
Object.assign(this, attributes); | ||
// If something passes in a Tx that has been toJSON'ed and back, | ||
// or where the data was filled in from /tx/data endpoint. | ||
// If something passes in a Tx that has been toJSON'ed and back, | ||
// or where the data was filled in from /tx/data endpoint. | ||
// data will be b64url encoded, so decode it. | ||
if (typeof this.data === 'string') { | ||
if (typeof this.data === "string") { | ||
this.data = ArweaveUtils.b64UrlToBuffer(this.data); | ||
@@ -88,3 +88,3 @@ } | ||
reward: this.reward, | ||
signature: this.signature, | ||
signature: this.signature | ||
}; | ||
@@ -110,9 +110,9 @@ } | ||
data_root: new Uint8Array(), | ||
proofs: [], | ||
proofs: [] | ||
}; | ||
this.data_root = ''; | ||
this.data_root = ""; | ||
} | ||
} | ||
// Returns a chunk in a format suitable for posting to /chunk. | ||
// Similar to `prepareChunks()` this does not operate `this.data`, | ||
// Similar to `prepareChunks()` this does not operate `this.data`, | ||
// instead using the data passed in. | ||
@@ -148,9 +148,9 @@ getChunk(idx, data) { | ||
this.get("last_tx", { decode: true, string: false }), | ||
ArweaveUtils.stringToBuffer(tagString), | ||
ArweaveUtils.stringToBuffer(tagString) | ||
]); | ||
case 2: | ||
await this.prepareChunks(this.data); | ||
const tagList = this.tags.map((tag) => [ | ||
const tagList = this.tags.map(tag => [ | ||
tag.get("name", { decode: true, string: false }), | ||
tag.get("value", { decode: true, string: false }), | ||
tag.get("value", { decode: true, string: false }) | ||
]); | ||
@@ -166,3 +166,3 @@ return await deepHash_1.default([ | ||
ArweaveUtils.stringToBuffer(this.data_size), | ||
this.get("data_root", { decode: true, string: false }), | ||
this.get("data_root", { decode: true, string: false }) | ||
]); | ||
@@ -169,0 +169,0 @@ default: |
@@ -57,3 +57,5 @@ "use strict"; | ||
const data_size = parseInt(response.data.data_size); | ||
if (response.data.format >= 2 && data_size > 0 && data_size <= 1024 * 1024 * 12) { | ||
if (response.data.format >= 2 && | ||
data_size > 0 && | ||
data_size <= 1024 * 1024 * 12) { | ||
const data = await this.getData(id); | ||
@@ -108,3 +110,3 @@ return new transaction_1.default(Object.assign(Object.assign({}, response.data), { data })); | ||
// Attempt to download from /txid, fall back to downloading chunks. | ||
const resp = await this.api.get(`${id}`, { responseType: 'arraybuffer' }); | ||
const resp = await this.api.get(`${id}`, { responseType: "arraybuffer" }); | ||
let data = undefined; | ||
@@ -114,7 +116,7 @@ if (resp.status === 200) { | ||
} | ||
if (resp.status === 400 && error_1.getError(resp) === 'tx_data_too_big') { | ||
if (resp.status === 400 && error_1.getError(resp) === "tx_data_too_big") { | ||
data = await this.chunks.downloadChunkedData(id); | ||
} | ||
// If we don't have data, throw an exception. Previously we | ||
// just returned an empty data object. | ||
// If we don't have data, throw an exception. Previously we | ||
// just returned an empty data object. | ||
if (!data) { | ||
@@ -170,9 +172,10 @@ if (resp.status == 202) { | ||
async post(transaction) { | ||
if (typeof transaction === 'string') { | ||
if (typeof transaction === "string") { | ||
transaction = new transaction_1.default(JSON.parse(transaction)); | ||
} | ||
else if (typeof transaction.readInt32BE === 'function') { | ||
else if (typeof transaction.readInt32BE === "function") { | ||
transaction = new transaction_1.default(JSON.parse(transaction.toString())); | ||
} | ||
else if (typeof transaction === 'object' && !(transaction instanceof transaction_1.default)) { | ||
else if (typeof transaction === "object" && | ||
!(transaction instanceof transaction_1.default)) { | ||
transaction = new transaction_1.default(transaction); | ||
@@ -199,11 +202,11 @@ } | ||
data: { | ||
error: uploader.lastResponseError, | ||
error: uploader.lastResponseError | ||
} | ||
}; | ||
} | ||
throw (e); | ||
throw e; | ||
} | ||
return { | ||
status: 200, | ||
statusText: 'OK', | ||
statusText: "OK", | ||
data: {} | ||
@@ -241,3 +244,3 @@ }; | ||
} | ||
if (typeof upload === 'string') { | ||
if (typeof upload === "string") { | ||
upload = await transaction_uploader_1.TransactionUploader.fromTransactionId(this.api, upload); | ||
@@ -244,0 +247,0 @@ } |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
GitHub dependency
Supply chain riskContains a dependency which resolves to a GitHub URL. Dependencies fetched from GitHub specifiers are not immutable can be used to inject untrusted code or reduce the likelihood of a reproducible install.
Found 1 instance in 1 package
1240483
13994
0
528
+ Addedasn1.js@^5.4.1
+ Addedasn1.js@5.4.1(transitive)
+ Addedbn.js@4.12.0(transitive)
+ Addedinherits@2.0.4(transitive)
+ Addedminimalistic-assert@1.0.1(transitive)
+ Addedsafer-buffer@2.1.2(transitive)