Comparing version 0.0.63 to 0.0.64
@@ -12,3 +12,3 @@ import { verifyDownloadResponse } from "./apidownloadverifyresponse.js"; | ||
// Get the Uint8Array of the input skylink. | ||
let [u8Link, errBTB] = b64ToBuf(skylink); | ||
const [u8Link, errBTB] = b64ToBuf(skylink); | ||
if (errBTB !== null) { | ||
@@ -23,5 +23,5 @@ resolve([new Uint8Array(0), addContextToErr(errBTB, "unable to decode skylink")]); | ||
// Prepare the download call. | ||
let endpoint = "/skynet/trustless/basesector/" + skylink; | ||
let fileDataPtr = { fileData: new Uint8Array(0), err: null }; | ||
let verifyFunction = function (response) { | ||
const endpoint = "/skynet/trustless/basesector/" + skylink; | ||
const fileDataPtr = { fileData: new Uint8Array(0), err: null }; | ||
const verifyFunction = function (response) { | ||
return verifyDownloadResponse(response, u8Link, fileDataPtr); | ||
@@ -41,3 +41,3 @@ }; | ||
// Error is not a 404, return the logs as the error. | ||
let err = objAsString(result.logs); | ||
const err = objAsString(result.logs); | ||
resolve([new Uint8Array(0), addContextToErr(err, "unable to complete download")]); | ||
@@ -44,0 +44,0 @@ return; |
@@ -32,3 +32,3 @@ import { decodeU64 } from "./encoding.js"; | ||
function verifyDownload(root, offset, fetchSize, buf) { | ||
let u8 = new Uint8Array(buf); | ||
const u8 = new Uint8Array(buf); | ||
// Input checking. If any of this is incorrect, its safe to blame the | ||
@@ -45,5 +45,5 @@ // server because the skylink format fundamentally should enable these | ||
// verify the Merkle proof. | ||
let skylinkData = u8.slice(0, Number(fetchSize)); | ||
let merkleProof = u8.slice(Number(fetchSize), u8.length); | ||
let errVBSRP = blake2bVerifySectorRangeProof(root, skylinkData, offset, fetchSize, merkleProof); | ||
const skylinkData = u8.slice(0, Number(fetchSize)); | ||
const merkleProof = u8.slice(Number(fetchSize), u8.length); | ||
const errVBSRP = blake2bVerifySectorRangeProof(root, skylinkData, offset, fetchSize, merkleProof); | ||
if (errVBSRP !== null) { | ||
@@ -66,14 +66,14 @@ return [nu8, true, addContextToErr(errVBSRP, "provided Merkle proof is not valid")]; | ||
// fanout, and then add those values to 99 to get the fileData offset. | ||
let fileSizeBytes = skylinkData.slice(1, 9); | ||
let mdSizeBytes = skylinkData.slice(9, 17); | ||
let fanoutSizeBytes = skylinkData.slice(17, 25); | ||
let [fileSize, errFSDN] = decodeU64(fileSizeBytes); | ||
const fileSizeBytes = skylinkData.slice(1, 9); | ||
const mdSizeBytes = skylinkData.slice(9, 17); | ||
const fanoutSizeBytes = skylinkData.slice(17, 25); | ||
const [fileSize, errFSDN] = decodeU64(fileSizeBytes); | ||
if (errFSDN !== null) { | ||
return [nu8, false, addContextToErr(errFSDN, "unable to decode filesize")]; | ||
} | ||
let [mdSize, errMDDN] = decodeU64(mdSizeBytes); | ||
const [mdSize, errMDDN] = decodeU64(mdSizeBytes); | ||
if (errMDDN !== null) { | ||
return [nu8, false, addContextToErr(errMDDN, "unable to decode metadata size")]; | ||
} | ||
let [fanoutSize, errFODN] = decodeU64(fanoutSizeBytes); | ||
const [fanoutSize, errFODN] = decodeU64(fanoutSizeBytes); | ||
if (errFODN !== null) { | ||
@@ -85,5 +85,5 @@ return [nu8, false, addContextToErr(errFODN, "unable to decode fanout size")]; | ||
} | ||
let fileData = skylinkData.slice(Number(99n + mdSize + fanoutSize), Number(99n + mdSize + fanoutSize + fileSize)); | ||
const fileData = skylinkData.slice(Number(99n + mdSize + fanoutSize), Number(99n + mdSize + fanoutSize + fileSize)); | ||
return [fileData, false, null]; | ||
} | ||
export { verifyDownload }; |
@@ -35,3 +35,3 @@ import { verifyDownload } from "./apidownloadverify.js"; | ||
// link. | ||
let proofJSON = response.headers.get("skynet-proof"); | ||
const proofJSON = response.headers.get("skynet-proof"); | ||
if (proofJSON === null || proofJSON === undefined) { | ||
@@ -41,3 +41,3 @@ resolve("response did not include resolver proofs"); | ||
} | ||
let [proof, errPJ] = parseJSON(proofJSON); | ||
const [proof, errPJ] = parseJSON(proofJSON); | ||
if (errPJ !== null) { | ||
@@ -70,3 +70,3 @@ resolve(addContextToErr(errPJ, "unable to parse resolver link proofs")); | ||
.then((buf) => { | ||
let [fileData, portalAtFault, errVD] = verifyDownload(u8Link.slice(2, 34), offset, fetchSize, buf); | ||
const [fileData, portalAtFault, errVD] = verifyDownload(u8Link.slice(2, 34), offset, fetchSize, buf); | ||
if (errVD !== null && portalAtFault) { | ||
@@ -73,0 +73,0 @@ resolve("received invalid download from portal"); |
@@ -8,3 +8,3 @@ import { objAsString } from "./objAsString.js"; | ||
if (pfm.remainingPortals.length === 0) { | ||
let newLog = "query failed because all portals have been tried"; | ||
const newLog = "query failed because all portals have been tried"; | ||
pfm.logs.push(newLog); | ||
@@ -24,6 +24,6 @@ resolve({ | ||
// Grab the portal and query. | ||
let portal = pfm.remainingPortals.shift(); | ||
let query = portal + pfm.endpoint; | ||
const portal = pfm.remainingPortals.shift(); | ||
const query = portal + pfm.endpoint; | ||
// Create a helper function for trying the next portal. | ||
let nextPortal = function (response, log) { | ||
const nextPortal = function (response, log) { | ||
if (response !== null) { | ||
@@ -114,5 +114,5 @@ response | ||
function progressiveFetch(endpoint, fetchOpts, portals, verifyFunction) { | ||
let portalsCopy = [...portals]; | ||
const portalsCopy = [...portals]; | ||
return new Promise((resolve) => { | ||
let pfm = { | ||
const pfm = { | ||
endpoint, | ||
@@ -119,0 +119,0 @@ fetchOpts, |
@@ -33,7 +33,7 @@ import { hexToBuf } from "./encoding.js"; | ||
// Parse out the fields we need. | ||
let [entryData, errHTB] = hexToBuf(data.data); | ||
const [entryData, errHTB] = hexToBuf(data.data); | ||
if (errHTB !== null) { | ||
return "could not decode registry data from response"; | ||
} | ||
let [sig, errHTB2] = hexToBuf(data.signature); | ||
const [sig, errHTB2] = hexToBuf(data.signature); | ||
if (errHTB2 !== null) { | ||
@@ -57,3 +57,3 @@ return "could not decode signature from response"; | ||
.then((str) => { | ||
let [obj, errPJ] = parseJSON(str); | ||
const [obj, errPJ] = parseJSON(str); | ||
if (errPJ !== null) { | ||
@@ -63,3 +63,3 @@ resolve(addContextToErr(errPJ, "unable to parse registry response")); | ||
} | ||
let errVDR = verifyDecodedResp(resp, obj, pubkey, datakey); | ||
const errVDR = verifyDecodedResp(resp, obj, pubkey, datakey); | ||
if (errVDR !== null) { | ||
@@ -66,0 +66,0 @@ resolve(addContextToErr(errVDR, "regRead response failed verification")); |
@@ -34,3 +34,3 @@ import { encodeU64 } from "./encoding.js"; | ||
// just going to be altering the final 8 bytes as we encrypt the file. | ||
let preimageHolder = new Uint8Array(key.length + 8); | ||
const preimageHolder = new Uint8Array(key.length + 8); | ||
preimageHolder.set(key, 0); | ||
@@ -40,5 +40,5 @@ // Iterate over the data and encrypt each section. | ||
// Set the nonce for this shard and then create the pad data. | ||
let [iBytes] = encodeU64(BigInt(i)); | ||
const [iBytes] = encodeU64(BigInt(i)); | ||
preimageHolder.set(iBytes, key.length); | ||
let keyData = sha512(preimageHolder); | ||
const keyData = sha512(preimageHolder); | ||
// XOR the keyData with the data. Watch for out-of-bounds on the | ||
@@ -45,0 +45,0 @@ // file data. |
@@ -15,11 +15,11 @@ import { decodeU64, encodeU64 } from "./encoding.js"; | ||
// Make a copy of the fullData so that we don't modify our inputs. | ||
let fullData = new Uint8Array(fullDataOrig); | ||
const fullData = new Uint8Array(fullDataOrig); | ||
// Create the encryption key. | ||
let truncHash = fullData.slice(0, 16); | ||
let encryptionTag = new TextEncoder().encode(":encryptionTag:" + inode); | ||
let keyPreimage = new Uint8Array(seed.length + truncHash.length + encryptionTag.length); | ||
const truncHash = fullData.slice(0, 16); | ||
const encryptionTag = new TextEncoder().encode(":encryptionTag:" + inode); | ||
const keyPreimage = new Uint8Array(seed.length + truncHash.length + encryptionTag.length); | ||
keyPreimage.set(seed, 0); | ||
keyPreimage.set(truncHash, seed.length); | ||
keyPreimage.set(encryptionTag, seed.length + truncHash.length); | ||
let encryptionKey = sha512(keyPreimage).slice(0, 16); | ||
const encryptionKey = sha512(keyPreimage).slice(0, 16); | ||
// Perform the decryption. otpEncrypt is just a fancy XOR, so it can be | ||
@@ -30,3 +30,3 @@ // called for decryption. | ||
// decrypted data and comparing it to the truncHash. | ||
let verify = sha512(fullData.slice(16, fullData.length)); | ||
const verify = sha512(fullData.slice(16, fullData.length)); | ||
for (let i = 0; i < 16; i++) { | ||
@@ -38,17 +38,17 @@ if (verify[i] !== truncHash[i]) { | ||
// Pull out the length prefixes for the metadata and data. | ||
let [metadataBI, errDU641] = decodeU64(fullData.slice(24, 32)); | ||
const [metadataBI, errDU641] = decodeU64(fullData.slice(24, 32)); | ||
if (errDU641 !== null) { | ||
return [{}, new Uint8Array(0), addContextToErr(errDU641, "unable to decode metadata length")]; | ||
} | ||
let metadataLen = Number(metadataBI); | ||
let [fileDataBI, errDU642] = decodeU64(fullData.slice(32, 40)); | ||
const metadataLen = Number(metadataBI); | ||
const [fileDataBI, errDU642] = decodeU64(fullData.slice(32, 40)); | ||
if (errDU642 !== null) { | ||
return [{}, new Uint8Array(0), addContextToErr(errDU642, "unable to decode file data length")]; | ||
} | ||
let fileDataLen = Number(fileDataBI); | ||
const fileDataLen = Number(fileDataBI); | ||
// Parse the metadata into an object. Note that parseJSON will read all | ||
// incoming numbers as bigints. | ||
let metadataBytes = fullData.slice(40, 40 + metadataLen); | ||
let metadataStr = new TextDecoder().decode(metadataBytes); | ||
let [metadata, errPJ] = parseJSON(metadataStr); | ||
const metadataBytes = fullData.slice(40, 40 + metadataLen); | ||
const metadataStr = new TextDecoder().decode(metadataBytes); | ||
const [metadata, errPJ] = parseJSON(metadataStr); | ||
if (errPJ !== null) { | ||
@@ -58,3 +58,3 @@ return [{}, new Uint8Array(0), addContextToErr(errPJ, "unable to parse metadata")]; | ||
// Extract the fileData and return | ||
let fileData = fullData.slice(40 + metadataLen, 40 + metadataLen + fileDataLen); | ||
const fileData = fullData.slice(40 + metadataLen, 40 + metadataLen + fileDataLen); | ||
return [metadata, fileData, null]; | ||
@@ -98,7 +98,7 @@ } | ||
// metadata before allocating the full data for the file. | ||
let [metadataStr, errJS] = jsonStringify(metadata); | ||
const [metadataStr, errJS] = jsonStringify(metadata); | ||
if (errJS !== null) { | ||
return [new Uint8Array(0), addContextToErr(errJS, "unable to stringify the metadata")]; | ||
} | ||
let metadataBytes = new TextEncoder().encode(metadataStr); | ||
const metadataBytes = new TextEncoder().encode(metadataStr); | ||
// Establish the size of the raw file. There's 16 bytes for the hash of the | ||
@@ -116,3 +116,3 @@ // data, then 8 bytes to establish the length of the metadata, then 8 bytes | ||
// and if they know the secret they can decrypt the full file anyway. | ||
let rawSize = BigInt(16 + 8 + 8 + 8 + metadataBytes.length + fileData.length); | ||
const rawSize = BigInt(16 + 8 + 8 + 8 + metadataBytes.length + fileData.length); | ||
// Get the padded size of the file and create the full data array. If a | ||
@@ -125,3 +125,3 @@ // minFullSize has been passed in by the caller, ensure that the fullData | ||
} | ||
let fullData = new Uint8Array(Number(paddedSize)); | ||
const fullData = new Uint8Array(Number(paddedSize)); | ||
// Create the prefixes that we need for the full data. This includes the | ||
@@ -133,11 +133,11 @@ // revision number, because the revision number is used as an extra step of | ||
// state. | ||
let [encodedRevision, errEU643] = encodeU64(revision); | ||
const [encodedRevision, errEU643] = encodeU64(revision); | ||
if (errEU643 !== null) { | ||
return [new Uint8Array(), addContextToErr(errEU643, "unable to encode revision number")]; | ||
} | ||
let [encodedMetadataSize, errEU642] = encodeU64(BigInt(metadataBytes.length)); | ||
const [encodedMetadataSize, errEU642] = encodeU64(BigInt(metadataBytes.length)); | ||
if (errEU642 !== null) { | ||
return [new Uint8Array(), addContextToErr(errEU642, "unable to encode metadata size")]; | ||
} | ||
let [encodedFileSize, errEU641] = encodeU64(BigInt(fileData.length)); | ||
const [encodedFileSize, errEU641] = encodeU64(BigInt(fileData.length)); | ||
if (errEU641 !== null) { | ||
@@ -153,4 +153,4 @@ return [new Uint8Array(), addContextToErr(errEU641, "unable to encode file data size")]; | ||
// Get the hash of the full data and set it in the metadata. | ||
let fullHash = sha512(fullData.slice(16, fullData.length)); | ||
let truncHash = fullHash.slice(0, 16); | ||
const fullHash = sha512(fullData.slice(16, fullData.length)); | ||
const truncHash = fullHash.slice(0, 16); | ||
fullData.set(truncHash, 0); | ||
@@ -161,8 +161,8 @@ // Create the encryption key. We need to use the seed, the inode, and the | ||
// data will also change the encryption key. | ||
let encryptionTag = new TextEncoder().encode(":encryptionTag:" + inode); | ||
let keyPreimage = new Uint8Array(seed.length + truncHash.length + encryptionTag.length); | ||
const encryptionTag = new TextEncoder().encode(":encryptionTag:" + inode); | ||
const keyPreimage = new Uint8Array(seed.length + truncHash.length + encryptionTag.length); | ||
keyPreimage.set(seed, 0); | ||
keyPreimage.set(truncHash, seed.length); | ||
keyPreimage.set(encryptionTag, seed.length + truncHash.length); | ||
let encryptionKey = sha512(keyPreimage).slice(0, 16); | ||
const encryptionKey = sha512(keyPreimage).slice(0, 16); | ||
// Encrypt the file. Don't encrypt the truncHash, which needs to be visible | ||
@@ -169,0 +169,0 @@ // to decrypt the file. The truncHash is just random data, and is not |
@@ -25,2 +25,2 @@ export { downloadSkylink } from "./apidownloadskylink.js"; | ||
export { jsonStringify } from "./stringifyjson.js"; | ||
export { DataFn, Err, ErrFn, ErrTuple, KernelAuthStatus, RequestOverrideResponse } from "./types.js"; | ||
export { DataFn, Err, ErrFn, ErrTuple, KernelAuthStatus, RequestOverrideResponse, SkynetPortal } from "./types.js"; |
@@ -21,3 +21,3 @@ import { blake2b } from "./blake2b.js"; | ||
// cannot be added. | ||
let maxHeight = ps.subtreeHeights[ps.subtreeHeights.length - 1]; | ||
const maxHeight = ps.subtreeHeights[ps.subtreeHeights.length - 1]; | ||
if (subtreeHeight > maxHeight) { | ||
@@ -36,9 +36,9 @@ return `cannot add a subtree that is taller ${subtreeHeight} than the smallest ${maxHeight} subtree in the stack`; | ||
// subtree, and push the result. | ||
let oldSTR = ps.subtreeRoots.pop(); | ||
const oldSTR = ps.subtreeRoots.pop(); | ||
ps.subtreeHeights.pop(); // We already have the height. | ||
let combinedRoot = new Uint8Array(65); | ||
const combinedRoot = new Uint8Array(65); | ||
combinedRoot[0] = 1; | ||
combinedRoot.set(oldSTR, 1); | ||
combinedRoot.set(subtreeRoot, 33); | ||
let newSubtreeRoot = blake2b(combinedRoot); | ||
const newSubtreeRoot = blake2b(combinedRoot); | ||
return blake2bAddSubtreeToProofStack(ps, newSubtreeRoot, subtreeHeight + 1n); | ||
@@ -51,5 +51,5 @@ } | ||
} | ||
let taggedBytes = new Uint8Array(65); | ||
const taggedBytes = new Uint8Array(65); | ||
taggedBytes.set(leafBytes, 1); | ||
let subtreeRoot = blake2b(taggedBytes); | ||
const subtreeRoot = blake2b(taggedBytes); | ||
return blake2bAddSubtreeToProofStack(ps, subtreeRoot, 1n); | ||
@@ -68,4 +68,4 @@ } | ||
while (ps.subtreeRoots.length !== 0) { | ||
let nextSubtreeRoot = ps.subtreeRoots.pop(); | ||
let combinedRoot = new Uint8Array(65); | ||
const nextSubtreeRoot = ps.subtreeRoots.pop(); | ||
const combinedRoot = new Uint8Array(65); | ||
combinedRoot[0] = 1; | ||
@@ -107,3 +107,3 @@ combinedRoot.set(baseSubtreeRoot, 1); | ||
let maxTreeSize = 1n; | ||
let range = end - start + 1n; | ||
const range = end - start + 1n; | ||
while (maxTreeSize * 2n < range) { | ||
@@ -128,3 +128,3 @@ maxTreeHeight++; | ||
// Compute the Merkle root. | ||
let ps = { | ||
const ps = { | ||
subtreeRoots: [], | ||
@@ -172,3 +172,3 @@ subtreeHeights: [], | ||
// data. | ||
let ps = { | ||
const ps = { | ||
subtreeRoots: [], | ||
@@ -183,10 +183,10 @@ subtreeHeights: [], | ||
} | ||
let [height, size, errNST] = nextSubtreeHeight(currentOffset / 64n, rangeStart / 64n); | ||
const [height, size, errNST] = nextSubtreeHeight(currentOffset / 64n, rangeStart / 64n); | ||
if (errNST !== null) { | ||
return addContextToErr(errNST, "error computing subtree height of initial proof stack"); | ||
} | ||
let newSubtreeRoot = new Uint8Array(32); | ||
const newSubtreeRoot = new Uint8Array(32); | ||
newSubtreeRoot.set(proof.slice(proofOffset, proofOffset + 32), 0); | ||
proofOffset += 32; | ||
let errSPS = blake2bAddSubtreeToProofStack(ps, newSubtreeRoot, height); | ||
const errSPS = blake2bAddSubtreeToProofStack(ps, newSubtreeRoot, height); | ||
if (errSPS !== null) { | ||
@@ -200,3 +200,3 @@ return addContextToErr(errSPS, "error adding subtree to initial proof stack"); | ||
while (data.length > dataOffset) { | ||
let errLBPS = blake2bAddLeafBytesToProofStack(ps, data.slice(dataOffset, dataOffset + 64)); | ||
const errLBPS = blake2bAddLeafBytesToProofStack(ps, data.slice(dataOffset, dataOffset + 64)); | ||
if (errLBPS !== null) { | ||
@@ -209,3 +209,3 @@ return addContextToErr(errLBPS, "error adding leaves to proof stack"); | ||
// Consume proof elements until the entire sector is proven. | ||
let sectorEnd = 4194304n; | ||
const sectorEnd = 4194304n; | ||
while (currentOffset < sectorEnd) { | ||
@@ -215,10 +215,10 @@ if (proof.length < proofOffset + 32) { | ||
} | ||
let [height, size, errNST] = nextSubtreeHeight(currentOffset / 64n, sectorEnd / 64n); | ||
const [height, size, errNST] = nextSubtreeHeight(currentOffset / 64n, sectorEnd / 64n); | ||
if (errNST !== null) { | ||
return addContextToErr(errNST, "error computing subtree height of trailing proof stack"); | ||
} | ||
let newSubtreeRoot = new Uint8Array(32); | ||
const newSubtreeRoot = new Uint8Array(32); | ||
newSubtreeRoot.set(proof.slice(proofOffset, proofOffset + 32), 0); | ||
proofOffset += 32; | ||
let errSPS = blake2bAddSubtreeToProofStack(ps, newSubtreeRoot, height); | ||
const errSPS = blake2bAddSubtreeToProofStack(ps, newSubtreeRoot, height); | ||
if (errSPS !== null) { | ||
@@ -225,0 +225,0 @@ return addContextToErr(errSPS, "error adding subtree to trailing proof stack"); |
@@ -19,7 +19,7 @@ import { blake2b } from "./blake2b.js"; | ||
// Build the encoded data. | ||
let [encodedData, errEPB] = encodePrefixedBytes(data); | ||
const [encodedData, errEPB] = encodePrefixedBytes(data); | ||
if (errEPB !== null) { | ||
return [nu8, addContextToErr(errEPB, "unable to encode provided registry data")]; | ||
} | ||
let [encodedRevision, errEU64] = encodeU64(revision); | ||
const [encodedRevision, errEU64] = encodeU64(revision); | ||
if (errEU64 !== null) { | ||
@@ -29,9 +29,9 @@ return [nu8, addContextToErr(errEU64, "unable to encode the revision number")]; | ||
// Build the signing data. | ||
let dataToSign = new Uint8Array(32 + 8 + data.length + 8); | ||
const dataToSign = new Uint8Array(32 + 8 + data.length + 8); | ||
dataToSign.set(dataKey, 0); | ||
dataToSign.set(encodedData, 32); | ||
dataToSign.set(encodedRevision, 32 + 8 + data.length); | ||
let sigHash = blake2b(dataToSign); | ||
const sigHash = blake2b(dataToSign); | ||
// Sign the data. | ||
let [sig, errS] = ed25519Sign(sigHash, secretKey); | ||
const [sig, errS] = ed25519Sign(sigHash, secretKey); | ||
if (errS !== null) { | ||
@@ -56,3 +56,3 @@ return [nu8, addContextToErr(errS, "unable to sign registry entry")]; | ||
// determined by the Sia protocol. | ||
let encoding = new Uint8Array(16 + 8 + 32 + 32); | ||
const encoding = new Uint8Array(16 + 8 + 32 + 32); | ||
// Set the specifier. | ||
@@ -67,3 +67,3 @@ encoding[0] = "e".charCodeAt(0); | ||
// Set the pubkey. | ||
let [encodedLen, errU64] = encodeU64(32n); | ||
const [encodedLen, errU64] = encodeU64(32n); | ||
if (errU64 !== null) { | ||
@@ -76,3 +76,3 @@ return [nu8, addContextToErr(errU64, "unable to encode pubkey length")]; | ||
// Get the final ID by hashing the encoded data. | ||
let id = blake2b(encoding); | ||
const id = blake2b(encoding); | ||
return [id, null]; | ||
@@ -82,3 +82,3 @@ } | ||
function entryIDToSkylink(entryID) { | ||
let v2Skylink = new Uint8Array(34); | ||
const v2Skylink = new Uint8Array(34); | ||
v2Skylink.set(entryID, 2); | ||
@@ -122,7 +122,7 @@ v2Skylink[0] = 1; | ||
// Generate a unique set of entropy using the seed and keypairTag. | ||
let keypairTag = new TextEncoder().encode(keypairTagStr); | ||
let entropyInput = new Uint8Array(keypairTag.length + seed.length); | ||
const keypairTag = new TextEncoder().encode(keypairTagStr); | ||
const entropyInput = new Uint8Array(keypairTag.length + seed.length); | ||
entropyInput.set(seed, 0); | ||
entropyInput.set(keypairTag, seed.length); | ||
let keypairEntropy = sha512(entropyInput); | ||
const keypairEntropy = sha512(entropyInput); | ||
// Use the seed to dervie the datakey for the registry entry. We use | ||
@@ -138,5 +138,5 @@ // a different tag to ensure that the datakey is independently random, such | ||
// "321"] which could end up with the same datakey. | ||
let datakeyTag = new TextEncoder().encode(datakeyTagStr); | ||
let datakeyInput = new Uint8Array(seed.length + 1 + keypairTag.length + datakeyTag.length); | ||
let keypairLen = new Uint8Array(1); | ||
const datakeyTag = new TextEncoder().encode(datakeyTagStr); | ||
const datakeyInput = new Uint8Array(seed.length + 1 + keypairTag.length + datakeyTag.length); | ||
const keypairLen = new Uint8Array(1); | ||
keypairLen[0] = keypairTag.length; | ||
@@ -147,9 +147,9 @@ datakeyInput.set(seed); | ||
datakeyInput.set(datakeyTag, seed.length + 1 + keypairTag.length); | ||
let datakeyEntropy = sha512(datakeyInput); | ||
const datakeyEntropy = sha512(datakeyInput); | ||
// Create the private key for the registry entry. | ||
let [keypair, errKPFE] = ed25519KeypairFromEntropy(keypairEntropy.slice(0, 32)); | ||
const [keypair, errKPFE] = ed25519KeypairFromEntropy(keypairEntropy.slice(0, 32)); | ||
if (errKPFE !== null) { | ||
return [nkp, nu8, addContextToErr(errKPFE, "unable to derive keypair")]; | ||
} | ||
let datakey = datakeyEntropy.slice(0, 32); | ||
const datakey = datakeyEntropy.slice(0, 32); | ||
return [keypair, datakey, null]; | ||
@@ -159,17 +159,17 @@ } | ||
function verifyRegistrySignature(pubkey, datakey, data, revision, sig) { | ||
let [encodedData, errEPB] = encodePrefixedBytes(data); | ||
const [encodedData, errEPB] = encodePrefixedBytes(data); | ||
if (errEPB !== null) { | ||
return false; | ||
} | ||
let [encodedRevision, errU64] = encodeU64(revision); | ||
const [encodedRevision, errU64] = encodeU64(revision); | ||
if (errU64 !== null) { | ||
return false; | ||
} | ||
let dataToVerify = new Uint8Array(32 + 8 + data.length + 8); | ||
const dataToVerify = new Uint8Array(32 + 8 + data.length + 8); | ||
dataToVerify.set(datakey, 0); | ||
dataToVerify.set(encodedData, 32); | ||
dataToVerify.set(encodedRevision, 32 + 8 + data.length); | ||
let sigHash = blake2b(dataToVerify); | ||
const sigHash = blake2b(dataToVerify); | ||
return ed25519Verify(sigHash, sig, pubkey); | ||
} | ||
export { computeRegistrySignature, deriveRegistryEntryID, entryIDToSkylink, skylinkToResolverEntryData, taggedRegistryEntryKeys, verifyRegistrySignature, }; |
@@ -135,3 +135,6 @@ // seed.ts implements mysky seed as defined in | ||
if (seedWords.length !== SEED_ENTROPY_WORDS) { | ||
return [new Uint8Array(0), `Seed words should have length ${SEED_ENTROPY_WORDS} but has length ${seedWords.length}`]; | ||
return [ | ||
new Uint8Array(0), | ||
`Seed words should have length ${SEED_ENTROPY_WORDS} but has length ${seedWords.length}`, | ||
]; | ||
} | ||
@@ -138,0 +141,0 @@ // We are getting 16 bytes of entropy. This was ported from somewhere else |
@@ -14,3 +14,3 @@ // Helper consts that make it easier to return empty values when returning an | ||
// Extract the version. | ||
let version = (bitfield & 3) + 1; | ||
const version = (bitfield & 3) + 1; | ||
// Only versions 1 and 2 are recognized. | ||
@@ -47,3 +47,3 @@ if (version !== 1 && version !== 2) { | ||
// Determine the offset and fetchSize increment. | ||
let offsetIncrement = 4096 << mode; | ||
const offsetIncrement = 4096 << mode; | ||
let fetchSizeIncrement = 4096; | ||
@@ -58,6 +58,6 @@ let fetchSizeStart = 0; | ||
fetchSizeBits++; // semantic upstep, range should be [1,8] not [0,8). | ||
let fetchSize = fetchSizeBits * fetchSizeIncrement + fetchSizeStart; | ||
const fetchSize = fetchSizeBits * fetchSizeIncrement + fetchSizeStart; | ||
bitfield = bitfield >> 3; | ||
// The remaining bits determine the offset. | ||
let offset = bitfield * offsetIncrement; | ||
const offset = bitfield * offsetIncrement; | ||
if (offset + fetchSize > 1 << 22) { | ||
@@ -77,3 +77,3 @@ return [0n, 0n, 0n, "provided skylink has an invalid v1 bitfield"]; | ||
} | ||
let dataSize = Number(dataSizeBI); | ||
const dataSize = Number(dataSizeBI); | ||
// Determine the mode for the file. The mode is determined by the | ||
@@ -93,4 +93,4 @@ // dataSize. | ||
else { | ||
let step = 1 << (11 + mode); | ||
let target = dataSize - (1 << (14 + mode)); | ||
const step = 1 << (11 + mode); | ||
const target = dataSize - (1 << (14 + mode)); | ||
if (target !== 0) { | ||
@@ -105,3 +105,3 @@ downloadNumber = Math.floor((target - 1) / step); | ||
// simplify the code here. | ||
let bitfield = new Uint8Array(2); | ||
const bitfield = new Uint8Array(2); | ||
if (mode === 7) { | ||
@@ -157,3 +157,3 @@ // 0 0 0 X X X 0 1|1 1 1 1 1 1 0 0 | ||
// Swap the byte order. | ||
let zero = bitfield[0]; | ||
const zero = bitfield[0]; | ||
bitfield[0] = bitfield[1]; | ||
@@ -160,0 +160,0 @@ bitfield[1] = zero; |
@@ -25,3 +25,3 @@ import { addContextToErr } from "./err.js"; | ||
} | ||
let pathElems = path.split("/"); | ||
const pathElems = path.split("/"); | ||
for (let i = 0; i < pathElems.length; i++) { | ||
@@ -50,3 +50,3 @@ if (pathElems[i] === ".") { | ||
} | ||
let errVSP = validateSkyfilePath(metadata.Filename); | ||
const errVSP = validateSkyfilePath(metadata.Filename); | ||
if (errVSP !== null) { | ||
@@ -100,3 +100,3 @@ return addContextToErr(errVSP, "metadata.Filename does not have a valid path"); | ||
if (typeof skylink === "string") { | ||
let [buf, err] = b64ToBuf(skylink); | ||
const [buf, err] = b64ToBuf(skylink); | ||
if (err !== null) { | ||
@@ -114,3 +114,3 @@ return false; | ||
} | ||
let [, , , errPSB] = parseSkylinkBitfield(u8Skylink); | ||
const [, , , errPSB] = parseSkylinkBitfield(u8Skylink); | ||
if (errPSB !== null) { | ||
@@ -117,0 +117,0 @@ return false; |
@@ -34,7 +34,7 @@ import { b64ToBuf, hexToBuf } from "./encoding.js"; | ||
} | ||
let dataStr = proof.data; | ||
const dataStr = proof.data; | ||
if (typeof proof.datakey !== "string") { | ||
return [nu8, "datakey is malformed"]; | ||
} | ||
let datakeyStr = proof.datakey; | ||
const datakeyStr = proof.datakey; | ||
if (proof.publickey.algorithm !== "ed25519") { | ||
@@ -46,3 +46,3 @@ return [nu8, "pubkey has unrecognized algorithm"]; | ||
} | ||
let pubkeyStr = proof.publickey.key; | ||
const pubkeyStr = proof.publickey.key; | ||
if (typeof proof.signature !== "string") { | ||
@@ -54,22 +54,22 @@ return [nu8, "signature is malformed"]; | ||
} | ||
let sigStr = proof.signature; | ||
const sigStr = proof.signature; | ||
if (typeof proof.revision !== "bigint") { | ||
return [nu8, "revision is malformed"]; | ||
} | ||
let revision = proof.revision; | ||
const revision = proof.revision; | ||
// Decode all of the fields. They are presented in varied types and | ||
// encodings. | ||
let [data, errD] = hexToBuf(dataStr); | ||
const [data, errD] = hexToBuf(dataStr); | ||
if (errD !== null) { | ||
return [nu8, addContextToErr(errD, "data is invalid hex")]; | ||
} | ||
let [datakey, errDK] = hexToBuf(datakeyStr); | ||
const [datakey, errDK] = hexToBuf(datakeyStr); | ||
if (errDK !== null) { | ||
return [nu8, addContextToErr(errDK, "datakey is invalid hex")]; | ||
} | ||
let [pubkey, errPK] = b64ToBuf(pubkeyStr); | ||
const [pubkey, errPK] = b64ToBuf(pubkeyStr); | ||
if (errPK !== null) { | ||
return [nu8, addContextToErr(errPK, "pubkey key is invalid base64")]; | ||
} | ||
let [sig, errS] = hexToBuf(sigStr); | ||
const [sig, errS] = hexToBuf(sigStr); | ||
if (errS !== null) { | ||
@@ -85,7 +85,7 @@ return [nu8, addContextToErr(errS, "signature is invalid hex")]; | ||
// the skylink. | ||
let [entryID, errREID] = deriveRegistryEntryID(pubkey, datakey); | ||
const [entryID, errREID] = deriveRegistryEntryID(pubkey, datakey); | ||
if (errREID !== null) { | ||
return [nu8, addContextToErr(errREID, "proof pubkey is malformed")]; | ||
} | ||
let linkID = skylink.slice(2, 34); | ||
const linkID = skylink.slice(2, 34); | ||
for (let i = 0; i < entryID.length; i++) { | ||
@@ -131,3 +131,3 @@ if (entryID[i] !== linkID[i]) { | ||
} | ||
let [version, , , errPSB] = parseSkylinkBitfield(skylink); | ||
const [version, , , errPSB] = parseSkylinkBitfield(skylink); | ||
if (errPSB !== null) { | ||
@@ -134,0 +134,0 @@ return [nu8, addContextToErr(errPSB, "final value returned by resolver link is not a valid skylink")]; |
@@ -7,3 +7,3 @@ import { addContextToErr } from "./err.js"; | ||
try { | ||
let str = JSON.stringify(obj, (_, v) => { | ||
const str = JSON.stringify(obj, (_, v) => { | ||
if (typeof v === "bigint") { | ||
@@ -10,0 +10,0 @@ return Number(v); |
@@ -10,2 +10,6 @@ declare type DataFn = (data?: any) => void; | ||
} | ||
interface SkynetPortal { | ||
url: string; | ||
name: string; | ||
} | ||
interface RequestOverrideResponse { | ||
@@ -16,2 +20,2 @@ override: boolean; | ||
} | ||
export { DataFn, ErrFn, Err, ErrTuple, KernelAuthStatus, RequestOverrideResponse }; | ||
export { DataFn, ErrFn, Err, ErrTuple, KernelAuthStatus, RequestOverrideResponse, SkynetPortal }; |
{ | ||
"name": "libskynet", | ||
"version": "0.0.63", | ||
"author": "Skynet Labs", | ||
"description": "helper library to interact with skynet's low level primitives", | ||
"main": "dist/index.js", | ||
"type": "module", | ||
"types": "dist/index.d.js", | ||
"publishConfig": { | ||
"access": "public" | ||
}, | ||
"files": [ | ||
"/dist" | ||
], | ||
"scripts": { | ||
"clean": "node ./clean.js", | ||
"prettier": "prettier --write .", | ||
"eslint": "eslint src --fix", | ||
"lint": "npm run clean && npm run prettier && npm run eslint", | ||
"test": "tsc && tsc --project tsconfig.test.json && node ./dist-test/src-test/test.js", | ||
"prepare": "npm run lint && npm run test" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "^17.0.23", | ||
"@typescript-eslint/eslint-plugin": "^5.19.0", | ||
"eslint": "^8.13.0", | ||
"prettier": "^2.6.2", | ||
"skynet-js": "^4.3.0", | ||
"tweetnacl": "^1.0.3" | ||
} | ||
"name": "libskynet", | ||
"version": "0.0.64", | ||
"author": "Skynet Labs", | ||
"description": "helper library to interact with skynet's low level primitives", | ||
"main": "dist/index.js", | ||
"type": "module", | ||
"types": "dist/index.d.js", | ||
"publishConfig": { | ||
"access": "public" | ||
}, | ||
"files": [ | ||
"/dist" | ||
], | ||
"scripts": { | ||
"lint": "prettier -l 'src' 'src-test' '*.js' '*.json' && eslint 'src' 'src-test' '*.js' '*.json'", | ||
"clean": "node ./clean.js", | ||
"deps": "npm audit fix", | ||
"update-deps": "npm run clean && npm run deps && npm install", | ||
"update-lint": "prettier -w 'src' 'src-test' '*.js' '*.json' && eslint 'src' 'src-test' '*.js' '*.json' --fix", | ||
"update": "npm run update-deps && npm run update-lint", | ||
"test": "tsc && tsc --project tsconfig.test.json && node ./dist-test/src-test/test.js", | ||
"build": "npm run clean && npm install && npm run lint && tsc", | ||
"prepublishOnly": "npm run clean && npm install && npm run lint && npm run test" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "^17.0.23", | ||
"@typescript-eslint/eslint-plugin": "^5.19.0", | ||
"eslint": "^8.13.0", | ||
"prettier": "^2.6.2", | ||
"skynet-js": "^4.3.0", | ||
"tweetnacl": "^1.0.3" | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
165001
4921