bcb-stream-parser
Advanced tools
Comparing version 0.1.1 to 0.1.2
{ | ||
"name": "bcb-stream-parser", | ||
"version": "0.1.1", | ||
"version": "0.1.2", | ||
"description": "Parse BitCoin-Block via NodeJS.ReadableStream", | ||
@@ -5,0 +5,0 @@ "main": "index.cjs", |
109
parser.js
@@ -1,6 +0,53 @@ | ||
import { bond } from 'proxy-bind'; | ||
import { toReadableStream } from 'async-readable'; | ||
import { apply, complement, compose, identity, forEach, head, isEmpty, not, tap, thunkify, times, } from 'ramda'; | ||
import { thunkLooping, mapIter, toHex, copy, blockHash, reverseBuffer, } from './utils'; | ||
export function readCompactSize(read) { | ||
'use strict'; | ||
Object.defineProperty(exports, '__esModule', { value: true }); | ||
const proxyBind = require('proxy-bind'); | ||
const asyncReadable = require('async-readable'); | ||
const ramda = require('ramda'); | ||
const crypto = require('crypto'); | ||
function thunkLooping(thunk) { | ||
return Object.freeze({ | ||
async array(size) { | ||
const list = []; | ||
while (size--) { | ||
list.push(await thunk()); | ||
} | ||
return list; | ||
}, | ||
async *generator(size) { | ||
while (size--) { | ||
yield thunk(); | ||
} | ||
}, | ||
}); | ||
} | ||
function mapIter(mapper) { | ||
return async function* (source) { | ||
let index = 0; | ||
for await (const item of source) { | ||
yield mapper(item, index++); | ||
} | ||
}; | ||
} | ||
function toHex(buffer, prefix = '') { | ||
return prefix + buffer.toString('hex'); | ||
} | ||
function copy(buffer) { | ||
const tmp = Buffer.allocUnsafe(buffer.length); | ||
buffer.copy(tmp); | ||
return tmp; | ||
} | ||
function sha256(content) { | ||
return crypto.createHash('sha256').update(content).digest(); | ||
} | ||
function blockHash(content) { | ||
return toHex(reverseBuffer(sha256(sha256(content)))); | ||
} | ||
function reverseBuffer(buffer) { | ||
return buffer.reverse(); | ||
} | ||
function readCompactSize(read) { | ||
return async function () { | ||
@@ -16,3 +63,3 @@ const size = (await read(1)).readUInt8(0); | ||
} | ||
export function readVerHex(read) { | ||
function readVerHex(read) { | ||
const compactSizeThunk = readCompactSize(read); | ||
@@ -27,3 +74,3 @@ return async function () { | ||
} | ||
export function readInput(read) { | ||
function readInput(read) { | ||
const verStrThunk = readVerHex(read); | ||
@@ -44,3 +91,3 @@ return async function () { | ||
} | ||
export function readOutput(read) { | ||
function readOutput(read) { | ||
const verStrThunk = readVerHex(read); | ||
@@ -56,3 +103,3 @@ return async function () { | ||
} | ||
export function readWitness(read) { | ||
function readWitness(read) { | ||
const compactSizeThunk = readCompactSize(read); | ||
@@ -65,3 +112,3 @@ const verStrThunk = readVerHex(read); | ||
} | ||
export function readTransaction(readOrigin) { | ||
function readTransaction(readOrigin) { | ||
const acc = bufferCounter(readOrigin); | ||
@@ -85,3 +132,3 @@ const { read } = acc; | ||
} | ||
const { map: mapInputs } = bond(await loopInput(inputLen)); | ||
const { map: mapInputs } = proxyBind.bond(await loopInput(inputLen)); | ||
const outputs = await loopOutput(await compactSizeThunk()); | ||
@@ -115,3 +162,3 @@ let inputsWithWitness; | ||
hasWitness: false, | ||
inputs: mapInputs(identity), | ||
inputs: mapInputs(ramda.identity), | ||
}; | ||
@@ -121,10 +168,10 @@ } | ||
} | ||
export function bufferCounter(read) { | ||
function bufferCounter(read) { | ||
const chunks = []; | ||
const marker = []; | ||
let flag = false; | ||
const notEmpty = complement(isEmpty); | ||
const mirror = tap(bond(chunks).push); | ||
const notEmpty = ramda.complement(ramda.isEmpty); | ||
const mirror = ramda.tap(proxyBind.bond(chunks).push); | ||
const concatChunks = () => Buffer.concat(chunks); | ||
const patchChunksBy = forEach(((x) => (i) => chunks[i] = x)(Buffer.alloc(0))); | ||
const patchChunksBy = ramda.forEach(((x) => (i) => chunks[i] = x)(Buffer.alloc(0))); | ||
const markChunksFromBack = (offset) => marker.push(chunks.length - 1 - offset); | ||
@@ -143,3 +190,3 @@ return Object.freeze({ | ||
pop(n) { | ||
times(markChunksFromBack, n); | ||
ramda.times(markChunksFromBack, n); | ||
}, | ||
@@ -171,6 +218,6 @@ reset() { | ||
} | ||
export function parseCoinbase(transaction) { | ||
function parseCoinbase(transaction) { | ||
const { hash, inputs: [input], outputs: [output] } = transaction; | ||
const { txId, vOut, script } = input; | ||
if (not(/^0{64}$/.test(txId) && vOut === -1)) { | ||
if (ramda.not(/^0{64}$/.test(txId) && vOut === -1)) { | ||
return; | ||
@@ -189,3 +236,3 @@ } | ||
} | ||
export function readHeader(read) { | ||
function readHeader(read) { | ||
const pointer = (i) => (step) => (i += step, [i - step, i]); | ||
@@ -195,6 +242,6 @@ const compactSizeThunk = readCompactSize(read); | ||
const chunk = await read(80); | ||
const { readUInt32LE, slice } = bond(chunk); | ||
const { readUInt32LE, slice } = proxyBind.bond(chunk); | ||
const p = pointer(0); | ||
const bytesHex = compose(toHex, reverseBuffer, apply(slice), p); | ||
const uint32LE = thunkify(compose(readUInt32LE, head, p))(4); | ||
const bytesHex = ramda.compose(toHex, reverseBuffer, ramda.apply(slice), p); | ||
const uint32LE = ramda.thunkify(ramda.compose(readUInt32LE, ramda.head, p))(4); | ||
return { | ||
@@ -213,4 +260,4 @@ type: 'HEADER', | ||
} | ||
export const reader = toReadableStream(parser); | ||
export async function* parser({ read, off = () => { } }) { | ||
const reader = asyncReadable.toReadableStream(parser); | ||
async function* parser({ read, off = () => { } }) { | ||
const header = await readHeader(read)(); | ||
@@ -232,1 +279,13 @@ yield { ...header }; | ||
} | ||
exports.bufferCounter = bufferCounter; | ||
exports.parseCoinbase = parseCoinbase; | ||
exports.parser = parser; | ||
exports.readCompactSize = readCompactSize; | ||
exports.readHeader = readHeader; | ||
exports.readInput = readInput; | ||
exports.readOutput = readOutput; | ||
exports.readTransaction = readTransaction; | ||
exports.readVerHex = readVerHex; | ||
exports.readWitness = readWitness; | ||
exports.reader = reader; |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
41571
12
1260