New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ipld/car

Package Overview
Dependencies
Maintainers
3
Versions
73
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ipld/car - npm Package Compare versions

Comparing version 0.0.1 to 0.1.0

57

cjs/browser-test/test-writer.js

@@ -8,21 +8,25 @@ 'use strict';

const {toHex} = multiformats.bytes;
function concatBytes(chunks) {
const length = chunks.reduce((p, c) => p + c.length, 0);
const bytes = new Uint8Array(length);
let off = 0;
for (const chunk of chunks) {
bytes.set(chunk, off);
off += chunk.length;
}
return bytes;
}
function collector(iterable) {
return (async () => {
const chunks = [];
let length = 0;
const chunks = [];
const cfn = (async () => {
for await (const chunk of iterable) {
chunks.push(chunk);
length += chunk.length;
}
const bytes = new Uint8Array(length);
length = 0;
for (const chunk of chunks) {
bytes.set(chunk, length);
length += chunk.length;
}
return bytes;
return concatBytes(chunks);
})();
return cfn;
}
describe('CarWriter', () => {
let cborBlocks;
let allBlocks;
let allBlocksFlattened;

@@ -36,2 +40,3 @@ let roots;

cborBlocks = data.cborBlocks;
allBlocks = data.allBlocks;
allBlocksFlattened = data.allBlocksFlattened;

@@ -90,3 +95,3 @@ roots = [

common.assert.strictEqual(written, false);
await new Promise(resolve => resolve());
await Promise.resolve();
assertCarData(bytes);

@@ -151,2 +156,30 @@ });

});
it('appender', async () => {
let writerOut = writer['default'].create(roots);
let collection = collector(writerOut.out);
await writerOut.writer.close();
const headerBytes = await collection;
const append = async index => {
writerOut = writer['default'].createAppender();
collection = collector(writerOut.out);
for (const block of allBlocks[index][1]) {
await writerOut.writer.put(block);
}
await writerOut.writer.close();
return collection;
};
const rawBytes = await append(0);
const pbBytes = await append(1);
const cborBytes = await append(2);
common.assert(rawBytes.length > 0);
common.assert(pbBytes.length > 0);
common.assert(cborBytes.length > 0);
const reassembled = concatBytes([
headerBytes,
rawBytes,
pbBytes,
cborBytes
]);
common.assert.strictEqual(toHex(reassembled), toHex(common.carBytes));
});
it('bad argument for create()', () => {

@@ -153,0 +186,0 @@ for (const arg of [

@@ -54,2 +54,12 @@ 'use strict';

}
static createAppender() {
const {encoder, iterator} = encodeWriter();
encoder.setRoots = () => Promise.resolve();
const writer = new CarWriter([], encoder);
const out = new CarWriterOut(iterator);
return {
writer,
out
};
}
}

@@ -56,0 +66,0 @@ class CarWriterOut {

@@ -8,21 +8,25 @@ 'use strict';

const {toHex} = multiformats.bytes;
function concatBytes(chunks) {
const length = chunks.reduce((p, c) => p + c.length, 0);
const bytes = new Uint8Array(length);
let off = 0;
for (const chunk of chunks) {
bytes.set(chunk, off);
off += chunk.length;
}
return bytes;
}
function collector(iterable) {
return (async () => {
const chunks = [];
let length = 0;
const chunks = [];
const cfn = (async () => {
for await (const chunk of iterable) {
chunks.push(chunk);
length += chunk.length;
}
const bytes = new Uint8Array(length);
length = 0;
for (const chunk of chunks) {
bytes.set(chunk, length);
length += chunk.length;
}
return bytes;
return concatBytes(chunks);
})();
return cfn;
}
describe('CarWriter', () => {
let cborBlocks;
let allBlocks;
let allBlocksFlattened;

@@ -36,2 +40,3 @@ let roots;

cborBlocks = data.cborBlocks;
allBlocks = data.allBlocks;
allBlocksFlattened = data.allBlocksFlattened;

@@ -90,3 +95,3 @@ roots = [

common.assert.strictEqual(written, false);
await new Promise(resolve => resolve());
await Promise.resolve();
assertCarData(bytes);

@@ -151,2 +156,30 @@ });

});
it('appender', async () => {
let writerOut = writer['default'].create(roots);
let collection = collector(writerOut.out);
await writerOut.writer.close();
const headerBytes = await collection;
const append = async index => {
writerOut = writer['default'].createAppender();
collection = collector(writerOut.out);
for (const block of allBlocks[index][1]) {
await writerOut.writer.put(block);
}
await writerOut.writer.close();
return collection;
};
const rawBytes = await append(0);
const pbBytes = await append(1);
const cborBytes = await append(2);
common.assert(rawBytes.length > 0);
common.assert(pbBytes.length > 0);
common.assert(cborBytes.length > 0);
const reassembled = concatBytes([
headerBytes,
rawBytes,
pbBytes,
cborBytes
]);
common.assert.strictEqual(toHex(reassembled), toHex(common.carBytes));
});
it('bad argument for create()', () => {

@@ -153,0 +186,0 @@ for (const arg of [

@@ -10,21 +10,25 @@ import CarWriter from '../lib/writer.js';

const {toHex} = bytes;
function concatBytes(chunks) {
const length = chunks.reduce((p, c) => p + c.length, 0);
const bytes = new Uint8Array(length);
let off = 0;
for (const chunk of chunks) {
bytes.set(chunk, off);
off += chunk.length;
}
return bytes;
}
function collector(iterable) {
return (async () => {
const chunks = [];
let length = 0;
const chunks = [];
const cfn = (async () => {
for await (const chunk of iterable) {
chunks.push(chunk);
length += chunk.length;
}
const bytes = new Uint8Array(length);
length = 0;
for (const chunk of chunks) {
bytes.set(chunk, length);
length += chunk.length;
}
return bytes;
return concatBytes(chunks);
})();
return cfn;
}
describe('CarWriter', () => {
let cborBlocks;
let allBlocks;
let allBlocksFlattened;

@@ -38,2 +42,3 @@ let roots;

cborBlocks = data.cborBlocks;
allBlocks = data.allBlocks;
allBlocksFlattened = data.allBlocksFlattened;

@@ -92,3 +97,3 @@ roots = [

assert.strictEqual(written, false);
await new Promise(resolve => resolve());
await Promise.resolve();
assertCarData(bytes);

@@ -153,2 +158,30 @@ });

});
it('appender', async () => {
let writerOut = CarWriter.create(roots);
let collection = collector(writerOut.out);
await writerOut.writer.close();
const headerBytes = await collection;
const append = async index => {
writerOut = CarWriter.createAppender();
collection = collector(writerOut.out);
for (const block of allBlocks[index][1]) {
await writerOut.writer.put(block);
}
await writerOut.writer.close();
return collection;
};
const rawBytes = await append(0);
const pbBytes = await append(1);
const cborBytes = await append(2);
assert(rawBytes.length > 0);
assert(pbBytes.length > 0);
assert(cborBytes.length > 0);
const reassembled = concatBytes([
headerBytes,
rawBytes,
pbBytes,
cborBytes
]);
assert.strictEqual(toHex(reassembled), toHex(carBytes));
});
it('bad argument for create()', () => {

@@ -155,0 +188,0 @@ for (const arg of [

@@ -45,2 +45,12 @@ import CID from 'multiformats/cid';

}
static createAppender() {
const {encoder, iterator} = encodeWriter();
encoder.setRoots = () => Promise.resolve();
const writer = new CarWriter([], encoder);
const out = new CarWriterOut(iterator);
return {
writer,
out
};
}
}

@@ -47,0 +57,0 @@ export class CarWriterOut {

@@ -10,21 +10,25 @@ import CarWriter from '../lib/writer.js';

const {toHex} = bytes;
function concatBytes(chunks) {
const length = chunks.reduce((p, c) => p + c.length, 0);
const bytes = new Uint8Array(length);
let off = 0;
for (const chunk of chunks) {
bytes.set(chunk, off);
off += chunk.length;
}
return bytes;
}
function collector(iterable) {
return (async () => {
const chunks = [];
let length = 0;
const chunks = [];
const cfn = (async () => {
for await (const chunk of iterable) {
chunks.push(chunk);
length += chunk.length;
}
const bytes = new Uint8Array(length);
length = 0;
for (const chunk of chunks) {
bytes.set(chunk, length);
length += chunk.length;
}
return bytes;
return concatBytes(chunks);
})();
return cfn;
}
describe('CarWriter', () => {
let cborBlocks;
let allBlocks;
let allBlocksFlattened;

@@ -38,2 +42,3 @@ let roots;

cborBlocks = data.cborBlocks;
allBlocks = data.allBlocks;
allBlocksFlattened = data.allBlocksFlattened;

@@ -92,3 +97,3 @@ roots = [

assert.strictEqual(written, false);
await new Promise(resolve => resolve());
await Promise.resolve();
assertCarData(bytes);

@@ -153,2 +158,30 @@ });

});
it('appender', async () => {
let writerOut = CarWriter.create(roots);
let collection = collector(writerOut.out);
await writerOut.writer.close();
const headerBytes = await collection;
const append = async index => {
writerOut = CarWriter.createAppender();
collection = collector(writerOut.out);
for (const block of allBlocks[index][1]) {
await writerOut.writer.put(block);
}
await writerOut.writer.close();
return collection;
};
const rawBytes = await append(0);
const pbBytes = await append(1);
const cborBytes = await append(2);
assert(rawBytes.length > 0);
assert(pbBytes.length > 0);
assert(cborBytes.length > 0);
const reassembled = concatBytes([
headerBytes,
rawBytes,
pbBytes,
cborBytes
]);
assert.strictEqual(toHex(reassembled), toHex(carBytes));
});
it('bad argument for create()', () => {

@@ -155,0 +188,0 @@ for (const arg of [

{
"name": "@ipld/car",
"version": "0.0.1",
"version": "0.1.0",
"description": "Content Addressable aRchive format reader and writer",

@@ -39,6 +39,6 @@ "directories": {

"@ipld/dag-pb": "^0.0.1",
"@types/mocha": "^8.0.3",
"@types/node": "^14.14.6",
"@typescript-eslint/eslint-plugin": "^4.6.0",
"@typescript-eslint/parser": "^4.6.0",
"@types/mocha": "^8.0.4",
"@types/node": "^14.14.9",
"@typescript-eslint/eslint-plugin": "^4.8.1",
"@typescript-eslint/parser": "^4.8.1",
"chai": "^4.2.0",

@@ -52,8 +52,8 @@ "chai-as-promised": "^7.1.1",

"polendina": "^1.1.0",
"standard": "^16.0.0",
"typescript": "^4.0.5"
"standard": "^16.0.3",
"typescript": "^4.1.2"
},
"dependencies": {
"@ipld/dag-cbor": "^2.0.2",
"multiformats": "^4.3.2",
"@ipld/dag-cbor": "^2.0.3",
"multiformats": "^4.4.1",
"varint": "^6.0.0"

@@ -60,0 +60,0 @@ },

@@ -71,6 +71,3 @@ /// <reference types="node" />

_roots: CID[];
_index: Map<string, {
blockLength: number;
blockOffset: number;
}>;
_index: Map<string, RawLocation>;
_order: string[];

@@ -77,0 +74,0 @@ _fd: fs.promises.FileHandle | null;

@@ -57,2 +57,17 @@ /**

/**
* Create a new CAR appender "channel" which consists of a
* `{ writer:CarWriter, out:AsyncIterable<Uint8Array> }` pair.
* This appender does not consider roots and does not produce a CAR header.
* It is designed to append blocks to an _existing_ CAR archive. It is
* expected that `out` will be concatenated onto the end of an existing
* archive that already has a properly formatted header.
*
* @async
* @static
* @memberof CarWriter
* @returns {WriterChannel} The channel takes the form of
* `{ writer:CarWriter, out:AsyncIterable<Uint8Array> }`.
*/
static createAppender(): WriterChannel;
/**
* @param {CID[]} roots

@@ -63,2 +78,3 @@ * @param {CarEncoder} encoder

_encoder: import("./coding.js").CarEncoder;
/** @type {Promise<void>} */
_mutex: Promise<void>;

@@ -65,0 +81,0 @@ _ended: boolean;

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc