New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ndn/segmented-object

Package Overview
Dependencies
Maintainers
1
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ndn/segmented-object - npm Package Compare versions

Comparing version 0.0.20210930 to 0.0.20220501

LICENSE

2

lib/convention.d.ts

@@ -1,2 +0,2 @@

import { NamingConvention } from "@ndn/packet";
import type { NamingConvention } from "@ndn/packet";
export declare type VersionConvention = NamingConvention<any>;

@@ -3,0 +3,0 @@ export declare type VersionConventionFromNumber = NamingConvention<number, unknown>;

@@ -1,3 +0,3 @@

import { Name } from "@ndn/packet";
import { SegmentConvention, VersionConvention } from "./convention";
import { type Name } from "@ndn/packet";
import { type SegmentConvention, type VersionConvention } from "./convention";
import { fetch } from "./fetch/mod";

@@ -4,0 +4,0 @@ /** Discover version with CanBePrefix. */

@@ -1,3 +0,2 @@

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import { assert } from "@ndn/util";
import { EventEmitter } from "events";

@@ -4,0 +3,0 @@ const CWND = Symbol("CongestionAvoidance.CWND");

@@ -1,3 +0,2 @@

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import { assert } from "@ndn/util";
import { EventEmitter } from "node:events";

@@ -4,0 +3,0 @@ const CWND = Symbol("CongestionAvoidance.CWND");

import type TypedEmitter from "typed-emitter";
interface Events {
declare type Events = {
cwndupdate: (cwnd: number) => void;
}
};
declare const CWND: unique symbol;

@@ -6,0 +6,0 @@ declare const CongestionAvoidance_base: new () => TypedEmitter<Events>;

import { __importDefault, __importStar } from "tslib";
import { Name } from "@ndn/packet";
import { assert } from "@ndn/util";
import _cjsDefaultImport0 from "event-iterator"; const EventIterator = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import { collect, map, writeToStream } from "streaming-iterables";

@@ -62,5 +62,5 @@ import { Fetcher } from "./fetcher_browser.js";

}
// eslint-disable-next-line unicorn/no-thenable
then(onfulfilled, onrejected) {
this.promise ?? (this.promise = this.startPromise());
// eslint-disable-next-line promise/prefer-await-to-then
this.promise ??= this.startPromise();
return this.promise.then(onfulfilled, onrejected);

@@ -74,3 +74,3 @@ }

export function fetch(name, opts = {}) {
return new FetchResult(new Name(name), opts);
return new FetchResult(Name.from(name), opts);
}
import { __importDefault, __importStar } from "tslib";
import { Name } from "@ndn/packet";
import { assert } from "@ndn/util";
import _cjsDefaultImport0 from "event-iterator"; const EventIterator = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import { collect, map, writeToStream } from "streaming-iterables";

@@ -62,5 +62,5 @@ import { Fetcher } from "./fetcher_node.js";

}
// eslint-disable-next-line unicorn/no-thenable
then(onfulfilled, onrejected) {
this.promise ?? (this.promise = this.startPromise());
// eslint-disable-next-line promise/prefer-await-to-then
this.promise ??= this.startPromise();
return this.promise.then(onfulfilled, onrejected);

@@ -74,3 +74,3 @@ }

export function fetch(name, opts = {}) {
return new FetchResult(new Name(name), opts);
return new FetchResult(Name.from(name), opts);
}
/// <reference types="node" />
import { Data, NameLike } from "@ndn/packet";
import { type NameLike, Data } from "@ndn/packet";
import { Fetcher } from "./fetcher";

@@ -4,0 +4,0 @@ /** Fetch a segment object as AsyncIterable of payload. */

@@ -1,10 +0,7 @@

/// <reference types="node" />
/// <reference types="web" />
import { Endpoint } from "@ndn/endpoint";
import { Data, Interest, Name, Verifier } from "@ndn/packet";
import type { AbortSignal } from "abort-controller";
import { type Name, type Verifier, Data, Interest } from "@ndn/packet";
import type TypedEmitter from "typed-emitter";
import { SegmentConvention } from "../convention";
import { type SegmentConvention } from "../convention";
import { FetchLogic } from "./logic";
interface Events {
declare type Events = {
/** Emitted when a Data segment arrives. */

@@ -16,3 +13,3 @@ segment: (seg: Fetcher.SegmentData) => void;

error: (err: Error) => void;
}
};
declare const Fetcher_base: new () => TypedEmitter<Events>;

@@ -54,3 +51,3 @@ /** Fetch Data packets as guided by FetchLogic. */

/** AbortSignal that allows canceling the Interest via AbortController. */
signal?: AbortSignal | globalThis.AbortSignal;
signal?: AbortSignal;
/**

@@ -57,0 +54,0 @@ * InterestLifetime added to RTO.

import { __importDefault, __importStar } from "tslib";
import { assert } from "@ndn/util";
import _cjsDefaultImport0 from "hirestime"; const hirestime = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "mnemonist/default-weak-map.js"; const DefaultWeakMap = __importDefault(_cjsDefaultImport2).default;
import _cjsDefaultImport1 from "mnemonist/default-weak-map.js"; const DefaultWeakMap = __importDefault(_cjsDefaultImport1).default;
import { EventEmitter } from "events";
import _cjsDefaultImport3 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport3).default;
import pDefer from "p-defer";
import { RttEstimator } from "./rtt-estimator_browser.js";

@@ -8,0 +8,0 @@ import { TcpCubic } from "./tcp-cubic_browser.js";

import { __importDefault, __importStar } from "tslib";
import { assert } from "@ndn/util";
import _cjsDefaultImport0 from "hirestime"; const hirestime = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "mnemonist/default-weak-map.js"; const DefaultWeakMap = __importDefault(_cjsDefaultImport2).default;
import _cjsDefaultImport1 from "mnemonist/default-weak-map.js"; const DefaultWeakMap = __importDefault(_cjsDefaultImport1).default;
import { EventEmitter } from "node:events";
import _cjsDefaultImport3 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport3).default;
import pDefer from "p-defer";
import { RttEstimator } from "./rtt-estimator_node.js";

@@ -8,0 +8,0 @@ import { TcpCubic } from "./tcp-cubic_node.js";

/// <reference types="node" />
import type TypedEmitter from "typed-emitter";
import { CongestionAvoidance } from "./congestion-avoidance";
import type { CongestionAvoidance } from "./congestion-avoidance";
import { RttEstimator } from "./rtt-estimator";

@@ -19,3 +19,3 @@ declare class SegState {

declare const UNBLOCK: unique symbol;
interface Events {
declare type Events = {
[UNBLOCK]: () => void;

@@ -26,3 +26,3 @@ /** Fetching finished. */

exceedRetxLimit: (segNum: number) => void;
}
};
declare const FetchLogic_base: new () => TypedEmitter<Events>;

@@ -36,4 +36,4 @@ /** Congestion control logic. */

private readonly tl;
private pending;
private retxQueue;
private readonly pending;
private readonly retxQueue;
private readonly retxLimit;

@@ -40,0 +40,0 @@ private hiInterestSegNum;

@@ -1,3 +0,2 @@

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import { assert } from "@ndn/util";
/** A token-based throttle limiter. */

@@ -16,3 +15,3 @@ export class TokenLimiter {

assert(v >= 0);
this.capacity_ = Math.floor(v);
this.capacity_ = Math.trunc(v);
this.unblock();

@@ -19,0 +18,0 @@ }

@@ -1,3 +0,2 @@

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import { assert } from "@ndn/util";
/** A token-based throttle limiter. */

@@ -16,3 +15,3 @@ export class TokenLimiter {

assert(v >= 0);
this.capacity_ = Math.floor(v);
this.capacity_ = Math.trunc(v);
this.unblock();

@@ -19,0 +18,0 @@ }

@@ -6,3 +6,3 @@ import { Component, Name } from "@ndn/packet";

let versionComp;
const { version = Date.now() } = opts;
let { version = Date.now(), producerPrefix } = opts;
if (typeof version === "number") {

@@ -15,5 +15,5 @@ const { versionConvention = defaultVersionConvention } = opts;

}
const producerPrefix = new Name(prefixInput);
const prefix = producerPrefix.append(versionComp);
return serve(prefix, source, {
const prefix = Name.from(prefixInput);
producerPrefix ??= prefix;
return serve(prefix.append(versionComp), source, {
...opts,

@@ -20,0 +20,0 @@ producerPrefix,

@@ -6,3 +6,3 @@ import { Component, Name } from "@ndn/packet";

let versionComp;
const { version = Date.now() } = opts;
let { version = Date.now(), producerPrefix } = opts;
if (typeof version === "number") {

@@ -15,5 +15,5 @@ const { versionConvention = defaultVersionConvention } = opts;

}
const producerPrefix = new Name(prefixInput);
const prefix = producerPrefix.append(versionComp);
return serve(prefix, source, {
const prefix = Name.from(prefixInput);
producerPrefix ??= prefix;
return serve(prefix.append(versionComp), source, {
...opts,

@@ -20,0 +20,0 @@ producerPrefix,

@@ -1,13 +0,20 @@

import { ComponentLike, NameLike } from "@ndn/packet";
import { VersionConventionFromNumber } from "./convention";
import { ChunkSource, ServeOptions, Server } from "./serve/mod";
import { type ComponentLike, type NameLike } from "@ndn/packet";
import { type VersionConventionFromNumber } from "./convention";
import { type ChunkSource, type ServeOptions, type Server } from "./serve/mod";
declare type GivenVersionOptions = {
/** Version number component. */
version: ComponentLike;
};
declare type MakeVersionOptions = {
/**
* Choose a version number naming convention.
* Default is Version from @ndn/naming-convention2 package.
*/
versionConvention?: VersionConventionFromNumber;
/** Version number. */
version?: number;
};
export declare type ServeVersionedOptions = Omit<ServeOptions, "producerPrefix"> & (GivenVersionOptions | MakeVersionOptions);
/** Options to serveVersioned(). */
export declare type ServeVersionedOptions = ServeOptions & (GivenVersionOptions | MakeVersionOptions);
export declare function serveVersioned(prefixInput: NameLike, source: ChunkSource, opts?: ServeVersionedOptions): Server;
export {};
/* eslint-env browser */
import { getMaxChunkSize, KnownSizeChunkSource } from "./common_browser.js";
function readBlobAsBuffer(blob) {
return blob.arrayBuffer();
}
function readBlobFileReader(blob) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsArrayBuffer(blob);
reader.addEventListener("load", () => {
const buffer = reader.result;
resolve(buffer);
});
reader.addEventListener("error", () => {
reject(reader.error);
});
});
}
/** Generate chunks from a Blob (from W3C File API, browser only). */

@@ -24,9 +8,9 @@ export class BlobChunkSource extends KnownSizeChunkSource {

this.blob = blob;
this.readBlob = typeof blob.arrayBuffer === "function" ? readBlobAsBuffer : readBlobFileReader;
}
async getPayload(i, offset, chunkSize) {
void i;
const sliced = this.blob.slice(offset, offset + chunkSize);
const buffer = await this.readBlob(sliced);
const buffer = await sliced.arrayBuffer();
return new Uint8Array(buffer);
}
}
/* eslint-env browser */
import { getMaxChunkSize, KnownSizeChunkSource } from "./common_node.js";
function readBlobAsBuffer(blob) {
return blob.arrayBuffer();
}
function readBlobFileReader(blob) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsArrayBuffer(blob);
reader.addEventListener("load", () => {
const buffer = reader.result;
resolve(buffer);
});
reader.addEventListener("error", () => {
reject(reader.error);
});
});
}
/** Generate chunks from a Blob (from W3C File API, browser only). */

@@ -24,9 +8,9 @@ export class BlobChunkSource extends KnownSizeChunkSource {

this.blob = blob;
this.readBlob = typeof blob.arrayBuffer === "function" ? readBlobAsBuffer : readBlobFileReader;
}
async getPayload(i, offset, chunkSize) {
void i;
const sliced = this.blob.slice(offset, offset + chunkSize);
const buffer = await this.readBlob(sliced);
const buffer = await sliced.arrayBuffer();
return new Uint8Array(buffer);
}
}

@@ -1,4 +0,2 @@

/// <reference types="node" />
/// <reference types="web" />
import { ChunkOptions, ChunkSource, KnownSizeChunkSource } from "./common";
import { type ChunkOptions, type ChunkSource, KnownSizeChunkSource } from "./common";
/** Generate chunks from a Blob (from W3C File API, browser only). */

@@ -8,4 +6,3 @@ export declare class BlobChunkSource extends KnownSizeChunkSource implements ChunkSource {

constructor(blob: Blob, opts?: ChunkOptions);
private readonly readBlob;
protected getPayload(i: number, offset: number, chunkSize: number): Promise<Uint8Array>;
}

@@ -9,4 +9,5 @@ import { getMaxChunkSize, KnownSizeChunkSource } from "./common_browser.js";

async getPayload(i, offset, chunkSize) {
void i;
return this.input.subarray(offset, offset + chunkSize);
}
}

@@ -9,4 +9,5 @@ import { getMaxChunkSize, KnownSizeChunkSource } from "./common_node.js";

async getPayload(i, offset, chunkSize) {
void i;
return this.input.subarray(offset, offset + chunkSize);
}
}

@@ -1,2 +0,2 @@

import { ChunkOptions, ChunkSource, KnownSizeChunkSource } from "./common";
import { type ChunkOptions, type ChunkSource, KnownSizeChunkSource } from "./common";
/** Generate chunks from a fixed buffer. */

@@ -3,0 +3,0 @@ export declare class BufferChunkSource extends KnownSizeChunkSource implements ChunkSource {

@@ -52,2 +52,3 @@ /** Index and payload of a chunk. */

}
/** Chunk sizing options. */
export declare type ChunkOptions = ChunkSizeRange | ChunkSizeExact;

@@ -54,0 +55,0 @@ export declare function getMinChunkSize(opts: ChunkOptions): number;

@@ -1,2 +0,2 @@

import { Chunk, ChunkOptions, ChunkSource } from "./common";
import { type Chunk, type ChunkOptions, type ChunkSource } from "./common";
/**

@@ -3,0 +3,0 @@ * Generate chunks from a file.

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "graceful-fs"; const { promises: fs } = __importStar(_cjsDefaultImport0);
import _cjsDefaultImport1 from "p-lazy"; const PLazy = __importDefault(_cjsDefaultImport1).default;
import PLazy from "p-lazy";
import { getMaxChunkSize, KnownSizeChunkSource } from "./common_node.js";

@@ -5,0 +5,0 @@ class FileHandleChunkSource extends KnownSizeChunkSource {

@@ -0,1 +1,2 @@

import { assert } from "@ndn/util";
import { getMaxChunkSize, getMinChunkSize } from "./common_browser.js";

@@ -22,11 +23,18 @@ /** Gather chunks of acceptable size from scattered buffers. */

// fast path when first buffer has acceptable size
const firstSize = this.vector[0].byteLength;
if (firstSize >= this.minSize && firstSize <= this.maxSize) {
this.length -= firstSize;
let buf = this.vector[0];
if (buf.byteLength >= this.minSize && buf.byteLength <= this.maxSize) {
this.length -= buf.byteLength;
return this.vector.shift();
}
// fast path when first buffer has enough payload
if (buf.byteLength > this.maxSize) {
const output = buf.subarray(0, this.maxSize);
this.length -= this.maxSize;
this.vector[0] = buf.subarray(this.maxSize);
return output;
}
// slow path that combines multiple buffers
const output = new Uint8Array(Math.min(this.maxSize, this.length));
for (let offset = 0; offset < output.byteLength;) {
const buf = this.vector.shift();
this.length -= buf.byteLength;
buf = this.vector[0];
const rem = output.byteLength - offset;

@@ -36,5 +44,3 @@ if (buf.byteLength > rem) {

offset += rem;
const excess = buf.subarray(rem);
this.vector.unshift(excess);
this.length += excess.byteLength;
this.vector[0] = buf.subarray(rem);
}

@@ -44,23 +50,27 @@ else {

offset += buf.byteLength;
this.vector.shift();
}
}
this.length -= output.byteLength;
return output;
}
}
/** Generate chunks from an Iterable or AsyncIterable of Uint8Arrays. */
/**
* Generate chunks from an Iterable or AsyncIterable of Uint8Arrays.
* This also accepts NodeJS stream.Readable, which is an AsyncIterable of Buffers.
*/
export class IterableChunkSource {
constructor(input, opts = {}) {
this.input = input;
this.opts = opts;
this.minSize = getMinChunkSize(opts);
this.maxSize = getMaxChunkSize(opts);
}
async *listChunks() {
let i = -1;
const scattered = new ScatteredChunk(getMinChunkSize(this.opts), getMaxChunkSize(this.opts));
const scattered = new ScatteredChunk(this.minSize, this.maxSize);
for await (const buf of this.input) {
assert(buf instanceof Uint8Array);
scattered.append(buf);
while (true) {
const payload = scattered.gather();
if (!payload) {
break;
}
let payload;
while (payload = scattered.gather()) { // eslint-disable-line no-cond-assign
++i;

@@ -74,1 +84,3 @@ yield { i, payload };

}
/** Alias of IterableChunkSource, which accepts NodeJS stream.Readable. */
export const StreamChunkSource = IterableChunkSource;

@@ -0,1 +1,2 @@

import { assert } from "@ndn/util";
import { getMaxChunkSize, getMinChunkSize } from "./common_node.js";

@@ -22,11 +23,18 @@ /** Gather chunks of acceptable size from scattered buffers. */

// fast path when first buffer has acceptable size
const firstSize = this.vector[0].byteLength;
if (firstSize >= this.minSize && firstSize <= this.maxSize) {
this.length -= firstSize;
let buf = this.vector[0];
if (buf.byteLength >= this.minSize && buf.byteLength <= this.maxSize) {
this.length -= buf.byteLength;
return this.vector.shift();
}
// fast path when first buffer has enough payload
if (buf.byteLength > this.maxSize) {
const output = buf.subarray(0, this.maxSize);
this.length -= this.maxSize;
this.vector[0] = buf.subarray(this.maxSize);
return output;
}
// slow path that combines multiple buffers
const output = new Uint8Array(Math.min(this.maxSize, this.length));
for (let offset = 0; offset < output.byteLength;) {
const buf = this.vector.shift();
this.length -= buf.byteLength;
buf = this.vector[0];
const rem = output.byteLength - offset;

@@ -36,5 +44,3 @@ if (buf.byteLength > rem) {

offset += rem;
const excess = buf.subarray(rem);
this.vector.unshift(excess);
this.length += excess.byteLength;
this.vector[0] = buf.subarray(rem);
}

@@ -44,23 +50,27 @@ else {

offset += buf.byteLength;
this.vector.shift();
}
}
this.length -= output.byteLength;
return output;
}
}
/** Generate chunks from an Iterable or AsyncIterable of Uint8Arrays. */
/**
* Generate chunks from an Iterable or AsyncIterable of Uint8Arrays.
* This also accepts NodeJS stream.Readable, which is an AsyncIterable of Buffers.
*/
export class IterableChunkSource {
constructor(input, opts = {}) {
this.input = input;
this.opts = opts;
this.minSize = getMinChunkSize(opts);
this.maxSize = getMaxChunkSize(opts);
}
async *listChunks() {
let i = -1;
const scattered = new ScatteredChunk(getMinChunkSize(this.opts), getMaxChunkSize(this.opts));
const scattered = new ScatteredChunk(this.minSize, this.maxSize);
for await (const buf of this.input) {
assert(buf instanceof Uint8Array);
scattered.append(buf);
while (true) {
const payload = scattered.gather();
if (!payload) {
break;
}
let payload;
while (payload = scattered.gather()) { // eslint-disable-line no-cond-assign
++i;

@@ -74,1 +84,3 @@ yield { i, payload };

}
/** Alias of IterableChunkSource, which accepts NodeJS stream.Readable. */
export const StreamChunkSource = IterableChunkSource;

@@ -0,9 +1,16 @@

/// <reference types="node" />
import type { AnyIterable } from "streaming-iterables";
import { Chunk, ChunkOptions, ChunkSource } from "./common";
/** Generate chunks from an Iterable or AsyncIterable of Uint8Arrays. */
import { type Chunk, type ChunkOptions, type ChunkSource } from "./common";
/**
* Generate chunks from an Iterable or AsyncIterable of Uint8Arrays.
* This also accepts NodeJS stream.Readable, which is an AsyncIterable of Buffers.
*/
export declare class IterableChunkSource implements ChunkSource {
constructor(input: AnyIterable<Uint8Array> | NodeJS.ReadableStream, opts?: ChunkOptions);
private readonly input;
private readonly opts;
constructor(input: AnyIterable<Uint8Array>, opts?: ChunkOptions);
private readonly minSize;
private readonly maxSize;
listChunks(): AsyncIterable<Chunk>;
}
/** Alias of IterableChunkSource, which accepts NodeJS stream.Readable. */
export declare const StreamChunkSource: typeof IterableChunkSource;

@@ -1,6 +0,3 @@

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "is-stream"; const { readable: isReadableStream } = __importStar(_cjsDefaultImport0);
import { BufferChunkSource } from "./buffer_browser.js";
import { IterableChunkSource } from "./iterable_browser.js";
import { StreamChunkSource } from "./stream_browser.js";
/**

@@ -12,10 +9,7 @@ * Create a chunk source, auto detecting input type.

*/
export function makeChunkSource(input, opts = {}) {
export function makeChunkSource(input, opts) {
if (input instanceof Uint8Array) {
return new BufferChunkSource(input, opts);
}
if (isReadableStream(input)) {
return new StreamChunkSource(input, opts);
}
return new IterableChunkSource(input, opts);
}

@@ -1,6 +0,3 @@

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "is-stream"; const { readable: isReadableStream } = __importStar(_cjsDefaultImport0);
import { BufferChunkSource } from "./buffer_node.js";
import { IterableChunkSource } from "./iterable_node.js";
import { StreamChunkSource } from "./stream_node.js";
/**

@@ -12,10 +9,7 @@ * Create a chunk source, auto detecting input type.

*/
export function makeChunkSource(input, opts = {}) {
export function makeChunkSource(input, opts) {
if (input instanceof Uint8Array) {
return new BufferChunkSource(input, opts);
}
if (isReadableStream(input)) {
return new StreamChunkSource(input, opts);
}
return new IterableChunkSource(input, opts);
}

@@ -6,5 +6,3 @@ /// <reference types="node" />

import { IterableChunkSource } from "./iterable";
import { StreamChunkSource } from "./stream";
export declare function makeChunkSource(input: Uint8Array, opts?: ChunkOptions): BufferChunkSource;
export declare function makeChunkSource(input: AnyIterable<Uint8Array>, opts?: ChunkOptions): IterableChunkSource;
export declare function makeChunkSource(input: NodeJS.ReadableStream, opts?: ChunkOptions): StreamChunkSource;
export declare function makeChunkSource(input: AnyIterable<Uint8Array> | NodeJS.ReadableStream, opts?: ChunkOptions): IterableChunkSource;

@@ -5,3 +5,2 @@ export * from "./blob_browser.js";

export * from "./iterable_browser.js";
export * from "./stream_browser.js";
export * from "./make_browser.js";

@@ -5,3 +5,2 @@ export * from "./blob_node.js";

export * from "./iterable_node.js";
export * from "./stream_node.js";
export * from "./make_node.js";

@@ -6,3 +6,2 @@ export type { Chunk, ChunkSource, ChunkOptions } from "./common";

export * from "./iterable";
export * from "./stream";
export * from "./make";
import { __importDefault, __importStar } from "tslib";
import { Data, digestSigning } from "@ndn/packet";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "mnemonist/default-map.js"; const DefaultMap = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport2).default;
import { assert } from "@ndn/util";
import _cjsDefaultImport0 from "mnemonist/default-map.js"; const DefaultMap = __importDefault(_cjsDefaultImport0).default;
import pDefer from "p-defer";
import { getIterator } from "streaming-iterables";

@@ -37,5 +37,4 @@ import { defaultSegmentConvention } from "../convention_browser.js";

}
throw new Error("invalid Interest name");
}
if (canBePrefix && name.isPrefixOf(this.prefix)) {
else if (canBePrefix && name.isPrefixOf(this.prefix)) {
return 0;

@@ -101,7 +100,6 @@ }

while (true) {
const { done, value } = await this.generator.next();
if (done || value === false) {
const { done, value: chunk } = await this.generator.next();
if (done) {
break;
}
const chunk = value;
++i;

@@ -108,0 +106,0 @@ assert(chunk.i === i, "unexpected chunk number");

import { __importDefault, __importStar } from "tslib";
import { Data, digestSigning } from "@ndn/packet";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "mnemonist/default-map.js"; const DefaultMap = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport2).default;
import { assert } from "@ndn/util";
import _cjsDefaultImport0 from "mnemonist/default-map.js"; const DefaultMap = __importDefault(_cjsDefaultImport0).default;
import pDefer from "p-defer";
import { getIterator } from "streaming-iterables";

@@ -37,5 +37,4 @@ import { defaultSegmentConvention } from "../convention_node.js";

}
throw new Error("invalid Interest name");
}
if (canBePrefix && name.isPrefixOf(this.prefix)) {
else if (canBePrefix && name.isPrefixOf(this.prefix)) {
return 0;

@@ -101,7 +100,6 @@ }

while (true) {
const { done, value } = await this.generator.next();
if (done || value === false) {
const { done, value: chunk } = await this.generator.next();
if (done) {
break;
}
const chunk = value;
++i;

@@ -108,0 +106,0 @@ assert(chunk.i === i, "unexpected chunk number");

import type { ProducerHandler } from "@ndn/endpoint";
import { Data, Name, Signer } from "@ndn/packet";
import { SegmentConvention } from "../convention";
import { type Signer, Data, Name } from "@ndn/packet";
import { type SegmentConvention } from "../convention";
import type { Chunk, ChunkSource } from "./chunk-source/mod";

@@ -5,0 +5,0 @@ /** Produce Data for requested segment. */

@@ -5,12 +5,15 @@ import { Endpoint } from "@ndn/endpoint";

/**
* Start serving an segmented object.
* Start serving a segmented object.
* @param prefixInput Data prefix excluding segment number.
* @param source where to read segment payload chunks.
* @param opts other options.
*
* This function does not automatically add a version component to the name prefix.
* If a version component is desired, use serveVersions() function instead.
*/
export function serve(prefixInput, source, opts = {}) {
const prefix = new Name(prefixInput);
const prefix = Name.from(prefixInput);
const { endpoint = new Endpoint() } = opts;
const producer = DataProducer.create(source, prefix, opts);
const prod = endpoint.produce(opts.producerPrefix ?? prefix, producer.processInterest, {
const dp = DataProducer.create(source, prefix, opts);
const ep = endpoint.produce(opts.producerPrefix ?? prefix, dp.processInterest, {
concurrency: 16,

@@ -22,10 +25,8 @@ describe: opts.describe ?? `serve(${prefix})`,

prefix,
processInterest(interest) {
return prod.processInterest(interest);
},
processInterest: ep.processInterest,
close() {
producer.close();
prod.close();
dp.close();
ep.close();
},
};
}

@@ -5,12 +5,15 @@ import { Endpoint } from "@ndn/endpoint";

/**
* Start serving an segmented object.
* Start serving a segmented object.
* @param prefixInput Data prefix excluding segment number.
* @param source where to read segment payload chunks.
* @param opts other options.
*
* This function does not automatically add a version component to the name prefix.
* If a version component is desired, use serveVersions() function instead.
*/
export function serve(prefixInput, source, opts = {}) {
const prefix = new Name(prefixInput);
const prefix = Name.from(prefixInput);
const { endpoint = new Endpoint() } = opts;
const producer = DataProducer.create(source, prefix, opts);
const prod = endpoint.produce(opts.producerPrefix ?? prefix, producer.processInterest, {
const dp = DataProducer.create(source, prefix, opts);
const ep = endpoint.produce(opts.producerPrefix ?? prefix, dp.processInterest, {
concurrency: 16,

@@ -22,10 +25,8 @@ describe: opts.describe ?? `serve(${prefix})`,

prefix,
processInterest(interest) {
return prod.processInterest(interest);
},
processInterest: ep.processInterest,
close() {
producer.close();
prod.close();
dp.close();
ep.close();
},
};
}
import { Endpoint } from "@ndn/endpoint";
import { Data, Interest, Name, NameLike } from "@ndn/packet";
import { type Interest, type NameLike, Data, Name } from "@ndn/packet";
import type { ChunkSource } from "./chunk-source/mod";

@@ -23,13 +23,25 @@ import { DataProducer } from "./data-producer";

};
/** Producer that serves a segmented object. */
export interface Server {
/** Data prefix excluding segment number. */
readonly prefix: Name;
/**
* Process an Interest.
*
* The producer handler is already attached to the Endpoint and will react to incoming Interests.
* It's usually unnecessary to call this function manually.
*/
processInterest: (interest: Interest) => Promise<Data | undefined>;
/** Stop the producer. */
close: () => void;
}
/**
* Start serving an segmented object.
* Start serving a segmented object.
* @param prefixInput Data prefix excluding segment number.
* @param source where to read segment payload chunks.
* @param opts other options.
*
* This function does not automatically add a version component to the name prefix.
* If a version component is desired, use serveVersions() function instead.
*/
export declare function serve(prefixInput: NameLike, source: ChunkSource, opts?: ServeOptions): Server;
{
"name": "@ndn/segmented-object",
"version": "0.0.20210930",
"version": "0.0.20220501",
"description": "NDNts: Segmented Object",

@@ -25,20 +25,19 @@ "keywords": [

"dependencies": {
"@ndn/endpoint": "0.0.20210930",
"@ndn/fw": "0.0.20210930",
"@ndn/naming-convention2": "0.0.20210930",
"@ndn/packet": "0.0.20210930",
"abort-controller": "^3.0.0",
"@ndn/endpoint": "0.0.20220501",
"@ndn/fw": "0.0.20220501",
"@ndn/naming-convention2": "0.0.20220501",
"@ndn/packet": "0.0.20220501",
"@ndn/util": "0.0.20220501",
"event-iterator": "^2.0.0",
"graceful-fs": "^4.2.8",
"graceful-fs": "^4.2.10",
"hirestime": "^6.1.0",
"is-stream": "^2.0.1",
"mnemonist": "^0.38.4",
"minimalistic-assert": "^1.0.1",
"p-defer": "^3.0.0",
"p-lazy": "^3.1.0",
"streaming-iterables": "^6.0.0",
"tslib": "^2.3.1",
"typed-emitter": "^1.3.1"
"mnemonist": "^0.39.1",
"p-defer": "^4.0.0",
"p-lazy": "^4.0.0",
"streaming-iterables": "^7.0.2",
"tslib": "^2.4.0",
"typed-emitter": "^2.1.0"
},
"types": "lib/mod.d.ts"
"types": "lib/mod.d.ts",
"readme": "# @ndn/segmented-object\n\nThis package is part of [NDNts](https://yoursunny.com/p/NDNts/), Named Data Networking libraries for the modern web.\n\nThis package implements functions to publish and retrieve segmented objects.\n`@ndn/cat` package is a command line program that uses this package, and serves as an example.\n\nThe consumer functionality:\n\n* [X] supports version discovery via CanBePrefix.\n* [X] supports version discovery via RDR protocol (in `@ndn/rdr` package).\n* [ ] supports manifest.\n* [X] allows specifying segment range.\n* [X] supports segment numbers.\n* [ ] supports byte offsets.\n* [X] supports multiple naming conventions.\n* [X] has Interest pipelining, congestion control, and loss recovery.\n* [X] verifies packets with a `Verifier` (fixed key or trust schema).\n* [X] emits events as segments arrive.\n* [X] outputs in-order data chunks as a readable stream.\n* [X] outputs completely reassembled object via Promise.\n\nThe producer functionality:\n\n* [X] takes input from `Uint8Array`.\n* [X] takes input from readable streams.\n* [X] takes input from files (filename in Node.js, `Blob` in browser).\n* [X] generates segments of fixed size.\n* [ ] generates segments of available data as Interest arrives, to minimize delivery latency.\n* [X] responds to version discovery Interests with CanBePrefix.\n* [X] responds to RDR protocol (in `@ndn/rdr` package).\n* [ ] generates manifest.\n* [X] supports segment numbers.\n* [ ] supports byte offsets.\n* [X] supports multiple naming conventions.\n* [X] signs packets with a `Signer` (fixed key or trust schema).\n* [ ] reports when all segments have been retrieved at least once.\n"
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc