New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ndn/segmented-object

Package Overview
Dependencies
Maintainers
1
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ndn/segmented-object - npm Package Compare versions

Comparing version 0.0.20210203 to 0.0.20210930

0

lib/convention.d.ts

@@ -0,0 +0,0 @@ import { NamingConvention } from "@ndn/packet";

17

lib/discover-version_browser.js

@@ -5,7 +5,7 @@ import { Endpoint } from "@ndn/endpoint";

/** Discover version with CanBePrefix. */
export async function discoverVersion(name, opts = {}) {
const { endpoint = new Endpoint(), describe, versionConvention = defaultVersionConvention, segmentNumConvention = defaultSegmentConvention, expectedSuffixLen = 2, modifyInterest, retxLimit = 2, signal, verifier, } = opts;
export async function discoverVersion(name, { endpoint = new Endpoint(), describe, versionConvention = defaultVersionConvention, segmentNumConvention = defaultSegmentConvention, conventions: conventionsInput = [], expectedSuffixLen = 2, modifyInterest, retxLimit = 2, signal, verifier, } = {}) {
const conventions = conventionsInput.length > 0 ? conventionsInput : [[versionConvention, segmentNumConvention]];
const interest = new Interest(name, Interest.CanBePrefix, Interest.MustBeFresh);
const data = await endpoint.consume(interest, {
describe: describe !== null && describe !== void 0 ? describe : `discoverVersion(${name})`,
describe: describe ?? `discoverVersion(${name})`,
modifyInterest,

@@ -16,9 +16,14 @@ retx: retxLimit,

});
const vComp = data.name.get(-2);
const sComp = data.name.get(-1);
let conventionIndex;
if ((expectedSuffixLen !== discoverVersion.ANY_SUFFIX_LEN &&
data.name.length !== name.length + expectedSuffixLen) ||
!versionConvention.match(data.name.get(-2)) ||
!segmentNumConvention.match(data.name.get(-1))) {
(conventionIndex = conventions.findIndex(([v, s]) => v.match(vComp) && s.match(sComp))) < 0) {
throw new Error(`cannot extract version from ${data.name}`);
}
return data.name.getPrefix(-1);
return Object.defineProperties(data.name.getPrefix(-1), {
versionConvention: { value: conventions[conventionIndex][0] },
segmentNumConvention: { value: conventions[conventionIndex][1] },
});
}

@@ -25,0 +30,0 @@ (function (discoverVersion) {

@@ -5,7 +5,7 @@ import { Endpoint } from "@ndn/endpoint";

/** Discover version with CanBePrefix. */
export async function discoverVersion(name, opts = {}) {
const { endpoint = new Endpoint(), describe, versionConvention = defaultVersionConvention, segmentNumConvention = defaultSegmentConvention, expectedSuffixLen = 2, modifyInterest, retxLimit = 2, signal, verifier, } = opts;
export async function discoverVersion(name, { endpoint = new Endpoint(), describe, versionConvention = defaultVersionConvention, segmentNumConvention = defaultSegmentConvention, conventions: conventionsInput = [], expectedSuffixLen = 2, modifyInterest, retxLimit = 2, signal, verifier, } = {}) {
const conventions = conventionsInput.length > 0 ? conventionsInput : [[versionConvention, segmentNumConvention]];
const interest = new Interest(name, Interest.CanBePrefix, Interest.MustBeFresh);
const data = await endpoint.consume(interest, {
describe: describe !== null && describe !== void 0 ? describe : `discoverVersion(${name})`,
describe: describe ?? `discoverVersion(${name})`,
modifyInterest,

@@ -16,9 +16,14 @@ retx: retxLimit,

});
const vComp = data.name.get(-2);
const sComp = data.name.get(-1);
let conventionIndex;
if ((expectedSuffixLen !== discoverVersion.ANY_SUFFIX_LEN &&
data.name.length !== name.length + expectedSuffixLen) ||
!versionConvention.match(data.name.get(-2)) ||
!segmentNumConvention.match(data.name.get(-1))) {
(conventionIndex = conventions.findIndex(([v, s]) => v.match(vComp) && s.match(sComp))) < 0) {
throw new Error(`cannot extract version from ${data.name}`);
}
return data.name.getPrefix(-1);
return Object.defineProperties(data.name.getPrefix(-1), {
versionConvention: { value: conventions[conventionIndex][0] },
segmentNumConvention: { value: conventions[conventionIndex][1] },
});
}

@@ -25,0 +30,0 @@ (function (discoverVersion) {

import { Name } from "@ndn/packet";
import { VersionConvention } from "./convention";
import { SegmentConvention, VersionConvention } from "./convention";
import { fetch } from "./fetch/mod";
/** Discover version with CanBePrefix. */
export declare function discoverVersion(name: Name, opts?: discoverVersion.Options): Promise<Name>;
export declare function discoverVersion(name: Name, { endpoint, describe, versionConvention, segmentNumConvention, conventions: conventionsInput, expectedSuffixLen, modifyInterest, retxLimit, signal, verifier, }?: discoverVersion.Options): Promise<discoverVersion.Result>;
export declare namespace discoverVersion {

@@ -15,2 +15,7 @@ const ANY_SUFFIX_LEN: unique symbol;

/**
* List of acceptable version+segment naming convention combinations.
* If this is specified and non-empty, it overrides versionConvention,segmentNumConvention.
*/
conventions?: ReadonlyArray<[VersionConvention, SegmentConvention]>;
/**
* Expected number of suffix components, including Version and Segment.

@@ -22,2 +27,8 @@ * Minimum and default are 2, i.e. Version and Segment components.

}
type Result = Name & {
/** Recognized version naming convention. */
versionConvention: VersionConvention;
/** Recognized segment number naming convention. */
segmentNumConvention: SegmentConvention;
};
}
import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import { EventEmitter } from "events";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
const CWND = Symbol("CongestionAvoidance.CWND");

@@ -5,0 +5,0 @@ /** Congestion avoidance algorithm. */

import { __importDefault, __importStar } from "tslib";
import { EventEmitter } from "events";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import { EventEmitter } from "node:events";
const CWND = Symbol("CongestionAvoidance.CWND");

@@ -5,0 +5,0 @@ /** Congestion avoidance algorithm. */

@@ -0,0 +0,0 @@ import type TypedEmitter from "typed-emitter";

import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "it-pushable"; const pushable = __importDefault(_cjsDefaultImport0).default;
import { Name } from "@ndn/packet";
import _cjsDefaultImport0 from "event-iterator"; const EventIterator = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import { map, writeToStream } from "streaming-iterables";
import { collect, map, writeToStream } from "streaming-iterables";
import { Fetcher } from "./fetcher_browser.js";

@@ -11,39 +12,33 @@ import { Reorder } from "./reorder_browser.js";

this.opts = opts;
this.count = 0;
this.unused = true;
}
makeFetcher() {
assert(this.unused, "fetch.Result is already used");
this.unused = false;
return new Fetcher(this.name, this.opts);
get count() { return this.ctx?.count ?? 0; }
startFetcher() {
assert(!this.ctx, "fetch.Result is already used");
const ctx = new Fetcher(this.name, this.opts);
this.ctx = ctx;
return new EventIterator(({ push, stop, fail, on }) => {
let resume;
on("highWater", () => { resume = ctx.pause(); });
on("lowWater", () => { resume?.(); });
ctx.on("segment", push);
ctx.on("end", stop);
ctx.on("error", fail);
return () => {
resume?.();
ctx.off("segment", push);
ctx.off("end", stop);
ctx.off("error", fail);
};
});
}
unordered() {
const ctx = this.makeFetcher();
const it = pushable();
ctx.on("segment", (segNum, data) => {
it.push(data);
++this.count;
});
ctx.on("end", () => it.end());
ctx.on("error", (err) => it.end(err));
return it;
return map(({ data }) => data, this.startFetcher());
}
ordered() {
var _a;
const ctx = this.makeFetcher();
const reorder = new Reorder((_a = this.opts.segmentRange) === null || _a === void 0 ? void 0 : _a[0]);
const it = pushable();
ctx.on("segment", (segNum, data) => {
async *ordered() {
const reorder = new Reorder(this.opts.segmentRange?.[0]);
for await (const { segNum, data } of this.startFetcher()) {
const ordered = reorder.push(segNum, data);
ordered.forEach((data) => {
it.push(data);
++this.count;
});
});
ctx.on("end", () => {
assert(reorder.empty);
it.end();
});
ctx.on("error", (err) => it.end(err));
return it;
yield* ordered;
}
assert(reorder.empty);
}

@@ -57,14 +52,10 @@ chunks() {

async startPromise() {
const chunks = [];
let totalLength = 0;
for await (const chunk of this.chunks()) {
chunks.push(chunk);
totalLength += chunk.length;
}
const chunks = await collect(this.chunks());
const totalLength = chunks.map((chunk) => chunk.length).reduce((a, b) => a + b);
const output = new Uint8Array(totalLength);
let offset = 0;
chunks.forEach((chunk) => {
for (const chunk of chunks) {
output.set(chunk, offset);
offset += chunk.length;
});
}
assert.equal(offset, totalLength);

@@ -74,5 +65,4 @@ return output;

then(onfulfilled, onrejected) {
if (!this.promise) {
this.promise = this.startPromise();
}
this.promise ?? (this.promise = this.startPromise());
// eslint-disable-next-line promise/prefer-await-to-then
return this.promise.then(onfulfilled, onrejected);

@@ -86,3 +76,3 @@ }

export function fetch(name, opts = {}) {
return new FetchResult(name, opts);
return new FetchResult(new Name(name), opts);
}
import { __importDefault, __importStar } from "tslib";
import _cjsDefaultImport0 from "it-pushable"; const pushable = __importDefault(_cjsDefaultImport0).default;
import { Name } from "@ndn/packet";
import _cjsDefaultImport0 from "event-iterator"; const EventIterator = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import { map, writeToStream } from "streaming-iterables";
import { collect, map, writeToStream } from "streaming-iterables";
import { Fetcher } from "./fetcher_node.js";

@@ -11,39 +12,33 @@ import { Reorder } from "./reorder_node.js";

this.opts = opts;
this.count = 0;
this.unused = true;
}
makeFetcher() {
assert(this.unused, "fetch.Result is already used");
this.unused = false;
return new Fetcher(this.name, this.opts);
get count() { return this.ctx?.count ?? 0; }
startFetcher() {
assert(!this.ctx, "fetch.Result is already used");
const ctx = new Fetcher(this.name, this.opts);
this.ctx = ctx;
return new EventIterator(({ push, stop, fail, on }) => {
let resume;
on("highWater", () => { resume = ctx.pause(); });
on("lowWater", () => { resume?.(); });
ctx.on("segment", push);
ctx.on("end", stop);
ctx.on("error", fail);
return () => {
resume?.();
ctx.off("segment", push);
ctx.off("end", stop);
ctx.off("error", fail);
};
});
}
unordered() {
const ctx = this.makeFetcher();
const it = pushable();
ctx.on("segment", (segNum, data) => {
it.push(data);
++this.count;
});
ctx.on("end", () => it.end());
ctx.on("error", (err) => it.end(err));
return it;
return map(({ data }) => data, this.startFetcher());
}
ordered() {
var _a;
const ctx = this.makeFetcher();
const reorder = new Reorder((_a = this.opts.segmentRange) === null || _a === void 0 ? void 0 : _a[0]);
const it = pushable();
ctx.on("segment", (segNum, data) => {
async *ordered() {
const reorder = new Reorder(this.opts.segmentRange?.[0]);
for await (const { segNum, data } of this.startFetcher()) {
const ordered = reorder.push(segNum, data);
ordered.forEach((data) => {
it.push(data);
++this.count;
});
});
ctx.on("end", () => {
assert(reorder.empty);
it.end();
});
ctx.on("error", (err) => it.end(err));
return it;
yield* ordered;
}
assert(reorder.empty);
}

@@ -57,14 +52,10 @@ chunks() {

async startPromise() {
const chunks = [];
let totalLength = 0;
for await (const chunk of this.chunks()) {
chunks.push(chunk);
totalLength += chunk.length;
}
const chunks = await collect(this.chunks());
const totalLength = chunks.map((chunk) => chunk.length).reduce((a, b) => a + b);
const output = new Uint8Array(totalLength);
let offset = 0;
chunks.forEach((chunk) => {
for (const chunk of chunks) {
output.set(chunk, offset);
offset += chunk.length;
});
}
assert.equal(offset, totalLength);

@@ -74,5 +65,4 @@ return output;

then(onfulfilled, onrejected) {
if (!this.promise) {
this.promise = this.startPromise();
}
this.promise ?? (this.promise = this.startPromise());
// eslint-disable-next-line promise/prefer-await-to-then
return this.promise.then(onfulfilled, onrejected);

@@ -86,3 +76,3 @@ }

export function fetch(name, opts = {}) {
return new FetchResult(name, opts);
return new FetchResult(new Name(name), opts);
}
/// <reference types="node" />
import type { Data, Name } from "@ndn/packet";
import { Data, NameLike } from "@ndn/packet";
import { Fetcher } from "./fetcher";
/** Fetch a segment object as AsyncIterable of payload. */
export declare function fetch(name: Name, opts?: fetch.Options): fetch.Result;
export declare function fetch(name: NameLike, opts?: fetch.Options): fetch.Result;
export declare namespace fetch {

@@ -27,5 +27,5 @@ type Options = Fetcher.Options;

pipe: (dest: NodeJS.WritableStream) => Promise<void>;
/** Number of segmented retrieved. */
/** Number of segments retrieved so far. */
readonly count: number;
}
}

@@ -6,11 +6,12 @@ import { CancelInterest, Forwarder, FwPacket } from "@ndn/fw";

import { FetchLogic } from "./logic_browser.js";
/** Fetch Data packets as guided by FetchLogic. */
export class Fetcher extends EventEmitter {
constructor(name, opts) {
var _a, _b, _c, _d;
super();
this.name = name;
this.opts = opts;
this.count_ = 0;
this.rx = async (iterable) => {
for await (const { l3, token } of iterable) {
if (l3 instanceof Data && typeof token === "number") {
if (l3 instanceof Data && typeof token === "number" && l3.contentType === 0) {
void this.handleData(l3, token);

@@ -21,4 +22,3 @@ }

this.handleAbort = () => {
this.emit("error", new Error("abort"));
this.close();
this.fail(new Error("abort"));
};

@@ -28,19 +28,23 @@ this.logic = new FetchLogic(opts);

this.logic.on("exceedRetxLimit", (segNum) => {
this.emit("error", new Error(`cannot retrieve segment ${segNum}`));
this.close();
this.fail(new Error(`cannot retrieve segment ${segNum}`));
});
this.face = ((_b = (_a = opts.endpoint) === null || _a === void 0 ? void 0 : _a.fw) !== null && _b !== void 0 ? _b : Forwarder.getDefault()).addFace({
this.face = (opts.endpoint?.fw ?? Forwarder.getDefault()).addFace({
rx: this.tx(),
tx: this.rx,
}, {
describe: (_c = opts.describe) !== null && _c !== void 0 ? _c : `fetch(${name})`,
describe: opts.describe ?? `fetch(${name})`,
});
(_d = opts.signal) === null || _d === void 0 ? void 0 : _d.addEventListener("abort", this.handleAbort);
opts.signal?.addEventListener("abort", this.handleAbort);
}
/** Number of segments retrieved so far. */
get count() { return this.count_; }
close() {
var _a;
(_a = this.opts.signal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", this.handleAbort);
this.on("error", () => undefined); // ignore further errors
this.opts.signal?.removeEventListener("abort", this.handleAbort);
this.logic.close();
this.face.close();
}
pause() {
return this.logic.pause();
}
tx() {

@@ -53,15 +57,11 @@ const { segmentNumConvention = defaultSegmentConvention, modifyInterest, lifetimeAfterRto = 1000, } = this.opts;

return FwPacket.create(interest, segNum);
}, ({ interest: { l3, token } }) => {
return new CancelInterest(l3, token);
});
}, ({ interest: { l3, token } }) => new CancelInterest(l3, token));
}
async handleData(data, segNum) {
var _a, _b;
const now = this.logic.now();
try {
await ((_a = this.opts.verifier) === null || _a === void 0 ? void 0 : _a.verify(data));
await this.opts.verifier?.verify(data);
}
catch (err) {
this.emit("error", new Error(`cannot verify segment ${segNum}: ${err}`));
this.close();
this.fail(new Error(`cannot verify segment ${segNum}: ${err}`));
return;

@@ -74,9 +74,16 @@ }

else {
const { segmentNumConvention = defaultSegmentConvention, } = this.opts;
if ((_b = data.finalBlockId) === null || _b === void 0 ? void 0 : _b.is(segmentNumConvention)) {
this.logic.setFinalSegNum(data.finalBlockId.as(segmentNumConvention), true);
let segmentConvention;
if (data.finalBlockId?.is((segmentConvention = this.opts.segmentNumConvention ?? defaultSegmentConvention))) {
this.logic.setFinalSegNum(data.finalBlockId.as(segmentConvention), true);
}
}
this.emit("segment", segNum, data);
++this.count_;
this.emit("segment", { segNum, data });
}
fail(err) {
setTimeout(() => {
this.emit("error", err);
this.close();
}, 0);
}
}
import { CancelInterest, Forwarder, FwPacket } from "@ndn/fw";
import { Data, Interest } from "@ndn/packet";
import { EventEmitter } from "events";
import { EventEmitter } from "node:events";
import { defaultSegmentConvention } from "../convention_node.js";
import { FetchLogic } from "./logic_node.js";
/** Fetch Data packets as guided by FetchLogic. */
export class Fetcher extends EventEmitter {
constructor(name, opts) {
var _a, _b, _c, _d;
super();
this.name = name;
this.opts = opts;
this.count_ = 0;
this.rx = async (iterable) => {
for await (const { l3, token } of iterable) {
if (l3 instanceof Data && typeof token === "number") {
if (l3 instanceof Data && typeof token === "number" && l3.contentType === 0) {
void this.handleData(l3, token);

@@ -20,4 +21,3 @@ }

this.handleAbort = () => {
this.emit("error", new Error("abort"));
this.close();
this.fail(new Error("abort"));
};

@@ -27,19 +27,23 @@ this.logic = new FetchLogic(opts);

this.logic.on("exceedRetxLimit", (segNum) => {
this.emit("error", new Error(`cannot retrieve segment ${segNum}`));
this.close();
this.fail(new Error(`cannot retrieve segment ${segNum}`));
});
this.face = ((_b = (_a = opts.endpoint) === null || _a === void 0 ? void 0 : _a.fw) !== null && _b !== void 0 ? _b : Forwarder.getDefault()).addFace({
this.face = (opts.endpoint?.fw ?? Forwarder.getDefault()).addFace({
rx: this.tx(),
tx: this.rx,
}, {
describe: (_c = opts.describe) !== null && _c !== void 0 ? _c : `fetch(${name})`,
describe: opts.describe ?? `fetch(${name})`,
});
(_d = opts.signal) === null || _d === void 0 ? void 0 : _d.addEventListener("abort", this.handleAbort);
opts.signal?.addEventListener("abort", this.handleAbort);
}
/** Number of segments retrieved so far. */
get count() { return this.count_; }
close() {
var _a;
(_a = this.opts.signal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", this.handleAbort);
this.on("error", () => undefined); // ignore further errors
this.opts.signal?.removeEventListener("abort", this.handleAbort);
this.logic.close();
this.face.close();
}
pause() {
return this.logic.pause();
}
tx() {

@@ -52,15 +56,11 @@ const { segmentNumConvention = defaultSegmentConvention, modifyInterest, lifetimeAfterRto = 1000, } = this.opts;

return FwPacket.create(interest, segNum);
}, ({ interest: { l3, token } }) => {
return new CancelInterest(l3, token);
});
}, ({ interest: { l3, token } }) => new CancelInterest(l3, token));
}
async handleData(data, segNum) {
var _a, _b;
const now = this.logic.now();
try {
await ((_a = this.opts.verifier) === null || _a === void 0 ? void 0 : _a.verify(data));
await this.opts.verifier?.verify(data);
}
catch (err) {
this.emit("error", new Error(`cannot verify segment ${segNum}: ${err}`));
this.close();
this.fail(new Error(`cannot verify segment ${segNum}: ${err}`));
return;

@@ -73,9 +73,16 @@ }

else {
const { segmentNumConvention = defaultSegmentConvention, } = this.opts;
if ((_b = data.finalBlockId) === null || _b === void 0 ? void 0 : _b.is(segmentNumConvention)) {
this.logic.setFinalSegNum(data.finalBlockId.as(segmentNumConvention), true);
let segmentConvention;
if (data.finalBlockId?.is((segmentConvention = this.opts.segmentNumConvention ?? defaultSegmentConvention))) {
this.logic.setFinalSegNum(data.finalBlockId.as(segmentConvention), true);
}
}
this.emit("segment", segNum, data);
++this.count_;
this.emit("segment", { segNum, data });
}
fail(err) {
setTimeout(() => {
this.emit("error", err);
this.close();
}, 0);
}
}

@@ -0,1 +1,3 @@

/// <reference types="node" />
/// <reference types="web" />
import { Endpoint } from "@ndn/endpoint";

@@ -9,3 +11,3 @@ import { Data, Interest, Name, Verifier } from "@ndn/packet";

/** Emitted when a Data segment arrives. */
segment: (segNum: number, data: Data) => void;
segment: (seg: Fetcher.SegmentData) => void;
/** Emitted after all data chunks arrive. */

@@ -17,5 +19,9 @@ end: () => void;

declare const Fetcher_base: new () => TypedEmitter<Events>;
/** Fetch Data packets as guided by FetchLogic. */
export declare class Fetcher extends Fetcher_base {
private readonly name;
private readonly opts;
/** Number of segments retrieved so far. */
get count(): number;
private count_;
private readonly logic;

@@ -25,6 +31,8 @@ private readonly face;

close(): void;
pause(): () => void;
private tx;
private rx;
private readonly rx;
private handleData;
private handleAbort;
private fail;
private readonly handleAbort;
}

@@ -58,3 +66,7 @@ export declare namespace Fetcher {

}
interface SegmentData {
segNum: number;
data: Data;
}
}
export {};
import { __importDefault, __importStar } from "tslib";
import { EventEmitter } from "events";
import _cjsDefaultImport0 from "hirestime"; const hirestime = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "mnemonist/default-weak-map.js"; const DefaultWeakMap = __importDefault(_cjsDefaultImport2).default;
import { EventEmitter } from "events";
import _cjsDefaultImport3 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport3).default;
import { RttEstimator } from "./rtt-estimator_browser.js";

@@ -28,3 +29,2 @@ import { TcpCubic } from "./tcp-cubic_browser.js";

constructor({ rtte, ca = new TcpCubic(), segmentRange = [0, undefined], estimatedFinalSegNum, retxLimit = 15, }) {
var _a;
super();

@@ -44,5 +44,5 @@ /** Internal clock. */

this.hiInterestSegNum = segmentRange[0] - 1;
this.finalSegNum = ((_a = segmentRange[1]) !== null && _a !== void 0 ? _a : Number.MAX_SAFE_INTEGER) - 1;
this.finalSegNum = (segmentRange[1] ?? Number.MAX_SAFE_INTEGER) - 1;
assert(this.hiInterestSegNum < this.finalSegNum, "invalid segmentRange");
this.estimatedFinalSegNum = estimatedFinalSegNum !== null && estimatedFinalSegNum !== void 0 ? estimatedFinalSegNum : this.finalSegNum;
this.estimatedFinalSegNum = estimatedFinalSegNum ?? this.finalSegNum;
}

@@ -58,5 +58,18 @@ /** Abort. */

}
/**
* Pause outgoing Interests, for backpressure from Data consumer.
* Return a function for resuming.
*/
pause() {
const defer = pDefer();
this.paused = defer.promise;
return () => {
defer.resolve();
this.paused = undefined;
};
}
/** Generate stream of outgoing requests. */
async *outgoing(makeInterest, cancelInterest) {
while (this.running) {
await this.paused;
await this.tl.take();

@@ -63,0 +76,0 @@ if (!this.running) {

import { __importDefault, __importStar } from "tslib";
import { EventEmitter } from "events";
import _cjsDefaultImport0 from "hirestime"; const hirestime = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "mnemonist/default-weak-map.js"; const DefaultWeakMap = __importDefault(_cjsDefaultImport2).default;
import { EventEmitter } from "node:events";
import _cjsDefaultImport3 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport3).default;
import { RttEstimator } from "./rtt-estimator_node.js";

@@ -28,3 +29,2 @@ import { TcpCubic } from "./tcp-cubic_node.js";

constructor({ rtte, ca = new TcpCubic(), segmentRange = [0, undefined], estimatedFinalSegNum, retxLimit = 15, }) {
var _a;
super();

@@ -44,5 +44,5 @@ /** Internal clock. */

this.hiInterestSegNum = segmentRange[0] - 1;
this.finalSegNum = ((_a = segmentRange[1]) !== null && _a !== void 0 ? _a : Number.MAX_SAFE_INTEGER) - 1;
this.finalSegNum = (segmentRange[1] ?? Number.MAX_SAFE_INTEGER) - 1;
assert(this.hiInterestSegNum < this.finalSegNum, "invalid segmentRange");
this.estimatedFinalSegNum = estimatedFinalSegNum !== null && estimatedFinalSegNum !== void 0 ? estimatedFinalSegNum : this.finalSegNum;
this.estimatedFinalSegNum = estimatedFinalSegNum ?? this.finalSegNum;
}

@@ -58,5 +58,18 @@ /** Abort. */

}
/**
* Pause outgoing Interests, for backpressure from Data consumer.
* Return a function for resuming.
*/
pause() {
const defer = pDefer();
this.paused = defer.promise;
return () => {
defer.resolve();
this.paused = undefined;
};
}
/** Generate stream of outgoing requests. */
async *outgoing(makeInterest, cancelInterest) {
while (this.running) {
await this.paused;
await this.tl.take();

@@ -63,0 +76,0 @@ if (!this.running) {

@@ -44,5 +44,11 @@ /// <reference types="node" />

private processCancels;
private paused?;
constructor({ rtte, ca, segmentRange, estimatedFinalSegNum, retxLimit, }: FetchLogic.Options);
/** Abort. */
close(): void;
/**
* Pause outgoing Interests, for backpressure from Data consumer.
* Return a function for resuming.
*/
pause(): () => void;
/** Generate stream of outgoing requests. */

@@ -49,0 +55,0 @@ outgoing<T, C>(makeInterest: (req: SegRequest<unknown>) => T, cancelInterest: (req: SegRequest<T>) => C): AsyncGenerator<T | C>;

@@ -0,0 +0,0 @@ export { RttEstimator } from "./rtt-estimator";

@@ -0,1 +1,2 @@

/** Reorder items according to their index numbers. */
export class Reorder {

@@ -6,3 +7,7 @@ constructor(first = 0) {

}
/** Return number of items in buffer. */
get size() { return this.buffer.size; }
/** Determine whether buffer is empty, i.e. all items emitted. */
get empty() { return this.buffer.size === 0; }
/** Add a new item, and return in-order items. */
push(index, obj) {

@@ -16,11 +21,6 @@ if (index >= this.next) {

const result = [];
for (;;) {
const obj = this.buffer.get(this.next);
if (obj) {
result.push(obj);
this.buffer.delete(this.next++);
}
else {
break;
}
let obj;
while ((obj = this.buffer.get(this.next)) !== undefined) {
result.push(obj);
this.buffer.delete(this.next++);
}

@@ -27,0 +27,0 @@ return result;

@@ -0,1 +1,2 @@

/** Reorder items according to their index numbers. */
export class Reorder {

@@ -6,3 +7,7 @@ constructor(first = 0) {

}
/** Return number of items in buffer. */
get size() { return this.buffer.size; }
/** Determine whether buffer is empty, i.e. all items emitted. */
get empty() { return this.buffer.size === 0; }
/** Add a new item, and return in-order items. */
push(index, obj) {

@@ -16,11 +21,6 @@ if (index >= this.next) {

const result = [];
for (;;) {
const obj = this.buffer.get(this.next);
if (obj) {
result.push(obj);
this.buffer.delete(this.next++);
}
else {
break;
}
let obj;
while ((obj = this.buffer.get(this.next)) !== undefined) {
result.push(obj);
this.buffer.delete(this.next++);
}

@@ -27,0 +27,0 @@ return result;

@@ -0,8 +1,13 @@

/** Reorder items according to their index numbers. */
export declare class Reorder<T> {
private next;
private buffer;
private readonly buffer;
constructor(first?: number);
/** Return number of items in buffer. */
get size(): number;
/** Determine whether buffer is empty, i.e. all items emitted. */
get empty(): boolean;
/** Add a new item, and return in-order items. */
push(index: number, obj: T): T[];
private pop;
}

@@ -0,0 +0,0 @@ interface Parameters {

@@ -0,0 +0,0 @@ import { CongestionAvoidance } from "./congestion-avoidance";

@@ -0,0 +0,0 @@ /** A token-based throttle limiter. */

@@ -0,0 +0,0 @@ export * from "./fetch/mod";

@@ -0,0 +0,0 @@ import { ComponentLike, NameLike } from "@ndn/packet";

@@ -0,1 +1,3 @@

/// <reference types="node" />
/// <reference types="web" />
import { ChunkOptions, ChunkSource, KnownSizeChunkSource } from "./common";

@@ -2,0 +4,0 @@ /** Generate chunks from a Blob (from W3C File API, browser only). */

@@ -0,0 +0,0 @@ import { ChunkOptions, ChunkSource, KnownSizeChunkSource } from "./common";

@@ -34,8 +34,6 @@ export class KnownSizeChunkSource {

export function getMinChunkSize(opts) {
var _a, _b;
return (_b = (_a = opts.minChunkSize) !== null && _a !== void 0 ? _a : opts.chunkSize) !== null && _b !== void 0 ? _b : 64;
return opts.minChunkSize ?? opts.chunkSize ?? 64;
}
export function getMaxChunkSize(opts) {
var _a, _b;
return (_b = (_a = opts.maxChunkSize) !== null && _a !== void 0 ? _a : opts.chunkSize) !== null && _b !== void 0 ? _b : 4096;
return opts.maxChunkSize ?? opts.chunkSize ?? 4096;
}

@@ -34,8 +34,6 @@ export class KnownSizeChunkSource {

export function getMinChunkSize(opts) {
var _a, _b;
return (_b = (_a = opts.minChunkSize) !== null && _a !== void 0 ? _a : opts.chunkSize) !== null && _b !== void 0 ? _b : 64;
return opts.minChunkSize ?? opts.chunkSize ?? 64;
}
export function getMaxChunkSize(opts) {
var _a, _b;
return (_b = (_a = opts.maxChunkSize) !== null && _a !== void 0 ? _a : opts.chunkSize) !== null && _b !== void 0 ? _b : 4096;
return opts.maxChunkSize ?? opts.chunkSize ?? 4096;
}

@@ -0,0 +0,0 @@ /** Index and payload of a chunk. */

@@ -9,3 +9,3 @@ import { Chunk, ChunkOptions, ChunkSource } from "./common";

constructor(path: string, opts?: ChunkOptions);
private opening;
private readonly opening;
listChunks(): AsyncIterable<Chunk>;

@@ -12,0 +12,0 @@ getChunk(i: number): Promise<Chunk | undefined>;

@@ -11,2 +11,3 @@ import { __importDefault, __importStar } from "tslib";

async getPayload(i, offset, chunkSize) {
void i;
const payload = new Uint8Array(chunkSize);

@@ -36,13 +37,13 @@ await this.fh.read(payload, 0, chunkSize, offset);

async *listChunks() {
const inner = await this.opening;
yield* inner.listChunks();
const h = await this.opening;
yield* h.listChunks();
}
async getChunk(i) {
const inner = await this.opening;
return inner.getChunk(i);
const h = await this.opening;
return h.getChunk(i);
}
async close() {
const inner = await this.opening;
await inner.close();
const h = await this.opening;
await h.close();
}
}

@@ -58,3 +58,3 @@ import { getMaxChunkSize, getMinChunkSize } from "./common_browser.js";

scattered.append(buf);
for (;;) {
while (true) {
const payload = scattered.gather();

@@ -61,0 +61,0 @@ if (!payload) {

@@ -58,3 +58,3 @@ import { getMaxChunkSize, getMinChunkSize } from "./common_node.js";

scattered.append(buf);
for (;;) {
while (true) {
const payload = scattered.gather();

@@ -61,0 +61,0 @@ if (!payload) {

@@ -0,0 +0,0 @@ import type { AnyIterable } from "streaming-iterables";

@@ -0,0 +0,0 @@ /// <reference types="node" />

@@ -0,0 +0,0 @@ export type { Chunk, ChunkSource, ChunkOptions } from "./common";

@@ -0,0 +0,0 @@ /// <reference types="node" />

import { __importDefault, __importStar } from "tslib";
import { Data, digestSigning } from "@ndn/packet";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport1 from "mnemonist/default-map.js"; const DefaultMap = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport2).default;
import { getIterator } from "streaming-iterables";

@@ -52,4 +53,3 @@ import { defaultSegmentConvention } from "../convention_browser.js";

close() {
var _a, _b;
(_b = (_a = this.source).close) === null || _b === void 0 ? void 0 : _b.call(_a);
this.source.close?.();
}

@@ -64,9 +64,7 @@ }

this.buffer = new Map();
this.waitlist = new Map();
this.stop = pDefer();
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.produce(opts);
this.waitlist = new DefaultMap(() => pDefer());
this.generator = this.listChunks();
void this.produce(opts);
}
async getData(i) {
var _a;
if (i > this.final) {

@@ -77,3 +75,3 @@ return undefined;

this.requested = i;
(_a = this.pause) === null || _a === void 0 ? void 0 : _a.resolve();
this.pause?.resolve();
}

@@ -84,20 +82,25 @@ const data = this.buffer.get(i);

}
let wait = this.waitlist.get(i);
if (!wait) {
wait = pDefer();
this.waitlist.set(i, wait);
}
await wait.promise;
await this.waitlist.get(i).promise;
return this.buffer.get(i);
}
async *listChunks() {
const iterator = getIterator(this.source.listChunks());
try {
while (true) {
const { done, value } = await iterator.next();
if (done) {
return false;
}
yield value;
}
}
finally {
void iterator.return?.();
}
}
async produce({ bufferBehind = Infinity, bufferAhead = 16 }) {
const iterator = getIterator(this.source.listChunks());
let i = -1;
for (;;) {
const { done, value } = await Promise.race([iterator.next(), this.stop.promise]);
if (done) {
if (iterator.return) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
iterator.return();
}
while (true) {
const { done, value } = await this.generator.next();
if (done || value === false) {
break;

@@ -118,3 +121,3 @@ }

}
const w = this.waitlist.get(i);
const w = this.waitlist.peek(i);
if (w) {

@@ -128,9 +131,8 @@ this.waitlist.delete(i);

close() {
var _a;
super.close();
void this.generator.return(false);
for (const w of this.waitlist.values()) {
w.resolve();
}
(_a = this.pause) === null || _a === void 0 ? void 0 : _a.resolve();
this.stop.resolve({ done: true, value: undefined });
this.pause?.resolve();
}

@@ -137,0 +139,0 @@ }

import { __importDefault, __importStar } from "tslib";
import { Data, digestSigning } from "@ndn/packet";
import _cjsDefaultImport0 from "minimalistic-assert"; const assert = __importDefault(_cjsDefaultImport0).default;
import _cjsDefaultImport1 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport1 from "mnemonist/default-map.js"; const DefaultMap = __importDefault(_cjsDefaultImport1).default;
import _cjsDefaultImport2 from "p-defer"; const pDefer = __importDefault(_cjsDefaultImport2).default;
import { getIterator } from "streaming-iterables";

@@ -52,4 +53,3 @@ import { defaultSegmentConvention } from "../convention_node.js";

close() {
var _a, _b;
(_b = (_a = this.source).close) === null || _b === void 0 ? void 0 : _b.call(_a);
this.source.close?.();
}

@@ -64,9 +64,7 @@ }

this.buffer = new Map();
this.waitlist = new Map();
this.stop = pDefer();
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.produce(opts);
this.waitlist = new DefaultMap(() => pDefer());
this.generator = this.listChunks();
void this.produce(opts);
}
async getData(i) {
var _a;
if (i > this.final) {

@@ -77,3 +75,3 @@ return undefined;

this.requested = i;
(_a = this.pause) === null || _a === void 0 ? void 0 : _a.resolve();
this.pause?.resolve();
}

@@ -84,20 +82,25 @@ const data = this.buffer.get(i);

}
let wait = this.waitlist.get(i);
if (!wait) {
wait = pDefer();
this.waitlist.set(i, wait);
}
await wait.promise;
await this.waitlist.get(i).promise;
return this.buffer.get(i);
}
async *listChunks() {
const iterator = getIterator(this.source.listChunks());
try {
while (true) {
const { done, value } = await iterator.next();
if (done) {
return false;
}
yield value;
}
}
finally {
void iterator.return?.();
}
}
async produce({ bufferBehind = Infinity, bufferAhead = 16 }) {
const iterator = getIterator(this.source.listChunks());
let i = -1;
for (;;) {
const { done, value } = await Promise.race([iterator.next(), this.stop.promise]);
if (done) {
if (iterator.return) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
iterator.return();
}
while (true) {
const { done, value } = await this.generator.next();
if (done || value === false) {
break;

@@ -118,3 +121,3 @@ }

}
const w = this.waitlist.get(i);
const w = this.waitlist.peek(i);
if (w) {

@@ -128,9 +131,8 @@ this.waitlist.delete(i);

close() {
var _a;
super.close();
void this.generator.return(false);
for (const w of this.waitlist.values()) {
w.resolve();
}
(_a = this.pause) === null || _a === void 0 ? void 0 : _a.resolve();
this.stop.resolve({ done: true, value: undefined });
this.pause?.resolve();
}

@@ -137,0 +139,0 @@ }

@@ -0,0 +0,0 @@ import type { ProducerHandler } from "@ndn/endpoint";

export * from "./chunk-source/mod";
export * from "./data-producer";
export * from "./serve";

@@ -11,9 +11,8 @@ import { Endpoint } from "@ndn/endpoint";

export function serve(prefixInput, source, opts = {}) {
var _a, _b;
const prefix = new Name(prefixInput);
const { endpoint = new Endpoint() } = opts;
const producer = DataProducer.create(source, prefix, opts);
const prod = endpoint.produce((_a = opts.producerPrefix) !== null && _a !== void 0 ? _a : prefix, producer.processInterest, {
const prod = endpoint.produce(opts.producerPrefix ?? prefix, producer.processInterest, {
concurrency: 16,
describe: (_b = opts.describe) !== null && _b !== void 0 ? _b : `serve(${prefix})`,
describe: opts.describe ?? `serve(${prefix})`,
announcement: opts.announcement,

@@ -20,0 +19,0 @@ });

@@ -11,9 +11,8 @@ import { Endpoint } from "@ndn/endpoint";

export function serve(prefixInput, source, opts = {}) {
var _a, _b;
const prefix = new Name(prefixInput);
const { endpoint = new Endpoint() } = opts;
const producer = DataProducer.create(source, prefix, opts);
const prod = endpoint.produce((_a = opts.producerPrefix) !== null && _a !== void 0 ? _a : prefix, producer.processInterest, {
const prod = endpoint.produce(opts.producerPrefix ?? prefix, producer.processInterest, {
concurrency: 16,
describe: (_b = opts.describe) !== null && _b !== void 0 ? _b : `serve(${prefix})`,
describe: opts.describe ?? `serve(${prefix})`,
announcement: opts.announcement,

@@ -20,0 +19,0 @@ });

@@ -0,0 +0,0 @@ import { Endpoint } from "@ndn/endpoint";

{
"name": "@ndn/segmented-object",
"version": "0.0.20210203",
"version": "0.0.20210930",
"description": "NDNts: Segmented Object",

@@ -25,17 +25,17 @@ "keywords": [

"dependencies": {
"@ndn/endpoint": "0.0.20210203",
"@ndn/fw": "0.0.20210203",
"@ndn/naming-convention2": "0.0.20210203",
"@ndn/packet": "0.0.20210203",
"@ndn/endpoint": "0.0.20210930",
"@ndn/fw": "0.0.20210930",
"@ndn/naming-convention2": "0.0.20210930",
"@ndn/packet": "0.0.20210930",
"abort-controller": "^3.0.0",
"graceful-fs": "^4.2.4",
"event-iterator": "^2.0.0",
"graceful-fs": "^4.2.8",
"hirestime": "^6.1.0",
"is-stream": "^2.0.0",
"it-pushable": "1.4.0",
"mnemonist": "^0.38.1",
"is-stream": "^2.0.1",
"mnemonist": "^0.38.4",
"minimalistic-assert": "^1.0.1",
"p-defer": "^3.0.0",
"p-lazy": "^3.1.0",
"streaming-iterables": "^5.0.4",
"tslib": "^2.1.0",
"streaming-iterables": "^6.0.0",
"tslib": "^2.3.1",
"typed-emitter": "^1.3.1"

@@ -42,0 +42,0 @@ },

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc