New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ndn/lp

Package Overview
Dependencies
Maintainers
1
Versions
10
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ndn/lp - npm Package Compare versions

Comparing version 0.0.20230121 to 0.0.20240113

18

lib/an.d.ts
export declare const TT: {
LpPacket: number;
LpPayload: number;
LpSeqNum: number;
FragIndex: number;
FragCount: number;
PitToken: number;
Nack: number;
NackReason: number;
CongestionMark: number;
readonly LpPacket: 100;
readonly LpPayload: 80;
readonly LpSeqNum: 81;
readonly FragIndex: 82;
readonly FragCount: 83;
readonly PitToken: 98;
readonly Nack: 800;
readonly NackReason: 801;
readonly CongestionMark: 832;
};

@@ -38,3 +38,3 @@ import { Encoder } from "@ndn/tlv";

const first = new LpPacket();
first.copyL3HeadersFrom(full);
Object.assign(first, full.l3);
first.fragSeqNum = this.seqNumGen.next();

@@ -41,0 +41,0 @@ first.payload = full.payload.subarray(0, sizeofFirstFragment);

@@ -38,3 +38,3 @@ import { Encoder } from "@ndn/tlv";

const first = new LpPacket();
first.copyL3HeadersFrom(full);
Object.assign(first, full.l3);
first.fragSeqNum = this.seqNumGen.next();

@@ -41,0 +41,0 @@ first.payload = full.payload.subarray(0, sizeofFirstFragment);

import { NackHeader } from "@ndn/packet";
import { Encoder, EvDecoder, NNI } from "@ndn/tlv";
import { asDataView } from "@ndn/util";
import { assert } from "@ndn/util";
import { TT } from "./an_browser.js";

@@ -10,3 +10,3 @@ function isCritical(tt) {

.setIsCritical(isCritical)
.add(TT.LpSeqNum, (t, { value }) => t.fragSeqNum = asDataView(value).getBigUint64(0))
.add(TT.LpSeqNum, (t, { nniBig }) => t.fragSeqNum = nniBig)
.add(TT.FragIndex, (t, { nni }) => t.fragIndex = nni)

@@ -16,2 +16,3 @@ .add(TT.FragCount, (t, { nni }) => t.fragCount = nni)

.add(TT.Nack, (t, { decoder }) => t.nack = decoder.decode(NackHeader))
.add(TT.CongestionMark, (t, { nni }) => t.congestionMark = nni)
.add(TT.LpPayload, (t, { value }) => t.payload = value);

@@ -26,18 +27,41 @@ /** NDNLPv2 packet. */

fragCount = 1;
pitToken;
nack;
payload;
/**
* Return L3 fields only.
* They may be copied to another LpPacket via Object.assign().
*/
get l3() {
const t = {};
for (const k of ["pitToken", "nack", "congestionMark"]) {
t[k] = this[k];
}
return t;
}
encodeTo(encoder) {
encoder.prependTlv(TT.LpPacket, this.fragSeqNum !== undefined && [TT.LpSeqNum, NNI(this.fragSeqNum, { len: 8 })], this.fragIndex > 0 && [TT.FragIndex, NNI(this.fragIndex)], this.fragCount > 1 && [TT.FragCount, NNI(this.fragCount)], ...this.encodeL3Headers(), [TT.LpPayload, Encoder.OmitEmpty, this.payload]);
encoder.prependTlv(TT.LpPacket, ...this.encodeFragHeaders(), ...this.encodeL3Headers(), [TT.LpPayload, Encoder.OmitEmpty, this.payload]);
}
encodeFragHeaders() {
assert(this.fragIndex < this.fragCount);
if (this.fragSeqNum === undefined) {
assert(this.fragCount === 1);
return [];
}
return [
[TT.LpSeqNum, NNI(this.fragSeqNum, { len: 8 })],
this.fragIndex > 0 && [TT.FragIndex, NNI(this.fragIndex)],
this.fragCount > 1 && [TT.FragCount, NNI(this.fragCount)],
];
}
hasL3Headers() {
const { congestionMark = 0 } = this;
return this.pitToken !== undefined || this.nack !== undefined || congestionMark > 0;
}
encodeL3Headers() {
const { congestionMark = 0 } = this;
return [
[TT.PitToken, Encoder.OmitEmpty, this.pitToken],
this.nack,
congestionMark > 0 && [TT.CongestionMark, NNI(congestionMark)],
];
}
copyL3HeadersFrom(src) {
this.pitToken = src.pitToken;
this.nack = src.nack;
}
}
import { NackHeader } from "@ndn/packet";
import { Encoder, EvDecoder, NNI } from "@ndn/tlv";
import { asDataView } from "@ndn/util";
import { assert } from "@ndn/util";
import { TT } from "./an_node.js";

@@ -10,3 +10,3 @@ function isCritical(tt) {

.setIsCritical(isCritical)
.add(TT.LpSeqNum, (t, { value }) => t.fragSeqNum = asDataView(value).getBigUint64(0))
.add(TT.LpSeqNum, (t, { nniBig }) => t.fragSeqNum = nniBig)
.add(TT.FragIndex, (t, { nni }) => t.fragIndex = nni)

@@ -16,2 +16,3 @@ .add(TT.FragCount, (t, { nni }) => t.fragCount = nni)

.add(TT.Nack, (t, { decoder }) => t.nack = decoder.decode(NackHeader))
.add(TT.CongestionMark, (t, { nni }) => t.congestionMark = nni)
.add(TT.LpPayload, (t, { value }) => t.payload = value);

@@ -26,18 +27,41 @@ /** NDNLPv2 packet. */

fragCount = 1;
pitToken;
nack;
payload;
/**
* Return L3 fields only.
* They may be copied to another LpPacket via Object.assign().
*/
get l3() {
const t = {};
for (const k of ["pitToken", "nack", "congestionMark"]) {
t[k] = this[k];
}
return t;
}
encodeTo(encoder) {
encoder.prependTlv(TT.LpPacket, this.fragSeqNum !== undefined && [TT.LpSeqNum, NNI(this.fragSeqNum, { len: 8 })], this.fragIndex > 0 && [TT.FragIndex, NNI(this.fragIndex)], this.fragCount > 1 && [TT.FragCount, NNI(this.fragCount)], ...this.encodeL3Headers(), [TT.LpPayload, Encoder.OmitEmpty, this.payload]);
encoder.prependTlv(TT.LpPacket, ...this.encodeFragHeaders(), ...this.encodeL3Headers(), [TT.LpPayload, Encoder.OmitEmpty, this.payload]);
}
encodeFragHeaders() {
assert(this.fragIndex < this.fragCount);
if (this.fragSeqNum === undefined) {
assert(this.fragCount === 1);
return [];
}
return [
[TT.LpSeqNum, NNI(this.fragSeqNum, { len: 8 })],
this.fragIndex > 0 && [TT.FragIndex, NNI(this.fragIndex)],
this.fragCount > 1 && [TT.FragCount, NNI(this.fragCount)],
];
}
hasL3Headers() {
const { congestionMark = 0 } = this;
return this.pitToken !== undefined || this.nack !== undefined || congestionMark > 0;
}
encodeL3Headers() {
const { congestionMark = 0 } = this;
return [
[TT.PitToken, Encoder.OmitEmpty, this.pitToken],
this.nack,
congestionMark > 0 && [TT.CongestionMark, NNI(congestionMark)],
];
}
copyL3HeadersFrom(src) {
this.pitToken = src.pitToken;
this.nack = src.nack;
}
}

@@ -9,8 +9,20 @@ import { NackHeader } from "@ndn/packet";

fragCount: number;
pitToken?: Uint8Array;
nack?: NackHeader;
payload?: Uint8Array;
/**
* Return L3 fields only.
* They may be copied to another LpPacket via Object.assign().
*/
get l3(): LpL3;
encodeTo(encoder: Encoder): void;
private encodeFragHeaders;
hasL3Headers(): boolean;
encodeL3Headers(): Encodable[];
copyL3HeadersFrom(src: LpPacket): void;
}
export interface LpPacket extends LpL3 {
}
/** L3 fields in LpPacket. */
export interface LpL3 {
pitToken?: Uint8Array;
nack?: NackHeader;
congestionMark?: number;
}

@@ -1,2 +0,2 @@

import { concatBuffers } from "@ndn/util";
import { concatBuffers, evict } from "@ndn/util";
import { LpPacket } from "./packet_browser.js";

@@ -35,7 +35,6 @@ class PartialPacket {

reassemble() {
const full = new LpPacket();
full.copyL3HeadersFrom(this.buffer[0]);
const full = Object.assign(new LpPacket(), this.buffer[0].l3);
const parts = [];
for (const fragment of this.buffer) {
const part = fragment?.payload;
const part = fragment.payload;
if (part) {

@@ -89,10 +88,4 @@ parts.push(part);

this.partials.set(partial.seqNumBase, partial);
if (this.partials.size > this.capacity) { // exceed capacity, delete oldest
// eslint-disable-next-line no-unreachable-loop
for (const key of this.partials.keys()) {
this.partials.delete(key);
break;
}
}
evict(this.capacity, this.partials);
}
}

@@ -1,2 +0,2 @@

import { concatBuffers } from "@ndn/util";
import { concatBuffers, evict } from "@ndn/util";
import { LpPacket } from "./packet_node.js";

@@ -35,7 +35,6 @@ class PartialPacket {

reassemble() {
const full = new LpPacket();
full.copyL3HeadersFrom(this.buffer[0]);
const full = Object.assign(new LpPacket(), this.buffer[0].l3);
const parts = [];
for (const fragment of this.buffer) {
const part = fragment?.payload;
const part = fragment.payload;
if (part) {

@@ -89,10 +88,4 @@ parts.push(part);

this.partials.set(partial.seqNumBase, partial);
if (this.partials.size > this.capacity) { // exceed capacity, delete oldest
// eslint-disable-next-line no-unreachable-loop
for (const key of this.partials.keys()) {
this.partials.delete(key);
break;
}
}
evict(this.capacity, this.partials);
}
}
import { __importDefault, __importStar } from "tslib";
import { Data, Interest, Nack, TT as l3TT } from "@ndn/packet";
import { Decoder, Encoder, printTT } from "@ndn/tlv";
import { assert, flatMapOnce, toHex } from "@ndn/util";
import { flatMapOnce, toHex } from "@ndn/util";
import _cjsDefaultImport0 from "it-keepalive"; const itKeepAlive = __importDefault(_cjsDefaultImport0).default;

@@ -49,2 +49,3 @@ import { TT } from "./an_browser.js";

l3pkt.token = lpp.pitToken;
l3pkt.congestionMark = lpp.congestionMark;
yield l3pkt;

@@ -58,8 +59,11 @@ }

switch (type) {
case l3TT.Interest:
case l3TT.Interest: {
return { l3: decoder.decode(Interest) };
case l3TT.Data:
}
case l3TT.Data: {
return { l3: decoder.decode(Data) };
default:
}
default: {
throw new Error(`unrecognized TLV-TYPE ${printTT(type)} as L3Pkt`);
}
}

@@ -76,23 +80,22 @@ }

const mtu = Math.min(this.mtu, this.transport.mtu);
const { l3, token } = pkt;
const { l3 } = pkt;
const lpp = new LpPacket();
lpp.pitToken = token;
lpp.pitToken = pkt.token;
lpp.congestionMark = pkt.congestionMark;
try {
if (l3 instanceof Interest || l3 instanceof Data) {
const payload = Encoder.encode(l3);
if (!token && payload.length <= mtu) {
return yield payload;
}
lpp.payload = payload;
}
else {
assert(l3 instanceof Nack);
if (l3 instanceof Nack) {
lpp.nack = l3.header;
lpp.payload = Encoder.encode(l3.interest);
}
else {
lpp.payload = Encoder.encode(l3);
}
}
catch (err) {
return yield new LpService.TxError(err, l3);
return yield new LpService.TxError(err, pkt.l3);
}
if (Number.isFinite(mtu)) {
if (!lpp.hasL3Headers() && lpp.payload.length <= mtu) {
yield lpp.payload;
}
else if (Number.isFinite(mtu)) {
yield* this.fragmenter.fragment(lpp, mtu).map((fragment) => Encoder.encode(fragment, mtu));

@@ -99,0 +102,0 @@ }

import { __importDefault, __importStar } from "tslib";
import { Data, Interest, Nack, TT as l3TT } from "@ndn/packet";
import { Decoder, Encoder, printTT } from "@ndn/tlv";
import { assert, flatMapOnce, toHex } from "@ndn/util";
import { flatMapOnce, toHex } from "@ndn/util";
import _cjsDefaultImport0 from "it-keepalive"; const itKeepAlive = __importDefault(_cjsDefaultImport0).default;

@@ -49,2 +49,3 @@ import { TT } from "./an_node.js";

l3pkt.token = lpp.pitToken;
l3pkt.congestionMark = lpp.congestionMark;
yield l3pkt;

@@ -58,8 +59,11 @@ }

switch (type) {
case l3TT.Interest:
case l3TT.Interest: {
return { l3: decoder.decode(Interest) };
case l3TT.Data:
}
case l3TT.Data: {
return { l3: decoder.decode(Data) };
default:
}
default: {
throw new Error(`unrecognized TLV-TYPE ${printTT(type)} as L3Pkt`);
}
}

@@ -76,23 +80,22 @@ }

const mtu = Math.min(this.mtu, this.transport.mtu);
const { l3, token } = pkt;
const { l3 } = pkt;
const lpp = new LpPacket();
lpp.pitToken = token;
lpp.pitToken = pkt.token;
lpp.congestionMark = pkt.congestionMark;
try {
if (l3 instanceof Interest || l3 instanceof Data) {
const payload = Encoder.encode(l3);
if (!token && payload.length <= mtu) {
return yield payload;
}
lpp.payload = payload;
}
else {
assert(l3 instanceof Nack);
if (l3 instanceof Nack) {
lpp.nack = l3.header;
lpp.payload = Encoder.encode(l3.interest);
}
else {
lpp.payload = Encoder.encode(l3);
}
}
catch (err) {
return yield new LpService.TxError(err, l3);
return yield new LpService.TxError(err, pkt.l3);
}
if (Number.isFinite(mtu)) {
if (!lpp.hasL3Headers() && lpp.payload.length <= mtu) {
yield lpp.payload;
}
else if (Number.isFinite(mtu)) {
yield* this.fragmenter.fragment(lpp, mtu).map((fragment) => Encoder.encode(fragment, mtu));

@@ -99,0 +102,0 @@ }

@@ -11,3 +11,3 @@ import { Data, Interest, Nack } from "@ndn/packet";

private readonly reassembler;
rx: (iterable: AsyncIterable<Decoder.Tlv>) => AsyncIterable<LpService.Packet | LpService.RxError>;
readonly rx: (iterable: AsyncIterable<Decoder.Tlv>) => AsyncIterable<LpService.Packet | LpService.RxError>;
private decode;

@@ -21,5 +21,3 @@ private decodeL3;

export interface Transport {
/**
* Return current transport MTU.
*/
/** Return current transport MTU. */
readonly mtu: number;

@@ -50,2 +48,3 @@ }

token?: Uint8Array;
congestionMark?: number;
}

@@ -52,0 +51,0 @@ export class RxError extends Error {

{
"name": "@ndn/lp",
"version": "0.0.20230121",
"version": "0.0.20240113",
"description": "NDNts: NDNLP",

@@ -25,9 +25,9 @@ "keywords": [

"dependencies": {
"@ndn/packet": "0.0.20230121",
"@ndn/tlv": "0.0.20230121",
"@ndn/util": "0.0.20230121",
"@ndn/packet": "0.0.20240113",
"@ndn/tlv": "0.0.20240113",
"@ndn/util": "0.0.20240113",
"it-keepalive": "^1.2.0",
"tslib": "^2.4.1"
"tslib": "^2.6.2"
},
"types": "lib/mod.d.ts"
}

@@ -5,4 +5,4 @@ # @ndn/lp

This package implements [NDNLPv2](https://redmine.named-data.net/projects/nfd/wiki/NDNLPv2) link protocol.
Currently, this is a minimal implementation that understands:
This package partially implements [NDNLPv2](https://redmine.named-data.net/projects/nfd/wiki/NDNLPv2) link protocol.
Current implementation understands:

@@ -12,1 +12,2 @@ * Fragmentation and reassembly.

* PIT token.
* CongestionMark.
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc