Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@rpch/sdk

Package Overview
Dependencies
Maintainers
2
Versions
32
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@rpch/sdk - npm Package Compare versions

Comparing version 1.6.0 to 1.7.1

build/node-path.d.ts

5

build/exit-data.d.ts

@@ -11,3 +11,3 @@ import * as PerfData from './perf-data';

version?: string;
infoLatSec: number;
infoLatMs: number;
counterOffset: number;

@@ -22,3 +22,4 @@ };

version?: string;
infoLatSec?: number;
infoLatMs?: number;
infoLatStarted?: number;
};

@@ -25,0 +26,0 @@ export declare function create(): ExitData;

4

build/exit-data.js

@@ -77,3 +77,3 @@ "use strict";

const avgLats = (0, utils_1.average)(lats);
const infoLatSec = xd.infoLatSec || -1;
const infoLatMs = xd.infoLatMs || -1;
const infoFail = !!xd.infoFail;

@@ -88,3 +88,3 @@ const counterOffset = xd.counterOffset || 0;

avgLats,
infoLatSec,
infoLatMs,
infoFail,

@@ -91,0 +91,0 @@ version,

@@ -30,4 +30,4 @@ "use strict";

const shortPid = utils.shortPeerId(peerId);
return `ExitNode[${shortPid}(${peerId}),v${version},c:${counter}]`;
return `ExitNode[x${shortPid},v${version},c:${counter}]`;
}
exports.prettyPrint = prettyPrint;

@@ -48,2 +48,3 @@ import * as DPapi from './dp-api';

readonly debugScope?: string;
readonly forceManualRelaying?: boolean;
};

@@ -50,0 +51,0 @@ /**

@@ -69,3 +69,4 @@ "use strict";

forceZeroHop: false,
segmentLimit: 0, // disable segment limit
segmentLimit: 0,
forceManualRelaying: false,
};

@@ -142,2 +143,5 @@ const log = Utils.logger(['sdk']);

const { entryNode, exitNode, counterOffset } = resNodes;
const { reqRelayPeerId, respRelayPeerId } = this.ops.forceManualRelaying
? resNodes
: { reqRelayPeerId: undefined, respRelayPeerId: undefined };
const id = RequestCache.generateId(this.requestCache);

@@ -155,5 +159,7 @@ const resReq = Request.create({

hops: this.hops,
reqRelayPeerId,
respRelayPeerId,
});
if (Res.isErr(resReq)) {
log.error('Error creating request', resReq.error);
log.error('error creating request', resReq.error);
return reject('Unable to create request object');

@@ -170,3 +176,3 @@ }

const timer = setTimeout(() => {
log.error('Request %s expired after %dms timeout', request.id, reqOps.timeout);
log.error('%s expired after %dms timeout', Request.prettyPrint(request), reqOps.timeout);
this.removeRequest(request);

@@ -185,3 +191,3 @@ return reject('Request timed out');

// send request to hoprd
log.info('sending request %s', request.id);
log.info('sending request %s', Request.prettyPrint(request));
// queue segment sending for all of them

@@ -200,2 +206,3 @@ segments.forEach((s) => setTimeout(() => {

hops: request.hops,
relay: request.reqRelayPeerId,
};

@@ -250,2 +257,4 @@ NodeAPI.sendMessage(conn, {

hops: origReq.hops,
reqRelayPeerId: fallback.reqRelayPeerId,
respRelayPeerId: fallback.respRelayPeerId,
});

@@ -274,3 +283,3 @@ if (Res.isErr(resReq)) {

// send request to hoprd
log.info('resending request %s', request.id, 'for original', origReq.id);
log.info('resending request %s', Request.prettyPrint(request));
// send segments sequentially

@@ -285,2 +294,3 @@ segments.forEach((s) => setTimeout(() => this.resendSegment(s, request, entryNode, newCacheEntry)));

hops: request.hops,
relay: request.reqRelayPeerId,
}, {

@@ -409,2 +419,3 @@ recipient: request.exitPeerId,

debugScope: ops.debugScope,
forceManualRelaying: ops.forceManualRelaying ?? defaultOps.forceManualRelaying,
};

@@ -422,3 +433,3 @@ };

this.fetchChainId = async (provider) => {
const res = await ProviderAPI.fetchChainId(provider).catch((err) => log.error('Error fetching chainId for', provider, JSON.stringify(err)));
const res = await ProviderAPI.fetchChainId(provider).catch((err) => log.error('error fetching chainId for %s: %s[%o]', provider, JSON.stringify(err), err));
if (!res) {

@@ -428,3 +439,3 @@ return;

if (JRPC.isError(res)) {
log.info('Unable to resolve chainId for', provider, JSON.stringify(res.error));
log.info('unable to resolve chainId for %s: %s', provider, JSON.stringify(res.error));
return;

@@ -472,3 +483,3 @@ }

if (limit > 0 && segLength > limit) {
log.error('Request exceeds maximum amount of segments[%i] with %i segments', limit, segLength);
log.error('request exceeds maximum amount of segments[%i] with %i segments', limit, segLength);
const maxSize = Segment.MaxSegmentBody * limit;

@@ -475,0 +486,0 @@ return `Request exceeds maximum size of ${maxSize}b`;

@@ -20,2 +20,3 @@ import WebSocket = require('isomorphic-ws');

peerId: string;
peerAddress: string;
multiAddr: string;

@@ -45,2 +46,9 @@ heartbeats: Heartbeats[];

};
export type PartChannel = {
type: 'incoming' | 'outgoing';
id: string;
peerAddress: string;
status: string;
balance: string;
};
export type NodeError = {

@@ -50,3 +58,3 @@ status: string;

};
export type Channels = {
export type AllChannels = {
all: Channel[];

@@ -56,5 +64,11 @@ incoming: [];

};
export type NodeChannels = {
all: [];
incoming: PartChannel[];
outgoing: PartChannel[];
};
export declare function connectWS(conn: ConnInfo): WebSocket;
export declare function sendMessage(conn: ConnInfo & {
hops?: number;
relay?: string;
}, { recipient, tag, message }: {

@@ -75,3 +89,4 @@ recipient: string;

export declare function getPeers(conn: ConnInfo): Promise<Peers | NodeError>;
export declare function getChannels(conn: ConnInfo): Promise<Channels>;
export declare function getAllChannels(conn: ConnInfo): Promise<AllChannels>;
export declare function getNodeChannels(conn: ConnInfo): Promise<NodeChannels>;
export declare function isError(payload: NonNullable<unknown> | NodeError): payload is NodeError;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isError = exports.getChannels = exports.getPeers = exports.accountAddresses = exports.deleteMessages = exports.retrieveMessages = exports.version = exports.sendMessage = exports.connectWS = void 0;
exports.isError = exports.getNodeChannels = exports.getAllChannels = exports.getPeers = exports.accountAddresses = exports.deleteMessages = exports.retrieveMessages = exports.version = exports.sendMessage = exports.connectWS = void 0;
const WebSocket = require("isomorphic-ws");

@@ -28,7 +28,19 @@ function connectWS(conn) {

else {
// default to one hop for now
payload.hops = 1;
// default to one hop
if (conn.relay) {
payload.path = [conn.relay];
}
else {
payload.hops = 1;
}
}
const body = JSON.stringify(payload);
return fetch(url, { method: 'POST', headers, body }).then((res) => res.json());
return new Promise((resolve, reject) => {
return fetch(url, { method: 'POST', headers, body }).then((res) => {
if (res.status !== 202) {
return reject(`Unexpected response status code: ${res.status}`);
}
resolve(res.json());
});
});
}

@@ -100,3 +112,3 @@ exports.sendMessage = sendMessage;

exports.getPeers = getPeers;
function getChannels(conn) {
function getAllChannels(conn) {
const url = new URL('/api/v3/channels', conn.apiEndpoint);

@@ -111,3 +123,13 @@ url.searchParams.set('fullTopology', 'true');

}
exports.getChannels = getChannels;
exports.getAllChannels = getAllChannels;
function getNodeChannels(conn) {
const url = new URL('/api/v3/channels', conn.apiEndpoint);
const headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
'x-auth-token': conn.accessToken,
};
return fetch(url, { headers }).then((res) => res.json());
}
exports.getNodeChannels = getNodeChannels;
function isError(payload) {

@@ -114,0 +136,0 @@ return 'error' in payload;

@@ -10,3 +10,5 @@ import type { EntryNode } from './entry-node';

exitNode: ExitNode;
reqRelayPeerId?: string;
respRelayPeerId?: string;
counterOffset: number;
};

@@ -15,6 +15,8 @@ import * as EntryData from './entry-data';

exitDatas: Map<string, ExitData.ExitData>;
relays: string[];
applicationTag: number;
hops?: number;
messageListener: MessageListener;
fetchInterval?: ReturnType<typeof setInterval>;
fetchTimeout?: ReturnType<typeof setTimeout>;
infoTimeout?: ReturnType<typeof setTimeout>;
fetchMessagesOngoing: boolean;

@@ -34,3 +36,3 @@ log: ReturnType<typeof logger>;

* Run initial discovery steps.
* Ping entry node version.
* Request peers from entry node.
* Request info msg from exit nodes.

@@ -37,0 +39,0 @@ */

@@ -39,2 +39,3 @@ "use strict";

const InfoResponseTimeout = 10e3; // 10s
const RelayNodesCompatVersions = ['2.0.4'];
function create(entryNode, exitNodesIt, applicationTag, messageListener, hops) {

@@ -53,2 +54,3 @@ const entryData = EntryData.create();

exitDatas,
relays: [],
applicationTag,

@@ -63,4 +65,4 @@ messageListener,

function destruct(np) {
clearInterval(np.fetchInterval);
np.fetchInterval = undefined;
clearTimeout(np.fetchTimeout);
np.fetchTimeout = undefined;
}

@@ -75,3 +77,3 @@ exports.destruct = destruct;

if (!data) {
np.log.error('requestStarted', Request.prettyPrint(req), 'cannot track on missing exitId', prettyPrint(np));
np.log.error('started %s on missing exit data', Request.prettyPrint(req));
return;

@@ -81,4 +83,4 @@ }

ExitData.addOngoing(data, req);
if (!np.fetchInterval) {
np.fetchInterval = setInterval(() => fetchMessages(np), MessagesFetchInterval);
if (!np.fetchTimeout) {
np.fetchTimeout = setTimeout(() => fetchMessages(np), MessagesFetchInterval);
}

@@ -90,3 +92,3 @@ }

if (!data) {
np.log.error('requestSucceeded', Request.prettyPrint(req), 'cannot track on missing exitId', prettyPrint(np));
np.log.error('successful %s on missing exit data', Request.prettyPrint(req));
return;

@@ -102,3 +104,3 @@ }

if (!data) {
np.log.error('requestFailed', Request.prettyPrint(req), 'cannot track on missing exitId', prettyPrint(np));
np.log.error('failed %s on missing exit data', Request.prettyPrint(req));
return;

@@ -114,4 +116,4 @@ }

if (np.entryData.requestsOngoing === 0 && np.entryData.infoOngoing === 0) {
clearInterval(np.fetchInterval);
np.fetchInterval = undefined;
clearTimeout(np.fetchTimeout);
np.fetchTimeout = undefined;
}

@@ -133,3 +135,3 @@ }

* Run initial discovery steps.
* Ping entry node version.
* Request peers from entry node.
* Request info msg from exit nodes.

@@ -139,5 +141,18 @@ */

const startPingTime = Date.now();
NodeAPI.version(np.entryNode).then((_) => {
np.entryData.pingDuration = Date.now() - startPingTime;
});
if (np.hops === 0) {
NodeAPI.version(np.entryNode)
.then(() => {
np.entryData.pingDuration = Date.now() - startPingTime;
})
.catch((err) => {
np.log.error('error fetching version: %s[%o]', JSON.stringify(err), err);
});
}
else {
NodeAPI.getPeers(np.entryNode)
.then((r) => incPeers(np, r, startPingTime))
.catch((err) => {
np.log.error('error fetching peers: %s[%o]', JSON.stringify(err), err);
});
}
Array.from(np.exitNodes.values()).map((x, idx) => {

@@ -150,2 +165,7 @@ setTimeout(() => requestInfo(np, x), idx);

const message = `info-${np.entryNode.id}-${np.hops ?? '_'}`;
const exitData = np.exitDatas.get(exitNode.id);
if (!exitData) {
return np.log.error('missing exit data for %s before info req', exitNode.id);
}
exitData.infoLatStarted = Date.now();
NodeAPI.sendMessage({

@@ -160,8 +180,9 @@ ...np.entryNode,

EntryData.addOngoingInfo(np.entryData);
if (!np.fetchInterval) {
np.fetchInterval = setInterval(() => fetchMessages(np), MessagesFetchInterval);
if (!np.fetchTimeout) {
np.fetchTimeout = setTimeout(() => fetchMessages(np), MessagesFetchInterval);
}
// stop checking for info resp at after this
// will still be able to receive info resp if messages went over this route
setTimeout(() => {
np.infoTimeout = setTimeout(() => {
np.log.warn('timeout (%dms) waiting for info response', InfoResponseTimeout);
EntryData.removeOngoingInfo(np.entryData);

@@ -171,3 +192,3 @@ checkStopInterval(np);

if (!exitData) {
return np.log.error('requestInfo ExitData mismatch for %s', exitNode.id);
return np.log.error('missing exit data for %s during info resp timeout', exitNode.id);
}

@@ -189,4 +210,4 @@ exitData.infoFail = true;

const v = d.version;
const ctrOff = d.counterOffset?.toFixed(2) || 0;
const info = d.infoFail ? 'fail' : `${d.infoLatSec}s`;
const ctrOff = d.counterOffset?.toFixed(0) || 0;
const info = d.infoFail ? 'fail' : `${d.infoLatMs?.toFixed(0)}ms`;
const o = d.requestsOngoing.length;

@@ -249,12 +270,18 @@ const tot = d.requestsHistory.length;

.catch((err) => {
np.log.error('Error fetching node messages: %s[%o]', JSON.stringify(err), err);
np.log.error('error fetching node messages: %s[%o]', JSON.stringify(err), err);
np.entryData.fetchMessagesErrors++;
})
.finally(() => {
// if not canceled fetch again
if (np.fetchTimeout) {
np.fetchTimeout = setTimeout(() => fetchMessages(np), MessagesFetchInterval);
}
});
}
function incInfoResps(np, infoResps) {
infoResps.forEach(({ body, receivedAt }) => {
infoResps.forEach(({ body }) => {
const [, payload] = body.split('-');
const resDec = Payload.decodeInfo(payload);
if (Res.isErr(resDec)) {
return np.log.error('Error decoding info payload:', resDec.error);
return np.log.error('error decoding info payload:', resDec.error);
}

@@ -265,15 +292,45 @@ const { peerId, version, counter } = resDec.res;

if (!exitNode) {
return np.log.info('Received untracked info from %s', nodeLog);
return np.log.info('info response for missing exit node %s', nodeLog);
}
const exitData = np.exitDatas.get(peerId);
if (!exitData) {
return np.log.error('ExitData mismatch for %s', nodeLog);
return np.log.error('info response missing exit data %s', nodeLog);
}
np.log.verbose('got exit node info: %s', nodeLog);
exitData.version = version;
exitData.counterOffset = Date.now() - counter;
exitData.infoLatSec = Math.abs(receivedAt - Math.floor(counter / 1000));
exitData.infoLatMs = exitData.infoLatStarted && Date.now() - exitData.infoLatStarted;
exitData.infoFail = false;
EntryData.removeOngoingInfo(np.entryData);
clearTimeout(np.infoTimeout);
np.infoTimeout = undefined;
});
checkStopInterval(np);
}
function incPeers(np, res, startPingTime) {
if (NodeAPI.isError(res)) {
np.log.error('error node internal: %o', res);
return;
}
// available peers
const relays = res.connected
.filter(({ reportedVersion }) => RelayNodesCompatVersions.some((v) => reportedVersion.startsWith(v)))
.map(({ peerId, peerAddress }) => ({ peerId, peerAddress }));
NodeAPI.getNodeChannels(np.entryNode)
.then((ch) => incChannels(np, ch, relays, startPingTime))
.catch((err) => {
np.log.error('error fetching channels: %s[%o]', JSON.stringify(err), err);
});
}
function incChannels(np, channels, relays, startPingTime) {
np.entryData.pingDuration = Date.now() - startPingTime;
// open channels
const openChannelsArr = channels.outgoing
.filter(({ status }) => status === 'Open')
.map(({ peerAddress }) => peerAddress);
const openChannels = new Set(openChannelsArr);
np.relays = relays
.filter(({ peerAddress }) => openChannels.has(peerAddress))
.map(({ peerId }) => peerId);
np.log.info('found %d potential relays', np.relays.length);
}

@@ -31,3 +31,3 @@ "use strict";

const utils_1 = require("./utils");
const ExitNodesCompatVersions = ['0.13'];
const ExitNodesCompatVersions = ['0.13', '0.14'];
/**

@@ -54,5 +54,14 @@ * Try to distribute evenly with best route pairs preferred.

function prettyPrint(sel) {
const eId = (0, utils_1.shortPeerId)(sel.match.entryNode.id);
const xId = (0, utils_1.shortPeerId)(sel.match.exitNode.id);
return `${eId} > ${xId} (via ${sel.via})`;
const { entryNode, exitNode, reqRelayPeerId, respRelayPeerId } = sel.match;
const eId = (0, utils_1.shortPeerId)(entryNode.id);
const xId = (0, utils_1.shortPeerId)(exitNode.id);
const path = [`e${eId}`];
if (reqRelayPeerId) {
path.push(`r${(0, utils_1.shortPeerId)(reqRelayPeerId)}`);
}
path.push(`x${xId}`);
if (respRelayPeerId) {
path.push(`r${(0, utils_1.shortPeerId)(respRelayPeerId)}`);
}
return `${path.join('>')} (via ${sel.via})`;
}

@@ -130,5 +139,5 @@ exports.prettyPrint = prettyPrint;

}
function success({ entryNode, exitNode, counterOffset }, via) {
function success({ entryNode, exitNode, counterOffset, reqRelayPeerId, respRelayPeerId }, via) {
return Res.ok({
match: { entryNode, exitNode, counterOffset },
match: { entryNode, exitNode, counterOffset, reqRelayPeerId, respRelayPeerId },
via,

@@ -138,8 +147,16 @@ });

function createRoutePerfs(nodePairs) {
// TODO better relay selection
return Array.from(nodePairs.values()).reduce((acc, np) => {
const perfs = Array.from(np.exitDatas).map(([xId, xd]) => ({
...ExitData.perf(xd),
entryNode: np.entryNode,
exitNode: np.exitNodes.get(xId),
}));
const perfs = Array.from(np.exitDatas).map(([xId, xd]) => {
const relays = np.relays.filter((rId) => rId !== xId && rId !== np.entryNode.id);
const reqRelayPeerId = (0, utils_1.randomEl)(relays);
const respRelayPeerId = (0, utils_1.randomEl)(relays);
return {
...ExitData.perf(xd),
entryNode: np.entryNode,
exitNode: np.exitNodes.get(xId),
reqRelayPeerId,
respRelayPeerId,
};
});
return acc.concat(perfs);

@@ -189,6 +206,4 @@ }, []);

function bestInfoLatencies(routePerfs) {
// have some leeway here since info lat is in seconds and compared to ms
// treat 1 sec diff as 0 sec diff
const haveLats = routePerfs.filter(({ infoLatSec }) => infoLatSec > 1);
haveLats.sort((l, r) => Math.min(l.infoLatSec, 0) - Math.min(r.infoLatSec, 0));
const haveLats = routePerfs.filter(({ infoLatMs }) => infoLatMs > 0);
haveLats.sort((l, r) => l.infoLatMs - r.infoLatMs);
return haveLats;

@@ -195,0 +210,0 @@ }

@@ -9,2 +9,3 @@ import * as JRPC from './jrpc';

hops?: number;
relayPeerId?: string;
};

@@ -11,0 +12,0 @@ export declare enum RespType {

@@ -16,2 +16,4 @@ import * as compatCrypto from '@rpch/compat-crypto';

hops?: number;
reqRelayPeerId?: string;
respRelayPeerId?: string;
};

@@ -25,3 +27,3 @@ export type UnboxRequest = {

*/
export declare function create({ id, originalId, provider, req, clientId, entryPeerId, exitPeerId, exitPublicKey, counterOffset, headers, hops, }: {
export declare function create({ id, originalId, provider, req, clientId, entryPeerId, exitPeerId, exitPublicKey, counterOffset, headers, hops, reqRelayPeerId, respRelayPeerId, }: {
id: string;

@@ -38,2 +40,4 @@ originalId?: string;

hops?: number;
reqRelayPeerId?: string;
respRelayPeerId?: string;
}): Res.Result<{

@@ -56,2 +60,2 @@ request: Request;

*/
export declare function prettyPrint(req: Request, id?: string): string;
export declare function prettyPrint(req: Request): string;

@@ -36,3 +36,3 @@ "use strict";

*/
function create({ id, originalId, provider, req, clientId, entryPeerId, exitPeerId, exitPublicKey, counterOffset, headers, hops, }) {
function create({ id, originalId, provider, req, clientId, entryPeerId, exitPeerId, exitPublicKey, counterOffset, headers, hops, reqRelayPeerId, respRelayPeerId, }) {
const resEncode = Payload.encodeReq({

@@ -44,2 +44,3 @@ provider,

hops,
relayPeerId: respRelayPeerId,
});

@@ -72,2 +73,4 @@ if (Res.isErr(resEncode)) {

hops,
reqRelayPeerId,
respRelayPeerId,
},

@@ -118,13 +121,20 @@ session: resBox.session,

*/
function prettyPrint(req, id) {
function prettyPrint(req) {
const eId = (0, utils_1.shortPeerId)(req.entryPeerId);
const xId = (0, utils_1.shortPeerId)(req.exitPeerId);
const path = [`e${eId}`];
if (req.reqRelayPeerId) {
path.push(`r${(0, utils_1.shortPeerId)(req.reqRelayPeerId)}`);
}
else if (req.hops !== 0) {
path.push('(r)');
}
path.push(`x${xId}`);
if (req.respRelayPeerId) {
path.push(`r${(0, utils_1.shortPeerId)(req.respRelayPeerId)}`);
}
const id = req.id;
const prov = req.provider;
const attrs = [req.id, `${eId}>${xId}`];
if (id) {
attrs.push(id);
}
attrs.push(prov);
return `request[${attrs.join(',')}]`;
return `request[${id}, ${path.join('>')}, ${prov}]`;
}
exports.prettyPrint = prettyPrint;

@@ -1,2 +0,2 @@

declare const _default: "1.6.0";
declare const _default: "1.7.1";
export default _default;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = '1.6.0';
exports.default = '1.7.1';
# @rpch/sdk
## 1.7.1
### Patch Changes
- fix exit nodes compats
## 1.7.0
### Minor Changes
- d2b7a70: Correctly determine info ping latency.
Determine potential relays for 1hop messages.
Force manual relaying only via env var.
## 1.6.0

@@ -4,0 +18,0 @@

{
"name": "@rpch/sdk",
"version": "1.6.0",
"version": "1.7.1",
"license": "LGPL-3.0",

@@ -5,0 +5,0 @@ "main": "./build/index.js",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc