Comparing version 1.7.1 to 1.9.0
import type { EntryNode } from './entry-node'; | ||
import type { ExitNode } from './exit-node'; | ||
export declare const NoMoreNodes = "no more nodes"; | ||
export declare const Unauthorized = "unauthorized"; | ||
/** | ||
@@ -5,0 +6,0 @@ * This module contains all communication with the discovery platform. |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.fetchQuota = exports.fetchNodes = exports.NoMoreNodes = void 0; | ||
const retry = require("async-retry"); | ||
const utils_1 = require("./utils"); | ||
exports.fetchQuota = exports.fetchNodes = exports.Unauthorized = exports.NoMoreNodes = void 0; | ||
exports.NoMoreNodes = 'no more nodes'; | ||
const DefaultBackoff = { | ||
retries: 5, | ||
factor: 3, | ||
minTimeout: 1e3, | ||
maxTimeout: 60e3, | ||
randomize: true, | ||
}; | ||
const log = (0, utils_1.logger)(['sdk', 'dp-api']); | ||
exports.Unauthorized = 'unauthorized'; | ||
function fetchNodes(ops, amount, since) { | ||
@@ -25,19 +16,12 @@ const url = new URL('/api/v1/nodes/pairings', ops.discoveryPlatformEndpoint); | ||
}; | ||
return retry(async (bail, num) => { | ||
if (num > 1) { | ||
log.verbose('Retrying', url.host.toString(), 'after', num - 1, 'failure(s)'); | ||
} | ||
const res = await fetch(url, { headers }); | ||
if (res.status !== 200) { | ||
log.info('Fetching nodes returned', res.status); | ||
} | ||
return fetch(url, { headers }).then((res) => { | ||
switch (res.status) { | ||
case 204: // none found | ||
return bail(new Error(exports.NoMoreNodes)); | ||
case 400: // validation errors | ||
throw new Error(exports.NoMoreNodes); | ||
case 403: // unauthorized | ||
return bail(new Error((await res.json()))); | ||
throw new Error(exports.Unauthorized); | ||
default: | ||
return res.json(); | ||
} | ||
return res.json(); | ||
}, DefaultBackoff); | ||
}); | ||
} | ||
@@ -44,0 +28,0 @@ exports.fetchNodes = fetchNodes; |
@@ -17,2 +17,3 @@ import * as PerfData from './perf-data'; | ||
requestsHistory: string[]; | ||
shRelays?: string[]; | ||
requests: Map<string, PerfData.PerfData>; | ||
@@ -19,0 +20,0 @@ infoFail?: boolean; |
@@ -5,2 +5,2 @@ export type ExitNode = { | ||
}; | ||
export declare function prettyPrint(peerId: string, version: string, counter: number): string; | ||
export declare function prettyPrint(peerId: string, version: string, counter: number, relays?: string[]): string; |
@@ -28,6 +28,10 @@ "use strict"; | ||
const utils = __importStar(require("./utils")); | ||
function prettyPrint(peerId, version, counter) { | ||
function prettyPrint(peerId, version, counter, relays) { | ||
const shortPid = utils.shortPeerId(peerId); | ||
return `ExitNode[x${shortPid},v${version},c:${counter}]`; | ||
const attrs = [`x${shortPid}`, `v${version}`, `c:${counter}`]; | ||
if (relays) { | ||
attrs.push(`r:${relays.length}`); | ||
} | ||
return `ExitNode[${attrs.join(',')}]`; | ||
} | ||
exports.prettyPrint = prettyPrint; |
@@ -36,2 +36,4 @@ import * as DPapi from './dp-api'; | ||
* @param debugScope - programatically set debug scope for SDK | ||
* @param debugLevel - only print debug statements that match at least the desired level: verbose < info < warn < error | ||
* @param forceManualRelaying - determine relay nodes for requests/responses and enforce them for one hop messages, can not be used with zero hop | ||
*/ | ||
@@ -49,2 +51,3 @@ export type Ops = { | ||
readonly debugScope?: string; | ||
readonly debugLevel?: string; | ||
readonly forceManualRelaying?: boolean; | ||
@@ -51,0 +54,0 @@ }; |
@@ -71,2 +71,3 @@ "use strict"; | ||
forceManualRelaying: false, | ||
debugLevel: 'info', | ||
}; | ||
@@ -130,5 +131,3 @@ const log = Utils.logger(['sdk']); | ||
// gather entry - exit node pair | ||
const resNodes = await this.nodesColl | ||
.requestNodePair(reqOps.timeout) | ||
.catch((err) => { | ||
const resNodes = await this.nodesColl.requestNodePair(reqOps.timeout).catch((err) => { | ||
log.error('Error finding node pair', err); | ||
@@ -191,6 +190,6 @@ return reject(`Could not find node pair in ${reqOps.timeout} ms`); | ||
// queue segment sending for all of them | ||
segments.forEach((s) => setTimeout(() => { | ||
segments.forEach((s) => { | ||
this.nodesColl.segmentStarted(request, s); | ||
this.sendSegment(request, s, entryNode, entry); | ||
})); | ||
}); | ||
}); | ||
@@ -281,3 +280,3 @@ }; | ||
// send segments sequentially | ||
segments.forEach((s) => setTimeout(() => this.resendSegment(s, request, entryNode, newCacheEntry))); | ||
segments.forEach((s) => this.resendSegment(s, request, entryNode, newCacheEntry)); | ||
}; | ||
@@ -404,4 +403,9 @@ this.resendSegment = (segment, request, entryNode, cacheEntry) => { | ||
this.sdkOps = (ops) => { | ||
const discoveryPlatformEndpoint = ops.discoveryPlatformEndpoint || defaultOps.discoveryPlatformEndpoint; | ||
const forceZeroHop = ops.forceZeroHop ?? defaultOps.forceZeroHop; | ||
const forceManualRelaying = forceZeroHop | ||
? false | ||
: ops.forceManualRelaying ?? defaultOps.forceManualRelaying; | ||
return { | ||
discoveryPlatformEndpoint: ops.discoveryPlatformEndpoint || defaultOps.discoveryPlatformEndpoint, | ||
discoveryPlatformEndpoint, | ||
timeout: ops.timeout || defaultOps.timeout, | ||
@@ -411,7 +415,9 @@ provider: ops.provider || defaultOps.provider, | ||
mevProtectionProvider: ops.mevProtectionProvider || defaultOps.mevProtectionProvider, | ||
forceZeroHop: ops.forceZeroHop ?? defaultOps.forceZeroHop, | ||
mevKickbackAddress: ops.mevKickbackAddress, | ||
forceZeroHop, | ||
segmentLimit: ops.segmentLimit ?? defaultOps.segmentLimit, | ||
versionListener: ops.versionListener, | ||
debugScope: ops.debugScope, | ||
forceManualRelaying: ops.forceManualRelaying ?? defaultOps.forceManualRelaying, | ||
debugLevel: ops.debugLevel || (process.env.DEBUG ? undefined : defaultOps.debugLevel), | ||
forceManualRelaying, | ||
}; | ||
@@ -460,6 +466,6 @@ }; | ||
this.determineHops = (forceZeroHop) => { | ||
// defaults to multihop (novalue) | ||
if (forceZeroHop) { | ||
return 0; | ||
} | ||
return 1; | ||
}; | ||
@@ -511,7 +517,8 @@ this.populateChainIds = (provider) => { | ||
this.ops = this.sdkOps(ops); | ||
this.ops.debugScope && Utils.setDebugScope(this.ops.debugScope); | ||
(this.ops.debugScope || this.ops.debugLevel) && | ||
Utils.setDebugScopeLevel(this.ops.debugScope, this.ops.debugLevel); | ||
this.requestCache = RequestCache.init(); | ||
this.segmentCache = SegmentCache.init(); | ||
this.hops = this.determineHops(!!this.ops.forceZeroHop); | ||
this.nodesColl = new nodes_collector_1.default(this.ops.discoveryPlatformEndpoint, this.clientId, ApplicationTag, this.onMessages, this.onVersions, this.hops); | ||
this.nodesColl = new nodes_collector_1.default(this.ops.discoveryPlatformEndpoint, this.clientId, ApplicationTag, this.onMessages, this.onVersions, this.hops, this.ops.forceManualRelaying); | ||
this.fetchChainId(this.ops.provider); | ||
@@ -518,0 +525,0 @@ log.info('RPCh SDK[v%s] started', version_1.default); |
@@ -74,3 +74,3 @@ import WebSocket = require('isomorphic-ws'); | ||
message: string; | ||
}): Promise<string>; | ||
}): Promise<string | NodeError>; | ||
export declare function version(conn: ConnInfo): Promise<any>; | ||
@@ -77,0 +77,0 @@ export declare function retrieveMessages(conn: ConnInfo, tag: number): Promise<{ |
@@ -37,10 +37,3 @@ "use strict"; | ||
const body = JSON.stringify(payload); | ||
return new Promise((resolve, reject) => { | ||
return fetch(url, { method: 'POST', headers, body }).then((res) => { | ||
if (res.status !== 202) { | ||
return reject(`Unexpected response status code: ${res.status}`); | ||
} | ||
resolve(res.json()); | ||
}); | ||
}); | ||
return fetch(url, { method: 'POST', headers, body }).then((res) => res.json()); | ||
} | ||
@@ -47,0 +40,0 @@ exports.sendMessage = sendMessage; |
@@ -15,2 +15,3 @@ import * as EntryData from './entry-data'; | ||
exitDatas: Map<string, ExitData.ExitData>; | ||
peers: string[]; | ||
relays: string[]; | ||
@@ -21,7 +22,10 @@ applicationTag: number; | ||
fetchTimeout?: ReturnType<typeof setTimeout>; | ||
infoTimeout?: ReturnType<typeof setTimeout>; | ||
infoTimeouts: Map<string, ReturnType<typeof setTimeout>>; | ||
fetchMessagesOngoing: boolean; | ||
log: ReturnType<typeof logger>; | ||
forceManualRelaying: boolean; | ||
}; | ||
export declare function create(entryNode: EntryNode, exitNodesIt: Iterable<ExitNode.ExitNode>, applicationTag: number, messageListener: MessageListener, hops?: number): NodePair; | ||
export declare function create(entryNode: EntryNode, exitNodes: ExitNode.ExitNode[], applicationTag: number, messageListener: MessageListener, hops: number, forceManualRelaying: boolean): NodePair; | ||
export declare function addExitNodes(np: NodePair, exitNodes: ExitNode.ExitNode[]): void; | ||
export declare function removeExitNode(np: NodePair, xId: string): void; | ||
export declare function destruct(np: NodePair): void; | ||
@@ -28,0 +32,0 @@ export declare function id(np: NodePair): string; |
@@ -26,3 +26,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.prettyPrint = exports.discover = exports.segmentFailed = exports.segmentSucceeded = exports.segmentStarted = exports.requestFailed = exports.requestSucceeded = exports.requestStarted = exports.id = exports.destruct = exports.create = void 0; | ||
exports.prettyPrint = exports.discover = exports.segmentFailed = exports.segmentSucceeded = exports.segmentStarted = exports.requestFailed = exports.requestSucceeded = exports.requestStarted = exports.id = exports.destruct = exports.removeExitNode = exports.addExitNodes = exports.create = void 0; | ||
const EntryData = __importStar(require("./entry-data")); | ||
@@ -40,18 +40,18 @@ const ExitData = __importStar(require("./exit-data")); | ||
const InfoResponseTimeout = 10e3; // 10s | ||
const RelayNodesCompatVersions = ['2.0.4']; | ||
function create(entryNode, exitNodesIt, applicationTag, messageListener, hops) { | ||
const RelayNodesCompatVersions = ['2.0.6']; | ||
function create(entryNode, exitNodes, applicationTag, messageListener, hops, forceManualRelaying) { | ||
const entryData = EntryData.create(); | ||
const shortId = (0, utils_1.shortPeerId)(entryNode.id); | ||
const log = (0, utils_1.logger)(['sdk', `nodepair${shortId}(${entryNode.apiEndpoint})`]); | ||
// ensure entry node not included in exits | ||
const exits = Array.from(exitNodesIt).filter((n) => entryNode.id !== n.id); | ||
const exitNodes = new Map(exits.map((n) => [n.id, n])); | ||
const exitDatas = new Map(exits.map((n) => [n.id, ExitData.create()])); | ||
const exitNodesMap = new Map(exitNodes.map((n) => [n.id, n])); | ||
const exitDatasMap = new Map(exitNodes.map((n) => [n.id, ExitData.create()])); | ||
return { | ||
entryNode, | ||
entryData, | ||
exitNodes, | ||
exitDatas, | ||
exitNodes: exitNodesMap, | ||
exitDatas: exitDatasMap, | ||
peers: [], | ||
relays: [], | ||
applicationTag, | ||
infoTimeouts: new Map(), | ||
messageListener, | ||
@@ -61,5 +61,20 @@ fetchMessagesOngoing: false, | ||
hops, | ||
forceManualRelaying, | ||
}; | ||
} | ||
exports.create = create; | ||
function addExitNodes(np, exitNodes) { | ||
exitNodes.forEach((x) => { | ||
if (!np.exitNodes.has(x.id)) { | ||
np.exitNodes.set(x.id, x); | ||
np.exitDatas.set(x.id, ExitData.create()); | ||
} | ||
}); | ||
} | ||
exports.addExitNodes = addExitNodes; | ||
function removeExitNode(np, xId) { | ||
np.exitNodes.delete(xId); | ||
np.exitDatas.delete(xId); | ||
} | ||
exports.removeExitNode = removeExitNode; | ||
function destruct(np) { | ||
@@ -135,6 +150,7 @@ clearTimeout(np.fetchTimeout); | ||
const startPingTime = Date.now(); | ||
if (np.hops === 0) { | ||
if (np.hops === 0 || !np.forceManualRelaying) { | ||
NodeAPI.version(np.entryNode) | ||
.then(() => { | ||
np.entryData.pingDuration = Date.now() - startPingTime; | ||
np.log.verbose('version ping took %dms', np.entryData.pingDuration); | ||
}) | ||
@@ -158,3 +174,3 @@ .catch((err) => { | ||
function requestInfo(np, exitNode) { | ||
const message = `info-${np.entryNode.id}-${np.hops ?? '_'}`; | ||
const message = `info-${np.entryNode.id}-${np.hops ?? '_'}-${np.forceManualRelaying ? 'r' : '_'}`; | ||
const exitData = np.exitDatas.get(exitNode.id); | ||
@@ -179,4 +195,4 @@ if (!exitData) { | ||
// will still be able to receive info resp if messages went over this route | ||
np.infoTimeout = setTimeout(() => { | ||
np.log.warn('timeout (%dms) waiting for info response', InfoResponseTimeout); | ||
const timeout = setTimeout(() => { | ||
np.log.warn('timeout (%dms) waiting for info response from x%s', InfoResponseTimeout, (0, utils_1.shortPeerId)(exitNode.id)); | ||
EntryData.removeOngoingInfo(np.entryData); | ||
@@ -186,6 +202,7 @@ checkStopInterval(np); | ||
if (!exitData) { | ||
return np.log.error('missing exit data for %s during info resp timeout', exitNode.id); | ||
return np.log.error('missing exit data for x%s during info resp timeout', (0, utils_1.shortPeerId)(exitNode.id)); | ||
} | ||
exitData.infoFail = true; | ||
}, InfoResponseTimeout); | ||
np.infoTimeouts.set(exitNode.id, timeout); | ||
} | ||
@@ -275,3 +292,3 @@ function prettyPrint(np) { | ||
infoResps.forEach(({ body }) => { | ||
const [, payload] = body.split('-'); | ||
const payload = body.slice(body.indexOf('-') + 1); | ||
const resDec = Payload.decodeInfo(payload); | ||
@@ -281,4 +298,4 @@ if (Res.isErr(resDec)) { | ||
} | ||
const { peerId, version, counter } = resDec.res; | ||
const nodeLog = ExitNode.prettyPrint(peerId, version, counter); | ||
const { peerId, version, counter, shRelays } = resDec.res; | ||
const nodeLog = ExitNode.prettyPrint(peerId, version, counter, shRelays); | ||
const exitNode = np.exitNodes.get(peerId); | ||
@@ -297,5 +314,7 @@ if (!exitNode) { | ||
exitData.infoFail = false; | ||
exitData.shRelays = shRelays; | ||
EntryData.removeOngoingInfo(np.entryData); | ||
clearTimeout(np.infoTimeout); | ||
np.infoTimeout = undefined; | ||
const t = np.infoTimeouts.get(peerId); | ||
clearTimeout(t); | ||
np.infoTimeouts.delete(peerId); | ||
}); | ||
@@ -310,7 +329,9 @@ checkStopInterval(np); | ||
// available peers | ||
const relays = res.connected | ||
const peers = res.connected | ||
.filter(({ reportedVersion }) => RelayNodesCompatVersions.some((v) => reportedVersion.startsWith(v))) | ||
.map(({ peerId, peerAddress }) => ({ peerId, peerAddress })); | ||
NodeAPI.getNodeChannels(np.entryNode) | ||
.then((ch) => incChannels(np, ch, relays, startPingTime)) | ||
.then((ch) => { | ||
incChannels(np, ch, peers, startPingTime); | ||
}) | ||
.catch((err) => { | ||
@@ -320,4 +341,5 @@ np.log.error('error fetching channels: %s[%o]', JSON.stringify(err), err); | ||
} | ||
function incChannels(np, channels, relays, startPingTime) { | ||
function incChannels(np, channels, peers, startPingTime) { | ||
np.entryData.pingDuration = Date.now() - startPingTime; | ||
np.log.verbose('channel ping took %dms', np.entryData.pingDuration); | ||
// open channels | ||
@@ -328,6 +350,7 @@ const openChannelsArr = channels.outgoing | ||
const openChannels = new Set(openChannelsArr); | ||
np.relays = relays | ||
np.peers = peers.map(({ peerId }) => peerId); | ||
np.relays = peers | ||
.filter(({ peerAddress }) => openChannels.has(peerAddress)) | ||
.map(({ peerId }) => peerId); | ||
np.log.info('found %d potential relays', np.relays.length); | ||
np.log.verbose('found %d potential relays', np.relays.length); | ||
} |
@@ -9,2 +9,3 @@ import * as NodeMatch from './node-match'; | ||
}; | ||
export type NodesSorting = Map<string, Set<string>>; | ||
/** | ||
@@ -14,3 +15,3 @@ * Try to distribute evenly with best route pairs preferred. | ||
*/ | ||
export declare function routePair(nodePairs: Map<string, NodePair.NodePair>): Res.Result<NodeSelection>; | ||
export declare function routePair(nodePairs: Map<string, NodePair.NodePair>, forceManualRelaying: boolean): Res.Result<NodeSelection>; | ||
/** | ||
@@ -21,3 +22,3 @@ * Try to distribute evenly with best route pairs preferred. | ||
*/ | ||
export declare function fallbackRoutePair(nodePairs: Map<string, NodePair.NodePair>, exclude: EntryNode): Res.Result<NodeSelection>; | ||
export declare function fallbackRoutePair(nodePairs: Map<string, NodePair.NodePair>, exclude: EntryNode, forceManualRelaying: boolean): Res.Result<NodeSelection>; | ||
export declare function prettyPrint(sel: NodeSelection): string; |
@@ -31,3 +31,3 @@ "use strict"; | ||
const utils_1 = require("./utils"); | ||
const ExitNodesCompatVersions = ['0.13', '0.14']; | ||
const ExitNodesCompatVersions = ['1.']; | ||
/** | ||
@@ -37,4 +37,4 @@ * Try to distribute evenly with best route pairs preferred. | ||
*/ | ||
function routePair(nodePairs) { | ||
const routePerfs = createRoutePerfs(nodePairs); | ||
function routePair(nodePairs, forceManualRelaying) { | ||
const routePerfs = createRoutePerfs(nodePairs, forceManualRelaying); | ||
return match(nodePairs, routePerfs); | ||
@@ -48,4 +48,4 @@ } | ||
*/ | ||
function fallbackRoutePair(nodePairs, exclude) { | ||
const routePerfs = createRoutePerfs(nodePairs); | ||
function fallbackRoutePair(nodePairs, exclude, forceManualRelaying) { | ||
const routePerfs = createRoutePerfs(nodePairs, forceManualRelaying); | ||
const filtered = routePerfs.filter(({ entryNode }) => entryNode.id !== exclude.id); | ||
@@ -146,9 +146,6 @@ return match(nodePairs, filtered); | ||
} | ||
function createRoutePerfs(nodePairs) { | ||
// TODO better relay selection | ||
function createRoutePerfs(nodePairs, forceManualRelaying) { | ||
return Array.from(nodePairs.values()).reduce((acc, np) => { | ||
const perfs = Array.from(np.exitDatas).map(([xId, xd]) => { | ||
const relays = np.relays.filter((rId) => rId !== xId && rId !== np.entryNode.id); | ||
const reqRelayPeerId = (0, utils_1.randomEl)(relays); | ||
const respRelayPeerId = (0, utils_1.randomEl)(relays); | ||
const [reqRelayPeerId, respRelayPeerId] = determineRelays(np, xId, xd, forceManualRelaying); | ||
return { | ||
@@ -162,22 +159,26 @@ ...ExitData.perf(xd), | ||
}); | ||
if (forceManualRelaying) { | ||
const withRelays = perfs.filter(({ reqRelayPeerId, respRelayPeerId }) => reqRelayPeerId && respRelayPeerId); | ||
return acc.concat(withRelays); | ||
} | ||
return acc.concat(perfs); | ||
}, []); | ||
} | ||
function noInfoFails(routePerfs) { | ||
// boolean sort: false first | ||
routePerfs.sort((l, r) => { | ||
if (l.infoFail === r.infoFail) { | ||
return 0; | ||
} | ||
if (l.infoFail) { | ||
return 1; | ||
} | ||
return -1; | ||
}); | ||
const idx = routePerfs.findIndex(({ infoFail }) => infoFail); | ||
if (idx > 0) { | ||
return routePerfs.slice(0, idx); | ||
function determineRelays(np, xId, xd, forceManualRelaying) { | ||
if (!forceManualRelaying) { | ||
return []; | ||
} | ||
return routePerfs; | ||
if (!xd.shRelays) { | ||
return []; | ||
} | ||
const shRelays = xd.shRelays; | ||
const relays = np.relays.filter((rId) => rId !== xId && rId !== np.entryNode.id); | ||
const reqRelayPeerId = (0, utils_1.randomEl)(relays); | ||
const respRelays = np.peers.filter((pId) => pId !== xId && shRelays.find((shId) => pId.endsWith(shId))); | ||
const respRelayPeerId = (0, utils_1.randomEl)(respRelays); | ||
return [reqRelayPeerId, respRelayPeerId]; | ||
} | ||
function noInfoFails(routePerfs) { | ||
return routePerfs.filter(({ infoFail }) => !infoFail); | ||
} | ||
function versionMatches(routePerfs) { | ||
@@ -184,0 +185,0 @@ return routePerfs.filter(({ version }) => { |
@@ -14,8 +14,7 @@ import * as DPapi from './dp-api'; | ||
private readonly versionListener; | ||
private readonly hops?; | ||
private readonly hops; | ||
private readonly forceManualRelaying; | ||
private readonly nodePairs; | ||
private lastFetchNodePairs; | ||
private lastMatchedAt; | ||
private ongoingFetchPairs; | ||
constructor(discoveryPlatformEndpoint: string, clientId: string, applicationTag: number, messageListener: MessageListener, versionListener: VersionListener, hops?: number | undefined); | ||
constructor(discoveryPlatformEndpoint: string, clientId: string, applicationTag: number, messageListener: MessageListener, versionListener: VersionListener, hops: number, forceManualRelaying: boolean); | ||
destruct: () => void; | ||
@@ -40,4 +39,8 @@ /** | ||
segmentFailed: (req: Request.Request, seg: Segment.Segment) => void; | ||
private fetchNodePairs; | ||
private fetchRoutes; | ||
private initNodes; | ||
private scheduleFetchRoutes; | ||
private logUnauthorized; | ||
private logNoNodes; | ||
private removeRedundant; | ||
} |
@@ -32,8 +32,8 @@ "use strict"; | ||
const Segment = __importStar(require("./segment")); | ||
const utils_1 = require("./utils"); | ||
const log = (0, utils_1.logger)(['sdk', 'nodes-collector']); | ||
const NodePairFetchTimeout = 10e3; // 10 seconds downtime to avoid repeatedly querying DP | ||
const NodePairAmount = 10; // how many routes do we fetch | ||
const Utils = __importStar(require("./utils")); | ||
const log = Utils.logger(['sdk', 'nodes-collector']); | ||
const RoutesFetchInterval = 1e3 * 60 * 10; // 10 min | ||
const RoutesAmount = 10; // fetch 10 routes | ||
class NodesCollector { | ||
constructor(discoveryPlatformEndpoint, clientId, applicationTag, messageListener, versionListener, hops) { | ||
constructor(discoveryPlatformEndpoint, clientId, applicationTag, messageListener, versionListener, hops, forceManualRelaying) { | ||
this.discoveryPlatformEndpoint = discoveryPlatformEndpoint; | ||
@@ -45,6 +45,5 @@ this.clientId = clientId; | ||
this.hops = hops; | ||
this.forceManualRelaying = forceManualRelaying; | ||
this.nodePairs = new Map(); | ||
this.lastFetchNodePairs = 0; | ||
this.lastMatchedAt = new Date(0); | ||
this.ongoingFetchPairs = false; | ||
this.destruct = () => { | ||
@@ -65,3 +64,3 @@ for (const np of this.nodePairs.values()) { | ||
const elapsed = now - start; | ||
const res = NodeSel.routePair(this.nodePairs); | ||
const res = NodeSel.routePair(this.nodePairs, this.forceManualRelaying); | ||
if (Res.isOk(res)) { | ||
@@ -89,3 +88,3 @@ log.verbose('ready with route pair: %s', NodeSel.prettyPrint(res.res)); | ||
const elapsed = now - start; | ||
const res = NodeSel.routePair(this.nodePairs); | ||
const res = NodeSel.routePair(this.nodePairs, this.forceManualRelaying); | ||
if (Res.isOk(res)) { | ||
@@ -108,3 +107,3 @@ log.verbose('found route pair: %s', NodeSel.prettyPrint(res.res)); | ||
this.fallbackNodePair = (exclude) => { | ||
const res = NodeSel.fallbackRoutePair(this.nodePairs, exclude); | ||
const res = NodeSel.fallbackRoutePair(this.nodePairs, exclude, this.forceManualRelaying); | ||
if (Res.isOk(res)) { | ||
@@ -169,12 +168,3 @@ log.verbose('found fallback route pair: %s', NodeSel.prettyPrint(res.res)); | ||
}; | ||
this.fetchNodePairs = () => { | ||
if (this.ongoingFetchPairs) { | ||
log.verbose('discovering node pairs ongoing'); | ||
return; | ||
} | ||
const diff = Date.now() - this.lastFetchNodePairs; | ||
if (diff < NodePairFetchTimeout) { | ||
log.verbose('discovering node pairs too early - need to wait %dms', NodePairFetchTimeout - diff); | ||
} | ||
this.ongoingFetchPairs = true; | ||
this.fetchRoutes = () => { | ||
DPapi.fetchNodes({ | ||
@@ -184,8 +174,14 @@ discoveryPlatformEndpoint: this.discoveryPlatformEndpoint, | ||
forceZeroHop: this.hops === 0, | ||
}, NodePairAmount, this.lastMatchedAt) | ||
}, RoutesAmount, this.lastMatchedAt) | ||
.then(this.initNodes) | ||
.catch((err) => { | ||
if (err.message === DPapi.NoMoreNodes) { | ||
log.warn('no node pairs available'); | ||
if (err.message === DPapi.Unauthorized) { | ||
this.logUnauthorized(); | ||
} | ||
else if (err.message === DPapi.NoMoreNodes && this.nodePairs.size === 0) { | ||
this.logNoNodes(); | ||
} | ||
else if (err.message === DPapi.NoMoreNodes) { | ||
log.info('no new nodes found'); | ||
} | ||
else { | ||
@@ -196,4 +192,3 @@ log.error('error fetching node pairs: %s[%o]', JSON.stringify(err), err); | ||
.finally(() => { | ||
this.lastFetchNodePairs = Date.now(); | ||
this.ongoingFetchPairs = false; | ||
this.scheduleFetchRoutes(); | ||
}); | ||
@@ -203,17 +198,103 @@ }; | ||
const lookupExitNodes = new Map(nodes.exitNodes.map((x) => [x.id, x])); | ||
nodes.entryNodes | ||
.filter((en) => !this.nodePairs.has(en.id)) | ||
.forEach((en) => { | ||
const exitNodes = en.recommendedExits.map((id) => lookupExitNodes.get(id)); | ||
const np = NodePair.create(en, exitNodes, this.applicationTag, this.messageListener, this.hops); | ||
this.nodePairs.set(NodePair.id(np), np); | ||
nodes.entryNodes.forEach((en) => { | ||
const exitNodes = en.recommendedExits | ||
.map((id) => lookupExitNodes.get(id)) | ||
// ensure entry node not included in exits | ||
.filter((x) => x.id !== en.id); | ||
if (exitNodes.length === 0) { | ||
return; | ||
} | ||
if (this.nodePairs.has(en.id)) { | ||
const np = this.nodePairs.get(en.id); | ||
NodePair.addExitNodes(np, exitNodes); | ||
} | ||
else { | ||
const np = NodePair.create(en, exitNodes, this.applicationTag, this.messageListener, this.hops, this.forceManualRelaying); | ||
this.nodePairs.set(NodePair.id(np), np); | ||
} | ||
}); | ||
// reping all nodes | ||
this.removeRedundant(); | ||
// ping all nodes | ||
this.nodePairs.forEach((np) => NodePair.discover(np)); | ||
log.info('discovered %d node-pairs with %d exits', this.nodePairs.size, lookupExitNodes.size); | ||
this.lastMatchedAt = new Date(nodes.matchedAt); | ||
log.info('discovered %d node-pairs with %d exits, matched at %s', this.nodePairs.size, lookupExitNodes.size, this.lastMatchedAt); | ||
this.versionListener(nodes.versions); | ||
}; | ||
this.fetchNodePairs(); | ||
this.scheduleFetchRoutes = () => { | ||
// schdule next run somehwere between 10min and 12min | ||
const next = RoutesFetchInterval + Math.floor(Math.random() * 2 * 60e3); | ||
const logM = Math.floor(next / 1000 / 60); | ||
const logS = Math.round(next / 1000) - logM * 60; | ||
log.info('scheduling next node pair fetching in %dm%ds', logM, logS); | ||
setTimeout(() => this.fetchRoutes(), next); | ||
}; | ||
this.logUnauthorized = () => { | ||
const errMessage = [ | ||
'***', | ||
'Authentication failed', | ||
'-', | ||
'Client ID is not valid.', | ||
'Visit https://degen.rpch.net to get a valid Client ID!', | ||
'***', | ||
].join(' '); | ||
const errDeco = Array.from({ length: errMessage.length }, () => '*').join(''); | ||
log.error(''); | ||
log.error(`!!! ${errDeco} !!!`); | ||
log.error(`!!! ${errMessage} !!!`); | ||
log.error(`!!! ${errDeco} !!!`); | ||
log.error(''); | ||
this.destruct(); | ||
}; | ||
this.logNoNodes = () => { | ||
const errMessage = [ | ||
'***', | ||
'No node pairs available.', | ||
'Contact support at https://degen.rpch.net to report this problem!', | ||
'***', | ||
].join(' '); | ||
const errDeco = Array.from({ length: errMessage.length }, () => '*').join(''); | ||
log.error(''); | ||
log.error(`!!! ${errDeco} !!!`); | ||
log.error(`!!! ${errMessage} !!!`); | ||
log.error(`!!! ${errDeco} !!!`); | ||
log.error(''); | ||
}; | ||
this.removeRedundant = () => { | ||
const count = Array.from(this.nodePairs).reduce((acc, [_, np]) => np.exitNodes.size + acc, 0); | ||
let toRemove = count - RoutesAmount; | ||
if (toRemove <= 0) { | ||
return; | ||
} | ||
const removablePairs = Array.from(this.nodePairs).reduce((acc, [eId, np]) => { | ||
const removableExitIds = Array.from(np.exitNodes.keys()).filter((xId) => { | ||
const exitData = np.exitDatas.get(xId); | ||
if (!exitData) { | ||
return true; | ||
} | ||
return exitData.requestsOngoing.length === 0; | ||
}); | ||
return acc.concat(removableExitIds.map((xId) => [eId, xId])); | ||
}, []); | ||
log.verbose('removing %d routes: %s', removablePairs.length, removablePairs); | ||
while (removablePairs.length > 0 && toRemove > 0) { | ||
const idx = Utils.randomIdx(removablePairs); | ||
const [eId, xId] = removablePairs[idx]; | ||
const np = this.nodePairs.get(eId); | ||
if (np) { | ||
NodePair.removeExitNode(np, xId); | ||
if (np.exitNodes.size === 0) { | ||
NodePair.destruct(np); | ||
this.nodePairs.delete(eId); | ||
} | ||
toRemove--; | ||
removablePairs.splice(idx, 1); | ||
if (toRemove === 0) { | ||
return; | ||
} | ||
} | ||
} | ||
}; | ||
this.fetchRoutes(); | ||
} | ||
} | ||
exports.default = NodesCollector; |
@@ -38,2 +38,3 @@ import * as JRPC from './jrpc'; | ||
counter: number; | ||
shRelays?: string[]; | ||
}; | ||
@@ -40,0 +41,0 @@ export declare function encodeReq(payload: ReqPayload): Res.Result<string>; |
@@ -86,3 +86,3 @@ "use strict"; | ||
catch (ex) { | ||
return Res.err('Error encoding info payload'); | ||
return Res.err(`Error encoding info payload: ${ex}`); | ||
} | ||
@@ -97,5 +97,5 @@ } | ||
catch (ex) { | ||
return Res.err('Error encoding info payload'); | ||
return Res.err(`Error decoding info payload: ${ex}`); | ||
} | ||
} | ||
exports.decodeInfo = decodeInfo; |
@@ -11,2 +11,3 @@ import debug from 'debug'; | ||
export declare function randomEl<T>(arr: T[]): T; | ||
export declare function randomIdx<T>(arr: T[]): number; | ||
export declare function average(arr: number[]): number; | ||
@@ -22,1 +23,2 @@ export declare function isValidURL(url: string): boolean; | ||
export declare function setDebugScope(scope: string): void; | ||
export declare function setDebugScopeLevel(scope?: string, level?: string): void; |
@@ -29,3 +29,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.setDebugScope = exports.versionCompare = exports.logger = exports.isValidURL = exports.average = exports.randomEl = exports.shortPeerId = exports.VrsnCmp = void 0; | ||
exports.setDebugScopeLevel = exports.setDebugScope = exports.versionCompare = exports.logger = exports.isValidURL = exports.average = exports.randomIdx = exports.randomEl = exports.shortPeerId = exports.VrsnCmp = void 0; | ||
const debug_1 = __importDefault(require("debug")); | ||
@@ -40,2 +40,3 @@ const Res = __importStar(require("./result")); | ||
})(VrsnCmp || (exports.VrsnCmp = VrsnCmp = {})); | ||
const DefaultLogLevel = 'info'; | ||
function shortPeerId(peerId) { | ||
@@ -46,5 +47,9 @@ return `.${peerId.substring(peerId.length - 4)}`; | ||
function randomEl(arr) { | ||
return arr[Math.floor(Math.random() * arr.length)]; | ||
return arr[randomIdx(arr)]; | ||
} | ||
exports.randomEl = randomEl; | ||
function randomIdx(arr) { | ||
return Math.floor(Math.random() * arr.length); | ||
} | ||
exports.randomIdx = randomIdx; | ||
function average(arr) { | ||
@@ -118,1 +123,22 @@ const sum = arr.reduce((acc, l) => acc + l, 0); | ||
exports.setDebugScope = setDebugScope; | ||
function setDebugScopeLevel(scope, level) { | ||
const scp = scope ? scope : '*'; | ||
const lvl = debugLevel(level); | ||
debug_1.default.enable([scp, lvl].join(',')); | ||
} | ||
exports.setDebugScopeLevel = setDebugScopeLevel; | ||
function debugLevel(level) { | ||
const lvl = level ? level : DefaultLogLevel; | ||
switch (lvl.toLowerCase()) { | ||
case 'error': | ||
return '-*:warn,-*:info,-*:verbose'; | ||
case 'warn': | ||
return '-*:info,-*:verbose'; | ||
case 'info': | ||
return '-*:verbose'; | ||
case 'verbose': | ||
return ''; | ||
default: | ||
return debugLevel(DefaultLogLevel); | ||
} | ||
} |
@@ -1,2 +0,2 @@ | ||
declare const _default: "1.7.1"; | ||
declare const _default: "1.9.0"; | ||
export default _default; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.default = '1.7.1'; | ||
exports.default = '1.9.0'; |
# @rpch/sdk | ||
## 1.9.0 | ||
### Minor Changes | ||
- 978f729: Increase performance of sending large requests | ||
Fix impact of relay handling when not using manual relaying | ||
## 1.8.0 | ||
### Minor Changes | ||
- 85d02e3: Better highlight node fetching errors and mitigations. Specifically invalid client ids and no nodes available. | ||
Allow SDK relay path specification (via `FORCE_MANUAL_RELAYING`) for request and response paths. This means the SDK determines quality peers with the help of the exit node to specify relays. | ||
Fix an issue when the SDK would falsly report info response timeouts. | ||
Repeatedly ask Discovery Platform for new nodes and update routes accordingly. | ||
Fix an issue when decoding compressed info response from exit node. | ||
### Patch Changes | ||
- 655f519: Expose `DEBUG_LEVEL` in RPC server and allow debugLevel ops parameter in SDK. | ||
This will set a minimal debug level and can be used in addition with scope to better control logging output. | ||
SDK and RPC-Server now default to `info` log level. | ||
Will only use default log level if `DEBUG` is not set. | ||
## 1.7.1 | ||
@@ -4,0 +28,0 @@ |
{ | ||
"name": "@rpch/sdk", | ||
"version": "1.7.1", | ||
"version": "1.9.0", | ||
"license": "LGPL-3.0", | ||
@@ -34,3 +34,2 @@ "main": "./build/index.js", | ||
"devDependencies": { | ||
"@types/async-retry": "^1.4.5", | ||
"@types/debug": "^4.1.10" | ||
@@ -40,3 +39,2 @@ }, | ||
"@rpch/compat-crypto": "^0.8.0", | ||
"async-retry": "^1.3.3", | ||
"debug": "^4.3.4", | ||
@@ -43,0 +41,0 @@ "ethers": "^5.7.2", |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
5
1
139903
49
3426
2
- Removedasync-retry@^1.3.3
- Removedasync-retry@1.3.3(transitive)
- Removedretry@0.13.1(transitive)