Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

tardis-machine

Package Overview
Dependencies
Maintainers
1
Versions
145
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

tardis-machine - npm Package Compare versions

Comparing version 2.0.5 to 2.0.6

8

dist/http/replay.js

@@ -56,9 +56,3 @@ "use strict";

// also instead of converting messages to string or parsing them let's manually stich together desired json response using buffers which is faster
buffers.push(...[
responsePrefixBuffer,
messageWithTimestamp.localTimestamp,
responseMiddleBuffer,
messageWithTimestamp.message,
responseSuffixBuffer
]);
buffers.push(responsePrefixBuffer, messageWithTimestamp.localTimestamp, responseMiddleBuffer, messageWithTimestamp.message, responseSuffixBuffer);
if (buffers.length == BATCH_SIZE * 5) {

@@ -65,0 +59,0 @@ const ok = res.write(Buffer.concat(buffers));

60

dist/ws/replay.js

@@ -78,5 +78,5 @@ "use strict";

}
// map connection to replay messages streams enhanced with addtional ws field so
// when we combine streams by localTimestamp we'll know which ws we should send given message via
const messagesWithConnections = this._connections.map(async function* (connection) {
// fast path for case when there is only single WS connection for given replay session
if (this._connections.length === 1) {
const connection = this._connections[0];
const messages = tardis_dev_1.replay({

@@ -87,20 +87,42 @@ ...connection.replayOptions,

});
for await (const { localTimestamp, message } of messages) {
yield {
ws: connection.ws,
localTimestamp: new Date(localTimestamp.toString()),
message
};
for await (const { message } of messages) {
// wait until buffer is empty
// sometimes slow clients can't keep up with messages arrival rate so we need to throttle
// https://nodejs.org/api/net.html#net_socket_buffersize
while (connection.ws.bufferedAmount > 0) {
await helpers_1.wait(1);
}
connection.ws.send(message, {
binary: false
});
}
});
for await (const { ws, message } of tardis_dev_1.combine(...messagesWithConnections)) {
// wait until buffer is empty
// sometimes slow clients can't keep up with messages arrival rate so we need to throttle
// https://nodejs.org/api/net.html#net_socket_buffersize
while (ws.bufferedAmount > 0) {
await helpers_1.wait(1);
}
else {
// map connections to replay messages streams enhanced with addtional ws field so
// when we combine streams by localTimestamp we'll know which ws we should send given message via
const messagesWithConnections = this._connections.map(async function* (connection) {
const messages = tardis_dev_1.replay({
...connection.replayOptions,
skipDecoding: true,
withDisconnects: false
});
for await (const { localTimestamp, message } of messages) {
yield {
ws: connection.ws,
localTimestamp: new Date(localTimestamp.toString()),
message
};
}
});
for await (const { ws, message } of tardis_dev_1.combine(...messagesWithConnections)) {
// wait until buffer is empty
// sometimes slow clients can't keep up with messages arrival rate so we need to throttle
// https://nodejs.org/api/net.html#net_socket_buffersize
while (ws.bufferedAmount > 0) {
await helpers_1.wait(1);
}
ws.send(message, {
binary: false
});
}
ws.send(message, {
binary: false
});
}

@@ -107,0 +129,0 @@ await this._closeAllConnections();

{
"name": "tardis-machine",
"version": "2.0.5",
"version": "2.0.6",
"engines": {

@@ -18,3 +18,3 @@ "node": ">=12"

"prepare": "npm run build",
"release": "cross-var \"npm run build && git commit -am $npm_package_version && git tag $npm_package_version && git push && git push --tags && npm publish --access=public\""
"release": "cross-var \"npm run test && npm run build && git commit -am $npm_package_version && git tag $npm_package_version && git push && git push --tags && npm publish --access=public\""
},

@@ -45,27 +45,27 @@ "bin": {

"dependencies": {
"@types/ws": "^6.0.3",
"@types/ws": "^7.2.2",
"debug": "^4.1.1",
"find-my-way": "^2.2.1",
"is-docker": "^2.0.0",
"tardis-dev": "^8.0.3",
"ws": "^7.2.0",
"yargs": "^14.2.2"
"tardis-dev": "^8.0.4",
"ws": "^7.2.1",
"yargs": "^15.1.0"
},
"devDependencies": {
"@types/debug": "^4.1.5",
"@types/jest": "^24.0.23",
"@types/node": "^12.12.12",
"@types/node-fetch": "^2.5.4",
"@types/jest": "^25.1.3",
"@types/node": "^13.7.7",
"@types/node-fetch": "^2.5.5",
"@types/split2": "^2.1.6",
"@types/yargs": "^13.0.3",
"@types/yargs": "^15.0.4",
"bitmex-realtime-api": "^0.4.3",
"cross-var": "^1.1.0",
"husky": "^3.1.0",
"jest": "^24.9.0",
"lint-staged": "^9.4.3",
"husky": "^4.2.3",
"jest": "^25.1.0",
"lint-staged": "^10.0.8",
"node-fetch": "^2.6.0",
"prettier": "^1.19.1",
"split2": "^3.1.1",
"ts-jest": "^24.2.0",
"typescript": "^3.7.2"
"ts-jest": "^25.2.1",
"typescript": "^3.8.3"
},

@@ -72,0 +72,0 @@ "lint-staged": {

@@ -68,9 +68,7 @@ import { once } from 'events'

buffers.push(
...[
responsePrefixBuffer,
messageWithTimestamp.localTimestamp,
responseMiddleBuffer,
messageWithTimestamp.message,
responseSuffixBuffer
]
responsePrefixBuffer,
messageWithTimestamp.localTimestamp,
responseMiddleBuffer,
messageWithTimestamp.message,
responseSuffixBuffer
)

@@ -77,0 +75,0 @@

@@ -93,5 +93,6 @@ import { IncomingMessage } from 'http'

// map connection to replay messages streams enhanced with addtional ws field so
// when we combine streams by localTimestamp we'll know which ws we should send given message via
const messagesWithConnections = this._connections.map(async function*(connection) {
// fast path for case when there is only single WS connection for given replay session
if (this._connections.length === 1) {
const connection = this._connections[0]
const messages = replay({

@@ -103,22 +104,45 @@ ...connection.replayOptions,

for await (const { localTimestamp, message } of messages) {
yield {
ws: connection.ws,
localTimestamp: new Date(localTimestamp.toString()),
message
for await (const { message } of messages) {
// wait until buffer is empty
// sometimes slow clients can't keep up with messages arrival rate so we need to throttle
// https://nodejs.org/api/net.html#net_socket_buffersize
while (connection.ws.bufferedAmount > 0) {
await wait(1)
}
}
})
for await (const { ws, message } of combine(...messagesWithConnections)) {
// wait until buffer is empty
// sometimes slow clients can't keep up with messages arrival rate so we need to throttle
// https://nodejs.org/api/net.html#net_socket_buffersize
while (ws.bufferedAmount > 0) {
await wait(1)
connection.ws.send(message, {
binary: false
})
}
} else {
// map connections to replay messages streams enhanced with addtional ws field so
// when we combine streams by localTimestamp we'll know which ws we should send given message via
const messagesWithConnections = this._connections.map(async function*(connection) {
const messages = replay({
...connection.replayOptions,
skipDecoding: true,
withDisconnects: false
})
ws.send(message, {
binary: false
for await (const { localTimestamp, message } of messages) {
yield {
ws: connection.ws,
localTimestamp: new Date(localTimestamp.toString()),
message
}
}
})
for await (const { ws, message } of combine(...messagesWithConnections)) {
// wait until buffer is empty
// sometimes slow clients can't keep up with messages arrival rate so we need to throttle
// https://nodejs.org/api/net.html#net_socket_buffersize
while (ws.bufferedAmount > 0) {
await wait(1)
}
ws.send(message, {
binary: false
})
}
}

@@ -125,0 +149,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc