kafka-node
Advanced tools
Comparing version 4.0.4 to 4.1.0
# kafka-node CHANGELOG | ||
## 2019-04-07, Version 4.1.0 | ||
* Fixed wrong offset being assigned to compressed messages [#1226](https://github.com/SOHU-Co/kafka-node/pull/1226) | ||
* Update producer and consumer types as EventEmitters [#1223](https://github.com/SOHU-Co/kafka-node/pull/1223) | ||
* Add setting to control auto-reconnect when closed due to being idle [#1218](https://github.com/SOHU-Co/kafka-node/pull/1218) | ||
## 2019-03-28, Version 4.0.4 | ||
@@ -4,0 +9,0 @@ * Fixed unnecessary metadata refresh when socket is closed from being idle [#1216](https://github.com/SOHU-Co/kafka-node/pull/1216) |
@@ -32,2 +32,3 @@ 'use strict'; | ||
idleConnection: 5 * 60 * 1000, | ||
reconnectOnIdle: true, | ||
autoConnect: true, | ||
@@ -813,3 +814,3 @@ versions: { | ||
if (s.closing) return; | ||
if (brokerWrapper.isIdle()) { | ||
if (!self.options.reconnectOnIdle && brokerWrapper.isIdle()) { | ||
logger.debug(`${self.clientId} to ${socket.addr} is idle not reconnecting`); | ||
@@ -816,0 +817,0 @@ s.closing = true; |
@@ -314,4 +314,3 @@ 'use strict'; | ||
highWaterOffset, | ||
topics, | ||
0 // base offset | ||
topics | ||
); | ||
@@ -322,3 +321,3 @@ }); | ||
function decodeMessageSet (topic, partition, messageSet, enqueue, emit, highWaterOffset, topics, base) { | ||
function decodeMessageSet (topic, partition, messageSet, enqueue, emit, highWaterOffset, topics) { | ||
const messageSetSize = messageSet.length; | ||
@@ -368,9 +367,11 @@ // TODO: this is broken logic. It overwrites previous partitions HWO. | ||
if (vars.attributes === 0 && vars.messageSize > messageSetSize) { | ||
const offset = vars.offset + (base || 0); | ||
return enqueue((next) => { | ||
emit(new MessageSizeTooLarge({ | ||
topic: topic, | ||
offset: offset, | ||
partition: partition | ||
})); | ||
const offset = vars.offset; | ||
return enqueue(next => { | ||
emit( | ||
new MessageSizeTooLarge({ | ||
topic: topic, | ||
offset: offset, | ||
partition: partition | ||
}) | ||
); | ||
next(null); | ||
@@ -381,3 +382,3 @@ }); | ||
if (!partial && vars.offset !== null) { | ||
const offset = vars.offset + (base || 0); | ||
const offset = vars.offset; | ||
const value = vars.value; | ||
@@ -384,0 +385,0 @@ const key = vars.key; |
@@ -17,3 +17,3 @@ { | ||
"bugs": "https://github.com/SOHU-co/kafka-node/issues", | ||
"version": "4.0.4", | ||
"version": "4.1.0", | ||
"main": "kafka.js", | ||
@@ -20,0 +20,0 @@ "types": "types/index.d.ts", |
@@ -76,3 +76,4 @@ Kafka-node | ||
* `connectRetryOptions` : object hash that applies to the initial connection. see [retry](https://www.npmjs.com/package/retry) module for these options. | ||
* `idleConnection` : allows the broker to disconnect an idle connection from a client (otherwise the clients continues to reconnect after being disconnected). The value is elapsed time in ms without any data written to the TCP socket. default: 5 minutes | ||
* `idleConnection` : allows the broker to disconnect an idle connection from a client (otherwise the clients continues to O after being disconnected). The value is elapsed time in ms without any data written to the TCP socket. default: 5 minutes | ||
* `reconnectOnIdle` : when the connection is closed due to client idling, client will attempt to auto-reconnect. default: true | ||
* `maxAsyncRequests` : maximum async operations at a time toward the kafka cluster. default: 10 | ||
@@ -79,0 +80,0 @@ * `sslOptions`: **Object**, options to be passed to the tls broker sockets, ex. `{ rejectUnauthorized: false }` (Kafka 0.9+) |
@@ -26,7 +26,7 @@ import { Readable, Writable } from 'stream'; | ||
export class Producer { | ||
export class Producer extends EventEmitter { | ||
constructor (client: KafkaClient, options?: ProducerOptions, customPartitioner?: CustomPartitioner); | ||
on (eventName: 'ready', cb: () => any): void; | ||
on (eventName: 'error', cb: (error: any) => any): void; | ||
on (eventName: 'ready', cb: () => any): this; | ||
on (eventName: 'error', cb: (error: any) => any): this; | ||
@@ -44,3 +44,3 @@ send (payloads: ProduceRequest[], cb: (error: any, data: any) => any): void; | ||
export class Consumer { | ||
export class Consumer extends EventEmitter { | ||
client: KafkaClient; | ||
@@ -50,4 +50,4 @@ | ||
on (eventName: 'message', cb: (message: Message) => any): void; | ||
on (eventName: 'error' | 'offsetOutOfRange', cb: (error: any) => any): void; | ||
on (eventName: 'message', cb: (message: Message) => any): this; | ||
on (eventName: 'error' | 'offsetOutOfRange', cb: (error: any) => any): this; | ||
@@ -182,2 +182,3 @@ addTopics<T extends string[] | Topic[]> (topics: T, cb: (error: any, added: T) => any, fromOffset?: boolean): void; | ||
idleConnection?: number; | ||
reconnectOnIdle?: boolean; | ||
maxAsyncRequests?: number; | ||
@@ -184,0 +185,0 @@ sasl?: any; |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
308699
6915
1272