@ovotech/avro-stream
Advanced tools
Comparing version 1.0.0 to 1.0.3
@@ -19,3 +19,4 @@ "use strict"; | ||
const schema = await this.resolver.fromId(schemaId); | ||
callback(undefined, avsc_1.Type.forSchema(schema).fromBuffer(buffer)); | ||
const transformedMessage = Object.assign({}, message, { schema, value: avsc_1.Type.forSchema(schema).fromBuffer(buffer) }); | ||
callback(undefined, transformedMessage); | ||
} | ||
@@ -28,1 +29,2 @@ catch (error) { | ||
exports.AvroDeserializer = AvroDeserializer; | ||
//# sourceMappingURL=AvroDeserializer.js.map |
/// <reference types="node" /> | ||
import { Schema } from 'avsc'; | ||
import { ProduceRequest } from 'kafka-node'; | ||
import { Transform, TransformCallback } from 'stream'; | ||
import { SchemaResolver } from './types'; | ||
export interface AvroProduceRequest extends ProduceRequest { | ||
schema: Schema; | ||
messages: any[]; | ||
} | ||
import { AvroProduceRequest, SchemaResolver } from './types'; | ||
export declare class AvroSerializer extends Transform { | ||
@@ -11,0 +5,0 @@ private resolver; |
@@ -24,1 +24,2 @@ "use strict"; | ||
exports.AvroSerializer = AvroSerializer; | ||
//# sourceMappingURL=AvroSerializer.js.map |
export { AvroDeserializer } from './AvroDeserializer'; | ||
export { AvroSerializer } from './AvroSerializer'; | ||
export { SchemaRegistryResolver } from './SchemaRegistryResolver'; | ||
export { SchemaResolver } from './types'; | ||
export { SchemaResolver, AvroMessage, AvroProduceRequest } from './types'; | ||
export { constructMessage, deconstructMessage } from './message'; |
@@ -12,1 +12,2 @@ "use strict"; | ||
exports.deconstructMessage = message_1.deconstructMessage; | ||
//# sourceMappingURL=index.js.map |
/// <reference types="node" /> | ||
export interface AvroMessage { | ||
export interface AvroBuffer { | ||
schemaId: number; | ||
buffer: Buffer; | ||
} | ||
export declare const deconstructMessage: (buffer: Buffer) => AvroMessage; | ||
export declare const constructMessage: ({ schemaId, buffer }: AvroMessage) => Buffer; | ||
export declare const deconstructMessage: (buffer: Buffer) => AvroBuffer; | ||
export declare const constructMessage: ({ schemaId, buffer }: AvroBuffer) => Buffer; |
@@ -12,1 +12,2 @@ "use strict"; | ||
}; | ||
//# sourceMappingURL=message.js.map |
@@ -20,1 +20,2 @@ "use strict"; | ||
exports.SchemaRegistryResolver = SchemaRegistryResolver; | ||
//# sourceMappingURL=SchemaRegistryResolver.js.map |
import { Schema } from 'avsc'; | ||
import { Message, ProduceRequest } from 'kafka-node'; | ||
export interface SchemaResolver { | ||
@@ -6,1 +7,9 @@ toId(topic: string, schema: Schema): Promise<number>; | ||
} | ||
export interface AvroProduceRequest extends ProduceRequest { | ||
schema: Schema; | ||
messages: any[]; | ||
} | ||
export interface AvroMessage extends Message { | ||
schema: Schema; | ||
value: any; | ||
} |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
//# sourceMappingURL=types.js.map |
{ | ||
"name": "@ovotech/avro-stream", | ||
"version": "1.0.0", | ||
"description": "Serialize/deserialize kafka-node streams with avro data, using confluent schema-registry to hold the schemas", | ||
"version": "1.0.3", | ||
"main": "dist/index.js", | ||
@@ -10,3 +11,3 @@ "source": "src/index.ts", | ||
"dependencies": { | ||
"@ovotech/schema-registry-api": "^1.0.1", | ||
"@ovotech/schema-registry-api": "^1.0.3", | ||
"avsc": "^5.4.7" | ||
@@ -21,2 +22,5 @@ }, | ||
}, | ||
"peerDependencies": { | ||
"kafka-node": "*" | ||
}, | ||
"devDependencies": { | ||
@@ -40,3 +44,3 @@ "@types/jest": "^23.3.10", | ||
}, | ||
"gitHead": "d6589b417ab91967602f916d00597043802d715d" | ||
"gitHead": "54ecb85f75519249bcfbbb6bb4cd12ad5ac9939b" | ||
} |
@@ -6,3 +6,3 @@ import { Type } from 'avsc'; | ||
import { SchemaRegistryResolver } from './SchemaRegistryResolver'; | ||
import { SchemaResolver } from './types'; | ||
import { AvroMessage, SchemaResolver } from './types'; | ||
@@ -25,3 +25,4 @@ export class AvroDeserializer extends Transform { | ||
const schema = await this.resolver.fromId(schemaId); | ||
callback(undefined, Type.forSchema(schema).fromBuffer(buffer)); | ||
const transformedMessage: AvroMessage = { ...message, schema, value: Type.forSchema(schema).fromBuffer(buffer) }; | ||
callback(undefined, transformedMessage); | ||
} catch (error) { | ||
@@ -28,0 +29,0 @@ callback(error); |
@@ -1,13 +0,7 @@ | ||
import { Schema, Type } from 'avsc'; | ||
import { ProduceRequest } from 'kafka-node'; | ||
import { Type } from 'avsc'; | ||
import { Transform, TransformCallback } from 'stream'; | ||
import { constructMessage } from './message'; | ||
import { SchemaRegistryResolver } from './SchemaRegistryResolver'; | ||
import { SchemaResolver } from './types'; | ||
import { AvroProduceRequest, SchemaResolver } from './types'; | ||
export interface AvroProduceRequest extends ProduceRequest { | ||
schema: Schema; | ||
messages: any[]; | ||
} | ||
export class AvroSerializer extends Transform { | ||
@@ -14,0 +8,0 @@ private resolver: SchemaResolver; |
export { AvroDeserializer } from './AvroDeserializer'; | ||
export { AvroSerializer } from './AvroSerializer'; | ||
export { SchemaRegistryResolver } from './SchemaRegistryResolver'; | ||
export { SchemaResolver } from './types'; | ||
export { SchemaResolver, AvroMessage, AvroProduceRequest } from './types'; | ||
export { constructMessage, deconstructMessage } from './message'; |
@@ -1,2 +0,2 @@ | ||
export interface AvroMessage { | ||
export interface AvroBuffer { | ||
schemaId: number; | ||
@@ -6,7 +6,7 @@ buffer: Buffer; | ||
export const deconstructMessage = (buffer: Buffer): AvroMessage => { | ||
export const deconstructMessage = (buffer: Buffer): AvroBuffer => { | ||
return { schemaId: buffer.readInt32BE(1), buffer: buffer.slice(5) }; | ||
}; | ||
export const constructMessage = ({ schemaId, buffer }: AvroMessage): Buffer => { | ||
export const constructMessage = ({ schemaId, buffer }: AvroBuffer): Buffer => { | ||
const prefix = Buffer.alloc(5); | ||
@@ -13,0 +13,0 @@ prefix.writeUInt8(0, 0); |
import { Schema } from 'avsc'; | ||
import { Message, ProduceRequest } from 'kafka-node'; | ||
@@ -7,1 +8,11 @@ export interface SchemaResolver { | ||
} | ||
export interface AvroProduceRequest extends ProduceRequest { | ||
schema: Schema; | ||
messages: any[]; | ||
} | ||
export interface AvroMessage extends Message { | ||
schema: Schema; | ||
value: any; | ||
} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
No README
QualityPackage does not have a README. This may indicate a failed publish or a low quality package.
Found 1 instance in 1 package
87954
42
694
1
131
3
1
+ Addedansi-regex@2.1.1(transitive)
+ Addedaproba@1.2.0(transitive)
+ Addedare-we-there-yet@1.1.7(transitive)
+ Addedasync@2.6.4(transitive)
+ Addedbalanced-match@1.0.2(transitive)
+ Addedbinary@0.3.0(transitive)
+ Addedbindings@1.5.0(transitive)
+ Addedbl@1.2.32.2.1(transitive)
+ Addedbrace-expansion@1.1.11(transitive)
+ Addedbuffer-alloc@1.2.0(transitive)
+ Addedbuffer-alloc-unsafe@1.1.0(transitive)
+ Addedbuffer-crc32@0.2.13(transitive)
+ Addedbuffer-fill@1.0.0(transitive)
+ Addedbuffermaker@1.2.1(transitive)
+ Addedbuffers@0.1.1(transitive)
+ Addedchainsaw@0.1.0(transitive)
+ Addedchownr@1.1.4(transitive)
+ Addedcode-point-at@1.1.0(transitive)
+ Addedconcat-map@0.0.1(transitive)
+ Addedconsole-control-strings@1.1.0(transitive)
+ Addedcore-util-is@1.0.3(transitive)
+ Addeddebug@2.6.9(transitive)
+ Addeddecompress-response@3.3.0(transitive)
+ Addeddeep-extend@0.6.0(transitive)
+ Addeddelegates@1.0.0(transitive)
+ Addeddenque@1.5.1(transitive)
+ Addeddetect-libc@1.0.3(transitive)
+ Addedend-of-stream@1.4.4(transitive)
+ Addedexpand-template@2.0.3(transitive)
+ Addedfile-uri-to-path@1.0.0(transitive)
+ Addedfs-constants@1.0.0(transitive)
+ Addedgauge@2.7.4(transitive)
+ Addedgithub-from-package@0.0.0(transitive)
+ Addedhas-unicode@2.0.1(transitive)
+ Addedinherits@2.0.4(transitive)
+ Addedini@1.3.8(transitive)
+ Addedis-fullwidth-code-point@1.0.0(transitive)
+ Addedisarray@1.0.0(transitive)
+ Addedkafka-node@5.0.0(transitive)
+ Addedlodash@4.17.21(transitive)
+ Addedlong@1.1.2(transitive)
+ Addedmimic-response@1.0.1(transitive)
+ Addedminimatch@3.1.2(transitive)
+ Addedminimist@1.2.8(transitive)
+ Addedmkdirp@0.5.6(transitive)
+ Addedms@2.0.0(transitive)
+ Addednan@2.22.0(transitive)
+ Addednapi-build-utils@1.0.2(transitive)
+ Addednested-error-stacks@2.1.1(transitive)
+ Addednode-abi@2.30.1(transitive)
+ Addednoop-logger@0.1.1(transitive)
+ Addednpmlog@4.1.2(transitive)
+ Addednumber-is-nan@1.0.1(transitive)
+ Addedobject-assign@4.1.1(transitive)
+ Addedonce@1.4.0(transitive)
+ Addedoptional@0.1.4(transitive)
+ Addedos-homedir@1.0.2(transitive)
+ Addedprebuild-install@5.3.0(transitive)
+ Addedprocess-nextick-args@2.0.1(transitive)
+ Addedpump@1.0.32.0.1(transitive)
+ Addedrc@1.2.8(transitive)
+ Addedreadable-stream@2.3.8(transitive)
+ Addedretry@0.10.1(transitive)
+ Addedsafe-buffer@5.1.25.2.1(transitive)
+ Addedsemver@5.7.2(transitive)
+ Addedset-blocking@2.0.0(transitive)
+ Addedsignal-exit@3.0.7(transitive)
+ Addedsimple-concat@1.0.1(transitive)
+ Addedsimple-get@2.8.2(transitive)
+ Addedsnappy@6.3.5(transitive)
+ Addedstring-width@1.0.2(transitive)
+ Addedstring_decoder@1.1.1(transitive)
+ Addedstrip-ansi@3.0.1(transitive)
+ Addedstrip-json-comments@2.0.1(transitive)
+ Addedtar-fs@1.16.4(transitive)
+ Addedtar-stream@1.6.2(transitive)
+ Addedto-buffer@1.1.1(transitive)
+ Addedtraverse@0.3.9(transitive)
+ Addedtunnel-agent@0.6.0(transitive)
+ Addedutil-deprecate@1.0.2(transitive)
+ Addeduuid@3.4.0(transitive)
+ Addedwhich-pm-runs@1.1.0(transitive)
+ Addedwide-align@1.1.5(transitive)
+ Addedwrappy@1.0.2(transitive)
+ Addedxtend@4.0.2(transitive)