json-text-sequence
Advanced tools
Comparing version 0.1.1 to 0.2.0
@@ -1,12 +0,14 @@ | ||
var fs = require('fs'); | ||
var generator = require('..').generator; | ||
'use strict' | ||
var g = new generator(); | ||
g.pipe(fs.createWriteStream('example.log')); | ||
const fs = require('fs') | ||
const generator = require('..').generator | ||
for (var i=0; i<10; i++) { | ||
const g = new generator() | ||
g.pipe(fs.createWriteStream('example.log')) | ||
for (let i=0; i<10; i++) { | ||
g.write({ | ||
d: new Date(), | ||
count: i | ||
}); | ||
}) | ||
} |
@@ -1,18 +0,20 @@ | ||
var parser = require('..').parser; | ||
var fs = require('fs'); | ||
'use strict' | ||
var p = new parser() | ||
.on('json', function(obj) { | ||
console.log('JSON:', obj); | ||
const parser = require('..').parser | ||
const fs = require('fs') | ||
const p = new parser() | ||
.on('json', (obj) => { | ||
console.log('JSON:', obj) | ||
}) | ||
.on('truncated', function(buf) { | ||
console.log('Truncated:', buf); | ||
.on('truncated', (buf) => { | ||
console.log('Truncated:', buf) | ||
}) | ||
.on('invalid', function(buf) { | ||
console.log('Invalid:', buf); | ||
.on('invalid', (buf) => { | ||
console.log('Invalid:', buf) | ||
}) | ||
.on('finish', function() { | ||
console.log('DONE'); | ||
}); | ||
.on('finish', () => { | ||
console.log('DONE') | ||
}) | ||
fs.createReadStream('example.log').pipe(p); | ||
fs.createReadStream('example.log').pipe(p) |
177
lib/index.js
@@ -1,93 +0,114 @@ | ||
// Generated by CoffeeScript 1.10.0 | ||
(function() { | ||
var DelimitStream, JSONSequenceGenerator, JSONSequenceParser, RS, assert, error1, stream, | ||
extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, | ||
hasProp = {}.hasOwnProperty; | ||
'use strict' | ||
try { | ||
require('source-map-support').install(); | ||
} catch (error1) { | ||
const assert = require('assert') | ||
const stream = require('stream') | ||
const DelimitStream = require('delimit-stream') | ||
} | ||
const RS = (this.RS = '\x1e') | ||
assert = require('assert'); | ||
// Parse a JSON text sequence stream as defined in | ||
// {http://tools.ietf.org/html/draft-ietf-json-text-sequence | ||
// draft-ietf-json-text-sequence}. | ||
// If you read() from this stream, each read() will return a single valid object | ||
// from the stream. However, streaming mode is much more likely to be what you | ||
// want: | ||
// | ||
// Generates the following events in addition to those emitted by a normal | ||
// Transform stream: | ||
// | ||
// @event data(object) found a valid JSON item in the stream | ||
// @param object [any] the value | ||
// @event truncated(Buffer) a JSON-text got truncated. The truncated Buffer | ||
// is included in case you can do something with it. This is a recoverable | ||
// error. | ||
// @event invalid(Buffer) an un-truncated, but otherwise invalid JSON-text was | ||
// found in the stream. This is likely a programming error on the sending | ||
// side, or some sort of horrible chocolate-in-peanutbutter interleaved I/O | ||
// issue. This is still a recoverable error, but you might want to warn | ||
// more loudly about these than the truncated ones. | ||
// | ||
// @example Parse stdin | ||
// var parser = require('json-text-sequence').parser; | ||
// var p = new parser() | ||
// .on('json', function(obj) { | ||
// console.log('Valid', obj); | ||
// }) | ||
// .on('truncated', function(buf) { | ||
// console.warn('Truncated', buf); | ||
// }) | ||
// .on('invalid', function(buf) { | ||
// console.warn('Invalid', buf); | ||
// }); | ||
// process.stdin.pipe(p); | ||
class JSONSequenceParser extends stream.Transform { | ||
// @nodoc | ||
constructor() { | ||
super() | ||
this._readableState.objectMode = true | ||
this._stream = new DelimitStream(RS) | ||
// I can't figure out how to make 'error' happen. Maybe it can't? | ||
.on('error', /* istanbul ignore next */ e => this.emit('error', e)) | ||
.on('data', d => { | ||
// NOTE: delimit-stream will deal with repeated delimiters. | ||
// d.length will always be > 0 | ||
assert.ok(d.length > 0) | ||
stream = require('stream'); | ||
DelimitStream = require('delimit-stream'); | ||
RS = this.RS = '\x1e'; | ||
JSONSequenceParser = (function(superClass) { | ||
extend(JSONSequenceParser, superClass); | ||
function JSONSequenceParser() { | ||
var that; | ||
JSONSequenceParser.__super__.constructor.call(this); | ||
this._readableState.objectMode = true; | ||
that = this; | ||
this._stream = new DelimitStream(RS).on('error', function(e) { | ||
// istanbul ignore next; | ||
return that.emit('error', e); | ||
}).on('data', function(d) { | ||
var error, error2, j; | ||
assert.ok(d.length > 0); | ||
// if the entry doesn't end with \n, it got truncated | ||
if (d[d.length - 1] !== 0x0a) { | ||
return that.emit('truncated', d); | ||
return this.emit('truncated', d) | ||
} else { | ||
try { | ||
j = JSON.parse(d); | ||
return that.push(j); | ||
} catch (error2) { | ||
error = error2; | ||
return that.emit('invalid', d); | ||
const j = JSON.parse(d) | ||
return this.push(j) | ||
} catch (ignored) { | ||
this.emit('invalid', d) | ||
} | ||
} | ||
}); | ||
} | ||
}) | ||
} | ||
JSONSequenceParser.prototype._transform = function(chunk, encoding, cb) { | ||
return this._stream._transform(chunk, encoding, cb); | ||
}; | ||
// @nodoc | ||
_transform(chunk, encoding, cb) { | ||
return this._stream._transform(chunk, encoding, cb) | ||
} | ||
JSONSequenceParser.prototype._flush = function(cb) { | ||
return this._stream._flush(cb); | ||
}; | ||
// @nodoc | ||
_flush(cb) { | ||
return this._stream._flush(cb) | ||
} | ||
} | ||
exports.parser = JSONSequenceParser | ||
return JSONSequenceParser; | ||
// Generate a JSON text sequence stream as defined in | ||
// {http://tools.ietf.org/html/draft-ietf-json-text-sequence | ||
// draft-ietf-json-text-sequence}. Write objects to the stream, and pipe | ||
// the output to wherever it may make sense, such as a file. | ||
// | ||
// @example write to stdout | ||
// var generator = require('json-text-sequence').generator; | ||
// var g = new generator() | ||
// g.pipe(process.stdout); | ||
// g.write({foo: true, bar: 1}) | ||
class JSONSequenceGenerator extends stream.Transform { | ||
// @nodoc | ||
constructor() { | ||
super() | ||
this._writableState.objectMode = true | ||
this._readableState.objectMode = false | ||
} | ||
})(stream.Transform); | ||
exports.parser = JSONSequenceParser; | ||
JSONSequenceGenerator = (function(superClass) { | ||
extend(JSONSequenceGenerator, superClass); | ||
function JSONSequenceGenerator() { | ||
JSONSequenceGenerator.__super__.constructor.call(this); | ||
this._writableState.objectMode = true; | ||
this._readableState.objectMode = false; | ||
// @nodoc | ||
_transform(chunk, encoding, cb) { | ||
let s = null | ||
try { | ||
// this can fail on circular objects, for example | ||
s = JSON.stringify(chunk, 'utf8') | ||
} catch (error) { | ||
return cb(error) | ||
} | ||
JSONSequenceGenerator.prototype._transform = function(chunk, encoding, cb) { | ||
var error, error2, s; | ||
s = null; | ||
try { | ||
s = JSON.stringify(chunk, 'utf8'); | ||
} catch (error2) { | ||
error = error2; | ||
return cb(error); | ||
} | ||
this.push("" + RS + s + "\n", 'utf8'); | ||
return cb(); | ||
}; | ||
return JSONSequenceGenerator; | ||
})(stream.Transform); | ||
exports.generator = JSONSequenceGenerator; | ||
}).call(this); | ||
//# sourceMappingURL=index.js.map | ||
this.push(`${RS}${s}\n`, 'utf8') | ||
return cb() | ||
} | ||
} | ||
exports.generator = JSONSequenceGenerator |
{ | ||
"name": "json-text-sequence", | ||
"version": "0.1.1", | ||
"version": "0.2.0", | ||
"description": "Parse and generate RS-delimited JSON sequences according to draft-ietf-json-text-sequence", | ||
@@ -10,15 +10,6 @@ "main": "./lib/index.js", | ||
"scripts": { | ||
"clean": "rm -rf coverage lib doc man", | ||
"lint": "coffeelint src test", | ||
"coffee": "coffee -cm -o lib/ src", | ||
"coverage": "istanbul cover nodeunit test/*.coffee", | ||
"precoverage": "npm run coffee -s", | ||
"test": "nodeunit test/*.coffee", | ||
"pretest": "npm run coffee -s", | ||
"prepublish": "npm run coffee -s", | ||
"watch": "watch 'npm run coverage' src/", | ||
"release": "npm version patch && git push --follow-tags && npm publish", | ||
"ci": "npm run coverage -s && cat ./coverage/lcov.info | coveralls", | ||
"live": "live-server --port=4001 --ignorePattern='(js|css|png)$' coverage/lcov-report", | ||
"dev": "npm-run-all -p --silent watch live" | ||
"clean": "rm -rf coverage doc man", | ||
"coverage": "nyc -r lcov npm test", | ||
"test": "ava test/*.js", | ||
"release": "npm version patch && git push --follow-tags && npm publish" | ||
}, | ||
@@ -41,16 +32,12 @@ "repository": { | ||
"homepage": "https://github.com/hildjj/json-text-sequence", | ||
"dependencies": { | ||
"delimit-stream": "0.1.0" | ||
}, | ||
"devDependencies": { | ||
"coffee-script": "latest", | ||
"coffeelint": "latest", | ||
"coveralls": "latest", | ||
"istanbul": "latest", | ||
"live-server": "latest", | ||
"nodeunit": "latest", | ||
"npm-run-all": "latest", | ||
"source-map-support": "latest", | ||
"watch": "latest" | ||
"ava": "^3.15.0", | ||
"nyc": "^15.1.0" | ||
}, | ||
"dependencies": { | ||
"delimit-stream": "0.1.0" | ||
"engines": { | ||
"node": ">=10.18.0" | ||
} | ||
} |
@@ -19,16 +19,16 @@ Parse and generate JSON text sequences as defined in [RFC 7464](https://tools.ietf.org/html/rfc7464). | ||
var parser = require('json-text-sequence').parser; | ||
var fs = require('fs'); | ||
const parser = require('json-text-sequence').parser; | ||
const fs = require('fs'); | ||
var p = new parser() | ||
.on('json', function(obj) { | ||
const p = new parser() | ||
.on('data', obj => { | ||
console.log('JSON:', obj); | ||
}) | ||
.on('truncated', function(buf) { | ||
.on('truncated', buf => { | ||
console.log('Truncated:', buf); | ||
}) | ||
.on('invalid', function(buf) { | ||
.on('invalid', buf => { | ||
console.log('Invalid:', buf); | ||
}) | ||
.on('finish', function() { | ||
.on('finish', () => { | ||
console.log('DONE'); | ||
@@ -42,6 +42,6 @@ }); | ||
var generator = require('json-text-sequence').generator; | ||
var fs = require('fs'); | ||
const generator = require('json-text-sequence').generator; | ||
const fs = require('fs'); | ||
var g = new generator(); | ||
const g = new generator(); | ||
g.pipe(fs.createWriteStream('example.log')); | ||
@@ -54,3 +54,3 @@ | ||
[![Build Status](https://travis-ci.org/hildjj/json-text-sequence.svg?branch=master)](https://travis-ci.org/hildjj/json-text-sequence) | ||
[![Tests](https://github.com/hildjj/json-text-sequence/workflows/Tests/badge.svg)](https://github.com/hildjj/json-text-sequence/actions?query=workflow%3ATests) | ||
[![Coverage Status](https://coveralls.io/repos/hildjj/json-text-sequence/badge.png?branch=master)](https://coveralls.io/r/hildjj/json-text-sequence?branch=master) |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
2
299
14539
12
1