Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@discoveryjs/json-ext

Package Overview
Dependencies
Maintainers
3
Versions
19
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@discoveryjs/json-ext - npm Package Compare versions

Comparing version 0.3.2 to 0.4.0

src/parse-chunked.js

5

CHANGELOG.md

@@ -0,1 +1,6 @@

## 0.4.0 (2020-12-04)
- Added `parseChunked()` method
- Fixed `stringifyInfo()` to not throw when meet unknown value type
## 0.3.2 (2020-10-26)

@@ -2,0 +7,0 @@

345

dist/json-ext.js

@@ -8,3 +8,3 @@ (function (global, factory) {

var name = "@discoveryjs/json-ext";
var version = "0.3.2";
var version = "0.4.0";
var description = "A set of utilities that extend the use of JSON";

@@ -273,3 +273,3 @@ var keywords = [

len = prevLeadingSurrogate
? len - 2 // surrogate pair (4 bytes), since we calulate prev leading surrogate as 6 bytes, substruct 2 bytes
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
: len + 6; // \uXXXX

@@ -302,3 +302,3 @@ } else {

default:
throw new TypeError(`Do not know how to serialize a ${typeof value}`);
return 0;
}

@@ -313,3 +313,3 @@ }

var stringifyInfo = function jsonStringifyInfo(value, replacer, space, options) {
function walk(key, value) {
function walk(holder, key, value) {
if (stop) {

@@ -319,3 +319,3 @@ return;

value = replaceValue$1(this, key, value, replacer);
value = replaceValue$1(holder, key, value, replacer);

@@ -338,5 +338,5 @@ let type = getType(value);

case PRIMITIVE:
if (value !== undefined || Array.isArray(this)) {
if (value !== undefined || Array.isArray(holder)) {
length += primitiveLength(value);
} else if (this === root) {
} else if (holder === root) {
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?

@@ -363,3 +363,3 @@ }

const prevLength = length;
walk.call(value, property, value[property]);
walk(value, property, value[property]);

@@ -404,3 +404,3 @@ if (prevLength !== length) {

for (let i = 0; i < value.length; i++) {
walk.call(value, String(i), value[i]);
walk(value, i, value[i]);
}

@@ -450,3 +450,3 @@

walk.call(root, '', value);
walk(root, '', value);

@@ -465,6 +465,329 @@ return {

const { isReadableStream: isReadableStream$1 } = utils;
const STACK_OBJECT = 1;
const STACK_ARRAY = 2;
const decoder = new TextDecoder();
function isObject(value) {
return value !== null && typeof value === 'object';
}
function adjustPosition(error, parser) {
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
'at position ' + (Number(pos) + parser.jsonParseOffset)
);
}
return error;
}
var parseChunked = function(chunkEmitter) {
let parser = new ChunkParser();
if (isObject(chunkEmitter) && isReadableStream$1(chunkEmitter)) {
return new Promise((resolve, reject) => {
chunkEmitter
.on('data', chunk => {
try {
parser.push(chunk);
} catch (e) {
reject(adjustPosition(e, parser));
parser = null;
}
})
.on('error', (e) => {
parser = null;
reject(e);
})
.on('end', () => {
try {
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
});
}
if (typeof chunkEmitter === 'function') {
const iterator = chunkEmitter();
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
return new Promise(async (resolve, reject) => {
try {
for await (const chunk of iterator) {
parser.push(chunk);
}
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
}
}
throw new Error(
'Chunk emitter should be readable stream, generator, ' +
'async generator or function returning an iterable object'
);
};
class ChunkParser {
constructor() {
this.value = undefined;
this.valueStack = null;
this.stack = new Array(100);
this.lastFlushDepth = 0;
this.flushDepth = 0;
this.stateString = false;
this.stateStringEscape = false;
this.pendingByteSeq = null;
this.pendingChunk = null;
this.pos = 0;
this.jsonParseOffset = 0;
}
flush(chunk, start, end) {
let fragment = chunk.slice(start, end);
this.jsonParseOffset = this.pos; // using for position correction in JSON.parse() error if any
// Prepend pending chunk if any
if (this.pendingChunk !== null) {
fragment = this.pendingChunk + fragment;
this.pendingChunk = null;
}
// Skip a comma at the beginning if any
if (fragment[0] === ',') {
fragment = fragment.slice(1);
this.jsonParseOffset++;
}
if (this.flushDepth === this.lastFlushDepth) {
// Depth didn't changed, so it's a root value or entry/element set
if (this.flushDepth > 0) {
this.jsonParseOffset--;
// Append new entries or elements
if (this.stack[this.flushDepth - 1] === STACK_OBJECT) {
Object.assign(this.valueStack.value, JSON.parse('{' + fragment + '}'));
} else {
this.valueStack.value.push(...JSON.parse('[' + fragment + ']'));
}
} else {
// That's an entire value on a top level
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
}
} else if (this.flushDepth > this.lastFlushDepth) {
// Add missed closing brackets/parentheses
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
}
if (this.lastFlushDepth === 0) {
// That's a root value
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
} else {
this.jsonParseOffset--;
// Parse fragment and append to current value
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
Object.assign(this.valueStack.value, JSON.parse('{' + fragment + '}'));
} else {
this.valueStack.value.push(...JSON.parse('[' + fragment + ']'));
}
}
// Move down to the depths to the last object/array, which is current now
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
let value = this.valueStack.value;
if (this.stack[i - 1] === STACK_OBJECT) {
// find last entry
let key;
// eslint-disable-next-line curly
for (key in value);
value = value[key];
} else {
// last element
value = value[value.length - 1];
}
this.valueStack = {
value,
prev: this.valueStack
};
}
} else { // this.flushDepth < this.lastFlushDepth
// Add missed opening brackets/parentheses
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.jsonParseOffset--;
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
}
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
Object.assign(this.valueStack.value, JSON.parse(fragment));
} else {
this.valueStack.value.push(...JSON.parse(fragment));
}
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.valueStack = this.valueStack.prev;
}
}
this.pos += end - start;
this.lastFlushDepth = this.flushDepth;
}
push(chunk, last = false) {
if (typeof chunk !== 'string') {
// Suppose chunk is Buffer or Uint8Array
// Prepend uncompleted byte sequence if any
if (this.pendingByteSeq !== null) {
const origRawChunk = chunk;
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
chunk.set(this.pendingByteSeq);
chunk.set(origRawChunk, this.pendingByteSeq.length);
this.pendingByteSeq = null;
}
// In case Buffer/Uint8Array, an input is encoded in UTF8
// Seek for parts of uncompleted UTF8 symbol on the ending
// This makes sense only if we expect more chunks and last char is not multi-bytes
if (!last && chunk[chunk.length - 1] > 127) {
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
const byte = chunk[chunk.length - 1 - seqLength];
// 10xxxxxx - 2nd, 3rd or 4th byte
// 110xxxxx – first byte of 2-byte sequence
// 1110xxxx - first byte of 3-byte sequence
// 11110xxx - first byte of 4-byte sequence
if (byte >> 6 === 3) {
seqLength++;
// If the sequence is really incomplete, then preserve it
// for the future chunk and cut off it from the current chunk
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
(seqLength !== 3 && byte >> 4 === 0b1110) ||
(seqLength !== 2 && byte >> 5 === 0b110)) {
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
chunk = chunk.slice(0, -seqLength);
}
break;
}
}
}
// Convert chunk to a string, since single decode per chunk
// is much effective than decode multiple small substrings
chunk = decoder.decode(chunk);
}
const chunkLength = chunk.length;
let lastFlushPoint = 0;
let flushPoint = 0;
// Main scan loop
scan: for (let i = 0; i < chunkLength; i++) {
if (this.stateString) {
for (; i < chunkLength; i++) {
if (this.stateStringEscape) {
this.stateStringEscape = false;
} else {
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = false;
continue scan;
case 0x5C: /* \ */
this.stateStringEscape = true;
}
}
}
break;
}
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = true;
this.stateStringEscape = false;
break;
case 0x2C: /* , */
flushPoint = i;
break;
case 0x7B: /* { */
// begin object
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_OBJECT;
break;
case 0x5B: /* [ */
// begin array
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_ARRAY;
break;
case 0x5D: /* ] */
case 0x7D: /* } */
// end object or array
flushPoint = i + 1;
this.flushDepth--;
if (this.flushDepth < this.lastFlushDepth) {
this.flush(chunk, lastFlushPoint, flushPoint);
lastFlushPoint = flushPoint;
}
break;
}
}
if (flushPoint > lastFlushPoint || (last && (chunkLength > 0 || this.pendingChunk !== null))) {
this.flush(chunk, lastFlushPoint, last ? chunkLength : flushPoint);
}
// Produce pendingChunk if any
if (!last && flushPoint < chunkLength) {
const newPending = chunk.slice(flushPoint, chunkLength);
this.pendingChunk = this.pendingChunk !== null
? this.pendingChunk + newPending
: newPending;
}
}
finish() {
this.push('', true);
return this.value;
}
}
var src = {
version: require$$0.version,
stringifyInfo: stringifyInfo,
stringifyStream: browserMethodIsNotSupported
stringifyStream: browserMethodIsNotSupported,
parseChunked: parseChunked
};

@@ -471,0 +794,0 @@

2

dist/json-ext.min.js

@@ -1,1 +0,1 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).jsonExt=t()}(this,(function(){"use strict";function e(e){return"function"==typeof e.pipe&&"function"==typeof e._read&&"object"==typeof e._readableState&&null!==e._readableState}var t={escapableCharCodeSubstitution:{8:"\\b",9:"\\t",10:"\\n",12:"\\f",13:"\\r",34:'\\"',92:"\\\\"},isLeadingSurrogate:function(e){return e>=55296&&e<=56319},isTrailingSurrogate:function(e){return e>=56320&&e<=57343},type:{PRIMITIVE:1,PROMISE:4,ARRAY:3,OBJECT:2,STRING_STREAM:5,OBJECT_STREAM:6},isReadableStream:e,replaceValue:function(e,t,n,r){switch(n&&"function"==typeof n.toJSON&&(n=n.toJSON()),null!==r&&(n=r.call(e,String(t),n)),typeof n){case"function":case"symbol":n=void 0;break;case"object":if(null!==n){const e=n.constructor;e!==String&&e!==Number&&e!==Boolean||(n=n.valueOf())}}return n},getTypeNative:function(e){return null===e||"object"!=typeof e?1:Array.isArray(e)?3:2},getTypeAsync:function(t){return null===t||"object"!=typeof t?1:"function"==typeof t.then?4:e(t)?t._readableState.objectMode?6:5:Array.isArray(t)?3:2},normalizeReplacer:function(e){if("function"==typeof e)return e;if(Array.isArray(e)){const t=new Set(e.map((e=>"string"==typeof e||"number"==typeof e?String(e):null)).filter((e=>"string"==typeof e)));return t.add(""),(e,n)=>t.has(e)?n:void 0}return null},normalizeSpace:function(e){return"number"==typeof e?!(!Number.isFinite(e)||e<1)&&" ".repeat(Math.min(e,10)):"string"==typeof e&&e.slice(0,10)||!1}};const{normalizeReplacer:n,normalizeSpace:r,replaceValue:i,getTypeNative:o,getTypeAsync:a,isLeadingSurrogate:s,isTrailingSurrogate:c,escapableCharCodeSubstitution:u,type:{PRIMITIVE:l,OBJECT:f,ARRAY:d,PROMISE:p,STRING_STREAM:y,OBJECT_STREAM:g}}=t,h=Array.from({length:2048}).map(((e,t)=>u.hasOwnProperty(t)?2:t<32?6:t<128?1:2));function b(e){let t=0,n=!1;for(let r=0;r<e.length;r++){const i=e.charCodeAt(r);if(i<2048)t+=h[i];else{if(s(i)){t+=6,n=!0;continue}c(i)?t=n?t-2:t+6:t+=3}n=!1}return t+2}return{version:"0.3.2",stringifyInfo:function(e,t,s,c){t=n(t),s=function(e){return"string"==typeof(e=r(e))?e.length:0}(s),c=c||{};const u=new Map,h=new Set,S=new Set,m=new Set,T=new Set,A=c.async?a:o,w={"":e};let R=!1,E=0;return function e(n,r){if(R)return;r=i(this,n,r,t);let o=A(r);if(o!==l&&h.has(r))return m.add(r),E+=4,void(c.continueOnCircular||(R=!0));switch(o){case l:void 0!==r||Array.isArray(this)?E+=function(e){switch(typeof e){case"string":return b(e);case"number":return Number.isFinite(e)?String(e).length:4;case"boolean":return e?4:5;case"undefined":case"object":return 4;default:throw new TypeError("Do not know how to serialize a "+typeof e)}}(r):this===w&&(E+=9);break;case f:{if(u.has(r)){S.add(r),E+=u.get(r);break}const t=E;let n=0;E+=2,h.add(r);for(const t in r)if(hasOwnProperty.call(r,t)){const i=E;e.call(r,t,r[t]),i!==E&&(E+=b(t)+1,n++)}n>1&&(E+=n-1),h.delete(r),s>0&&n>0&&(E+=(1+(h.size+1)*s+1)*n,E+=1+h.size*s),u.set(r,E-t);break}case d:{if(u.has(r)){S.add(r),E+=u.get(r);break}const t=E;E+=2,h.add(r);for(let t=0;t<r.length;t++)e.call(r,String(t),r[t]);r.length>1&&(E+=r.length-1),h.delete(r),s>0&&r.length>0&&(E+=(1+(h.size+1)*s)*r.length,E+=1+h.size*s),u.set(r,E-t);break}case p:case y:T.add(r);break;case g:E+=2,T.add(r)}}.call(w,"",e),{minLength:isNaN(E)?1/0:E,circular:[...m],duplicate:[...S],async:[...T]}},stringifyStream:()=>{throw new Error("Method is not supported")}}}));
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).jsonExt=e()}(this,(function(){"use strict";function t(t){return"function"==typeof t.pipe&&"function"==typeof t._read&&"object"==typeof t._readableState&&null!==t._readableState}var e={escapableCharCodeSubstitution:{8:"\\b",9:"\\t",10:"\\n",12:"\\f",13:"\\r",34:'\\"',92:"\\\\"},isLeadingSurrogate:function(t){return t>=55296&&t<=56319},isTrailingSurrogate:function(t){return t>=56320&&t<=57343},type:{PRIMITIVE:1,PROMISE:4,ARRAY:3,OBJECT:2,STRING_STREAM:5,OBJECT_STREAM:6},isReadableStream:t,replaceValue:function(t,e,s,n){switch(s&&"function"==typeof s.toJSON&&(s=s.toJSON()),null!==n&&(s=n.call(t,String(e),s)),typeof s){case"function":case"symbol":s=void 0;break;case"object":if(null!==s){const t=s.constructor;t!==String&&t!==Number&&t!==Boolean||(s=s.valueOf())}}return s},getTypeNative:function(t){return null===t||"object"!=typeof t?1:Array.isArray(t)?3:2},getTypeAsync:function(e){return null===e||"object"!=typeof e?1:"function"==typeof e.then?4:t(e)?e._readableState.objectMode?6:5:Array.isArray(e)?3:2},normalizeReplacer:function(t){if("function"==typeof t)return t;if(Array.isArray(t)){const e=new Set(t.map((t=>"string"==typeof t||"number"==typeof t?String(t):null)).filter((t=>"string"==typeof t)));return e.add(""),(t,s)=>e.has(t)?s:void 0}return null},normalizeSpace:function(t){return"number"==typeof t?!(!Number.isFinite(t)||t<1)&&" ".repeat(Math.min(t,10)):"string"==typeof t&&t.slice(0,10)||!1}};const{normalizeReplacer:s,normalizeSpace:n,replaceValue:i,getTypeNative:a,getTypeAsync:r,isLeadingSurrogate:l,isTrailingSurrogate:h,escapableCharCodeSubstitution:u,type:{PRIMITIVE:o,OBJECT:c,ARRAY:f,PROMISE:p,STRING_STREAM:g,OBJECT_STREAM:d}}=e,S=Array.from({length:2048}).map(((t,e)=>u.hasOwnProperty(e)?2:e<32?6:e<128?1:2));function y(t){let e=0,s=!1;for(let n=0;n<t.length;n++){const i=t.charCodeAt(n);if(i<2048)e+=S[i];else{if(l(i)){e+=6,s=!0;continue}h(i)?e=s?e-2:e+6:e+=3}s=!1}return e+2}const{isReadableStream:b}=e,k=new TextDecoder;function v(t){return null!==t&&"object"==typeof t}function m(t,e){return"SyntaxError"===t.name&&e.jsonParseOffset&&(t.message=t.message.replace(/at position (\d+)/,((t,s)=>"at position "+(Number(s)+e.jsonParseOffset)))),t}class O{constructor(){this.value=void 0,this.valueStack=null,this.stack=new Array(100),this.lastFlushDepth=0,this.flushDepth=0,this.stateString=!1,this.stateStringEscape=!1,this.pendingByteSeq=null,this.pendingChunk=null,this.pos=0,this.jsonParseOffset=0}flush(t,e,s){let n=t.slice(e,s);if(this.jsonParseOffset=this.pos,null!==this.pendingChunk&&(n=this.pendingChunk+n,this.pendingChunk=null),","===n[0]&&(n=n.slice(1),this.jsonParseOffset++),this.flushDepth===this.lastFlushDepth)this.flushDepth>0?(this.jsonParseOffset--,1===this.stack[this.flushDepth-1]?Object.assign(this.valueStack.value,JSON.parse("{"+n+"}")):this.valueStack.value.push(...JSON.parse("["+n+"]"))):(this.value=JSON.parse(n),this.valueStack={value:this.value,prev:null});else if(this.flushDepth>this.lastFlushDepth){for(let t=this.flushDepth-1;t>=this.lastFlushDepth;t--)n+=1===this.stack[t]?"}":"]";0===this.lastFlushDepth?(this.value=JSON.parse(n),this.valueStack={value:this.value,prev:null}):(this.jsonParseOffset--,1===this.stack[this.lastFlushDepth-1]?Object.assign(this.valueStack.value,JSON.parse("{"+n+"}")):this.valueStack.value.push(...JSON.parse("["+n+"]")));for(let t=this.lastFlushDepth||1;t<this.flushDepth;t++){let e=this.valueStack.value;if(1===this.stack[t-1]){let t;for(t in e);e=e[t]}else e=e[e.length-1];this.valueStack={value:e,prev:this.valueStack}}}else{for(let t=this.lastFlushDepth-1;t>=this.flushDepth;t--)this.jsonParseOffset--,n=(1===this.stack[t]?"{":"[")+n;1===this.stack[this.lastFlushDepth-1]?Object.assign(this.valueStack.value,JSON.parse(n)):this.valueStack.value.push(...JSON.parse(n));for(let t=this.lastFlushDepth-1;t>=this.flushDepth;t--)this.valueStack=this.valueStack.prev}this.pos+=s-e,this.lastFlushDepth=this.flushDepth}push(t,e=!1){if("string"!=typeof t){if(null!==this.pendingByteSeq){const e=t;(t=new Uint8Array(this.pendingByteSeq.length+e.length)).set(this.pendingByteSeq),t.set(e,this.pendingByteSeq.length),this.pendingByteSeq=null}if(!e&&t[t.length-1]>127)for(let e=0;e<t.length;e++){const s=t[t.length-1-e];if(s>>6==3){e++,(4!==e&&s>>3==30||3!==e&&s>>4==14||2!==e&&s>>5==6)&&(this.pendingByteSeq=t.slice(t.length-e),t=t.slice(0,-e));break}}t=k.decode(t)}const s=t.length;let n=0,i=0;t:for(let e=0;e<s;e++){if(this.stateString){for(;e<s;e++)if(this.stateStringEscape)this.stateStringEscape=!1;else switch(t.charCodeAt(e)){case 34:this.stateString=!1;continue t;case 92:this.stateStringEscape=!0}break}switch(t.charCodeAt(e)){case 34:this.stateString=!0,this.stateStringEscape=!1;break;case 44:i=e;break;case 123:i=e+1,this.stack[this.flushDepth++]=1;break;case 91:i=e+1,this.stack[this.flushDepth++]=2;break;case 93:case 125:i=e+1,this.flushDepth--,this.flushDepth<this.lastFlushDepth&&(this.flush(t,n,i),n=i)}}if((i>n||e&&(s>0||null!==this.pendingChunk))&&this.flush(t,n,e?s:i),!e&&i<s){const e=t.slice(i,s);this.pendingChunk=null!==this.pendingChunk?this.pendingChunk+e:e}}finish(){return this.push("",!0),this.value}}return{version:"0.4.0",stringifyInfo:function(t,e,l,h){e=s(e),l=function(t){return"string"==typeof(t=n(t))?t.length:0}(l),h=h||{};const u=new Map,S=new Set,b=new Set,k=new Set,v=new Set,m=h.async?r:a,O={"":t};let D=!1,w=0;return function t(s,n,a){if(D)return;a=i(s,n,a,e);let r=m(a);if(r!==o&&S.has(a))return k.add(a),w+=4,void(h.continueOnCircular||(D=!0));switch(r){case o:void 0!==a||Array.isArray(s)?w+=function(t){switch(typeof t){case"string":return y(t);case"number":return Number.isFinite(t)?String(t).length:4;case"boolean":return t?4:5;case"undefined":case"object":return 4;default:return 0}}(a):s===O&&(w+=9);break;case c:{if(u.has(a)){b.add(a),w+=u.get(a);break}const e=w;let s=0;w+=2,S.add(a);for(const e in a)if(hasOwnProperty.call(a,e)){const n=w;t(a,e,a[e]),n!==w&&(w+=y(e)+1,s++)}s>1&&(w+=s-1),S.delete(a),l>0&&s>0&&(w+=(1+(S.size+1)*l+1)*s,w+=1+S.size*l),u.set(a,w-e);break}case f:{if(u.has(a)){b.add(a),w+=u.get(a);break}const e=w;w+=2,S.add(a);for(let e=0;e<a.length;e++)t(a,e,a[e]);a.length>1&&(w+=a.length-1),S.delete(a),l>0&&a.length>0&&(w+=(1+(S.size+1)*l)*a.length,w+=1+S.size*l),u.set(a,w-e);break}case p:case g:v.add(a);break;case d:w+=2,v.add(a)}}(O,"",t),{minLength:isNaN(w)?1/0:w,circular:[...k],duplicate:[...b],async:[...v]}},stringifyStream:()=>{throw new Error("Method is not supported")},parseChunked:function(t){let e=new O;if(v(t)&&b(t))return new Promise(((s,n)=>{t.on("data",(t=>{try{e.push(t)}catch(t){n(m(t,e)),e=null}})).on("error",(t=>{e=null,n(t)})).on("end",(()=>{try{s(e.finish())}catch(t){n(m(t,e))}finally{e=null}}))}));if("function"==typeof t){const s=t();if(v(s)&&(Symbol.iterator in s||Symbol.asyncIterator in s))return new Promise((async(t,n)=>{try{for await(const t of s)e.push(t);t(e.finish())}catch(t){n(m(t,e))}finally{e=null}}))}throw new Error("Chunk emitter should be readable stream, generator, async generator or function returning an iterable object")}}}));
{
"name": "@discoveryjs/json-ext",
"version": "0.3.2",
"version": "0.4.0",
"description": "A set of utilities that extend the use of JSON",

@@ -5,0 +5,0 @@ "keywords": [

@@ -11,7 +11,8 @@ # json-ext

- [x] `parseChunked()` – Parse JSON that comes by chunks (e.g. FS readable stream or fetch response stream)
- [x] `stringifyStream()` – Stringify stream (Node.js)
- [x] `stringifyInfo()` – Get estimated size and other facts of JSON.stringify() without converting a value to string
- [x] `stringifyStream()` – Stringify stream (Node.js)
- [ ] **TBD** Parse stream
- [ ] **TBD** Support for circular references
- [ ] **TBD** Binary representation
- [ ] **TBD** Binary representation [branch](https://github.com/discoveryjs/json-ext/tree/binary)
- [ ] **TBD** WHATWG [Streams](https://streams.spec.whatwg.org/) support

@@ -26,2 +27,3 @@ ## Install

- [parseChunked(chunkEmitter)](#parsechunked-chunkemitter)
- [stringifyStream(value[, replacer[, space]])](#stringifystreamvalue-replacer-space)

@@ -32,8 +34,95 @@ - [stringifyInfo(value[, replacer[, space[, options]]])](#stringifyinfovalue-replacer-space-options)

- [continueOnCircular](#continueoncircular)
- [version](#version)
### parseChunked(chunkEmitter)
Works the same as [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) but takes `chunkEmitter` instead of string and returns [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
> NOTE: `reviver` parameter is not supported yet, but will be added in next releases.
> NOTE: WHATWG streams aren't supported yet
When to use:
- It's required to avoid freezing the main thread during big JSON parsing, since this process can be distributed in time
- Huge JSON needs to be parsed (e.g. >500MB on Node.js)
- Needed to reduce memory pressure. `JSON.parse()` needs to receive the entire JSON before parsing it. With `parseChunked()` you may parse JSON as first bytes of it comes. This approach helps to avoid storing a huge string in the memory at a single time point and following GC.
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#parse-chunked)
Usage:
```js
const { parseChunked } = require('@discoveryjs/json-ext');
// as a regular Promise
parseChunked(chunkEmitter)
.then(data => {
/* data is parsed JSON */
});
// using await (keep in mind that not every runtime has a support for top level await)
const data = await parseChunked(chunkEmitter);
```
Parameter `chunkEmitter` can be:
- [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) (Node.js only)
```js
const fs = require('fs');
const { parseChunked } = require('@discoveryjs/json-ext');
parseChunked(fs.createReadableStream('path/to/file.json'))
```
- Generator, async generator or function that returns iterable (chunks). Chunk might be a `string`, `Uint8Array` or `Buffer` (Node.js only):
```js
const { parseChunked } = require('@discoveryjs/json-ext');
const encoder = new TextEncoder();
// generator
parseChunked(function*() {
yield '{ "hello":';
yield Buffer.from(' "wor'); // Node.js only
yield encoder.encode('ld" }'); // returns Uint8Array(5) [ 108, 100, 34, 32, 125 ]
});
// async generator
parseChunked(async function*() {
for await (const chunk of someAsyncSource) {
yield chunk;
}
});
// function that returns iterable
parseChunked(() => ['{ "hello":', ' "world"}'])
```
Using with [fetch()](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API):
```js
async function loadData(url) {
const response = await fetch(url);
const reader = response.body.getReader();
return parseChunked(async function*() {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
yield value;
}
});
}
loadData('https://example.com/data.json')
.then(data => {
/* data is parsed JSON */
})
```
### stringifyStream(value[, replacer[, space]])
Works the same as [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an instance of `ReadableStream` instead of string.
Works the same as [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an instance of [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) instead of string.
[Compare with other solutions](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#stream-stringifying) (benchmark)
> NOTE: WHATWG Streams aren't supported yet, so function available for Node.js only for now

@@ -46,2 +135,76 @@ Departs from JSON.stringify():

When to use:
- Huge JSON needs to be generated (e.g. >500MB on Node.js)
- Needed to reduce memory pressure. `JSON.stringify()` needs to generate the entire JSON before send or write it to somewhere. With `stringifyStream()` you may send a result to somewhere as first bytes of the result appears. This approach helps to avoid storing a huge string in the memory at a single time point.
- The object being serialized contains Promises or Streams (see Usage for examples)
[Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#stream-stringifying)
Usage:
```js
const { stringifyStream } = require('@discoveryjs/json-ext');
// handle events
stringifyStream(data)
.on('data', chunk => console.log(chunk))
.on('error', error => consold.error(error))
.on('finish', () => console.log('DONE!'));
// pipe into a stream
stringifyStream(data)
.pipe(writableStream);
```
Using Promise or ReadableStream in serializing object:
```js
const fs = require('fs');
const { stringifyStream } = require('@discoveryjs/json-ext');
// output will be
// {"name":"example","willSerializeResolvedValue":42,"fromFile":[1, 2, 3],"at":{"any":{"level":"promise!"}}}
stringifyStream({
name: 'example',
willSerializeResolvedValue: Promise.resolve(42),
fromFile: fs.createReadbleStream('path/to/file.json'), // support file content is "[1, 2, 3]", it'll be inserted as it
at: {
any: {
level: new Promise(resolve => setTimeout(() => resolve('promise!'), 100))
}
}
})
// in case several async requests are used in object, it's prefered
// to put fastest requests first, because in this case
stringifyStream({
foo: fetch('http://example.com/request_takes_2s').then(req => req.json()),
bar: fetch('http://example.com/request_takes_5s').then(req => req.json())
});
```
Using with [`WritableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_writable_streams) (Node.js only):
```js
const fs = require('fs');
const { stringifyStream } = require('@discoveryjs/json-ext');
// pipe into a console
stringifyStream(data)
.pipe(process.stdout);
// pipe into a file
stringifyStream(data)
.pipe(fs.createWritableStream('path/to/file.json'));
// wrapping into a Promise
new Promise((resolve, reject) => {
stringifyStream(data)
.on('error', reject)
.pipe(stream)
.on('error', reject)
.on('finish', resolve);
});
```
### stringifyInfo(value[, replacer[, space[, options]]])

@@ -55,3 +218,3 @@

{
minLength: Number, // mininmal bytes when values is stringified
minLength: Number, // minimal bytes when values is stringified
circular: [...], // list of circular references

@@ -63,2 +226,14 @@ duplicate: [...], // list of objects that occur more than once

Example:
```js
const { stringifyInfo } = require('@discoveryjs/json-ext');
console.log(
stringifyInfo({ test: true }).minLength
);
// > 13
// that equals '{"test":true}'.length
```
#### Options

@@ -65,0 +240,0 @@

module.exports = {
version: require('../package.json').version,
stringifyInfo: require('./stringify-info'),
stringifyStream: require('./stringify-stream')
stringifyStream: require('./stringify-stream'),
parseChunked: require('./parse-chunked')
};

@@ -46,3 +46,3 @@ const {

len = prevLeadingSurrogate
? len - 2 // surrogate pair (4 bytes), since we calulate prev leading surrogate as 6 bytes, substruct 2 bytes
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
: len + 6; // \uXXXX

@@ -75,3 +75,3 @@ } else {

default:
throw new TypeError(`Do not know how to serialize a ${typeof value}`);
return 0;
}

@@ -86,3 +86,3 @@ }

module.exports = function jsonStringifyInfo(value, replacer, space, options) {
function walk(key, value) {
function walk(holder, key, value) {
if (stop) {

@@ -92,3 +92,3 @@ return;

value = replaceValue(this, key, value, replacer);
value = replaceValue(holder, key, value, replacer);

@@ -111,5 +111,5 @@ let type = getType(value);

case PRIMITIVE:
if (value !== undefined || Array.isArray(this)) {
if (value !== undefined || Array.isArray(holder)) {
length += primitiveLength(value);
} else if (this === root) {
} else if (holder === root) {
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?

@@ -136,3 +136,3 @@ }

const prevLength = length;
walk.call(value, property, value[property]);
walk(value, property, value[property]);

@@ -177,3 +177,3 @@ if (prevLength !== length) {

for (let i = 0; i < value.length; i++) {
walk.call(value, String(i), value[i]);
walk(value, i, value[i]);
}

@@ -223,3 +223,3 @@

walk.call(root, '', value);
walk(root, '', value);

@@ -226,0 +226,0 @@ return {

@@ -43,3 +43,3 @@ const { Readable } = require('stream');

default:
this.destroy(new TypeError(`Do not know how to serialize a ${typeof value}`));
this.destroy(new TypeError(`Do not know how to serialize a ${value.constructor && value.constructor.name || typeof value}`));
}

@@ -281,6 +281,3 @@ }

self.awaiting = false;
if (this._stack === self) {
this.processStack();
}
this.processStack();
}

@@ -359,9 +356,4 @@ };

_read(size) {
if (this._ended) {
return;
}
// start processing
this._readSize = size || this.readableHighWaterMark;
// start processing
this.processStack();

@@ -368,0 +360,0 @@ }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc