Socket
Socket
Sign inDemoInstall

@discoveryjs/json-ext

Package Overview
Dependencies
Maintainers
0
Versions
17
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@discoveryjs/json-ext - npm Package Compare versions

Comparing version 0.5.7 to 0.6.0

cjs/index.cjs

1397

dist/json-ext.js
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.jsonExt = factory());
})(this, (function () { 'use strict';
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.jsonExt = factory());
}(typeof globalThis != 'undefined' ? globalThis : typeof window != 'undefined' ? window : typeof global != 'undefined' ? global : typeof self != 'undefined' ? self : this, (function () {
var exports = (() => {
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") {
for (let key of __getOwnPropNames(from))
if (!__hasOwnProp.call(to, key) && key !== except)
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
}
return to;
};
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
var version = "0.5.7";
// src/index.js
var src_exports = {};
__export(src_exports, {
createStringifyWebStream: () => createStringifyWebStream,
parseChunked: () => parseChunked,
parseFromWebStream: () => parseFromWebStream,
stringifyChunked: () => stringifyChunked,
stringifyInfo: () => stringifyInfo
});
const PrimitiveType = 1;
const ObjectType = 2;
const ArrayType = 3;
const PromiseType = 4;
const ReadableStringType = 5;
const ReadableObjectType = 6;
// https://tc39.es/ecma262/#table-json-single-character-escapes
const escapableCharCodeSubstitution$1 = { // JSON Single Character Escape Sequences
0x08: '\\b',
0x09: '\\t',
0x0a: '\\n',
0x0c: '\\f',
0x0d: '\\r',
0x22: '\\\"',
0x5c: '\\\\'
};
function isLeadingSurrogate$1(code) {
return code >= 0xD800 && code <= 0xDBFF;
// src/utils.js
function isIterable(value) {
return typeof value === "object" && value !== null && (typeof value[Symbol.iterator] === "function" || typeof value[Symbol.asyncIterator] === "function");
}
function replaceValue(holder, key, value, replacer) {
if (value && typeof value.toJSON === "function") {
value = value.toJSON();
}
function isTrailingSurrogate$1(code) {
return code >= 0xDC00 && code <= 0xDFFF;
if (replacer !== null) {
value = replacer.call(holder, String(key), value);
}
switch (typeof value) {
case "function":
case "symbol":
value = void 0;
break;
case "object":
if (value !== null) {
const cls = value.constructor;
if (cls === String || cls === Number || cls === Boolean) {
value = value.valueOf();
}
}
break;
}
return value;
}
function normalizeReplacer(replacer) {
if (typeof replacer === "function") {
return replacer;
}
if (Array.isArray(replacer)) {
const allowlist = new Set(
replacer.map((item) => {
const cls = item && item.constructor;
return cls === String || cls === Number ? String(item) : null;
}).filter((item) => typeof item === "string")
);
return [...allowlist];
}
return null;
}
function normalizeSpace(space) {
if (typeof space === "number") {
if (!Number.isFinite(space) || space < 1) {
return false;
}
return " ".repeat(Math.min(space, 10));
}
if (typeof space === "string") {
return space.slice(0, 10) || false;
}
return false;
}
function isReadableStream$1(value) {
return (
typeof value.pipe === 'function' &&
typeof value._read === 'function' &&
typeof value._readableState === 'object' && value._readableState !== null
);
// src/parse-chunked.js
var STACK_OBJECT = 1;
var STACK_ARRAY = 2;
var decoder = new TextDecoder();
function adjustPosition(error, parser) {
if (error.name === "SyntaxError" && parser.jsonParseOffset) {
error.message = error.message.replace(
/at position (\d+)/,
(_, pos) => "at position " + (Number(pos) + parser.jsonParseOffset)
);
}
function replaceValue$1(holder, key, value, replacer) {
if (value && typeof value.toJSON === 'function') {
value = value.toJSON();
return error;
}
function append(array, elements) {
const initialLength = array.length;
array.length += elements.length;
for (let i = 0; i < elements.length; i++) {
array[initialLength + i] = elements[i];
}
}
async function parseChunked(chunkEmitter) {
const iterable = typeof chunkEmitter === "function" ? chunkEmitter() : chunkEmitter;
if (isIterable(iterable)) {
let parser = new ChunkParser();
try {
for await (const chunk of iterable) {
if (typeof chunk !== "string" && !ArrayBuffer.isView(chunk)) {
throw new TypeError("Invalid chunk: Expected string, TypedArray or Buffer");
}
parser.push(chunk);
}
if (replacer !== null) {
value = replacer.call(holder, String(key), value);
return parser.finish();
} catch (e) {
throw adjustPosition(e, parser);
}
}
throw new TypeError(
"Invalid chunk emitter: Expected an Iterable, AsyncIterable, generator, async generator, or a function returning an Iterable or AsyncIterable"
);
}
var ChunkParser = class {
constructor() {
this.value = void 0;
this.valueStack = null;
this.stack = new Array(100);
this.lastFlushDepth = 0;
this.flushDepth = 0;
this.stateString = false;
this.stateStringEscape = false;
this.pendingByteSeq = null;
this.pendingChunk = null;
this.chunkOffset = 0;
this.jsonParseOffset = 0;
}
parseAndAppend(fragment, wrap) {
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
if (wrap) {
this.jsonParseOffset--;
fragment = "{" + fragment + "}";
}
switch (typeof value) {
case 'function':
case 'symbol':
value = undefined;
break;
case 'object':
if (value !== null) {
const cls = value.constructor;
if (cls === String || cls === Number || cls === Boolean) {
value = value.valueOf();
}
}
break;
Object.assign(this.valueStack.value, JSON.parse(fragment));
} else {
if (wrap) {
this.jsonParseOffset--;
fragment = "[" + fragment + "]";
}
return value;
append(this.valueStack.value, JSON.parse(fragment));
}
}
function getTypeNative$1(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
prepareAddition(fragment) {
const { value } = this.valueStack;
const expectComma = Array.isArray(value) ? value.length !== 0 : Object.keys(value).length !== 0;
if (expectComma) {
if (fragment[0] === ",") {
this.jsonParseOffset++;
return fragment.slice(1);
}
if (Array.isArray(value)) {
return ArrayType;
if (fragment[0] !== "}" && fragment[0] !== "]") {
this.jsonParseOffset -= 3;
return "[[]" + fragment;
}
return ObjectType;
}
return fragment;
}
function getTypeAsync$1(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
flush(chunk, start, end) {
let fragment = chunk.slice(start, end);
this.jsonParseOffset = this.chunkOffset + start;
if (this.pendingChunk !== null) {
fragment = this.pendingChunk + fragment;
this.jsonParseOffset -= this.pendingChunk.length;
this.pendingChunk = null;
}
if (this.flushDepth === this.lastFlushDepth) {
if (this.flushDepth > 0) {
this.parseAndAppend(this.prepareAddition(fragment), true);
} else {
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
}
if (typeof value.then === 'function') {
return PromiseType;
} else if (this.flushDepth > this.lastFlushDepth) {
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
fragment += this.stack[i] === STACK_OBJECT ? "}" : "]";
}
if (isReadableStream$1(value)) {
return value._readableState.objectMode ? ReadableObjectType : ReadableStringType;
if (this.lastFlushDepth === 0) {
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
} else {
this.parseAndAppend(this.prepareAddition(fragment), true);
}
if (Array.isArray(value)) {
return ArrayType;
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
let value = this.valueStack.value;
if (this.stack[i - 1] === STACK_OBJECT) {
let key;
for (key in value) ;
value = value[key];
} else {
value = value[value.length - 1];
}
this.valueStack = {
value,
prev: this.valueStack
};
}
return ObjectType;
}
function normalizeReplacer$1(replacer) {
if (typeof replacer === 'function') {
return replacer;
} else {
fragment = this.prepareAddition(fragment);
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.jsonParseOffset--;
fragment = (this.stack[i] === STACK_OBJECT ? "{" : "[") + fragment;
}
if (Array.isArray(replacer)) {
const allowlist = new Set(replacer
.map(item => {
const cls = item && item.constructor;
return cls === String || cls === Number ? String(item) : null;
})
.filter(item => typeof item === 'string')
);
return [...allowlist];
this.parseAndAppend(fragment, false);
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.valueStack = this.valueStack.prev;
}
return null;
}
this.lastFlushDepth = this.flushDepth;
}
function normalizeSpace$1(space) {
if (typeof space === 'number') {
if (!Number.isFinite(space) || space < 1) {
return false;
push(chunk) {
if (typeof chunk !== "string") {
if (this.pendingByteSeq !== null) {
const origRawChunk = chunk;
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
chunk.set(this.pendingByteSeq);
chunk.set(origRawChunk, this.pendingByteSeq.length);
this.pendingByteSeq = null;
}
if (chunk[chunk.length - 1] > 127) {
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
const byte = chunk[chunk.length - 1 - seqLength];
if (byte >> 6 === 3) {
seqLength++;
if (seqLength !== 4 && byte >> 3 === 30 || seqLength !== 3 && byte >> 4 === 14 || seqLength !== 2 && byte >> 5 === 6) {
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
chunk = chunk.slice(0, -seqLength);
}
break;
}
return ' '.repeat(Math.min(space, 10));
}
}
if (typeof space === 'string') {
return space.slice(0, 10) || false;
}
return false;
}
var utils = {
escapableCharCodeSubstitution: escapableCharCodeSubstitution$1,
isLeadingSurrogate: isLeadingSurrogate$1,
isTrailingSurrogate: isTrailingSurrogate$1,
type: {
PRIMITIVE: PrimitiveType,
PROMISE: PromiseType,
ARRAY: ArrayType,
OBJECT: ObjectType,
STRING_STREAM: ReadableStringType,
OBJECT_STREAM: ReadableObjectType
},
isReadableStream: isReadableStream$1,
replaceValue: replaceValue$1,
getTypeNative: getTypeNative$1,
getTypeAsync: getTypeAsync$1,
normalizeReplacer: normalizeReplacer$1,
normalizeSpace: normalizeSpace$1
};
const {
normalizeReplacer,
normalizeSpace,
replaceValue,
getTypeNative,
getTypeAsync,
isLeadingSurrogate,
isTrailingSurrogate,
escapableCharCodeSubstitution,
type: {
PRIMITIVE,
OBJECT,
ARRAY,
PROMISE,
STRING_STREAM,
OBJECT_STREAM
}
} = utils;
const charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
if (escapableCharCodeSubstitution.hasOwnProperty(code)) {
return 2; // \X
}
if (code < 0x20) {
return 6; // \uXXXX
}
return code < 128 ? 1 : 2; // UTF8 bytes
});
function stringLength(str) {
let len = 0;
let prevLeadingSurrogate = false;
for (let i = 0; i < str.length; i++) {
const code = str.charCodeAt(i);
if (code < 2048) {
len += charLength2048[code];
} else if (isLeadingSurrogate(code)) {
len += 6; // \uXXXX since no pair with trailing surrogate yet
prevLeadingSurrogate = true;
continue;
} else if (isTrailingSurrogate(code)) {
len = prevLeadingSurrogate
? len - 2 // surrogate pair (4 bytes), since we calculate prev leading surrogate as 6 bytes, substruct 2 bytes
: len + 6; // \uXXXX
chunk = decoder.decode(chunk);
}
const chunkLength = chunk.length;
let lastFlushPoint = 0;
let flushPoint = 0;
scan: for (let i = 0; i < chunkLength; i++) {
if (this.stateString) {
for (; i < chunkLength; i++) {
if (this.stateStringEscape) {
this.stateStringEscape = false;
} else {
len += 3; // code >= 2048 is 3 bytes length for UTF8
switch (chunk.charCodeAt(i)) {
case 34:
this.stateString = false;
continue scan;
case 92:
this.stateStringEscape = true;
}
}
prevLeadingSurrogate = false;
}
break;
}
return len + 2; // +2 for quotes
}
function primitiveLength(value) {
switch (typeof value) {
case 'string':
return stringLength(value);
case 'number':
return Number.isFinite(value) ? String(value).length : 4 /* null */;
case 'boolean':
return value ? 4 /* true */ : 5 /* false */;
case 'undefined':
case 'object':
return 4; /* null */
default:
return 0;
}
}
function spaceLength(space) {
space = normalizeSpace(space);
return typeof space === 'string' ? space.length : 0;
}
var stringifyInfo = function jsonStringifyInfo(value, replacer, space, options) {
function walk(holder, key, value) {
if (stop) {
return;
switch (chunk.charCodeAt(i)) {
case 34:
this.stateString = true;
this.stateStringEscape = false;
break;
case 44:
flushPoint = i;
break;
case 123:
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_OBJECT;
break;
case 91:
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_ARRAY;
break;
case 93:
case 125:
flushPoint = i + 1;
this.flushDepth--;
if (this.flushDepth < this.lastFlushDepth) {
this.flush(chunk, lastFlushPoint, flushPoint);
lastFlushPoint = flushPoint;
}
value = replaceValue(holder, key, value, replacer);
let type = getType(value);
// check for circular structure
if (type !== PRIMITIVE && stack.has(value)) {
circular.add(value);
length += 4; // treat as null
if (!options.continueOnCircular) {
stop = true;
}
return;
break;
case 9:
case 10:
case 13:
case 32:
if (lastFlushPoint === i) {
lastFlushPoint++;
}
switch (type) {
case PRIMITIVE:
if (value !== undefined || Array.isArray(holder)) {
length += primitiveLength(value);
} else if (holder === root) {
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
}
break;
case OBJECT: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
let entries = 0;
length += 2; // {}
stack.add(value);
for (const key in value) {
if (hasOwnProperty.call(value, key) && (allowlist === null || allowlist.has(key))) {
const prevLength = length;
walk(value, key, value[key]);
if (prevLength !== length) {
// value is printed
length += stringLength(key) + 1; // "key":
entries++;
}
}
}
if (entries > 1) {
length += entries - 1; // commas
}
stack.delete(value);
if (space > 0 && entries > 0) {
length += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
length += 1 + stack.size * space; // for }
}
visited.set(value, length - valueLength);
break;
}
case ARRAY: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
length += 2; // []
stack.add(value);
for (let i = 0; i < value.length; i++) {
walk(value, i, value[i]);
}
if (value.length > 1) {
length += value.length - 1; // commas
}
stack.delete(value);
if (space > 0 && value.length > 0) {
length += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
length += 1 + stack.size * space; // for ]
}
visited.set(value, length - valueLength);
break;
}
case PROMISE:
case STRING_STREAM:
async.add(value);
break;
case OBJECT_STREAM:
length += 2; // []
async.add(value);
break;
if (flushPoint === i) {
flushPoint++;
}
break;
}
let allowlist = null;
replacer = normalizeReplacer(replacer);
if (Array.isArray(replacer)) {
allowlist = new Set(replacer);
replacer = null;
}
if (flushPoint > lastFlushPoint) {
this.flush(chunk, lastFlushPoint, flushPoint);
}
if (flushPoint < chunkLength) {
if (this.pendingChunk !== null) {
this.pendingChunk += chunk;
} else {
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
}
}
this.chunkOffset += chunkLength;
}
finish() {
if (this.pendingChunk !== null) {
this.flush("", 0, 0);
this.pendingChunk = null;
}
return this.value;
}
};
space = spaceLength(space);
options = options || {};
const visited = new Map();
const stack = new Set();
const duplicate = new Set();
const circular = new Set();
const async = new Set();
const getType = options.async ? getTypeAsync : getTypeNative;
const root = { '': value };
let stop = false;
let length = 0;
walk(root, '', value);
return {
minLength: isNaN(length) ? Infinity : length,
circular: [...circular],
duplicate: [...duplicate],
async: [...async]
};
};
var stringifyStreamBrowser = () => {
throw new Error('Method is not supported');
};
var textDecoderBrowser = TextDecoder;
const { isReadableStream } = utils;
const STACK_OBJECT = 1;
const STACK_ARRAY = 2;
const decoder = new textDecoderBrowser();
function isObject(value) {
return value !== null && typeof value === 'object';
// src/stringify-chunked.js
function encodeString(value) {
if (/[^\x20\x21\x23-\x5B\x5D-\uD799]/.test(value)) {
return JSON.stringify(value);
}
function adjustPosition(error, parser) {
if (error.name === 'SyntaxError' && parser.jsonParseOffset) {
error.message = error.message.replace(/at position (\d+)/, (_, pos) =>
'at position ' + (Number(pos) + parser.jsonParseOffset)
);
return '"' + value + '"';
}
function* stringifyChunked(value, optionsOrReplacer, space) {
if (optionsOrReplacer === null || Array.isArray(optionsOrReplacer) || typeof optionsOrReplacer !== "object") {
optionsOrReplacer = {
replacer: optionsOrReplacer,
space
};
}
const highWaterMark = Number(optionsOrReplacer.highWaterMark) || 16384;
let replacer = normalizeReplacer(optionsOrReplacer.replacer);
space = normalizeSpace(optionsOrReplacer.space);
let buffer = "";
let depth = 0;
let stack = null;
let first = false;
let visited = /* @__PURE__ */ new WeakSet();
let processing = false;
let getKeys = Object.keys;
if (Array.isArray(replacer)) {
const allowlist = replacer;
getKeys = () => allowlist;
replacer = null;
}
pushStack(processRoot, value, null);
while (stack !== null) {
processing = true;
while (stack !== null && !stack.awaiting) {
stack.handler();
if (!processing) {
break;
}
return error;
}
processing = false;
yield buffer;
buffer = "";
}
function append(array, elements) {
// Note: Avoid to use array.push(...elements) since it may lead to
// "RangeError: Maximum call stack size exceeded" for a long arrays
const initialLength = array.length;
array.length += elements.length;
for (let i = 0; i < elements.length; i++) {
array[initialLength + i] = elements[i];
function processRoot() {
const { value: value2 } = stack;
popStack();
processValue({ "": value2 }, "", value2, () => {
});
}
function processObjectEntry(key) {
if (first === false) {
first = true;
} else {
push(",");
}
if (space) {
push(`
${space.repeat(depth)}${encodeString(key)}: `);
} else {
push(encodeString(key) + ":");
}
}
function processObject() {
const current = stack;
if (current.index === current.keys.length) {
if (space && first) {
push(`
${space.repeat(depth - 1)}}`);
} else {
push("}");
}
popStack();
return;
}
const key = current.keys[current.index];
processValue(current.value, key, current.value[key], processObjectEntry);
current.index++;
}
var parseChunked = function(chunkEmitter) {
let parser = new ChunkParser();
if (isObject(chunkEmitter) && isReadableStream(chunkEmitter)) {
return new Promise((resolve, reject) => {
chunkEmitter
.on('data', chunk => {
try {
parser.push(chunk);
} catch (e) {
reject(adjustPosition(e, parser));
parser = null;
}
})
.on('error', (e) => {
parser = null;
reject(e);
})
.on('end', () => {
try {
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
});
function processArrayItem(index) {
if (index !== 0) {
push(",");
}
if (space) {
push(`
${space.repeat(depth)}`);
}
}
function processArray() {
const current = stack;
if (current.index === current.value.length) {
if (space && current.index !== 0) {
push(`
${space.repeat(depth - 1)}]`);
} else {
push("]");
}
if (typeof chunkEmitter === 'function') {
const iterator = chunkEmitter();
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
return new Promise(async (resolve, reject) => {
try {
for await (const chunk of iterator) {
parser.push(chunk);
}
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
}
popStack();
return;
}
processValue(current.value, current.index, current.value[current.index], processArrayItem);
current.index++;
}
function processValue(holder, key, value2, callback) {
value2 = replaceValue(holder, key, value2, replacer);
if (value2 === null || typeof value2 !== "object") {
if (callback !== processObjectEntry || value2 !== void 0) {
callback(key);
pushPrimitive(value2);
}
} else if (Array.isArray(value2)) {
callback(key);
circularCheck(value2);
depth++;
push("[");
pushStack(processArray, value2, null);
} else {
callback(key);
circularCheck(value2);
depth++;
push("{");
pushStack(processObject, value2, getKeys(value2));
}
}
function circularCheck(value2) {
if (visited.has(value2)) {
throw new TypeError("Converting circular structure to JSON");
}
visited.add(value2);
}
function pushPrimitive(value2) {
switch (typeof value2) {
case "string":
push(encodeString(value2));
break;
case "number":
push(Number.isFinite(value2) ? value2 : "null");
break;
case "boolean":
push(value2 ? "true" : "false");
break;
case "undefined":
case "object":
push("null");
break;
default:
throw new TypeError(`Do not know how to serialize a ${value2.constructor?.name || typeof value2}`);
}
}
function pushStack(handler, value2, keys) {
first = false;
return stack = {
handler,
value: value2,
index: 0,
keys,
prev: stack
};
}
function popStack() {
const { handler, value: value2 } = stack;
if (handler === processObject || handler === processArray) {
visited.delete(value2);
depth--;
}
stack = stack.prev;
first = true;
}
function push(data) {
buffer += data;
processing = buffer.length < highWaterMark;
}
}
throw new Error(
'Chunk emitter should be readable stream, generator, ' +
'async generator or function returning an iterable object'
);
// src/stringify-info.js
var hasOwn = typeof Object.hasOwn === "function" ? Object.hasOwn : (object, key) => Object.hasOwnProperty.call(object, key);
var escapableCharCodeSubstitution = {
// JSON Single Character Escape Sequences
8: "\\b",
9: "\\t",
10: "\\n",
12: "\\f",
13: "\\r",
34: '\\"',
92: "\\\\"
};
var charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
if (hasOwn(escapableCharCodeSubstitution, code)) {
return 2;
}
if (code < 32) {
return 6;
}
return code < 128 ? 1 : 2;
});
function isLeadingSurrogate(code) {
return code >= 55296 && code <= 56319;
}
function isTrailingSurrogate(code) {
return code >= 56320 && code <= 57343;
}
function stringLength(str) {
let len = 0;
let prevLeadingSurrogate = false;
for (let i = 0; i < str.length; i++) {
const code = str.charCodeAt(i);
if (code < 2048) {
len += charLength2048[code];
} else if (isLeadingSurrogate(code)) {
len += 6;
prevLeadingSurrogate = true;
continue;
} else if (isTrailingSurrogate(code)) {
len = prevLeadingSurrogate ? len - 2 : len + 6;
} else {
len += 3;
}
prevLeadingSurrogate = false;
}
return len + 2;
}
function primitiveLength(value) {
switch (typeof value) {
case "string":
return stringLength(value);
case "number":
return Number.isFinite(value) ? String(value).length : 4;
case "boolean":
return value ? 4 : 5;
case "undefined":
case "object":
return 4;
default:
return 0;
}
}
function spaceLength(space) {
space = normalizeSpace(space);
return typeof space === "string" ? space.length : 0;
}
function stringifyInfo(value, optionsOrReplacer, space) {
if (optionsOrReplacer === null || Array.isArray(optionsOrReplacer) || typeof optionsOrReplacer !== "object") {
optionsOrReplacer = {
replacer: optionsOrReplacer,
space
};
}
let allowlist = null;
let replacer = normalizeReplacer(optionsOrReplacer.replacer);
const continueOnCircular = Boolean(optionsOrReplacer.continueOnCircular);
if (Array.isArray(replacer)) {
allowlist = new Set(replacer);
replacer = null;
}
space = spaceLength(space);
const visited = /* @__PURE__ */ new WeakMap();
const stack = /* @__PURE__ */ new Set();
const circular = /* @__PURE__ */ new Set();
const root = { "": value };
let stop = false;
let bytes = 0;
walk(root, "", value);
return {
bytes: isNaN(bytes) ? Infinity : bytes,
circular: [...circular]
};
class ChunkParser {
constructor() {
this.value = undefined;
this.valueStack = null;
this.stack = new Array(100);
this.lastFlushDepth = 0;
this.flushDepth = 0;
this.stateString = false;
this.stateStringEscape = false;
this.pendingByteSeq = null;
this.pendingChunk = null;
this.chunkOffset = 0;
this.jsonParseOffset = 0;
function walk(holder, key, value2) {
if (stop) {
return;
}
value2 = replaceValue(holder, key, value2, replacer);
if (value2 === null || typeof value2 !== "object") {
if (value2 !== void 0 || Array.isArray(holder)) {
bytes += primitiveLength(value2);
} else if (holder === root) {
bytes += 9;
}
parseAndAppend(fragment, wrap) {
// Append new entries or elements
if (this.stack[this.lastFlushDepth - 1] === STACK_OBJECT) {
if (wrap) {
this.jsonParseOffset--;
fragment = '{' + fragment + '}';
}
Object.assign(this.valueStack.value, JSON.parse(fragment));
} else {
if (wrap) {
this.jsonParseOffset--;
fragment = '[' + fragment + ']';
}
append(this.valueStack.value, JSON.parse(fragment));
}
} else {
if (stack.has(value2)) {
circular.add(value2);
bytes += 4;
if (!continueOnCircular) {
stop = true;
}
return;
}
prepareAddition(fragment) {
const { value } = this.valueStack;
const expectComma = Array.isArray(value)
? value.length !== 0
: Object.keys(value).length !== 0;
if (expectComma) {
// Skip a comma at the beginning of fragment, otherwise it would
// fail to parse
if (fragment[0] === ',') {
this.jsonParseOffset++;
return fragment.slice(1);
}
// When value (an object or array) is not empty and a fragment
// doesn't start with a comma, a single valid fragment starting
// is a closing bracket. If it's not, a prefix is adding to fail
// parsing. Otherwise, the sequence of chunks can be successfully
// parsed, although it should not, e.g. ["[{}", "{}]"]
if (fragment[0] !== '}' && fragment[0] !== ']') {
this.jsonParseOffset -= 3;
return '[[]' + fragment;
}
}
return fragment;
if (visited.has(value2)) {
bytes += visited.get(value2);
return;
}
flush(chunk, start, end) {
let fragment = chunk.slice(start, end);
// Save position correction an error in JSON.parse() if any
this.jsonParseOffset = this.chunkOffset + start;
// Prepend pending chunk if any
if (this.pendingChunk !== null) {
fragment = this.pendingChunk + fragment;
this.jsonParseOffset -= this.pendingChunk.length;
this.pendingChunk = null;
if (Array.isArray(value2)) {
const valueLength = bytes;
bytes += 2;
stack.add(value2);
for (let i = 0; i < value2.length; i++) {
walk(value2, i, value2[i]);
}
if (value2.length > 1) {
bytes += value2.length - 1;
}
stack.delete(value2);
if (space > 0 && value2.length > 0) {
bytes += (1 + (stack.size + 1) * space) * value2.length;
bytes += 1 + stack.size * space;
}
visited.set(value2, bytes - valueLength);
} else {
const valueLength = bytes;
let entries = 0;
bytes += 2;
stack.add(value2);
for (const key2 in value2) {
if (hasOwn(value2, key2) && (allowlist === null || allowlist.has(key2))) {
const prevLength = bytes;
walk(value2, key2, value2[key2]);
if (prevLength !== bytes) {
bytes += stringLength(key2) + 1;
entries++;
}
}
if (this.flushDepth === this.lastFlushDepth) {
// Depth didn't changed, so it's a root value or entry/element set
if (this.flushDepth > 0) {
this.parseAndAppend(this.prepareAddition(fragment), true);
} else {
// That's an entire value on a top level
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
}
} else if (this.flushDepth > this.lastFlushDepth) {
// Add missed closing brackets/parentheses
for (let i = this.flushDepth - 1; i >= this.lastFlushDepth; i--) {
fragment += this.stack[i] === STACK_OBJECT ? '}' : ']';
}
if (this.lastFlushDepth === 0) {
// That's a root value
this.value = JSON.parse(fragment);
this.valueStack = {
value: this.value,
prev: null
};
} else {
this.parseAndAppend(this.prepareAddition(fragment), true);
}
// Move down to the depths to the last object/array, which is current now
for (let i = this.lastFlushDepth || 1; i < this.flushDepth; i++) {
let value = this.valueStack.value;
if (this.stack[i - 1] === STACK_OBJECT) {
// find last entry
let key;
// eslint-disable-next-line curly
for (key in value);
value = value[key];
} else {
// last element
value = value[value.length - 1];
}
this.valueStack = {
value,
prev: this.valueStack
};
}
} else /* this.flushDepth < this.lastFlushDepth */ {
fragment = this.prepareAddition(fragment);
// Add missed opening brackets/parentheses
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.jsonParseOffset--;
fragment = (this.stack[i] === STACK_OBJECT ? '{' : '[') + fragment;
}
this.parseAndAppend(fragment, false);
for (let i = this.lastFlushDepth - 1; i >= this.flushDepth; i--) {
this.valueStack = this.valueStack.prev;
}
}
this.lastFlushDepth = this.flushDepth;
}
if (entries > 1) {
bytes += entries - 1;
}
stack.delete(value2);
if (space > 0 && entries > 0) {
bytes += (1 + (stack.size + 1) * space + 1) * entries;
bytes += 1 + stack.size * space;
}
visited.set(value2, bytes - valueLength);
}
}
}
}
push(chunk) {
if (typeof chunk !== 'string') {
// Suppose chunk is Buffer or Uint8Array
// Prepend uncompleted byte sequence if any
if (this.pendingByteSeq !== null) {
const origRawChunk = chunk;
chunk = new Uint8Array(this.pendingByteSeq.length + origRawChunk.length);
chunk.set(this.pendingByteSeq);
chunk.set(origRawChunk, this.pendingByteSeq.length);
this.pendingByteSeq = null;
}
// In case Buffer/Uint8Array, an input is encoded in UTF8
// Seek for parts of uncompleted UTF8 symbol on the ending
// This makes sense only if we expect more chunks and last char is not multi-bytes
if (chunk[chunk.length - 1] > 127) {
for (let seqLength = 0; seqLength < chunk.length; seqLength++) {
const byte = chunk[chunk.length - 1 - seqLength];
// 10xxxxxx - 2nd, 3rd or 4th byte
// 110xxxxx – first byte of 2-byte sequence
// 1110xxxx - first byte of 3-byte sequence
// 11110xxx - first byte of 4-byte sequence
if (byte >> 6 === 3) {
seqLength++;
// If the sequence is really incomplete, then preserve it
// for the future chunk and cut off it from the current chunk
if ((seqLength !== 4 && byte >> 3 === 0b11110) ||
(seqLength !== 3 && byte >> 4 === 0b1110) ||
(seqLength !== 2 && byte >> 5 === 0b110)) {
this.pendingByteSeq = chunk.slice(chunk.length - seqLength);
chunk = chunk.slice(0, -seqLength);
}
break;
}
}
}
// Convert chunk to a string, since single decode per chunk
// is much effective than decode multiple small substrings
chunk = decoder.decode(chunk);
}
const chunkLength = chunk.length;
let lastFlushPoint = 0;
let flushPoint = 0;
// Main scan loop
scan: for (let i = 0; i < chunkLength; i++) {
if (this.stateString) {
for (; i < chunkLength; i++) {
if (this.stateStringEscape) {
this.stateStringEscape = false;
} else {
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = false;
continue scan;
case 0x5C: /* \ */
this.stateStringEscape = true;
}
}
}
break;
}
switch (chunk.charCodeAt(i)) {
case 0x22: /* " */
this.stateString = true;
this.stateStringEscape = false;
break;
case 0x2C: /* , */
flushPoint = i;
break;
case 0x7B: /* { */
// Open an object
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_OBJECT;
break;
case 0x5B: /* [ */
// Open an array
flushPoint = i + 1;
this.stack[this.flushDepth++] = STACK_ARRAY;
break;
case 0x5D: /* ] */
case 0x7D: /* } */
// Close an object or array
flushPoint = i + 1;
this.flushDepth--;
if (this.flushDepth < this.lastFlushDepth) {
this.flush(chunk, lastFlushPoint, flushPoint);
lastFlushPoint = flushPoint;
}
break;
case 0x09: /* \t */
case 0x0A: /* \n */
case 0x0D: /* \r */
case 0x20: /* space */
// Move points forward when they points on current position and it's a whitespace
if (lastFlushPoint === i) {
lastFlushPoint++;
}
if (flushPoint === i) {
flushPoint++;
}
break;
}
}
if (flushPoint > lastFlushPoint) {
this.flush(chunk, lastFlushPoint, flushPoint);
}
// Produce pendingChunk if something left
if (flushPoint < chunkLength) {
if (this.pendingChunk !== null) {
// When there is already a pending chunk then no flush happened,
// appending entire chunk to pending one
this.pendingChunk += chunk;
} else {
// Create a pending chunk, it will start with non-whitespace since
// flushPoint was moved forward away from whitespaces on scan
this.pendingChunk = chunk.slice(flushPoint, chunkLength);
}
}
this.chunkOffset += chunkLength;
// src/web-streams.js
function parseFromWebStream(stream) {
return parseChunked(isIterable(stream) ? stream : async function* () {
const reader = stream.getReader();
while (true) {
const { value, done } = await reader.read();
if (done) {
break;
}
finish() {
if (this.pendingChunk !== null) {
this.flush('', 0, 0);
this.pendingChunk = null;
}
return this.value;
yield value;
}
});
}
function createStringifyWebStream(value, replacer, space) {
if (typeof ReadableStream.from === "function") {
return ReadableStream.from(stringifyChunked(value, replacer, space));
}
return new ReadableStream({
start() {
this.generator = stringifyChunked(value, replacer, space);
},
pull(controller) {
const { value: value2, done } = this.generator.next();
if (done) {
controller.close();
} else {
controller.enqueue(value2);
}
}
},
cancel() {
this.generator = null;
}
});
}
return __toCommonJS(src_exports);
})();
var src = {
version: version,
stringifyInfo: stringifyInfo,
stringifyStream: stringifyStreamBrowser,
parseChunked: parseChunked
};
return src;
}));
return exports;
})));

@@ -1,1 +0,14 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).jsonExt=t()}(this,(function(){"use strict";function e(e){return"function"==typeof e.pipe&&"function"==typeof e._read&&"object"==typeof e._readableState&&null!==e._readableState}var t={escapableCharCodeSubstitution:{8:"\\b",9:"\\t",10:"\\n",12:"\\f",13:"\\r",34:'\\"',92:"\\\\"},isLeadingSurrogate:function(e){return e>=55296&&e<=56319},isTrailingSurrogate:function(e){return e>=56320&&e<=57343},type:{PRIMITIVE:1,PROMISE:4,ARRAY:3,OBJECT:2,STRING_STREAM:5,OBJECT_STREAM:6},isReadableStream:e,replaceValue:function(e,t,s,n){switch(s&&"function"==typeof s.toJSON&&(s=s.toJSON()),null!==n&&(s=n.call(e,String(t),s)),typeof s){case"function":case"symbol":s=void 0;break;case"object":if(null!==s){const e=s.constructor;e!==String&&e!==Number&&e!==Boolean||(s=s.valueOf())}}return s},getTypeNative:function(e){return null===e||"object"!=typeof e?1:Array.isArray(e)?3:2},getTypeAsync:function(t){return null===t||"object"!=typeof t?1:"function"==typeof t.then?4:e(t)?t._readableState.objectMode?6:5:Array.isArray(t)?3:2},normalizeReplacer:function(e){return"function"==typeof e?e:Array.isArray(e)?[...new Set(e.map((e=>{const t=e&&e.constructor;return t===String||t===Number?String(e):null})).filter((e=>"string"==typeof e)))]:null},normalizeSpace:function(e){return"number"==typeof e?!(!Number.isFinite(e)||e<1)&&" ".repeat(Math.min(e,10)):"string"==typeof e&&e.slice(0,10)||!1}};const{normalizeReplacer:s,normalizeSpace:n,replaceValue:i,getTypeNative:r,getTypeAsync:a,isLeadingSurrogate:l,isTrailingSurrogate:h,escapableCharCodeSubstitution:u,type:{PRIMITIVE:o,OBJECT:c,ARRAY:f,PROMISE:p,STRING_STREAM:d,OBJECT_STREAM:g}}=t,y=Array.from({length:2048}).map(((e,t)=>u.hasOwnProperty(t)?2:t<32?6:t<128?1:2));function S(e){let t=0,s=!1;for(let n=0;n<e.length;n++){const i=e.charCodeAt(n);if(i<2048)t+=y[i];else{if(l(i)){t+=6,s=!0;continue}h(i)?t=s?t-2:t+6:t+=3}s=!1}return t+2}var b=TextDecoder;const{isReadableStream:k}=t,A=new b;function v(e){return null!==e&&"object"==typeof e}function m(e,t){return"SyntaxError"===e.name&&t.jsonParseOffset&&(e.message=e.message.replace(/at position (\d+)/,((e,s)=>"at position "+(Number(s)+t.jsonParseOffset)))),e}class O{constructor(){this.value=void 0,this.valueStack=null,this.stack=new Array(100),this.lastFlushDepth=0,this.flushDepth=0,this.stateString=!1,this.stateStringEscape=!1,this.pendingByteSeq=null,this.pendingChunk=null,this.chunkOffset=0,this.jsonParseOffset=0}parseAndAppend(e,t){1===this.stack[this.lastFlushDepth-1]?(t&&(this.jsonParseOffset--,e="{"+e+"}"),Object.assign(this.valueStack.value,JSON.parse(e))):(t&&(this.jsonParseOffset--,e="["+e+"]"),function(e,t){const s=e.length;e.length+=t.length;for(let n=0;n<t.length;n++)e[s+n]=t[n]}(this.valueStack.value,JSON.parse(e)))}prepareAddition(e){const{value:t}=this.valueStack;if(Array.isArray(t)?0!==t.length:0!==Object.keys(t).length){if(","===e[0])return this.jsonParseOffset++,e.slice(1);if("}"!==e[0]&&"]"!==e[0])return this.jsonParseOffset-=3,"[[]"+e}return e}flush(e,t,s){let n=e.slice(t,s);if(this.jsonParseOffset=this.chunkOffset+t,null!==this.pendingChunk&&(n=this.pendingChunk+n,this.jsonParseOffset-=this.pendingChunk.length,this.pendingChunk=null),this.flushDepth===this.lastFlushDepth)this.flushDepth>0?this.parseAndAppend(this.prepareAddition(n),!0):(this.value=JSON.parse(n),this.valueStack={value:this.value,prev:null});else if(this.flushDepth>this.lastFlushDepth){for(let e=this.flushDepth-1;e>=this.lastFlushDepth;e--)n+=1===this.stack[e]?"}":"]";0===this.lastFlushDepth?(this.value=JSON.parse(n),this.valueStack={value:this.value,prev:null}):this.parseAndAppend(this.prepareAddition(n),!0);for(let e=this.lastFlushDepth||1;e<this.flushDepth;e++){let t=this.valueStack.value;if(1===this.stack[e-1]){let e;for(e in t);t=t[e]}else t=t[t.length-1];this.valueStack={value:t,prev:this.valueStack}}}else{n=this.prepareAddition(n);for(let e=this.lastFlushDepth-1;e>=this.flushDepth;e--)this.jsonParseOffset--,n=(1===this.stack[e]?"{":"[")+n;this.parseAndAppend(n,!1);for(let e=this.lastFlushDepth-1;e>=this.flushDepth;e--)this.valueStack=this.valueStack.prev}this.lastFlushDepth=this.flushDepth}push(e){if("string"!=typeof e){if(null!==this.pendingByteSeq){const t=e;(e=new Uint8Array(this.pendingByteSeq.length+t.length)).set(this.pendingByteSeq),e.set(t,this.pendingByteSeq.length),this.pendingByteSeq=null}if(e[e.length-1]>127)for(let t=0;t<e.length;t++){const s=e[e.length-1-t];if(s>>6==3){t++,(4!==t&&s>>3==30||3!==t&&s>>4==14||2!==t&&s>>5==6)&&(this.pendingByteSeq=e.slice(e.length-t),e=e.slice(0,-t));break}}e=A.decode(e)}const t=e.length;let s=0,n=0;e:for(let i=0;i<t;i++){if(this.stateString){for(;i<t;i++)if(this.stateStringEscape)this.stateStringEscape=!1;else switch(e.charCodeAt(i)){case 34:this.stateString=!1;continue e;case 92:this.stateStringEscape=!0}break}switch(e.charCodeAt(i)){case 34:this.stateString=!0,this.stateStringEscape=!1;break;case 44:n=i;break;case 123:n=i+1,this.stack[this.flushDepth++]=1;break;case 91:n=i+1,this.stack[this.flushDepth++]=2;break;case 93:case 125:n=i+1,this.flushDepth--,this.flushDepth<this.lastFlushDepth&&(this.flush(e,s,n),s=n);break;case 9:case 10:case 13:case 32:s===i&&s++,n===i&&n++}}n>s&&this.flush(e,s,n),n<t&&(null!==this.pendingChunk?this.pendingChunk+=e:this.pendingChunk=e.slice(n,t)),this.chunkOffset+=t}finish(){return null!==this.pendingChunk&&(this.flush("",0,0),this.pendingChunk=null),this.value}}return{version:"0.5.7",stringifyInfo:function(e,t,l,h){let u=null;t=s(t),Array.isArray(t)&&(u=new Set(t),t=null),l=function(e){return"string"==typeof(e=n(e))?e.length:0}(l),h=h||{};const y=new Map,b=new Set,k=new Set,A=new Set,v=new Set,m=h.async?a:r,O={"":e};let w=!1,D=0;return function e(s,n,r){if(w)return;r=i(s,n,r,t);let a=m(r);if(a!==o&&b.has(r))return A.add(r),D+=4,void(h.continueOnCircular||(w=!0));switch(a){case o:void 0!==r||Array.isArray(s)?D+=function(e){switch(typeof e){case"string":return S(e);case"number":return Number.isFinite(e)?String(e).length:4;case"boolean":return e?4:5;case"undefined":case"object":return 4;default:return 0}}(r):s===O&&(D+=9);break;case c:{if(y.has(r)){k.add(r),D+=y.get(r);break}const t=D;let s=0;D+=2,b.add(r);for(const t in r)if(hasOwnProperty.call(r,t)&&(null===u||u.has(t))){const n=D;e(r,t,r[t]),n!==D&&(D+=S(t)+1,s++)}s>1&&(D+=s-1),b.delete(r),l>0&&s>0&&(D+=(1+(b.size+1)*l+1)*s,D+=1+b.size*l),y.set(r,D-t);break}case f:{if(y.has(r)){k.add(r),D+=y.get(r);break}const t=D;D+=2,b.add(r);for(let t=0;t<r.length;t++)e(r,t,r[t]);r.length>1&&(D+=r.length-1),b.delete(r),l>0&&r.length>0&&(D+=(1+(b.size+1)*l)*r.length,D+=1+b.size*l),y.set(r,D-t);break}case p:case d:v.add(r);break;case g:D+=2,v.add(r)}}(O,"",e),{minLength:isNaN(D)?1/0:D,circular:[...A],duplicate:[...k],async:[...v]}},stringifyStream:()=>{throw new Error("Method is not supported")},parseChunked:function(e){let t=new O;if(v(e)&&k(e))return new Promise(((s,n)=>{e.on("data",(e=>{try{t.push(e)}catch(e){n(m(e,t)),t=null}})).on("error",(e=>{t=null,n(e)})).on("end",(()=>{try{s(t.finish())}catch(e){n(m(e,t))}finally{t=null}}))}));if("function"==typeof e){const s=e();if(v(s)&&(Symbol.iterator in s||Symbol.asyncIterator in s))return new Promise((async(e,n)=>{try{for await(const e of s)t.push(e);e(t.finish())}catch(e){n(m(e,t))}finally{t=null}}))}throw new Error("Chunk emitter should be readable stream, generator, async generator or function returning an iterable object")}}}));
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.jsonExt = factory());
}(typeof globalThis != 'undefined' ? globalThis : typeof window != 'undefined' ? window : typeof global != 'undefined' ? global : typeof self != 'undefined' ? self : this, (function () {
var exports=(()=>{var L=Object.defineProperty;var M=Object.getOwnPropertyDescriptor;var _=Object.getOwnPropertyNames;var K=Object.prototype.hasOwnProperty;var U=(e,t)=>{for(var n in t)L(e,n,{get:t[n],enumerable:!0})},Y=(e,t,n,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let i of _(t))!K.call(e,i)&&i!==n&&L(e,i,{get:()=>t[i],enumerable:!(r=M(t,i))||r.enumerable});return e};var G=e=>Y(L({},"__esModule",{value:!0}),e);var st={};U(st,{createStringifyWebStream:()=>V,parseChunked:()=>B,parseFromWebStream:()=>$,stringifyChunked:()=>w,stringifyInfo:()=>W});function j(e){return typeof e=="object"&&e!==null&&(typeof e[Symbol.iterator]=="function"||typeof e[Symbol.asyncIterator]=="function")}function O(e,t,n,r){switch(n&&typeof n.toJSON=="function"&&(n=n.toJSON()),r!==null&&(n=r.call(e,String(t),n)),typeof n){case"function":case"symbol":n=void 0;break;case"object":if(n!==null){let i=n.constructor;(i===String||i===Number||i===Boolean)&&(n=n.valueOf())}break}return n}function F(e){return typeof e=="function"?e:Array.isArray(e)?[...new Set(e.map(n=>{let r=n&&n.constructor;return r===String||r===Number?String(n):null}).filter(n=>typeof n=="string"))]:null}function N(e){return typeof e=="number"?!Number.isFinite(e)||e<1?!1:" ".repeat(Math.min(e,10)):typeof e=="string"&&e.slice(0,10)||!1}var k=1,H=2,Q=new TextDecoder;function X(e,t){return e.name==="SyntaxError"&&t.jsonParseOffset&&(e.message=e.message.replace(/at position (\d+)/,(n,r)=>"at position "+(Number(r)+t.jsonParseOffset))),e}function Z(e,t){let n=e.length;e.length+=t.length;for(let r=0;r<t.length;r++)e[n+r]=t[r]}async function B(e){let t=typeof e=="function"?e():e;if(j(t)){let n=new z;try{for await(let r of t){if(typeof r!="string"&&!ArrayBuffer.isView(r))throw new TypeError("Invalid chunk: Expected string, TypedArray or Buffer");n.push(r)}return n.finish()}catch(r){throw X(r,n)}}throw new TypeError("Invalid chunk emitter: Expected an Iterable, AsyncIterable, generator, async generator, or a function returning an Iterable or AsyncIterable")}var z=class{constructor(){this.value=void 0,this.valueStack=null,this.stack=new Array(100),this.lastFlushDepth=0,this.flushDepth=0,this.stateString=!1,this.stateStringEscape=!1,this.pendingByteSeq=null,this.pendingChunk=null,this.chunkOffset=0,this.jsonParseOffset=0}parseAndAppend(t,n){this.stack[this.lastFlushDepth-1]===k?(n&&(this.jsonParseOffset--,t="{"+t+"}"),Object.assign(this.valueStack.value,JSON.parse(t))):(n&&(this.jsonParseOffset--,t="["+t+"]"),Z(this.valueStack.value,JSON.parse(t)))}prepareAddition(t){let{value:n}=this.valueStack;if(Array.isArray(n)?n.length!==0:Object.keys(n).length!==0){if(t[0]===",")return this.jsonParseOffset++,t.slice(1);if(t[0]!=="}"&&t[0]!=="]")return this.jsonParseOffset-=3,"[[]"+t}return t}flush(t,n,r){let i=t.slice(n,r);if(this.jsonParseOffset=this.chunkOffset+n,this.pendingChunk!==null&&(i=this.pendingChunk+i,this.jsonParseOffset-=this.pendingChunk.length,this.pendingChunk=null),this.flushDepth===this.lastFlushDepth)this.flushDepth>0?this.parseAndAppend(this.prepareAddition(i),!0):(this.value=JSON.parse(i),this.valueStack={value:this.value,prev:null});else if(this.flushDepth>this.lastFlushDepth){for(let s=this.flushDepth-1;s>=this.lastFlushDepth;s--)i+=this.stack[s]===k?"}":"]";this.lastFlushDepth===0?(this.value=JSON.parse(i),this.valueStack={value:this.value,prev:null}):this.parseAndAppend(this.prepareAddition(i),!0);for(let s=this.lastFlushDepth||1;s<this.flushDepth;s++){let o=this.valueStack.value;if(this.stack[s-1]===k){let h;for(h in o);o=o[h]}else o=o[o.length-1];this.valueStack={value:o,prev:this.valueStack}}}else{i=this.prepareAddition(i);for(let s=this.lastFlushDepth-1;s>=this.flushDepth;s--)this.jsonParseOffset--,i=(this.stack[s]===k?"{":"[")+i;this.parseAndAppend(i,!1);for(let s=this.lastFlushDepth-1;s>=this.flushDepth;s--)this.valueStack=this.valueStack.prev}this.lastFlushDepth=this.flushDepth}push(t){if(typeof t!="string"){if(this.pendingByteSeq!==null){let s=t;t=new Uint8Array(this.pendingByteSeq.length+s.length),t.set(this.pendingByteSeq),t.set(s,this.pendingByteSeq.length),this.pendingByteSeq=null}if(t[t.length-1]>127)for(let s=0;s<t.length;s++){let o=t[t.length-1-s];if(o>>6===3){s++,(s!==4&&o>>3===30||s!==3&&o>>4===14||s!==2&&o>>5===6)&&(this.pendingByteSeq=t.slice(t.length-s),t=t.slice(0,-s));break}}t=Q.decode(t)}let n=t.length,r=0,i=0;t:for(let s=0;s<n;s++){if(this.stateString){for(;s<n;s++)if(this.stateStringEscape)this.stateStringEscape=!1;else switch(t.charCodeAt(s)){case 34:this.stateString=!1;continue t;case 92:this.stateStringEscape=!0}break}switch(t.charCodeAt(s)){case 34:this.stateString=!0,this.stateStringEscape=!1;break;case 44:i=s;break;case 123:i=s+1,this.stack[this.flushDepth++]=k;break;case 91:i=s+1,this.stack[this.flushDepth++]=H;break;case 93:case 125:i=s+1,this.flushDepth--,this.flushDepth<this.lastFlushDepth&&(this.flush(t,r,i),r=i);break;case 9:case 10:case 13:case 32:r===s&&r++,i===s&&i++;break}}i>r&&this.flush(t,r,i),i<n&&(this.pendingChunk!==null?this.pendingChunk+=t:this.pendingChunk=t.slice(i,n)),this.chunkOffset+=n}finish(){return this.pendingChunk!==null&&(this.flush("",0,0),this.pendingChunk=null),this.value}};function T(e){return/[^\x20\x21\x23-\x5B\x5D-\uD799]/.test(e)?JSON.stringify(e):'"'+e+'"'}function*w(e,t,n){(t===null||Array.isArray(t)||typeof t!="object")&&(t={replacer:t,space:n});let r=Number(t.highWaterMark)||16384,i=F(t.replacer);n=N(t.space);let s="",o=0,h=null,g=!1,S=new WeakSet,b=!1,a=Object.keys;if(Array.isArray(i)){let f=i;a=()=>f,i=null}for(P(m,e,null);h!==null;){for(b=!0;h!==null&&!h.awaiting&&(h.handler(),!!b););b=!1,yield s,s=""}function m(){let{value:f}=h;E(),c({"":f},"",f,()=>{})}function x(f){g===!1?g=!0:u(","),u(n?`
${n.repeat(o)}${T(f)}: `:T(f)+":")}function D(){let f=h;if(f.index===f.keys.length){u(n&&g?`
${n.repeat(o-1)}}`:"}"),E();return}let d=f.keys[f.index];c(f.value,d,f.value[d],x),f.index++}function l(f){f!==0&&u(","),n&&u(`
${n.repeat(o)}`)}function A(){let f=h;if(f.index===f.value.length){n&&f.index!==0?u(`
${n.repeat(o-1)}]`):u("]"),E();return}c(f.value,f.index,f.value[f.index],l),f.index++}function c(f,d,p,C){p=O(f,d,p,i),p===null||typeof p!="object"?(C!==x||p!==void 0)&&(C(d),I(p)):Array.isArray(p)?(C(d),y(p),o++,u("["),P(A,p,null)):(C(d),y(p),o++,u("{"),P(D,p,a(p)))}function y(f){if(S.has(f))throw new TypeError("Converting circular structure to JSON");S.add(f)}function I(f){switch(typeof f){case"string":u(T(f));break;case"number":u(Number.isFinite(f)?f:"null");break;case"boolean":u(f?"true":"false");break;case"undefined":case"object":u("null");break;default:throw new TypeError(`Do not know how to serialize a ${f.constructor?.name||typeof f}`)}}function P(f,d,p){return g=!1,h={handler:f,value:d,index:0,keys:p,prev:h}}function E(){let{handler:f,value:d}=h;(f===D||f===A)&&(S.delete(d),o--),h=h.prev,g=!0}function u(f){s+=f,b=s.length<r}}var q=typeof Object.hasOwn=="function"?Object.hasOwn:(e,t)=>Object.hasOwnProperty.call(e,t),R={8:"\\b",9:"\\t",10:"\\n",12:"\\f",13:"\\r",34:'\\"',92:"\\\\"},v=Array.from({length:2048}).map((e,t)=>q(R,t)?2:t<32?6:t<128?1:2);function tt(e){return e>=55296&&e<=56319}function et(e){return e>=56320&&e<=57343}function J(e){let t=0,n=!1;for(let r=0;r<e.length;r++){let i=e.charCodeAt(r);if(i<2048)t+=v[i];else if(tt(i)){t+=6,n=!0;continue}else et(i)?t=n?t-2:t+6:t+=3;n=!1}return t+2}function nt(e){switch(typeof e){case"string":return J(e);case"number":return Number.isFinite(e)?String(e).length:4;case"boolean":return e?4:5;case"undefined":case"object":return 4;default:return 0}}function it(e){return e=N(e),typeof e=="string"?e.length:0}function W(e,t,n){(t===null||Array.isArray(t)||typeof t!="object")&&(t={replacer:t,space:n});let r=null,i=F(t.replacer),s=!!t.continueOnCircular;Array.isArray(i)&&(r=new Set(i),i=null),n=it(n);let o=new WeakMap,h=new Set,g=new Set,S={"":e},b=!1,a=0;return m(S,"",e),{bytes:isNaN(a)?1/0:a,circular:[...g]};function m(x,D,l){if(!b)if(l=O(x,D,l,i),l===null||typeof l!="object")l!==void 0||Array.isArray(x)?a+=nt(l):x===S&&(a+=9);else{if(h.has(l)){g.add(l),a+=4,s||(b=!0);return}if(o.has(l)){a+=o.get(l);return}if(Array.isArray(l)){let A=a;a+=2,h.add(l);for(let c=0;c<l.length;c++)m(l,c,l[c]);l.length>1&&(a+=l.length-1),h.delete(l),n>0&&l.length>0&&(a+=(1+(h.size+1)*n)*l.length,a+=1+h.size*n),o.set(l,a-A)}else{let A=a,c=0;a+=2,h.add(l);for(let y in l)if(q(l,y)&&(r===null||r.has(y))){let I=a;m(l,y,l[y]),I!==a&&(a+=J(y)+1,c++)}c>1&&(a+=c-1),h.delete(l),n>0&&c>0&&(a+=(1+(h.size+1)*n+1)*c,a+=1+h.size*n),o.set(l,a-A)}}}}function $(e){return B(j(e)?e:async function*(){let t=e.getReader();for(;;){let{value:n,done:r}=await t.read();if(r)break;yield n}})}function V(e,t,n){return typeof ReadableStream.from=="function"?ReadableStream.from(w(e,t,n)):new ReadableStream({start(){this.generator=w(e,t,n)},pull(r){let{value:i,done:s}=this.generator.next();s?r.close():r.enqueue(i)},cancel(){this.generator=null}})}return G(st);})();
return exports;
})));
//# sourceMappingURL=json-ext.min.js.map
declare module '@discoveryjs/json-ext' {
import { Readable } from 'stream';
type TReplacer =
type Chunk = string | Uint8Array | Buffer;
type Replacer =
| ((this: any, key: string, value: any) => any)
| string[]
| number[]
| (string | number)[]
| null;
type TSpace = string | number | null;
type TChunk = string | Buffer | Uint8Array;
type Space = string | number | null;
type StringifyOptions = {
replacer?: Replacer;
space?: Space;
highWaterMark?: number;
};
type StringifyInfoOptions = {
replacer?: Replacer;
space?: Space;
continueOnCircular?: boolean;
}
type StringifyInfoResult = {
bytes: number;
circular: Object[];
};
export function parseChunked(input: Readable): Promise<any>;
export function parseChunked(input: () => (Iterable<TChunk> | AsyncIterable<TChunk>)): Promise<any>;
export function parseChunked(input: Iterable<Chunk> | AsyncIterable<Chunk>): Promise<any>;
export function parseChunked(input: () => (Iterable<Chunk> | AsyncIterable<Chunk>)): Promise<any>;
export function stringifyStream(value: any, replacer?: TReplacer, space?: TSpace): Readable;
export function stringifyChunked(value: any, replacer?: Replacer, space?: Space): Generator<string>;
export function stringifyChunked(value: any, options: StringifyOptions): Generator<string>;
export function stringifyInfo(
value: any,
replacer?: TReplacer,
space?: TSpace,
options?: {
async?: boolean;
continueOnCircular?: boolean;
}
): {
minLength: number;
circular: any[];
duplicate: any[];
async: any[];
};
export function stringifyInfo(value: any, replacer?: Replacer, space?: Space): StringifyInfoResult;
export function stringifyInfo(value: any, options?: StringifyInfoOptions): StringifyInfoResult;
// Web streams
export function parseFromWebStream(stream: ReadableStream<Chunk>): Promise<any>;
export function createStringifyWebStream(value: any, replacer?: Replacer, space?: Space): ReadableStream<string>;
export function createStringifyWebStream(value: any, options: StringifyOptions): ReadableStream<string>;
}
{
"name": "@discoveryjs/json-ext",
"version": "0.5.7",
"version": "0.6.0",
"description": "A set of utilities that extend the use of JSON",

@@ -16,42 +16,54 @@ "keywords": [

"license": "MIT",
"repository": "discoveryjs/json-ext",
"main": "./src/index",
"browser": {
"./src/stringify-stream.js": "./src/stringify-stream-browser.js",
"./src/text-decoder.js": "./src/text-decoder-browser.js",
"./src/version.js": "./dist/version.js"
"repository": {
"type": "git",
"url": "git+https://github.com/discoveryjs/json-ext.git"
},
"engines": {
"node": ">=14.17.0"
},
"type": "module",
"main": "./cjs/index.cjs",
"module": "./src/index.js",
"types": "./index.d.ts",
"exports": {
".": {
"require": "./cjs/index.cjs",
"import": "./src/index.js",
"types": "./index.d.ts"
},
"./dist/*": "./dist/*",
"./package.json": "./package.json"
},
"scripts": {
"test": "mocha --reporter progress",
"lint": "eslint src test",
"test": "npm run test:src",
"lint": "eslint src",
"lint-and-test": "npm run lint && npm test",
"build": "rollup --config",
"test:all": "npm run test:src && npm run test:dist",
"test:src": "npm test",
"test:dist": "cross-env MODE=dist npm test && cross-env MODE=dist-min npm test",
"build-and-test": "npm run build && npm run test:dist",
"bundle": "node scripts/bundle.js",
"transpile": "node scripts/transpile.cjs",
"test:all": "npm run test:src && npm run test:cjs && npm run test:dist && npm run test:e2e",
"test:src": "mocha --reporter progress src/*.test.js",
"test:cjs": "mocha --reporter progress cjs/*.test.cjs",
"test:e2e": "mocha --reporter progress test-e2e",
"test:dist": "mocha --reporter progress dist/test",
"test:deno": "node scripts/deno-adapt-test.js && mocha --reporter progress deno-tests/*.test.js",
"bundle-and-test": "npm run bundle && npm run test:dist",
"coverage": "c8 --reporter=lcovonly npm test",
"prepublishOnly": "npm run lint && npm test && npm run build-and-test"
"prepublishOnly": "npm run lint && npm run bundle && npm run transpile && npm run test:all"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^15.1.0",
"@rollup/plugin-json": "^4.1.0",
"@rollup/plugin-node-resolve": "^9.0.0",
"c8": "^7.10.0",
"chalk": "^4.1.0",
"cross-env": "^7.0.3",
"eslint": "^8.10.0",
"mocha": "^8.4.0",
"rollup": "^2.28.2",
"rollup-plugin-terser": "^7.0.2"
"esbuild": "^0.21.5",
"eslint": "^8.57.0",
"mocha": "^9.2.2",
"rollup": "^2.67.3"
},
"engines": {
"node": ">=10.0.0"
},
"files": [
"cjs",
"!cjs/*{.test,-cases}.cjs",
"dist",
"src",
"!src/*{.test,-cases}.js",
"index.d.ts"
]
}

@@ -5,16 +5,28 @@ # json-ext

[![Build Status](https://github.com/discoveryjs/json-ext/actions/workflows/ci.yml/badge.svg)](https://github.com/discoveryjs/json-ext/actions/workflows/ci.yml)
[![Coverage Status](https://coveralls.io/repos/github/discoveryjs/json-ext/badge.svg?branch=master)](https://coveralls.io/github/discoveryjs/json-ext?)
[![Coverage Status](https://coveralls.io/repos/github/discoveryjs/json-ext/badge.svg?branch=master)](https://coveralls.io/github/discoveryjs/json-ext)
[![NPM Downloads](https://img.shields.io/npm/dm/@discoveryjs/json-ext.svg)](https://www.npmjs.com/package/@discoveryjs/json-ext)
A set of utilities that extend the use of JSON. Designed to be fast and memory efficient
A set of utilities that extend the use of JSON:
- [parseChunked()](#parsechunked) – functions like [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) but iterates over chunks, reconstructing the result object.
- [stringifyChunked()](#stringifychunked) – functions like [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns a generator yielding strings instead of a single string.
- [stringifyInfo()](#stringifyinfo) – returns an object with the expected overall size of the stringify operation and any circular references.
- [parseFromWebStream()](#parsefromwebstream) – a helper function to consume chunks from a Web Stream.
- [createStringifyWebStream()](#createstringifywebstream) – a helper to create a Web Stream.
Features:
- [x] `parseChunked()` – Parse JSON that comes by chunks (e.g. FS readable stream or fetch response stream)
- [x] `stringifyStream()` – Stringify stream (Node.js)
- [x] `stringifyInfo()` – Get estimated size and other facts of JSON.stringify() without converting a value to string
- [ ] **TBD** Support for circular references
- [ ] **TBD** Binary representation [branch](https://github.com/discoveryjs/json-ext/tree/binary)
- [ ] **TBD** WHATWG [Streams](https://streams.spec.whatwg.org/) support
- Fast and memory-efficient
- Compatible with browsers, Node.js, Deno, Bun
- Supports Node.js and Web streams
- Dual package: ESM and CommonJS
- No dependencies
- Size: 9.4Kb (minified), 3.6Kb (min+gzip)
## Why?
- Prevents main thread freezing during large JSON parsing by distributing the process over time.
- Handles large JSON processing (e.g., V8 has a limitation for strings ~500MB, making JSON larger than 500MB unmanageable).
- Reduces memory pressure. `JSON.parse()` and `JSON.stringify()` require the entire JSON content before processing. `parseChunked()` and `stringifyChunked()` allow processing and sending data incrementally, avoiding large memory consumption at a single time point and reducing GC pressure.
## Install

@@ -28,21 +40,14 @@

- [parseChunked(chunkEmitter)](#parsechunkedchunkemitter)
- [stringifyStream(value[, replacer[, space]])](#stringifystreamvalue-replacer-space)
- [stringifyInfo(value[, replacer[, space[, options]]])](#stringifyinfovalue-replacer-space-options)
- [Options](#options)
- [async](#async)
- [continueOnCircular](#continueoncircular)
- [version](#version)
### parseChunked()
### parseChunked(chunkEmitter)
Functions like [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse), iterating over chunks to reconstruct the result object, and returns a [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
Works the same as [`JSON.parse()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) but takes `chunkEmitter` instead of string and returns [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise).
> Note: `reviver` parameter is not supported yet.
> NOTE: `reviver` parameter is not supported yet, but will be added in next releases.
> NOTE: WHATWG streams aren't supported yet
```ts
function parseChunked(input: Iterable<Chunk> | AsyncIterable<Chunk>): Promise<any>;
function parseChunked(input: () => (Iterable<Chunk> | AsyncIterable<Chunk>)): Promise<any>;
When to use:
- It's required to avoid freezing the main thread during big JSON parsing, since this process can be distributed in time
- Huge JSON needs to be parsed (e.g. >500MB on Node.js)
- Needed to reduce memory pressure. `JSON.parse()` needs to receive the entire JSON before parsing it. With `parseChunked()` you may parse JSON as first bytes of it comes. This approach helps to avoid storing a huge string in the memory at a single time point and following GC.
type Chunk = string | Buffer | Uint8Array;
```

@@ -54,87 +59,70 @@ [Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#parse-chunked)

```js
const { parseChunked } = require('@discoveryjs/json-ext');
import { parseChunked } from '@discoveryjs/json-ext';
// as a regular Promise
parseChunked(chunkEmitter)
.then(data => {
/* data is parsed JSON */
});
// using await (keep in mind that not every runtime has a support for top level await)
const data = await parseChunked(chunkEmitter);
```
Parameter `chunkEmitter` can be:
- [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) (Node.js only)
```js
const fs = require('fs');
const { parseChunked } = require('@discoveryjs/json-ext');
Parameter `chunkEmitter` can be an iterable or async iterable that iterates over chunks, or a function returning such a value. A chunk can be a `string`, `Uint8Array`, or Node.js `Buffer`.
parseChunked(fs.createReadStream('path/to/file.json'))
```
- Generator, async generator or function that returns iterable (chunks). Chunk might be a `string`, `Uint8Array` or `Buffer` (Node.js only):
```js
const { parseChunked } = require('@discoveryjs/json-ext');
const encoder = new TextEncoder();
Examples:
// generator
parseChunked(function*() {
yield '{ "hello":';
yield Buffer.from(' "wor'); // Node.js only
yield encoder.encode('ld" }'); // returns Uint8Array(5) [ 108, 100, 34, 32, 125 ]
});
// async generator
parseChunked(async function*() {
for await (const chunk of someAsyncSource) {
yield chunk;
}
});
// function that returns iterable
parseChunked(() => ['{ "hello":', ' "world"}'])
```
Using with [fetch()](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API):
```js
async function loadData(url) {
const response = await fetch(url);
const reader = response.body.getReader();
return parseChunked(async function*() {
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
yield value;
- Generator:
```js
parseChunked(function*() {
yield '{ "hello":';
yield Buffer.from(' "wor'); // Node.js only
yield new TextEncoder().encode('ld" }'); // returns Uint8Array
});
```
- Async generator:
```js
parseChunked(async function*() {
for await (const chunk of someAsyncSource) {
yield chunk;
}
});
}
```
- Array:
```js
parseChunked(['{ "hello":', ' "world"}'])
```
- Function returning iterable:
```js
parseChunked(() => ['{ "hello":', ' "world"}'])
```
- Node.js [`Readable`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) stream:
```js
import { parseChunked } from '@discoveryjs/json-ext';
import fs from 'node:fs';
loadData('https://example.com/data.json')
.then(data => {
/* data is parsed JSON */
})
```
parseChunked(fs.createReadStream('path/to/file.json'))
```
- Web stream (e.g., using [fetch()](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)):
> Note: Iterability for Web streams was added later in the Web platform, not all environments support it. Consider using `parseFromWebStream()` for broader compatibility.
```js
const response = await fetch('https://example.com/data.json');
const data = await parseChunked(response.body); // body is ReadableStream
```
### stringifyStream(value[, replacer[, space]])
### stringifyChunked()
Works the same as [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an instance of [`ReadableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_readable_streams) instead of string.
Functions like [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns a generator yielding strings instead of a single string.
> NOTE: WHATWG Streams aren't supported yet, so function available for Node.js only for now
> Note: Returns `"null"` when `JSON.stringify()` returns `undefined` (since a chunk cannot be `undefined`).
Departs from JSON.stringify():
- Outputs `null` when `JSON.stringify()` returns `undefined` (since streams may not emit `undefined`)
- A promise is resolving and the resulting value is stringifying as a regular one
- A stream in non-object mode is piping to output as is
- A stream in object mode is piping to output as an array of objects
```ts
function stringifyChunked(value: any, replacer?: Replacer, space?: Space): Generator<string, void, unknown>;
function stringifyChunked(value: any, options: StringifyOptions): Generator<string, void, unknown>;
When to use:
- Huge JSON needs to be generated (e.g. >500MB on Node.js)
- Needed to reduce memory pressure. `JSON.stringify()` needs to generate the entire JSON before send or write it to somewhere. With `stringifyStream()` you may send a result to somewhere as first bytes of the result appears. This approach helps to avoid storing a huge string in the memory at a single time point.
- The object being serialized contains Promises or Streams (see Usage for examples)
type Replacer =
| ((this: any, key: string, value: any) => any)
| (string | number)[]
| null;
type Space = string | number | null;
type StringifyOptions = {
replacer?: Replacer;
space?: Space;
highWaterMark?: number;
};
```

@@ -146,91 +134,90 @@ [Benchmark](https://github.com/discoveryjs/json-ext/tree/master/benchmarks#stream-stringifying)

```js
const { stringifyStream } = require('@discoveryjs/json-ext');
import { stringifyStream } from '@discoveryjs/json-ext';
// handle events
stringifyStream(data)
.on('data', chunk => console.log(chunk))
.on('error', error => consold.error(error))
.on('finish', () => console.log('DONE!'));
// pipe into a stream
stringifyStream(data)
.pipe(writableStream);
const chunks = [...stringifyChunked(data)];
// or
for (const chunk of stringifyChunked(data)) {
console.log(chunk);
}
```
Using Promise or ReadableStream in serializing object:
Examples:
```js
const fs = require('fs');
const { stringifyStream } = require('@discoveryjs/json-ext');
- Streaming into a file (Node.js):
```js
Readable.from(stringifyChunked(data))
.pipe(fs.createWriteStream('path/to/file.json'));
```
- Wrapping into a `Promise` for piping into a writable Node.js stream:
```js
new Promise((resolve, reject) => {
Readable.from(stringifyChunked(data))
.on('error', reject)
.pipe(stream)
.on('error', reject)
.on('finish', resolve);
});
```
- Using with fetch (JSON streaming):
> Note: This feature has limited support in browsers, see [Streaming requests with the fetch API](https://developer.chrome.com/docs/capabilities/web-apis/fetch-streaming-requests)
// output will be
// {"name":"example","willSerializeResolvedValue":42,"fromFile":[1, 2, 3],"at":{"any":{"level":"promise!"}}}
stringifyStream({
name: 'example',
willSerializeResolvedValue: Promise.resolve(42),
fromFile: fs.createReadStream('path/to/file.json'), // support file content is "[1, 2, 3]", it'll be inserted as it
at: {
any: {
level: new Promise(resolve => setTimeout(() => resolve('promise!'), 100))
> Note: `ReadableStream.from()` has limited [support in browsers](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/from_static), use [`createStringifyWebStream()`](#createstringifywebstream) instead.
```js
fetch('http://example.com', {
method: 'POST',
duplex: 'half',
body: ReadableStream.from(stringifyChunked(data))
});
```
- Wrapping into `ReadableStream`:
> Note: Use `ReadableStream.from()` or [`createStringifyWebStream()`](#createstringifywebstream) when no extra logic is needed
```js
new ReadableStream({
start() {
this.generator = stringifyChunked(data);
},
pull(controller) {
const { value, done } = this.generator.next();
if (done) {
controller.close();
} else {
controller.enqueue(value);
}
},
cancel() {
this.generator = null;
}
}
})
});
```
// in case several async requests are used in object, it's prefered
// to put fastest requests first, because in this case
stringifyStream({
foo: fetch('http://example.com/request_takes_2s').then(req => req.json()),
bar: fetch('http://example.com/request_takes_5s').then(req => req.json())
});
```
### stringifyInfo()
Using with [`WritableStream`](https://nodejs.org/dist/latest-v14.x/docs/api/stream.html#stream_writable_streams) (Node.js only):
```ts
export function stringifyInfo(value: any, replacer?: Replacer, space?: Space): StringifyInfoResult;
export function stringifyInfo(value: any, options?: StringifyInfoOptions): StringifyInfoResult;
```js
const fs = require('fs');
const { stringifyStream } = require('@discoveryjs/json-ext');
// pipe into a console
stringifyStream(data)
.pipe(process.stdout);
// pipe into a file
stringifyStream(data)
.pipe(fs.createWriteStream('path/to/file.json'));
// wrapping into a Promise
new Promise((resolve, reject) => {
stringifyStream(data)
.on('error', reject)
.pipe(stream)
.on('error', reject)
.on('finish', resolve);
});
type StringifyInfoOptions = {
replacer?: Replacer;
space?: Space;
continueOnCircular?: boolean;
}
type StringifyInfoResult = {
minLength: number;
circular: Object[]; // list of circular references
};
```
### stringifyInfo(value[, replacer[, space[, options]]])
Functions like [`JSON.stringify()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify), but returns an object with the expected overall size of the stringify operation and a list of circular references.
`value`, `replacer` and `space` arguments are the same as for `JSON.stringify()`.
Result is an object:
```js
{
minLength: Number, // minimal bytes when values is stringified
circular: [...], // list of circular references
duplicate: [...], // list of objects that occur more than once
async: [...] // list of async values, i.e. promises and streams
}
```
Example:
```js
const { stringifyInfo } = require('@discoveryjs/json-ext');
import { stringifyInfo } from '@discoveryjs/json-ext';
console.log(
stringifyInfo({ test: true }).minLength
);
// > 13
// that equals '{"test":true}'.length
console.log(stringifyInfo({ test: true }));
// {
// bytes: 13, // Buffer.byteLength('{"test":true}')
// circular: []
// }
```

@@ -240,3 +227,3 @@

##### async
##### continueOnCircular

@@ -246,17 +233,30 @@ Type: `Boolean`

Collect async values (promises and streams) or not.
Determines whether to continue collecting info for a value when a circular reference is found. Setting this option to `true` allows finding all circular references.
##### continueOnCircular
### parseFromWebStream()
Type: `Boolean`
Default: `false`
A helper function to consume JSON from a Web Stream. You can use `parseChunked(stream)` instead, but `@@asyncIterator` on `ReadableStream` has limited support in browsers (see [ReadableStream](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) compatibility table).
Stop collecting info for a value or not whenever circular reference is found. Setting option to `true` allows to find all circular references.
```js
import { parseFromWebStream } from '@discoveryjs/json-ext';
### version
const data = await parseFromWebStream(readableStream);
// equivalent to (when ReadableStream[@@asyncIterator] is supported):
// await parseChunked(readableStream);
```
The version of library, e.g. `"0.3.1"`.
### createStringifyWebStream()
A helper function to convert `stringifyChunked()` into a `ReadableStream` (Web Stream). You can use `ReadableStream.from()` instead, but this method has limited support in browsers (see [ReadableStream.from()](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/from_static) compatibility table).
```js
import { createStringifyWebStream } from '@discoveryjs/json-ext';
createStringifyWebStream({ test: true });
// equivalent to (when ReadableStream.from() is supported):
// ReadableStream.from(stringifyChunked({ test: true }))
```
## License
MIT

@@ -1,6 +0,4 @@

module.exports = {
version: require('./version'),
stringifyInfo: require('./stringify-info'),
stringifyStream: require('./stringify-stream'),
parseChunked: require('./parse-chunked')
};
export { parseChunked } from './parse-chunked.js';
export { stringifyChunked } from './stringify-chunked.js';
export { stringifyInfo } from './stringify-info.js';
export { createStringifyWebStream, parseFromWebStream } from './web-streams.js';

@@ -1,3 +0,2 @@

const { isReadableStream } = require('./utils');
const TextDecoder = require('./text-decoder');
import { isIterable } from './utils.js';

@@ -8,6 +7,2 @@ const STACK_OBJECT = 1;

function isObject(value) {
return value !== null && typeof value === 'object';
}
function adjustPosition(error, parser) {

@@ -34,55 +29,28 @@ if (error.name === 'SyntaxError' && parser.jsonParseOffset) {

module.exports = function(chunkEmitter) {
let parser = new ChunkParser();
export async function parseChunked(chunkEmitter) {
const iterable = typeof chunkEmitter === 'function'
? chunkEmitter()
: chunkEmitter;
if (isObject(chunkEmitter) && isReadableStream(chunkEmitter)) {
return new Promise((resolve, reject) => {
chunkEmitter
.on('data', chunk => {
try {
parser.push(chunk);
} catch (e) {
reject(adjustPosition(e, parser));
parser = null;
}
})
.on('error', (e) => {
parser = null;
reject(e);
})
.on('end', () => {
try {
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
});
}
if (isIterable(iterable)) {
let parser = new ChunkParser();
if (typeof chunkEmitter === 'function') {
const iterator = chunkEmitter();
try {
for await (const chunk of iterable) {
if (typeof chunk !== 'string' && !ArrayBuffer.isView(chunk)) {
throw new TypeError('Invalid chunk: Expected string, TypedArray or Buffer');
}
if (isObject(iterator) && (Symbol.iterator in iterator || Symbol.asyncIterator in iterator)) {
return new Promise(async (resolve, reject) => {
try {
for await (const chunk of iterator) {
parser.push(chunk);
}
parser.push(chunk);
}
resolve(parser.finish());
} catch (e) {
reject(adjustPosition(e, parser));
} finally {
parser = null;
}
});
return parser.finish();
} catch (e) {
throw adjustPosition(e, parser);
}
}
throw new Error(
'Chunk emitter should be readable stream, generator, ' +
'async generator or function returning an iterable object'
throw new TypeError(
'Invalid chunk emitter: Expected an Iterable, AsyncIterable, generator, ' +
'async generator, or a function returning an Iterable or AsyncIterable'
);

@@ -89,0 +57,0 @@ };

@@ -1,21 +0,24 @@

const {
import {
normalizeReplacer,
normalizeSpace,
replaceValue,
getTypeNative,
getTypeAsync,
isLeadingSurrogate,
isTrailingSurrogate,
escapableCharCodeSubstitution,
type: {
PRIMITIVE,
OBJECT,
ARRAY,
PROMISE,
STRING_STREAM,
OBJECT_STREAM
}
} = require('./utils');
replaceValue
} from './utils.js';
const hasOwn = typeof Object.hasOwn === 'function'
? Object.hasOwn
: (object, key) => Object.hasOwnProperty.call(object, key);
// https://tc39.es/ecma262/#table-json-single-character-escapes
const escapableCharCodeSubstitution = { // JSON Single Character Escape Sequences
0x08: '\\b',
0x09: '\\t',
0x0a: '\\n',
0x0c: '\\f',
0x0d: '\\r',
0x22: '\\\"',
0x5c: '\\\\'
};
const charLength2048 = Array.from({ length: 2048 }).map((_, code) => {
if (escapableCharCodeSubstitution.hasOwnProperty(code)) {
if (hasOwn(escapableCharCodeSubstitution, code)) {
return 2; // \X

@@ -31,2 +34,10 @@ }

function isLeadingSurrogate(code) {
return code >= 0xD800 && code <= 0xDBFF;
}
function isTrailingSurrogate(code) {
return code >= 0xDC00 && code <= 0xDFFF;
}
function stringLength(str) {

@@ -84,3 +95,35 @@ let len = 0;

module.exports = function jsonStringifyInfo(value, replacer, space, options) {
export function stringifyInfo(value, optionsOrReplacer, space) {
if (optionsOrReplacer === null || Array.isArray(optionsOrReplacer) || typeof optionsOrReplacer !== 'object') {
optionsOrReplacer = {
replacer: optionsOrReplacer,
space
};
}
let allowlist = null;
let replacer = normalizeReplacer(optionsOrReplacer.replacer);
const continueOnCircular = Boolean(optionsOrReplacer.continueOnCircular);
if (Array.isArray(replacer)) {
allowlist = new Set(replacer);
replacer = null;
}
space = spaceLength(space);
const visited = new WeakMap();
const stack = new Set();
const circular = new Set();
const root = { '': value };
let stop = false;
let bytes = 0;
walk(root, '', value);
return {
bytes: isNaN(bytes) ? Infinity : bytes,
circular: [...circular]
};
function walk(holder, key, value) {

@@ -93,36 +136,59 @@ if (stop) {

let type = getType(value);
if (value === null || typeof value !== 'object') {
// primitive
if (value !== undefined || Array.isArray(holder)) {
bytes += primitiveLength(value);
} else if (holder === root) {
bytes += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
}
} else {
// check for circular structure
if (stack.has(value)) {
circular.add(value);
bytes += 4; // treat as null
// check for circular structure
if (type !== PRIMITIVE && stack.has(value)) {
circular.add(value);
length += 4; // treat as null
if (!continueOnCircular) {
stop = true;
}
if (!options.continueOnCircular) {
stop = true;
return;
}
return;
}
// duplicates
if (visited.has(value)) {
bytes += visited.get(value);
switch (type) {
case PRIMITIVE:
if (value !== undefined || Array.isArray(holder)) {
length += primitiveLength(value);
} else if (holder === root) {
length += 9; // FIXME: that's the length of undefined, should we normalize behaviour to convert it to null?
return;
}
if (Array.isArray(value)) {
// array
const valueLength = bytes;
bytes += 2; // []
stack.add(value);
for (let i = 0; i < value.length; i++) {
walk(value, i, value[i]);
}
break;
case OBJECT: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
if (value.length > 1) {
bytes += value.length - 1; // commas
}
const valueLength = length;
stack.delete(value);
if (space > 0 && value.length > 0) {
bytes += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
bytes += 1 + stack.size * space; // for ]
}
visited.set(value, bytes - valueLength);
} else {
// object
const valueLength = bytes;
let entries = 0;
length += 2; // {}
bytes += 2; // {}

@@ -132,9 +198,9 @@ stack.add(value);

for (const key in value) {
if (hasOwnProperty.call(value, key) && (allowlist === null || allowlist.has(key))) {
const prevLength = length;
if (hasOwn(value, key) && (allowlist === null || allowlist.has(key))) {
const prevLength = bytes;
walk(value, key, value[key]);
if (prevLength !== length) {
if (prevLength !== bytes) {
// value is printed
length += stringLength(key) + 1; // "key":
bytes += stringLength(key) + 1; // "key":
entries++;

@@ -146,3 +212,3 @@ }

if (entries > 1) {
length += entries - 1; // commas
bytes += entries - 1; // commas
}

@@ -153,85 +219,10 @@

if (space > 0 && entries > 0) {
length += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
length += 1 + stack.size * space; // for }
bytes += (1 + (stack.size + 1) * space + 1) * entries; // for each key-value: \n{space}
bytes += 1 + stack.size * space; // for }
}
visited.set(value, length - valueLength);
break;
visited.set(value, bytes - valueLength);
}
case ARRAY: {
if (visited.has(value)) {
duplicate.add(value);
length += visited.get(value);
break;
}
const valueLength = length;
length += 2; // []
stack.add(value);
for (let i = 0; i < value.length; i++) {
walk(value, i, value[i]);
}
if (value.length > 1) {
length += value.length - 1; // commas
}
stack.delete(value);
if (space > 0 && value.length > 0) {
length += (1 + (stack.size + 1) * space) * value.length; // for each element: \n{space}
length += 1 + stack.size * space; // for ]
}
visited.set(value, length - valueLength);
break;
}
case PROMISE:
case STRING_STREAM:
async.add(value);
break;
case OBJECT_STREAM:
length += 2; // []
async.add(value);
break;
}
}
let allowlist = null;
replacer = normalizeReplacer(replacer);
if (Array.isArray(replacer)) {
allowlist = new Set(replacer);
replacer = null;
}
space = spaceLength(space);
options = options || {};
const visited = new Map();
const stack = new Set();
const duplicate = new Set();
const circular = new Set();
const async = new Set();
const getType = options.async ? getTypeAsync : getTypeNative;
const root = { '': value };
let stop = false;
let length = 0;
walk(root, '', value);
return {
minLength: isNaN(length) ? Infinity : length,
circular: [...circular],
duplicate: [...duplicate],
async: [...async]
};
};

@@ -1,35 +0,13 @@

const PrimitiveType = 1;
const ObjectType = 2;
const ArrayType = 3;
const PromiseType = 4;
const ReadableStringType = 5;
const ReadableObjectType = 6;
// https://tc39.es/ecma262/#table-json-single-character-escapes
const escapableCharCodeSubstitution = { // JSON Single Character Escape Sequences
0x08: '\\b',
0x09: '\\t',
0x0a: '\\n',
0x0c: '\\f',
0x0d: '\\r',
0x22: '\\\"',
0x5c: '\\\\'
};
function isLeadingSurrogate(code) {
return code >= 0xD800 && code <= 0xDBFF;
}
function isTrailingSurrogate(code) {
return code >= 0xDC00 && code <= 0xDFFF;
}
function isReadableStream(value) {
export function isIterable(value) {
return (
typeof value.pipe === 'function' &&
typeof value._read === 'function' &&
typeof value._readableState === 'object' && value._readableState !== null
typeof value === 'object' &&
value !== null &&
(
typeof value[Symbol.iterator] === 'function' ||
typeof value[Symbol.asyncIterator] === 'function'
)
);
}
function replaceValue(holder, key, value, replacer) {
export function replaceValue(holder, key, value, replacer) {
if (value && typeof value.toJSON === 'function') {

@@ -62,35 +40,3 @@ value = value.toJSON();

function getTypeNative(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
}
if (Array.isArray(value)) {
return ArrayType;
}
return ObjectType;
}
function getTypeAsync(value) {
if (value === null || typeof value !== 'object') {
return PrimitiveType;
}
if (typeof value.then === 'function') {
return PromiseType;
}
if (isReadableStream(value)) {
return value._readableState.objectMode ? ReadableObjectType : ReadableStringType;
}
if (Array.isArray(value)) {
return ArrayType;
}
return ObjectType;
}
function normalizeReplacer(replacer) {
export function normalizeReplacer(replacer) {
if (typeof replacer === 'function') {

@@ -115,3 +61,3 @@ return replacer;

function normalizeSpace(space) {
export function normalizeSpace(space) {
if (typeof space === 'number') {

@@ -131,22 +77,1 @@ if (!Number.isFinite(space) || space < 1) {

}
module.exports = {
escapableCharCodeSubstitution,
isLeadingSurrogate,
isTrailingSurrogate,
type: {
PRIMITIVE: PrimitiveType,
PROMISE: PromiseType,
ARRAY: ArrayType,
OBJECT: ObjectType,
STRING_STREAM: ReadableStringType,
OBJECT_STREAM: ReadableObjectType
},
isReadableStream,
replaceValue,
getTypeNative,
getTypeAsync,
normalizeReplacer,
normalizeSpace
};
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc