+2
-2
@@ -1,2 +0,2 @@ | ||
| var o=Object.defineProperty;var C=Object.getOwnPropertyDescriptor;var O=Object.getOwnPropertyNames;var b=Object.prototype.hasOwnProperty;var d=(e,t)=>{for(var r in t)o(e,r,{get:t[r],enumerable:!0})},w=(e,t,r,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let a of O(t))!b.call(e,a)&&a!==r&&o(e,a,{get:()=>t[a],enumerable:!(n=C(t,a))||n.enumerable});return e};var D=e=>w(o({},"__esModule",{value:!0}),e);var v={};d(v,{default:()=>j,defaultOptions:()=>m,format:()=>i,formatArray:()=>h,formatField:()=>u,formatObject:()=>x});module.exports=D(v);var m={header:!0,newlineChar:`\r | ||
| `,delimiterChar:",",quoteChar:'"'},i=(e,t={})=>{let r={...m,enqueue:()=>{},...t};r.escapeChar??(r.escapeChar=r.quoteChar);let{enableReturn:n,enqueue:a}=r,l=Array.isArray(e[0]),y=l?h:x,g=r.header!==!1;typeof r.header=="boolean"&&!l&&(r.header=Object.keys(e[0]));let f=g?h(r.header,r):"";for(let c=0,A=e.length;c<A;c++){let s=y(e[c],r);a(s),n&&(f+=s)}return n&&f},h=(e,t)=>{let r="";for(let n=0,a=e.length;n<a;n++)r+=(n?t.delimiterChar:"")+u(e[n],null,t);return r+t.newlineChar},x=(e,t)=>{var n;let r="";for(let a=0,l=t.header.length;a<l;a++)r+=(a?t.delimiterChar:"")+u(e[t.header[a]],(n=t.quoteColumn)==null?void 0:n[a],t);return r+t.newlineChar},u=(e,t,{quoteChar:r,escapeChar:n,delimiterChar:a,newlineChar:l})=>e==null||e===""?"":e.constructor===Date?e.toISOString():(e=e.toString(),t===!1?e:(t=t||S(e,[a,l,r,"\uFEFF"])||e[0]===" "||e[e.length-1]===" ",t?r+e.replaceAll(r,n+r)+r:e)),S=(e,t)=>{for(let r of t)if(e.indexOf(r)>-1)return!0},j=i;0&&(module.exports={defaultOptions,format,formatArray,formatField,formatObject}); | ||
| var u=Object.defineProperty;var i=Object.getOwnPropertyDescriptor;var O=Object.getOwnPropertyNames;var D=Object.prototype.hasOwnProperty;var b=(r,t)=>{for(var e in t)u(r,e,{get:t[e],enumerable:!0})},w=(r,t,e,a)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of O(t))!D.call(r,n)&&n!==e&&u(r,n,{get:()=>t[n],enumerable:!(a=i(t,n))||a.enumerable});return r};var S=r=>w(u({},"__esModule",{value:!0}),r);var k={};b(k,{default:()=>v,defaultOptions:()=>f,format:()=>x,formatArray:()=>m,formatField:()=>o,formatObject:()=>y});module.exports=S(k);var f={header:!0,newlineChar:`\r | ||
| `,delimiterChar:",",quoteChar:'"'},x=(r,t={})=>{let e={...f,enqueue:()=>{},...t};e.escapeChar??(e.escapeChar=e.quoteChar);let{enableReturn:a,enqueue:n}=e,A=Array.isArray(r[0])?m:y,g=e.header!==!1;e.columns??(e.columns=Object.keys(r[0]));let s=g?m(e.columns,e):"";for(let c=0,C=r.length;c<C;c++){let h=A(r[c],e);n(h),a&&(s+=h)}return a&&s},m=(r,t)=>{let e="";for(let a=0,n=r.length;a<n;a++)e+=(a?t.delimiterChar:"")+o(r[a],null,t);return e+t.newlineChar},y=(r,t)=>{var a;let e="";for(let n=0,l=t.columns.length;n<l;n++)e+=(n?t.delimiterChar:"")+o(r[t.columns[n]],(a=t.quoteColumn)==null?void 0:a[n],t);return e+t.newlineChar},o=(r,t,{quoteChar:e,escapeChar:a,delimiterChar:n,newlineChar:l})=>r==null||r===""?"":r.constructor===Date?r.toISOString():(r=r.toString(),t===!1?r:(t=t||j(r,[n,l,e,"\uFEFF"])||r[0]===" "||r[r.length-1]===" ",t?e+r.replaceAll(e,a+e)+e:r)),j=(r,t)=>{for(let e of t)if(r.indexOf(e)>-1)return!0},v=x;0&&(module.exports={defaultOptions,format,formatArray,formatField,formatObject}); |
+1
-1
| var x={header:!0,newlineChar:`\r | ||
| `,delimiterChar:",",quoteChar:'"'},y=(e,t={})=>{let r={...x,enqueue:()=>{},...t};r.escapeChar??=r.quoteChar;let{enableReturn:n,enqueue:a}=r,l=Array.isArray(e[0]),f=l?u:g,m=r.header!==!1;typeof r.header=="boolean"&&!l&&(r.header=Object.keys(e[0]));let o=m?u(r.header,r):"";for(let c=0,i=e.length;c<i;c++){let h=f(e[c],r);a(h),n&&(o+=h)}return n&&o},u=(e,t)=>{let r="";for(let n=0,a=e.length;n<a;n++)r+=(n?t.delimiterChar:"")+s(e[n],null,t);return r+t.newlineChar},g=(e,t)=>{let r="";for(let n=0,a=t.header.length;n<a;n++)r+=(n?t.delimiterChar:"")+s(e[t.header[n]],t.quoteColumn?.[n],t);return r+t.newlineChar},s=(e,t,{quoteChar:r,escapeChar:n,delimiterChar:a,newlineChar:l})=>e==null||e===""?"":e.constructor===Date?e.toISOString():(e=e.toString(),t===!1?e:(t=t||A(e,[a,l,r,"\uFEFF"])||e[0]===" "||e[e.length-1]===" ",t?r+e.replaceAll(r,n+r)+r:e)),A=(e,t)=>{for(let r of t)if(e.indexOf(r)>-1)return!0},C=y;export{C as default,x as defaultOptions,y as format,u as formatArray,s as formatField,g as formatObject}; | ||
| `,delimiterChar:",",quoteChar:'"'},y=(r,t={})=>{let e={...x,enqueue:()=>{},...t};e.escapeChar??=e.quoteChar;let{enableReturn:n,enqueue:a}=e,h=Array.isArray(r[0])?s:A,f=e.header!==!1;e.columns??=Object.keys(r[0]);let o=f?s(e.columns,e):"";for(let l=0,i=r.length;l<i;l++){let u=h(r[l],e);a(u),n&&(o+=u)}return n&&o},s=(r,t)=>{let e="";for(let n=0,a=r.length;n<a;n++)e+=(n?t.delimiterChar:"")+m(r[n],null,t);return e+t.newlineChar},A=(r,t)=>{let e="";for(let n=0,a=t.columns.length;n<a;n++)e+=(n?t.delimiterChar:"")+m(r[t.columns[n]],t.quoteColumn?.[n],t);return e+t.newlineChar},m=(r,t,{quoteChar:e,escapeChar:n,delimiterChar:a,newlineChar:c})=>r==null||r===""?"":r.constructor===Date?r.toISOString():(r=r.toString(),t===!1?r:(t=t||g(r,[a,c,e,"\uFEFF"])||r[0]===" "||r[r.length-1]===" ",t?e+r.replaceAll(e,n+e)+e:r)),g=(r,t)=>{for(let e of t)if(r.indexOf(e)>-1)return!0},C=y;export{C as default,x as defaultOptions,y as format,s as formatArray,m as formatField,A as formatObject}; |
+3
-3
| { | ||
| "version": 3, | ||
| "sources": ["format.js"], | ||
| "sourcesContent": ["export const defaultOptions = {\n header: true, // false: return array; true: detect headers and return json; [...]: use defined headers and return json\n newlineChar: '\\r\\n', // undefined: detect newline from file; '\\r\\n': Windows; '\\n': Linux/Mac\n delimiterChar: ',', // TODO add in auto detect or function\n quoteChar: '\"'\n // escapeChar: '\"'\n\n // quoteColumn: undefined\n}\n\nexport const format = (input, opts = {}) => {\n const options = { ...defaultOptions, enqueue: () => {}, ...opts }\n options.escapeChar ??= options.quoteChar\n const { enableReturn, enqueue } = options\n\n const isArrayData = Array.isArray(input[0])\n const format = isArrayData ? formatArray : formatObject\n\n const includeHeader = options.header !== false\n if (typeof options.header === 'boolean' && !isArrayData) {\n options.header = Object.keys(input[0])\n }\n\n let res = includeHeader ? formatArray(options.header, options) : ''\n\n for (let i = 0, l = input.length; i < l; i++) {\n const data = format(input[i], options)\n enqueue(data)\n if (enableReturn) {\n res += data\n }\n }\n\n return enableReturn && res\n}\n\nexport const formatArray = (arr, options) => {\n let csv = ''\n for (let i = 0, l = arr.length; i < l; i++) {\n csv += (i ? options.delimiterChar : '') + formatField(arr[i], null, options)\n }\n return csv + options.newlineChar\n}\n\nexport const formatObject = (data, options) => {\n let csv = ''\n for (let i = 0, l = options.header.length; i < l; i++) {\n csv +=\n (i ? options.delimiterChar : '') +\n formatField(data[options.header[i]], options.quoteColumn?.[i], options)\n }\n return csv + options.newlineChar\n}\n\nexport const formatField = (\n field,\n needsQuotes,\n { quoteChar, escapeChar, delimiterChar, newlineChar }\n) => {\n if (field === undefined || field === null || field === '') {\n return ''\n }\n\n if (field.constructor === Date) {\n return field.toISOString() // JSON.stringify(str).slice(1, 25) faster??\n }\n\n field = field.toString()\n\n // Developer override using options.quotes\n if (needsQuotes === false) {\n return field\n }\n\n // Test if needs quote\n needsQuotes =\n needsQuotes ||\n hasAnyDelimiters(field, [\n delimiterChar,\n newlineChar,\n quoteChar,\n '\\ufeff'\n ]) ||\n field[0] === ' ' ||\n field[field.length - 1] === ' '\n\n return needsQuotes\n ? quoteChar +\n field.replaceAll(quoteChar, escapeChar + quoteChar) +\n quoteChar\n : field\n}\n\nconst hasAnyDelimiters = (field, delimiters) => {\n for (const delimiter of delimiters) {\n if (field.indexOf(delimiter) > -1) {\n return true\n }\n }\n}\n\nexport default format\n"], | ||
| "mappings": "AAAO,GAAM,GAAiB,CAC5B,OAAQ,GACR,YAAa;AAAA,EACb,cAAe,IACf,UAAW,GAIb,EAEa,EAAS,CAAC,EAAO,EAAO,CAAC,IAAM,CAC1C,GAAM,GAAU,CAAE,GAAG,EAAgB,QAAS,IAAM,CAAC,EAAG,GAAG,CAAK,EAChE,EAAQ,aAAe,EAAQ,UAC/B,GAAM,CAAE,eAAc,WAAY,EAE5B,EAAc,MAAM,QAAQ,EAAM,EAAE,EACpC,EAAS,EAAc,EAAc,EAErC,EAAgB,EAAQ,SAAW,GACzC,AAAI,MAAO,GAAQ,QAAW,WAAa,CAAC,GAC1C,GAAQ,OAAS,OAAO,KAAK,EAAM,EAAE,GAGvC,GAAI,GAAM,EAAgB,EAAY,EAAQ,OAAQ,CAAO,EAAI,GAEjE,OAAS,GAAI,EAAG,EAAI,EAAM,OAAQ,EAAI,EAAG,IAAK,CAC5C,GAAM,GAAO,EAAO,EAAM,GAAI,CAAO,EACrC,EAAQ,CAAI,EACR,GACF,IAAO,EAEX,CAEA,MAAO,IAAgB,CACzB,EAEa,EAAc,CAAC,EAAK,IAAY,CAC3C,GAAI,GAAM,GACV,OAAS,GAAI,EAAG,EAAI,EAAI,OAAQ,EAAI,EAAG,IACrC,GAAQ,GAAI,EAAQ,cAAgB,IAAM,EAAY,EAAI,GAAI,KAAM,CAAO,EAE7E,MAAO,GAAM,EAAQ,WACvB,EAEa,EAAe,CAAC,EAAM,IAAY,CAC7C,GAAI,GAAM,GACV,OAAS,GAAI,EAAG,EAAI,EAAQ,OAAO,OAAQ,EAAI,EAAG,IAChD,GACG,GAAI,EAAQ,cAAgB,IAC7B,EAAY,EAAK,EAAQ,OAAO,IAAK,EAAQ,cAAc,GAAI,CAAO,EAE1E,MAAO,GAAM,EAAQ,WACvB,EAEa,EAAc,CACzB,EACA,EACA,CAAE,YAAW,aAAY,gBAAe,iBAEpC,AAAuB,GAAU,MAAQ,IAAU,GAC9C,GAGL,EAAM,cAAgB,KACjB,EAAM,YAAY,EAG3B,GAAQ,EAAM,SAAS,EAGnB,IAAgB,GACX,EAIT,GACE,GACA,EAAiB,EAAO,CACtB,EACA,EACA,EACA,QACF,CAAC,GACD,EAAM,KAAO,KACb,EAAM,EAAM,OAAS,KAAO,IAEvB,EACH,EACE,EAAM,WAAW,EAAW,EAAa,CAAS,EAClD,EACF,IAGA,EAAmB,CAAC,EAAO,IAAe,CAC9C,OAAW,KAAa,GACtB,GAAI,EAAM,QAAQ,CAAS,EAAI,GAC7B,MAAO,EAGb,EAEO,EAAQ", | ||
| "names": [] | ||
| "sourcesContent": ["export const defaultOptions = {\n header: true, // false: don't log out header; true: log out header\n // columns: undefined, // [] to set order of headers and allowed columns\n newlineChar: '\\r\\n', // undefined: detect newline from file; '\\r\\n': Windows; '\\n': Linux/Mac\n delimiterChar: ',', // TODO add in auto detect or function\n quoteChar: '\"'\n // escapeChar: '\"'\n\n // quoteColumn: undefined\n}\n\nexport const format = (input, opts = {}) => {\n const options = { ...defaultOptions, enqueue: () => {}, ...opts }\n options.escapeChar ??= options.quoteChar\n const { enableReturn, enqueue } = options\n\n const isArrayData = Array.isArray(input[0])\n const format = isArrayData ? formatArray : formatObject\n\n const includeHeader = options.header !== false\n options.columns ??= Object.keys(input[0])\n\n let res = includeHeader ? formatArray(options.columns, options) : ''\n\n for (let i = 0, l = input.length; i < l; i++) {\n const data = format(input[i], options)\n enqueue(data)\n if (enableReturn) {\n res += data\n }\n }\n\n return enableReturn && res\n}\n\nexport const formatArray = (arr, options) => {\n let csv = ''\n for (let i = 0, l = arr.length; i < l; i++) {\n csv += (i ? options.delimiterChar : '') + formatField(arr[i], null, options)\n }\n return csv + options.newlineChar\n}\n\nexport const formatObject = (data, options) => {\n let csv = ''\n for (let i = 0, l = options.columns.length; i < l; i++) {\n csv +=\n (i ? options.delimiterChar : '') +\n formatField(data[options.columns[i]], options.quoteColumn?.[i], options)\n }\n return csv + options.newlineChar\n}\n\nexport const formatField = (\n field,\n needsQuotes,\n { quoteChar, escapeChar, delimiterChar, newlineChar }\n) => {\n if (field === undefined || field === null || field === '') {\n return ''\n }\n\n if (field.constructor === Date) {\n return field.toISOString() // JSON.stringify(str).slice(1, 25) faster??\n }\n\n field = field.toString()\n\n // Developer override using options.quotes\n if (needsQuotes === false) {\n return field\n }\n\n // Test if needs quote\n needsQuotes =\n needsQuotes ||\n hasAnyDelimiters(field, [\n delimiterChar,\n newlineChar,\n quoteChar,\n '\\ufeff'\n ]) ||\n field[0] === ' ' ||\n field[field.length - 1] === ' '\n\n return needsQuotes\n ? quoteChar +\n field.replaceAll(quoteChar, escapeChar + quoteChar) +\n quoteChar\n : field\n}\n\nconst hasAnyDelimiters = (field, delimiters) => {\n for (const delimiter of delimiters) {\n if (field.indexOf(delimiter) > -1) {\n return true\n }\n }\n}\n\nexport default format\n"], | ||
| "mappings": "AAAO,IAAMA,EAAiB,CAC5B,OAAQ,GAER,YAAa;AAAA,EACb,cAAe,IACf,UAAW,GAIb,EAEaC,EAAS,CAACC,EAAOC,EAAO,CAAC,IAAM,CAC1C,IAAMC,EAAU,CAAE,GAAGJ,EAAgB,QAAS,IAAM,CAAC,EAAG,GAAGG,CAAK,EAChEC,EAAQ,aAAeA,EAAQ,UAC/B,GAAM,CAAE,aAAAC,EAAc,QAAAC,CAAQ,EAAIF,EAG5BH,EADc,MAAM,QAAQC,EAAM,EAAE,EACbK,EAAcC,EAErCC,EAAgBL,EAAQ,SAAW,GACzCA,EAAQ,UAAY,OAAO,KAAKF,EAAM,EAAE,EAExC,IAAIQ,EAAMD,EAAgBF,EAAYH,EAAQ,QAASA,CAAO,EAAI,GAElE,QAASO,EAAI,EAAGC,EAAIV,EAAM,OAAQS,EAAIC,EAAGD,IAAK,CAC5C,IAAME,EAAOZ,EAAOC,EAAMS,GAAIP,CAAO,EACrCE,EAAQO,CAAI,EACRR,IACFK,GAAOG,EAEX,CAEA,OAAOR,GAAgBK,CACzB,EAEaH,EAAc,CAACO,EAAKV,IAAY,CAC3C,IAAIW,EAAM,GACV,QAASJ,EAAI,EAAGC,EAAIE,EAAI,OAAQH,EAAIC,EAAGD,IACrCI,IAAQJ,EAAIP,EAAQ,cAAgB,IAAMY,EAAYF,EAAIH,GAAI,KAAMP,CAAO,EAE7E,OAAOW,EAAMX,EAAQ,WACvB,EAEaI,EAAe,CAACK,EAAMT,IAAY,CAC7C,IAAIW,EAAM,GACV,QAASJ,EAAI,EAAGC,EAAIR,EAAQ,QAAQ,OAAQO,EAAIC,EAAGD,IACjDI,IACGJ,EAAIP,EAAQ,cAAgB,IAC7BY,EAAYH,EAAKT,EAAQ,QAAQO,IAAKP,EAAQ,cAAcO,GAAIP,CAAO,EAE3E,OAAOW,EAAMX,EAAQ,WACvB,EAEaY,EAAc,CACzBC,EACAC,EACA,CAAE,UAAAC,EAAW,WAAAC,EAAY,cAAAC,EAAe,YAAAC,CAAY,IAEzBL,GAAU,MAAQA,IAAU,GAC9C,GAGLA,EAAM,cAAgB,KACjBA,EAAM,YAAY,GAG3BA,EAAQA,EAAM,SAAS,EAGnBC,IAAgB,GACXD,GAITC,EACEA,GACAK,EAAiBN,EAAO,CACtBI,EACAC,EACAH,EACA,QACF,CAAC,GACDF,EAAM,KAAO,KACbA,EAAMA,EAAM,OAAS,KAAO,IAEvBC,EACHC,EACEF,EAAM,WAAWE,EAAWC,EAAaD,CAAS,EAClDA,EACFF,IAGAM,EAAmB,CAACN,EAAOO,IAAe,CAC9C,QAAWC,KAAaD,EACtB,GAAIP,EAAM,QAAQQ,CAAS,EAAI,GAC7B,MAAO,EAGb,EAEOC,EAAQzB", | ||
| "names": ["defaultOptions", "format", "input", "opts", "options", "enableReturn", "enqueue", "formatArray", "formatObject", "includeHeader", "res", "i", "l", "data", "arr", "csv", "formatField", "field", "needsQuotes", "quoteChar", "escapeChar", "delimiterChar", "newlineChar", "hasAnyDelimiters", "delimiters", "delimiter", "format_default"] | ||
| } |
+2
-2
@@ -5,4 +5,4 @@ { | ||
| "sourcesContent": ["// import {TextDecoder} from 'node:util'\n// import {defaultOptions, optionDetectNewlineValue} from './options.js'\nimport csvParse from 'csv-rex/parse'\nimport csvParseMini from 'csv-rex/parse-mini'\nimport csvFormat from 'csv-rex/format'\n\nexport const parse = csvParse\nexport const parseMini = csvParseMini\nexport const format = csvFormat\n\nexport default {\n parse: csvParse,\n parseMini: csvParseMini,\n format: csvFormat\n}\n"], | ||
| "mappings": "AAEA,6BACA,kCACA,8BAEO,KAAM,GAAQ,EACR,EAAY,EACZ,EAAS,EAEtB,GAAO,GAAQ,CACb,MAAO,EACP,UAAW,EACX,OAAQ,CACV", | ||
| "names": [] | ||
| "mappings": "AAEA,OAAOA,MAAc,gBACrB,OAAOC,MAAkB,qBACzB,OAAOC,MAAe,iBAEf,MAAMC,EAAQH,EACRI,EAAYH,EACZI,EAASH,EAEtB,IAAOI,EAAQ,CACb,MAAON,EACP,UAAWC,EACX,OAAQC,CACV", | ||
| "names": ["csvParse", "csvParseMini", "csvFormat", "parse", "parseMini", "format", "csv_rex_default"] | ||
| } |
+10
-5
| { | ||
| "name": "csv-rex", | ||
| "version": "0.4.2", | ||
| "version": "0.5.0", | ||
| "description": "A tiny and fast CSV parser for JavaScript.", | ||
@@ -50,2 +50,3 @@ "type": "module", | ||
| "pre-commit": "lint-staged", | ||
| "start": "docsify serve docs", | ||
| "lint": "prettier --write *.{js,json} && standard --fix *.js", | ||
@@ -81,12 +82,16 @@ "test": "c8 node --test", | ||
| "@commitlint/config-conventional": "^17.0.0", | ||
| "benny": "3.7.1", | ||
| "benny": "^3.7.1", | ||
| "c8": "^7.11.0", | ||
| "esbuild": "^0.14.47", | ||
| "docsify-cli": "^4.4.4", | ||
| "esbuild": "^0.15.7", | ||
| "husky": "^8.0.0", | ||
| "lint-staged": "^13.0.0", | ||
| "papaparse": "5.3.2", | ||
| "prettier": "^2.0.0", | ||
| "sinon": "^13.0.1", | ||
| "sinon": "^14.0.0", | ||
| "standard": "17.0.0" | ||
| }, | ||
| "funding": { | ||
| "type": "github", | ||
| "url": "https://github.com/sponsors/willfarrell" | ||
| } | ||
| } |
@@ -5,4 +5,4 @@ { | ||
| "sourcesContent": ["// chunkSize >> largest expected row\nconst defaultOptions = {\n header: true, // false: return array; true: detect headers and return json; [...]: use defined headers and return json\n newlineChar: '\\r\\n', // '': detect newline from chunk; '\\r\\n': Windows; '\\n': Linux/Mac\n delimiterChar: ',', // '': detect delimiter from chunk\n // quoteChar: '\"',\n // escapeChar: '\"', // default: `quoteChar`\n\n // Parse\n emptyFieldValue: '',\n coerceField: (field) => field, // TODO tests\n // commentPrefixValue: false, // falsy: disable, '//': enabled\n // errorOnComment: true,\n // errorOnEmptyLine: true,\n errorOnFieldsMismatch: true\n // errorOnFieldMalformed: true\n}\n\nconst length = (value) => value.length\n\nexport const parse = (opts = {}) => {\n const options = { ...defaultOptions, ...opts }\n options.escapeChar ??= options.quoteChar\n\n let { header, newlineChar, delimiterChar } = options\n let headerLength = length(header)\n const {\n // quoteChar,\n // escapeChar,\n // commentPrefixValue,\n emptyFieldValue,\n coerceField,\n // errorOnEmptyLine,\n // errorOnComment,\n errorOnFieldsMismatch\n // errorOnFieldMalformed\n } = options\n\n let chunk, enqueue\n let partialLine = ''\n let idx = 0\n const enqueueRow = (row) => {\n let data = row\n idx += 1\n if (headerLength) {\n const rowLength = length(row)\n\n if (headerLength !== rowLength) {\n if (errorOnFieldsMismatch) {\n enqueueError(\n 'FieldsMismatch',\n `Incorrect number of fields parsed, expected ${headerLength}.`\n )\n }\n return\n } else {\n data = {}\n for (let i = 0; i < rowLength; i++) {\n data[header[i]] = row[i]\n }\n }\n }\n enqueue({ idx, data })\n }\n\n const enqueueError = (code, message) => {\n enqueue({ idx, err: { code, message } })\n }\n\n const transformField = (field, idx) => {\n return coerceField(field || emptyFieldValue, idx)\n }\n\n const chunkParse = (string, controller) => {\n chunk = string\n enqueue = controller.enqueue\n const lines = chunk.split(newlineChar) // TODO use cursor pattern\n let linesLength = length(lines)\n if (linesLength > 1) {\n partialLine = lines.pop()\n linesLength -= 1\n }\n\n let i = 0\n if (header === true) {\n header = lines[i].split(delimiterChar)\n headerLength = length(header)\n i += 1\n }\n\n for (; i < linesLength; i++) {\n const line = lines[i]\n const row = []\n let cursor = 0\n while (cursor < line.length) {\n const delimiterIndex = line.indexOf(delimiterChar, cursor)\n if (delimiterIndex === -1) {\n row.push(transformField(line.substring(cursor), row.length))\n break\n }\n row.push(\n transformField(line.substring(cursor, delimiterIndex), row.length)\n )\n cursor = delimiterIndex + 1\n }\n enqueueRow(row)\n }\n }\n\n return {\n chunkParse,\n header: () => header,\n previousChunk: () => partialLine\n }\n}\n\nexport default (input, opts) => {\n const options = {\n ...defaultOptions,\n ...{\n enableReturn: true,\n chunkSize: 64 * 1024 * 1024,\n enqueue: () => {}\n },\n ...opts\n }\n const { chunkSize, enableReturn, enqueue } = options\n const { chunkParse, previousChunk } = parse(options)\n\n const res = []\n const controller = { enqueue }\n\n if (enableReturn) {\n controller.enqueue = (row) => {\n enqueue(row)\n res.push(row.data)\n }\n }\n\n let position = 0\n while (position < input.length) {\n const chunk =\n previousChunk() + input.substring(position, position + chunkSize)\n\n // Checking if you can use fastParse slows it down more than checking for quoteChar on ever field.\n chunkParse(chunk, controller)\n position += chunkSize\n }\n // flush\n const chunk = previousChunk()\n chunkParse(chunk, controller, true)\n\n return enableReturn && res\n}\n"], | ||
| "mappings": "AACA,GAAM,GAAiB,CACrB,OAAQ,GACR,YAAa;AAAA,EACb,cAAe,IAKf,gBAAiB,GACjB,YAAa,AAAC,GAAU,EAIxB,sBAAuB,EAEzB,EAEM,EAAS,AAAC,GAAU,EAAM,OAEnB,EAAQ,CAAC,EAAO,CAAC,IAAM,CAClC,GAAM,GAAU,CAAE,GAAG,EAAgB,GAAG,CAAK,EAC7C,EAAQ,aAAe,EAAQ,UAE/B,GAAI,CAAE,SAAQ,cAAa,iBAAkB,EACzC,EAAe,EAAO,CAAM,EAC1B,CAIJ,kBACA,cAGA,yBAEE,EAEA,EAAO,EACP,EAAc,GACd,EAAM,EACJ,EAAa,AAAC,GAAQ,CAC1B,GAAI,GAAO,EAEX,GADA,GAAO,EACH,EAAc,CAChB,GAAM,GAAY,EAAO,CAAG,EAE5B,GAAI,IAAiB,EAAW,CAC9B,AAAI,GACF,EACE,iBACA,+CAA+C,IACjD,EAEF,MACF,KAAO,CACL,EAAO,CAAC,EACR,OAAS,GAAI,EAAG,EAAI,EAAW,IAC7B,EAAK,EAAO,IAAM,EAAI,EAE1B,CACF,CACA,EAAQ,CAAE,MAAK,MAAK,CAAC,CACvB,EAEM,EAAe,CAAC,EAAM,IAAY,CACtC,EAAQ,CAAE,MAAK,IAAK,CAAE,OAAM,SAAQ,CAAE,CAAC,CACzC,EAEM,EAAiB,CAAC,EAAO,IACtB,EAAY,GAAS,EAAiB,CAAG,EAuClD,MAAO,CACL,WArCiB,CAAC,EAAQ,IAAe,CACzC,EAAQ,EACR,EAAU,EAAW,QACrB,GAAM,GAAQ,EAAM,MAAM,CAAW,EACjC,EAAc,EAAO,CAAK,EAC9B,AAAI,EAAc,GAChB,GAAc,EAAM,IAAI,EACxB,GAAe,GAGjB,GAAI,GAAI,EAOR,IANI,IAAW,IACb,GAAS,EAAM,GAAG,MAAM,CAAa,EACrC,EAAe,EAAO,CAAM,EAC5B,GAAK,GAGA,EAAI,EAAa,IAAK,CAC3B,GAAM,GAAO,EAAM,GACb,EAAM,CAAC,EACT,EAAS,EACb,KAAO,EAAS,EAAK,QAAQ,CAC3B,GAAM,GAAiB,EAAK,QAAQ,EAAe,CAAM,EACzD,GAAI,IAAmB,GAAI,CACzB,EAAI,KAAK,EAAe,EAAK,UAAU,CAAM,EAAG,EAAI,MAAM,CAAC,EAC3D,KACF,CACA,EAAI,KACF,EAAe,EAAK,UAAU,EAAQ,CAAc,EAAG,EAAI,MAAM,CACnE,EACA,EAAS,EAAiB,CAC5B,CACA,EAAW,CAAG,CAChB,CACF,EAIE,OAAQ,IAAM,EACd,cAAe,IAAM,CACvB,CACF,EAEO,EAAQ,CAAC,EAAO,IAAS,CAC9B,GAAM,GAAU,CACd,GAAG,EAED,aAAc,GACd,UAAW,SACX,QAAS,IAAM,CAAC,EAElB,GAAG,CACL,EACM,CAAE,YAAW,eAAc,WAAY,EACvC,CAAE,aAAY,iBAAkB,EAAM,CAAO,EAE7C,EAAM,CAAC,EACP,EAAa,CAAE,SAAQ,EAE7B,AAAI,GACF,GAAW,QAAU,AAAC,GAAQ,CAC5B,EAAQ,CAAG,EACX,EAAI,KAAK,EAAI,IAAI,CACnB,GAGF,GAAI,GAAW,EACf,KAAO,EAAW,EAAM,QAAQ,CAC9B,GAAM,GACJ,EAAc,EAAI,EAAM,UAAU,EAAU,EAAW,CAAS,EAGlE,EAAW,EAAO,CAAU,EAC5B,GAAY,CACd,CAEA,GAAM,GAAQ,EAAc,EAC5B,SAAW,EAAO,EAAY,EAAI,EAE3B,GAAgB,CACzB", | ||
| "names": [] | ||
| "mappings": "AACA,IAAMA,EAAiB,CACrB,OAAQ,GACR,YAAa;AAAA,EACb,cAAe,IAKf,gBAAiB,GACjB,YAAcC,GAAUA,EAIxB,sBAAuB,EAEzB,EAEMC,EAAUC,GAAUA,EAAM,OAEnBC,EAAQ,CAACC,EAAO,CAAC,IAAM,CAClC,IAAMC,EAAU,CAAE,GAAGN,EAAgB,GAAGK,CAAK,EAC7CC,EAAQ,aAAeA,EAAQ,UAE/B,GAAI,CAAE,OAAAC,EAAQ,YAAAC,EAAa,cAAAC,CAAc,EAAIH,EACzCI,EAAeR,EAAOK,CAAM,EAC1B,CAIJ,gBAAAI,EACA,YAAAC,EAGA,sBAAAC,CAEF,EAAIP,EAEAQ,EAAOC,EACPC,EAAc,GACdC,EAAM,EACJC,EAAcC,GAAQ,CAC1B,IAAIC,EAAOD,EAEX,GADAF,GAAO,EACHP,EAAc,CAChB,IAAMW,EAAYnB,EAAOiB,CAAG,EAE5B,GAAIT,IAAiBW,EAAW,CAC1BR,GACFS,EACE,iBACA,+CAA+CZ,IACjD,EAEF,MACF,KAAO,CACLU,EAAO,CAAC,EACR,QAASG,EAAI,EAAGA,EAAIF,EAAWE,IAC7BH,EAAKb,EAAOgB,IAAMJ,EAAII,EAE1B,CACF,CACAR,EAAQ,CAAE,IAAAE,EAAK,KAAAG,CAAK,CAAC,CACvB,EAEME,EAAe,CAACE,EAAMC,IAAY,CACtCV,EAAQ,CAAE,IAAAE,EAAK,IAAK,CAAE,KAAAO,EAAM,QAAAC,CAAQ,CAAE,CAAC,CACzC,EAEMC,EAAiB,CAACzB,EAAOgB,IACtBL,EAAYX,GAASU,EAAiBM,CAAG,EAuClD,MAAO,CACL,WArCiB,CAACU,EAAQC,IAAe,CACzCd,EAAQa,EACRZ,EAAUa,EAAW,QACrB,IAAMC,EAAQf,EAAM,MAAMN,CAAW,EACjCsB,EAAc5B,EAAO2B,CAAK,EAC1BC,EAAc,IAChBd,EAAca,EAAM,IAAI,EACxBC,GAAe,GAGjB,IAAIP,EAAI,EAOR,IANIhB,IAAW,KACbA,EAASsB,EAAMN,GAAG,MAAMd,CAAa,EACrCC,EAAeR,EAAOK,CAAM,EAC5BgB,GAAK,GAGAA,EAAIO,EAAaP,IAAK,CAC3B,IAAMQ,EAAOF,EAAMN,GACbJ,EAAM,CAAC,EACTa,EAAS,EACb,KAAOA,EAASD,EAAK,QAAQ,CAC3B,IAAME,EAAiBF,EAAK,QAAQtB,EAAeuB,CAAM,EACzD,GAAIC,IAAmB,GAAI,CACzBd,EAAI,KAAKO,EAAeK,EAAK,UAAUC,CAAM,EAAGb,EAAI,MAAM,CAAC,EAC3D,KACF,CACAA,EAAI,KACFO,EAAeK,EAAK,UAAUC,EAAQC,CAAc,EAAGd,EAAI,MAAM,CACnE,EACAa,EAASC,EAAiB,CAC5B,CACAf,EAAWC,CAAG,CAChB,CACF,EAIE,OAAQ,IAAMZ,EACd,cAAe,IAAMS,CACvB,CACF,EAEOkB,EAAQ,CAACC,EAAO9B,IAAS,CAC9B,IAAMC,EAAU,CACd,GAAGN,EAED,aAAc,GACd,UAAW,SACX,QAAS,IAAM,CAAC,EAElB,GAAGK,CACL,EACM,CAAE,UAAA+B,EAAW,aAAAC,EAAc,QAAAtB,CAAQ,EAAIT,EACvC,CAAE,WAAAgC,EAAY,cAAAC,CAAc,EAAInC,EAAME,CAAO,EAE7CkC,EAAM,CAAC,EACPZ,EAAa,CAAE,QAAAb,CAAQ,EAEzBsB,IACFT,EAAW,QAAWT,GAAQ,CAC5BJ,EAAQI,CAAG,EACXqB,EAAI,KAAKrB,EAAI,IAAI,CACnB,GAGF,IAAIsB,EAAW,EACf,KAAOA,EAAWN,EAAM,QAAQ,CAC9B,IAAMrB,EACJyB,EAAc,EAAIJ,EAAM,UAAUM,EAAUA,EAAWL,CAAS,EAGlEE,EAAWxB,EAAOc,CAAU,EAC5Ba,GAAYL,CACd,CAEA,IAAMtB,EAAQyB,EAAc,EAC5B,OAAAD,EAAWxB,EAAOc,EAAY,EAAI,EAE3BS,GAAgBG,CACzB", | ||
| "names": ["defaultOptions", "field", "length", "value", "parse", "opts", "options", "header", "newlineChar", "delimiterChar", "headerLength", "emptyFieldValue", "coerceField", "errorOnFieldsMismatch", "chunk", "enqueue", "partialLine", "idx", "enqueueRow", "row", "data", "rowLength", "enqueueError", "i", "code", "message", "transformField", "string", "controller", "lines", "linesLength", "line", "cursor", "delimiterIndex", "parse_mini_default", "input", "chunkSize", "enableReturn", "chunkParse", "previousChunk", "res", "position"] | ||
| } |
+1
-1
@@ -1,1 +0,1 @@ | ||
| var $=Object.defineProperty;var re=Object.getOwnPropertyDescriptor;var ne=Object.getOwnPropertyNames;var oe=Object.prototype.hasOwnProperty;var se=(e,t)=>{for(var o in t)$(e,o,{get:t[o],enumerable:!0})},ae=(e,t,o,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let c of ne(t))!oe.call(e,c)&&c!==o&&$(e,c,{get:()=>t[c],enumerable:!(n=re(t,c))||n.enumerable});return e};var ue=e=>ae($({},"__esModule",{value:!0}),e);var le={};se(le,{coerceTo:()=>V,default:()=>ie,detectChar:()=>D,parse:()=>A});module.exports=ue(le);var J={header:!0,newlineChar:"",delimiterChar:"",quoteChar:'"',emptyFieldValue:"",coerceField:e=>e,commentPrefixValue:!1,errorOnComment:!0,errorOnEmptyLine:!0,errorOnFieldsMismatch:!0},b=e=>e.length,ce=e=>e.replace(/[\\^$*+?.()|[\]{}]/g,"\\$&"),A=(e={})=>{let t={...J,...e};t.escapeChar??(t.escapeChar=t.quoteChar);let{header:o,newlineChar:n,delimiterChar:c}=t,{quoteChar:r,escapeChar:h,commentPrefixValue:E,emptyFieldValue:q,coerceField:L,errorOnEmptyLine:m,errorOnComment:v,errorOnFieldsMismatch:C}=t,p=b(o),B=/,|\t|\||;|\x1E|\x1F/g,G=/\r\n|\n|\r/g,H=h+r,K=new RegExp(`${ce(H)}`,"g"),U=h===r,W=h!==r,y=b(n),X=1,M=1,Y=1,Z=2,l,k,u,F,Q,g="",x=0,_=a=>{if(x+=1,o===!0){o=a,p=b(o);return}let i=a;if(p){let f=b(a);if(p!==f){C&&(p<f?O("FieldsMismatchTooMany",`Too many fields were parsed, expected ${p}.`):f<p&&O("FieldsMismatchTooFew",`Too few fields were parsed, expected ${p}.`));return}else{i={};for(let d=0;d<f;d++)i[o[d]]=a[d]}}Q({idx:x,data:i})},O=(a,i)=>{Q({idx:x,err:{code:a,message:i}})},w=(a,i=u)=>l.indexOf(a,i),j=a=>l.substring(u,a),ee=(a,i)=>L(a||q,i),te=(a,i)=>{F.push(ee(a,i))},R=()=>{if(w(n)===u)return x+=1,u+=y,m&&O("EmptyLineExists","Empty line detected."),R();if(E&&w(E)===u)return x+=1,u=w(n)+y,v&&O("CommentExists","Comment detected."),R()};return{chunkParse:(a,i,f=!1)=>{l=a,k=b(l),Q=i.enqueue,g="",u=0,F=[],n||(n=D(l.substring(0,1024),G),y=b(n)),c||(c=D(l.substring(0,1024),B)),R();let d=0;for(;;){let z,s=u,T,I;if(l[u]===r)for(u+=M,z=!0,s=u;;){if(s=w(r,s),s<0){if(g=l.substring(d,k)+g,f)throw new Error("QuotedFieldMalformed",{cause:x});return}if(U&&l[s+M]===r){s+=Z;continue}if(W&&l[s-Y]===h){s+=M;continue}break}let P=w(c,s),N=w(n,s);if(N<0){if(!f){g=l.substring(d,k)+g;return}N=k}if(P>-1&&P<N?(s=P,T=X):(s=N,T=y,I=!0),s<0||!s)break;let S;if(z?S=j(s-1).replace(K,r):S=j(s),te(S,F.length),u=s+T,I&&(_(F),F=[],R(),d=u),k<=u)break}},header:()=>o,previousChunk:()=>g}},D=(e,t)=>{let o,n={};for(;o=t.exec(e);){let r=o[0];if(n[r]??(n[r]=0),n[r]+=1,n[r]>5)return r}let{key:c}=Object.keys(n).map(r=>({key:r,value:n[r]})).sort((r,h)=>r.value-h.value)[0];return c},V={string:e=>e,boolean:e=>{let t=V.true(e);return typeof t=="boolean"?t:V.false(e)},integer:e=>Number.parseInt(e,10)||e,decimal:e=>Number.parseFloat(e)||e,json:e=>{try{return JSON.parse(e)}catch{return e}},timestamp:e=>{let t=new Date(e);return t.toString()!=="Invalid Date"?t:e},true:e=>e.toLowerCase()==="true"?!0:e,false:e=>e.toLowerCase()==="false"?!1:e,null:e=>e.toLowerCase()==="null"?null:e},ie=(e,t)=>{let o={...J,enableReturn:!0,chunkSize:67108864,enqueue:()=>{},...t},{chunkSize:n,enableReturn:c,enqueue:r}=o,{chunkParse:h,previousChunk:E}=A(o),q=[],L={enqueue:r};c&&(L.enqueue=C=>{r(C),q.push(C.data)});let m=0;for(;m<e.length;){let C=E()+e.substring(m,m+n);h(C,L),m+=n}let v=E();return h(v,L,!0),c&&q};0&&(module.exports={coerceTo,detectChar,parse}); | ||
| var $=Object.defineProperty;var re=Object.getOwnPropertyDescriptor;var ne=Object.getOwnPropertyNames;var oe=Object.prototype.hasOwnProperty;var se=(e,t)=>{for(var o in t)$(e,o,{get:t[o],enumerable:!0})},ae=(e,t,o,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let u of ne(t))!oe.call(e,u)&&u!==o&&$(e,u,{get:()=>t[u],enumerable:!(r=re(t,u))||r.enumerable});return e};var ue=e=>ae($({},"__esModule",{value:!0}),e);var le={};se(le,{coerceTo:()=>q,default:()=>ie,detectChar:()=>V,parse:()=>U});module.exports=ue(le);var J={header:!0,newlineChar:"",delimiterChar:"",quoteChar:'"',emptyFieldValue:"",coerceField:e=>e,commentPrefixValue:!1,errorOnComment:!0,errorOnEmptyLine:!0,errorOnFieldsMismatch:!0},E=e=>e.length,ce=e=>e.replace(/[\\^$*+?.()|[\]{}]/g,"\\$&"),U=(e={})=>{let t={...J,...e};t.escapeChar??(t.escapeChar=t.quoteChar);let{header:o,newlineChar:r,delimiterChar:u}=t,{quoteChar:i,escapeChar:n,commentPrefixValue:p,emptyFieldValue:y,coerceField:L,errorOnEmptyLine:g,errorOnComment:M,errorOnFieldsMismatch:C}=t,m=E(o),A=/,|\t|\||;|\x1E|\x1F/g,B=/\r\n|\n|\r/g,G=n+i,H=new RegExp(`${ce(G)}`,"g"),K=n===i,W=n!==i,O=E(r),X=1,Q=1,Y=1,Z=2,h,k,c,F,T,x="",w=0,_=a=>{if(w+=1,o===!0){o=a,m=E(o);return}let l=a;if(m){let f=E(a);if(m!==f){C&&(m<f?N("FieldsMismatchTooMany",`Too many fields were parsed, expected ${m}.`):f<m&&N("FieldsMismatchTooFew",`Too few fields were parsed, expected ${m}.`));return}else{l={};for(let d=0;d<f;d++)l[o[d]]=a[d]}}T({idx:w,data:l})},N=(a,l)=>{T({idx:w,err:{code:a,message:l}})},b=(a,l=c)=>h.indexOf(a,l),I=a=>h.substring(c,a),ee=(a,l)=>L(a||y,l),te=(a,l)=>{F.push(ee(a,l))},R=()=>{if(b(r)===c)return w+=1,c+=O,g&&N("EmptyLineExists","Empty line detected."),R();if(p&&b(p)===c)return w+=1,c=b(r)+O,M&&N("CommentExists","Comment detected."),R()};return{chunkParse:(a,l,f=!1)=>{h=a,k=E(h),T=l.enqueue,x="",c=0,F=[],r||(r=V(h.substring(0,1024),B),O=E(r)),u||(u=V(h.substring(0,1024),A)),R();let d=0;for(;;){let j,s=c,D,z;if(h[c]===i)for(c+=Q,j=!0,s=c;;){if(s=b(i,s),s<0){if(x=h.substring(d,k)+x,f)throw new Error("QuotedFieldMalformed",{cause:w});return}if(K&&h[s+Q]===i){s+=Z;continue}if(W&&h[s-Y]===n){s+=Q;continue}break}let P=b(u,s),v=b(r,s);if(v<0){if(!f){x=h.substring(d,k)+x;return}v=k}if(P>-1&&P<v?(s=P,D=X):(s=v,D=O,z=!0),s<0||!s)break;let S;if(j?S=I(s-1).replace(H,i):S=I(s),te(S,F.length),c=s+D,z&&(_(F),F=[],R(),d=c),k<=c)break}},header:()=>o,previousChunk:()=>x}},V=(e,t)=>{var i;let o,r={};for(;o=t.exec(e);){let n=o[0];if(r[n]??(r[n]=0),r[n]+=1,r[n]>5)return n}let{key:u}=((i=Object.keys(r).map(n=>({key:n,value:r[n]})).sort((n,p)=>n.value-p.value))==null?void 0:i[0])??{};if(!u)throw new Error("UnknownDetectChar",{cause:{pattern:t,chunk:e}});return u},q={string:e=>e,boolean:e=>{let t=q.true(e);return typeof t=="boolean"?t:q.false(e)},true:e=>e.toLowerCase()==="true"?!0:e,false:e=>e.toLowerCase()==="false"?!1:e,number:e=>{let t=q.decimal(e);return Number.isInteger(t)?q.integer(e):t},integer:e=>Number.parseInt(e,10)||e,decimal:e=>Number.parseFloat(e)||e,json:e=>{try{return JSON.parse(e)}catch{return e}},timestamp:e=>{let t=new Date(e);return t.toString()!=="Invalid Date"?t:e},null:e=>e.toLowerCase()==="null"?null:e,any:e=>e},ie=(e,t)=>{let o={...J,enableReturn:!0,chunkSize:67108864,enqueue:()=>{},...t},{chunkSize:r,enableReturn:u,enqueue:i}=o,{chunkParse:n,previousChunk:p}=U(o),y=[],L={enqueue:i};u&&(L.enqueue=C=>{i(C),y.push(C.data)});let g=0;for(;g<e.length;){let C=p()+e.substring(g,g+r);n(C,L),g+=r}let M=p();return n(M,L,!0),u&&y};0&&(module.exports={coerceTo,detectChar,parse}); |
+1
-1
@@ -1,1 +0,1 @@ | ||
| var I={header:!0,newlineChar:"",delimiterChar:"",quoteChar:'"',emptyFieldValue:"",coerceField:e=>e,commentPrefixValue:!1,errorOnComment:!0,errorOnEmptyLine:!0,errorOnFieldsMismatch:!0},b=e=>e.length,ee=e=>e.replace(/[\\^$*+?.()|[\]{}]/g,"\\$&"),te=(e={})=>{let s={...I,...e};s.escapeChar??=s.quoteChar;let{header:c,newlineChar:n,delimiterChar:p}=s,{quoteChar:t,escapeChar:l,commentPrefixValue:E,emptyFieldValue:q,coerceField:L,errorOnEmptyLine:m,errorOnComment:v,errorOnFieldsMismatch:C}=s,d=b(c),J=/,|\t|\||;|\x1E|\x1F/g,A=/\r\n|\n|\r/g,B=l+t,G=new RegExp(`${ee(B)}`,"g"),H=l===t,K=l!==t,y=b(n),U=1,M=1,W=1,X=2,i,k,a,F,Q,g="",x=0,Y=o=>{if(x+=1,c===!0){c=o,d=b(c);return}let u=o;if(d){let f=b(o);if(d!==f){C&&(d<f?O("FieldsMismatchTooMany",`Too many fields were parsed, expected ${d}.`):f<d&&O("FieldsMismatchTooFew",`Too few fields were parsed, expected ${d}.`));return}else{u={};for(let h=0;h<f;h++)u[c[h]]=o[h]}}Q({idx:x,data:u})},O=(o,u)=>{Q({idx:x,err:{code:o,message:u}})},w=(o,u=a)=>i.indexOf(o,u),$=o=>i.substring(a,o),Z=(o,u)=>L(o||q,u),_=(o,u)=>{F.push(Z(o,u))},R=()=>{if(w(n)===a)return x+=1,a+=y,m&&O("EmptyLineExists","Empty line detected."),R();if(E&&w(E)===a)return x+=1,a=w(n)+y,v&&O("CommentExists","Comment detected."),R()};return{chunkParse:(o,u,f=!1)=>{i=o,k=b(i),Q=u.enqueue,g="",a=0,F=[],n||(n=j(i.substring(0,1024),A),y=b(n)),p||=j(i.substring(0,1024),J),R();let h=0;for(;;){let D,r=a,T,V;if(i[a]===t)for(a+=M,D=!0,r=a;;){if(r=w(t,r),r<0){if(g=i.substring(h,k)+g,f)throw new Error("QuotedFieldMalformed",{cause:x});return}if(H&&i[r+M]===t){r+=X;continue}if(K&&i[r-W]===l){r+=M;continue}break}let P=w(p,r),N=w(n,r);if(N<0){if(!f){g=i.substring(h,k)+g;return}N=k}if(P>-1&&P<N?(r=P,T=U):(r=N,T=y,V=!0),r<0||!r)break;let S;if(D?S=$(r-1).replace(G,t):S=$(r),_(S,F.length),a=r+T,V&&(Y(F),F=[],R(),h=a),k<=a)break}},header:()=>c,previousChunk:()=>g}},j=(e,s)=>{let c,n={};for(;c=s.exec(e);){let t=c[0];if(n[t]??=0,n[t]+=1,n[t]>5)return t}let{key:p}=Object.keys(n).map(t=>({key:t,value:n[t]})).sort((t,l)=>t.value-l.value)[0];return p},z={string:e=>e,boolean:e=>{let s=z.true(e);return typeof s=="boolean"?s:z.false(e)},integer:e=>Number.parseInt(e,10)||e,decimal:e=>Number.parseFloat(e)||e,json:e=>{try{return JSON.parse(e)}catch{return e}},timestamp:e=>{let s=new Date(e);return s.toString()!=="Invalid Date"?s:e},true:e=>e.toLowerCase()==="true"?!0:e,false:e=>e.toLowerCase()==="false"?!1:e,null:e=>e.toLowerCase()==="null"?null:e},ne=(e,s)=>{let c={...I,enableReturn:!0,chunkSize:67108864,enqueue:()=>{},...s},{chunkSize:n,enableReturn:p,enqueue:t}=c,{chunkParse:l,previousChunk:E}=te(c),q=[],L={enqueue:t};p&&(L.enqueue=C=>{t(C),q.push(C.data)});let m=0;for(;m<e.length;){let C=E()+e.substring(m,m+n);l(C,L),m+=n}let v=E();return l(v,L,!0),p&&q};export{z as coerceTo,ne as default,j as detectChar,te as parse}; | ||
| var z={header:!0,newlineChar:"",delimiterChar:"",quoteChar:'"',emptyFieldValue:"",coerceField:e=>e,commentPrefixValue:!1,errorOnComment:!0,errorOnEmptyLine:!0,errorOnFieldsMismatch:!0},b=e=>e.length,ee=e=>e.replace(/[\\^$*+?.()|[\]{}]/g,"\\$&"),te=(e={})=>{let r={...z,...e};r.escapeChar??=r.quoteChar;let{header:c,newlineChar:o,delimiterChar:h}=r,{quoteChar:t,escapeChar:l,commentPrefixValue:E,emptyFieldValue:q,coerceField:L,errorOnEmptyLine:f,errorOnComment:M,errorOnFieldsMismatch:g}=r,p=b(c),J=/,|\t|\||;|\x1E|\x1F/g,U=/\r\n|\n|\r/g,A=l+t,B=new RegExp(`${ee(A)}`,"g"),G=l===t,H=l!==t,y=b(o),K=1,Q=1,W=1,X=2,i,k,a,F,T,C="",x=0,Y=s=>{if(x+=1,c===!0){c=s,p=b(c);return}let u=s;if(p){let m=b(s);if(p!==m){g&&(p<m?O("FieldsMismatchTooMany",`Too many fields were parsed, expected ${p}.`):m<p&&O("FieldsMismatchTooFew",`Too few fields were parsed, expected ${p}.`));return}else{u={};for(let d=0;d<m;d++)u[c[d]]=s[d]}}T({idx:x,data:u})},O=(s,u)=>{T({idx:x,err:{code:s,message:u}})},w=(s,u=a)=>i.indexOf(s,u),$=s=>i.substring(a,s),Z=(s,u)=>L(s||q,u),_=(s,u)=>{F.push(Z(s,u))},N=()=>{if(w(o)===a)return x+=1,a+=y,f&&O("EmptyLineExists","Empty line detected."),N();if(E&&w(E)===a)return x+=1,a=w(o)+y,M&&O("CommentExists","Comment detected."),N()};return{chunkParse:(s,u,m=!1)=>{i=s,k=b(i),T=u.enqueue,C="",a=0,F=[],o||(o=j(i.substring(0,1024),U),y=b(o)),h||=j(i.substring(0,1024),J),N();let d=0;for(;;){let V,n=a,D,I;if(i[a]===t)for(a+=Q,V=!0,n=a;;){if(n=w(t,n),n<0){if(C=i.substring(d,k)+C,m)throw new Error("QuotedFieldMalformed",{cause:x});return}if(G&&i[n+Q]===t){n+=X;continue}if(H&&i[n-W]===l){n+=Q;continue}break}let P=w(h,n),R=w(o,n);if(R<0){if(!m){C=i.substring(d,k)+C;return}R=k}if(P>-1&&P<R?(n=P,D=K):(n=R,D=y,I=!0),n<0||!n)break;let S;if(V?S=$(n-1).replace(B,t):S=$(n),_(S,F.length),a=n+D,I&&(Y(F),F=[],N(),d=a),k<=a)break}},header:()=>c,previousChunk:()=>C}},j=(e,r)=>{let c,o={};for(;c=r.exec(e);){let t=c[0];if(o[t]??=0,o[t]+=1,o[t]>5)return t}let{key:h}=Object.keys(o).map(t=>({key:t,value:o[t]})).sort((t,l)=>t.value-l.value)?.[0]??{};if(!h)throw new Error("UnknownDetectChar",{cause:{pattern:r,chunk:e}});return h},v={string:e=>e,boolean:e=>{let r=v.true(e);return typeof r=="boolean"?r:v.false(e)},true:e=>e.toLowerCase()==="true"?!0:e,false:e=>e.toLowerCase()==="false"?!1:e,number:e=>{let r=v.decimal(e);return Number.isInteger(r)?v.integer(e):r},integer:e=>Number.parseInt(e,10)||e,decimal:e=>Number.parseFloat(e)||e,json:e=>{try{return JSON.parse(e)}catch{return e}},timestamp:e=>{let r=new Date(e);return r.toString()!=="Invalid Date"?r:e},null:e=>e.toLowerCase()==="null"?null:e,any:e=>e},ne=(e,r)=>{let c={...z,enableReturn:!0,chunkSize:67108864,enqueue:()=>{},...r},{chunkSize:o,enableReturn:h,enqueue:t}=c,{chunkParse:l,previousChunk:E}=te(c),q=[],L={enqueue:t};h&&(L.enqueue=g=>{t(g),q.push(g.data)});let f=0;for(;f<e.length;){let g=E()+e.substring(f,f+o);l(g,L),f+=o}let M=E();return l(M,L,!0),h&&q};export{v as coerceTo,ne as default,j as detectChar,te as parse}; |
+3
-3
| { | ||
| "version": 3, | ||
| "sources": ["parse.js"], | ||
| "sourcesContent": ["// chunkSize >> largest expected row\nconst defaultOptions = {\n header: true, // false: return array; true: detect headers and return json; [...]: use defined headers and return json\n newlineChar: '', // '': detect newline from chunk; '\\r\\n': Windows; '\\n': Linux/Mac\n delimiterChar: '', // '': detect delimiter from chunk\n quoteChar: '\"',\n // escapeChar: '\"', // default: `quoteChar`\n\n // Parse\n emptyFieldValue: '',\n coerceField: (field) => field, // TODO tests\n commentPrefixValue: false, // falsy: disable, '//': enabled\n errorOnComment: true,\n errorOnEmptyLine: true,\n errorOnFieldsMismatch: true\n // errorOnFieldMalformed: true\n}\n\nconst length = (value) => value.length\nconst escapeRegExp = (string) => string.replace(/[\\\\^$*+?.()|[\\]{}]/g, '\\\\$&') // https://github.com/tc39/proposal-regex-escaping\n\nexport const parse = (opts = {}) => {\n const options = { ...defaultOptions, ...opts }\n options.escapeChar ??= options.quoteChar\n\n let { header, newlineChar, delimiterChar } = options\n const {\n quoteChar,\n escapeChar,\n commentPrefixValue,\n emptyFieldValue,\n coerceField,\n errorOnEmptyLine,\n errorOnComment,\n errorOnFieldsMismatch\n // errorOnFieldMalformed\n } = options\n let headerLength = length(header)\n const detectDelimiterCharRegExp = /,|\\t|\\||;|\\x1E|\\x1F/g // eslint-disable-line no-control-regex\n const detectNewlineCharRegExp = /\\r\\n|\\n|\\r/g\n\n const escapedQuoteChar = escapeChar + quoteChar\n const escapedQuoteCharRegExp = new RegExp(\n `${escapeRegExp(escapedQuoteChar)}`,\n 'g'\n )\n\n const escapedQuoteEqual = escapeChar === quoteChar\n const escapedQuoteNotEqual = escapeChar !== quoteChar\n\n let newlineCharLength = length(newlineChar)\n const delimiterCharLength = 1 // length(delimiterChar)\n const quoteCharLength = 1 // length(quoteChar)\n const escapeCharLength = 1 // length(escapeChar)\n const escapedQuoteCharLength = 2 // length(escapedQuoteChar)\n // const commentPrefixValueLength = length(commentPrefixValue)\n\n let chunk, chunkLength, cursor, row, enqueue\n let partialLine = ''\n let idx = 0\n const enqueueRow = (row) => {\n idx += 1\n if (header === true) {\n header = row\n headerLength = length(header)\n return\n }\n let data = row\n if (headerLength) {\n const rowLength = length(row)\n\n if (headerLength !== rowLength) {\n if (errorOnFieldsMismatch) {\n // enqueueError('FieldsMismatch', `Parsed ${rowLength} fields, expected ${headerLength}.`)\n if (headerLength < rowLength) {\n enqueueError(\n 'FieldsMismatchTooMany',\n `Too many fields were parsed, expected ${headerLength}.`\n )\n } else if (rowLength < headerLength) {\n enqueueError(\n 'FieldsMismatchTooFew',\n `Too few fields were parsed, expected ${headerLength}.`\n )\n }\n }\n return\n } else {\n data = {}\n for (let i = 0; i < rowLength; i++) {\n data[header[i]] = row[i]\n }\n }\n }\n enqueue({ idx, data })\n }\n\n const enqueueError = (code, message) => {\n enqueue({ idx, err: { code, message } })\n }\n\n const findNext = (searchValue, start = cursor) => {\n return chunk.indexOf(searchValue, start)\n }\n\n const parseField = (end) => {\n return chunk.substring(cursor, end)\n }\n const transformField = (field, idx) => {\n return coerceField(field || emptyFieldValue, idx)\n }\n\n // TODO idea: when header == true/array using a different addFieldToRow function to allow faster key:value mapping\n // const resetRow = () => {\n // row = []\n // }\n const addFieldToRow = (field, idx) => {\n row.push(transformField(field, idx))\n }\n\n const checkForEmptyLine = () => {\n if (findNext(newlineChar) === cursor) {\n idx += 1\n cursor += newlineCharLength\n if (errorOnEmptyLine) {\n enqueueError('EmptyLineExists', 'Empty line detected.')\n }\n return checkForEmptyLine()\n } else if (commentPrefixValue && findNext(commentPrefixValue) === cursor) {\n idx += 1\n cursor = findNext(newlineChar) + newlineCharLength\n if (errorOnComment) {\n enqueueError('CommentExists', 'Comment detected.')\n }\n return checkForEmptyLine()\n }\n }\n\n const chunkParse = (string, controller, flush = false) => {\n chunk = string\n chunkLength = length(chunk)\n enqueue = controller.enqueue\n partialLine = ''\n cursor = 0\n row = [] // resetRow()\n\n // auto-detect\n if (!newlineChar) {\n newlineChar = detectChar(\n chunk.substring(0, 1024),\n detectNewlineCharRegExp\n )\n newlineCharLength = length(newlineChar)\n }\n delimiterChar ||= detectChar(\n chunk.substring(0, 1024),\n detectDelimiterCharRegExp\n )\n\n checkForEmptyLine()\n let lineStart = 0\n for (;;) {\n let quoted\n let nextCursor = cursor\n let nextCursorLength\n let atNewline\n if (chunk[cursor] === quoteChar) {\n cursor += quoteCharLength\n quoted = true\n nextCursor = cursor\n for (;;) {\n nextCursor = findNext(quoteChar, nextCursor)\n if (nextCursor < 0) {\n partialLine = chunk.substring(lineStart, chunkLength) + partialLine\n if (flush) {\n throw new Error('QuotedFieldMalformed', { cause: idx })\n }\n return\n }\n if (\n escapedQuoteEqual &&\n chunk[nextCursor + quoteCharLength] === quoteChar\n ) {\n nextCursor += escapedQuoteCharLength\n continue\n }\n if (\n escapedQuoteNotEqual &&\n chunk[nextCursor - escapeCharLength] === escapeChar\n ) {\n nextCursor += quoteCharLength\n continue\n }\n break\n }\n }\n\n // fallback\n const nextDelimiterChar = findNext(delimiterChar, nextCursor)\n let nextNewlineChar = findNext(newlineChar, nextCursor)\n if (nextNewlineChar < 0) {\n if (!flush) {\n partialLine = chunk.substring(lineStart, chunkLength) + partialLine\n return\n }\n nextNewlineChar = chunkLength\n }\n if (nextDelimiterChar > -1 && nextDelimiterChar < nextNewlineChar) {\n nextCursor = nextDelimiterChar\n nextCursorLength = delimiterCharLength\n } else {\n nextCursor = nextNewlineChar\n nextCursorLength = newlineCharLength\n atNewline = true\n }\n\n if (nextCursor < 0 || !nextCursor) {\n break\n }\n\n let field\n if (quoted) {\n field = parseField(nextCursor - 1).replace(\n escapedQuoteCharRegExp,\n quoteChar\n )\n } else {\n field = parseField(nextCursor)\n }\n addFieldToRow(field, row.length)\n\n cursor = nextCursor + nextCursorLength\n\n if (atNewline) {\n enqueueRow(row)\n row = [] // resetRow()\n checkForEmptyLine()\n lineStart = cursor\n }\n if (chunkLength <= cursor) {\n break\n }\n }\n }\n\n return {\n chunkParse,\n header: () => header,\n previousChunk: () => partialLine\n }\n}\n\nexport const detectChar = (chunk, pattern) => {\n let match\n const chars = {}\n while ((match = pattern.exec(chunk))) {\n const char = match[0]\n chars[char] ??= 0\n chars[char] += 1\n if (chars[char] > 5) return char\n }\n // pattern.lastIndex = 0 // not reused again\n const { key } = Object.keys(chars)\n .map((key) => ({ key, value: chars[key] }))\n .sort((a, b) => a.value - b.value)[0]\n return key\n}\n\nexport const coerceTo = {\n string: (field) => field,\n boolean: (field) => {\n const boolean = coerceTo.true(field)\n return typeof boolean === 'boolean' ? boolean : coerceTo.false(field)\n },\n integer: (field) => Number.parseInt(field, 10) || field,\n decimal: (field) => Number.parseFloat(field) || field,\n json: (field) => {\n try {\n return JSON.parse(field)\n } catch (e) {\n return field\n }\n },\n timestamp: (field) => {\n const date = new Date(field)\n return date.toString() !== 'Invalid Date' ? date : field\n },\n true: (field) => (field.toLowerCase() === 'true' ? true : field),\n false: (field) => (field.toLowerCase() === 'false' ? false : field),\n null: (field) => (field.toLowerCase() === 'null' ? null : field)\n}\n\nexport default (input, opts) => {\n const options = {\n ...defaultOptions,\n ...{\n enableReturn: true,\n chunkSize: 64 * 1024 * 1024,\n enqueue: () => {}\n },\n ...opts\n }\n const { chunkSize, enableReturn, enqueue } = options\n const { chunkParse, previousChunk } = parse(options)\n\n const res = []\n const controller = { enqueue }\n\n if (enableReturn) {\n controller.enqueue = (row) => {\n enqueue(row)\n res.push(row.data)\n }\n }\n\n let position = 0\n while (position < input.length) {\n const chunk =\n previousChunk() + input.substring(position, position + chunkSize)\n\n // Checking if you can use fastParse slows it down more than checking for quoteChar on ever field.\n chunkParse(chunk, controller)\n position += chunkSize\n }\n // flush\n const chunk = previousChunk()\n chunkParse(chunk, controller, true)\n\n return enableReturn && res\n}\n"], | ||
| "mappings": "AACA,GAAM,GAAiB,CACrB,OAAQ,GACR,YAAa,GACb,cAAe,GACf,UAAW,IAIX,gBAAiB,GACjB,YAAa,AAAC,GAAU,EACxB,mBAAoB,GACpB,eAAgB,GAChB,iBAAkB,GAClB,sBAAuB,EAEzB,EAEM,EAAS,AAAC,GAAU,EAAM,OAC1B,GAAe,AAAC,GAAW,EAAO,QAAQ,sBAAuB,MAAM,EAEhE,GAAQ,CAAC,EAAO,CAAC,IAAM,CAClC,GAAM,GAAU,CAAE,GAAG,EAAgB,GAAG,CAAK,EAC7C,EAAQ,aAAe,EAAQ,UAE/B,GAAI,CAAE,SAAQ,cAAa,iBAAkB,EACvC,CACJ,YACA,aACA,qBACA,kBACA,cACA,mBACA,iBACA,yBAEE,EACA,EAAe,EAAO,CAAM,EAC1B,EAA4B,uBAC5B,EAA0B,cAE1B,EAAmB,EAAa,EAChC,EAAyB,GAAI,QACjC,GAAG,GAAa,CAAgB,IAChC,GACF,EAEM,EAAoB,IAAe,EACnC,EAAuB,IAAe,EAExC,EAAoB,EAAO,CAAW,EACpC,EAAsB,EACtB,EAAkB,EAClB,EAAmB,EACnB,EAAyB,EAG3B,EAAO,EAAa,EAAQ,EAAK,EACjC,EAAc,GACd,EAAM,EACJ,EAAa,AAAC,GAAQ,CAE1B,GADA,GAAO,EACH,IAAW,GAAM,CACnB,EAAS,EACT,EAAe,EAAO,CAAM,EAC5B,MACF,CACA,GAAI,GAAO,EACX,GAAI,EAAc,CAChB,GAAM,GAAY,EAAO,CAAG,EAE5B,GAAI,IAAiB,EAAW,CAC9B,AAAI,GAEF,CAAI,EAAe,EACjB,EACE,wBACA,yCAAyC,IAC3C,EACS,EAAY,GACrB,EACE,uBACA,wCAAwC,IAC1C,GAGJ,MACF,KAAO,CACL,EAAO,CAAC,EACR,OAAS,GAAI,EAAG,EAAI,EAAW,IAC7B,EAAK,EAAO,IAAM,EAAI,EAE1B,CACF,CACA,EAAQ,CAAE,MAAK,MAAK,CAAC,CACvB,EAEM,EAAe,CAAC,EAAM,IAAY,CACtC,EAAQ,CAAE,MAAK,IAAK,CAAE,OAAM,SAAQ,CAAE,CAAC,CACzC,EAEM,EAAW,CAAC,EAAa,EAAQ,IAC9B,EAAM,QAAQ,EAAa,CAAK,EAGnC,EAAa,AAAC,GACX,EAAM,UAAU,EAAQ,CAAG,EAE9B,EAAiB,CAAC,EAAO,IACtB,EAAY,GAAS,EAAiB,CAAG,EAO5C,EAAgB,CAAC,EAAO,IAAQ,CACpC,EAAI,KAAK,EAAe,EAAO,CAAG,CAAC,CACrC,EAEM,EAAoB,IAAM,CAC9B,GAAI,EAAS,CAAW,IAAM,EAC5B,UAAO,EACP,GAAU,EACN,GACF,EAAa,kBAAmB,sBAAsB,EAEjD,EAAkB,EACpB,GAAI,GAAsB,EAAS,CAAkB,IAAM,EAChE,UAAO,EACP,EAAS,EAAS,CAAW,EAAI,EAC7B,GACF,EAAa,gBAAiB,mBAAmB,EAE5C,EAAkB,CAE7B,EA6GA,MAAO,CACL,WA5GiB,CAAC,EAAQ,EAAY,EAAQ,KAAU,CACxD,EAAQ,EACR,EAAc,EAAO,CAAK,EAC1B,EAAU,EAAW,QACrB,EAAc,GACd,EAAS,EACT,EAAM,CAAC,EAGF,GACH,GAAc,EACZ,EAAM,UAAU,EAAG,IAAI,EACvB,CACF,EACA,EAAoB,EAAO,CAAW,GAExC,IAAkB,EAChB,EAAM,UAAU,EAAG,IAAI,EACvB,CACF,EAEA,EAAkB,EAClB,GAAI,GAAY,EAChB,OAAS,CACP,GAAI,GACA,EAAa,EACb,EACA,EACJ,GAAI,EAAM,KAAY,EAIpB,IAHA,GAAU,EACV,EAAS,GACT,EAAa,IACJ,CAEP,GADA,EAAa,EAAS,EAAW,CAAU,EACvC,EAAa,EAAG,CAElB,GADA,EAAc,EAAM,UAAU,EAAW,CAAW,EAAI,EACpD,EACF,KAAM,IAAI,OAAM,uBAAwB,CAAE,MAAO,CAAI,CAAC,EAExD,MACF,CACA,GACE,GACA,EAAM,EAAa,KAAqB,EACxC,CACA,GAAc,EACd,QACF,CACA,GACE,GACA,EAAM,EAAa,KAAsB,EACzC,CACA,GAAc,EACd,QACF,CACA,KACF,CAIF,GAAM,GAAoB,EAAS,EAAe,CAAU,EACxD,EAAkB,EAAS,EAAa,CAAU,EACtD,GAAI,EAAkB,EAAG,CACvB,GAAI,CAAC,EAAO,CACV,EAAc,EAAM,UAAU,EAAW,CAAW,EAAI,EACxD,MACF,CACA,EAAkB,CACpB,CAUA,GATA,AAAI,EAAoB,IAAM,EAAoB,EAChD,GAAa,EACb,EAAmB,GAEnB,GAAa,EACb,EAAmB,EACnB,EAAY,IAGV,EAAa,GAAK,CAAC,EACrB,MAGF,GAAI,GAmBJ,GAlBA,AAAI,EACF,EAAQ,EAAW,EAAa,CAAC,EAAE,QACjC,EACA,CACF,EAEA,EAAQ,EAAW,CAAU,EAE/B,EAAc,EAAO,EAAI,MAAM,EAE/B,EAAS,EAAa,EAElB,GACF,GAAW,CAAG,EACd,EAAM,CAAC,EACP,EAAkB,EAClB,EAAY,GAEV,GAAe,EACjB,KAEJ,CACF,EAIE,OAAQ,IAAM,EACd,cAAe,IAAM,CACvB,CACF,EAEa,EAAa,CAAC,EAAO,IAAY,CAC5C,GAAI,GACE,EAAQ,CAAC,EACf,KAAQ,EAAQ,EAAQ,KAAK,CAAK,GAAI,CACpC,GAAM,GAAO,EAAM,GAGnB,GAFA,EAAM,KAAU,EAChB,EAAM,IAAS,EACX,EAAM,GAAQ,EAAG,MAAO,EAC9B,CAEA,GAAM,CAAE,OAAQ,OAAO,KAAK,CAAK,EAC9B,IAAI,AAAC,GAAS,EAAE,MAAK,MAAO,EAAM,EAAK,EAAE,EACzC,KAAK,CAAC,EAAG,IAAM,EAAE,MAAQ,EAAE,KAAK,EAAE,GACrC,MAAO,EACT,EAEa,EAAW,CACtB,OAAQ,AAAC,GAAU,EACnB,QAAS,AAAC,GAAU,CAClB,GAAM,GAAU,EAAS,KAAK,CAAK,EACnC,MAAO,OAAO,IAAY,UAAY,EAAU,EAAS,MAAM,CAAK,CACtE,EACA,QAAS,AAAC,GAAU,OAAO,SAAS,EAAO,EAAE,GAAK,EAClD,QAAS,AAAC,GAAU,OAAO,WAAW,CAAK,GAAK,EAChD,KAAM,AAAC,GAAU,CACf,GAAI,CACF,MAAO,MAAK,MAAM,CAAK,CACzB,MAAE,CACA,MAAO,EACT,CACF,EACA,UAAW,AAAC,GAAU,CACpB,GAAM,GAAO,GAAI,MAAK,CAAK,EAC3B,MAAO,GAAK,SAAS,IAAM,eAAiB,EAAO,CACrD,EACA,KAAM,AAAC,GAAW,EAAM,YAAY,IAAM,OAAS,GAAO,EAC1D,MAAO,AAAC,GAAW,EAAM,YAAY,IAAM,QAAU,GAAQ,EAC7D,KAAM,AAAC,GAAW,EAAM,YAAY,IAAM,OAAS,KAAO,CAC5D,EAEO,GAAQ,CAAC,EAAO,IAAS,CAC9B,GAAM,GAAU,CACd,GAAG,EAED,aAAc,GACd,UAAW,SACX,QAAS,IAAM,CAAC,EAElB,GAAG,CACL,EACM,CAAE,YAAW,eAAc,WAAY,EACvC,CAAE,aAAY,iBAAkB,GAAM,CAAO,EAE7C,EAAM,CAAC,EACP,EAAa,CAAE,SAAQ,EAE7B,AAAI,GACF,GAAW,QAAU,AAAC,GAAQ,CAC5B,EAAQ,CAAG,EACX,EAAI,KAAK,EAAI,IAAI,CACnB,GAGF,GAAI,GAAW,EACf,KAAO,EAAW,EAAM,QAAQ,CAC9B,GAAM,GACJ,EAAc,EAAI,EAAM,UAAU,EAAU,EAAW,CAAS,EAGlE,EAAW,EAAO,CAAU,EAC5B,GAAY,CACd,CAEA,GAAM,GAAQ,EAAc,EAC5B,SAAW,EAAO,EAAY,EAAI,EAE3B,GAAgB,CACzB", | ||
| "names": [] | ||
| "sourcesContent": ["// chunkSize >> largest expected row\nconst defaultOptions = {\n header: true, // false: return array; true: detect headers and return json; [...]: use defined headers and return json\n // TODO add in columns\n // TODO add in output format array/object\n // output: 'array', // 'array' / 'object'\n newlineChar: '', // '': detect newline from chunk; '\\r\\n': Windows; '\\n': Linux/Mac\n delimiterChar: '', // '': detect delimiter from chunk\n quoteChar: '\"',\n // escapeChar: '\"', // default: `quoteChar`\n\n // Parse\n emptyFieldValue: '',\n // TODO option to remove empty fields from object\n coerceField: (field) => field, // TODO tests\n commentPrefixValue: false, // falsy: disable, '//': enabled\n errorOnComment: true,\n errorOnEmptyLine: true,\n errorOnFieldsMismatch: true\n // errorOnFieldMalformed: true\n}\n\nconst length = (value) => value.length\nconst escapeRegExp = (string) => string.replace(/[\\\\^$*+?.()|[\\]{}]/g, '\\\\$&') // https://github.com/tc39/proposal-regex-escaping\n\nexport const parse = (opts = {}) => {\n const options = { ...defaultOptions, ...opts }\n options.escapeChar ??= options.quoteChar\n\n let { header, newlineChar, delimiterChar } = options\n const {\n quoteChar,\n escapeChar,\n commentPrefixValue,\n emptyFieldValue,\n coerceField,\n errorOnEmptyLine,\n errorOnComment,\n errorOnFieldsMismatch\n // errorOnFieldMalformed\n } = options\n let headerLength = length(header)\n const detectDelimiterCharRegExp = /,|\\t|\\||;|\\x1E|\\x1F/g // eslint-disable-line no-control-regex\n const detectNewlineCharRegExp = /\\r\\n|\\n|\\r/g\n\n const escapedQuoteChar = escapeChar + quoteChar\n const escapedQuoteCharRegExp = new RegExp(\n `${escapeRegExp(escapedQuoteChar)}`,\n 'g'\n )\n\n const escapedQuoteEqual = escapeChar === quoteChar\n const escapedQuoteNotEqual = escapeChar !== quoteChar\n\n let newlineCharLength = length(newlineChar)\n const delimiterCharLength = 1 // length(delimiterChar)\n const quoteCharLength = 1 // length(quoteChar)\n const escapeCharLength = 1 // length(escapeChar)\n const escapedQuoteCharLength = 2 // length(escapedQuoteChar)\n // const commentPrefixValueLength = length(commentPrefixValue)\n\n let chunk, chunkLength, cursor, row, enqueue\n let partialLine = ''\n let idx = 0\n const enqueueRow = (row) => {\n idx += 1\n if (header === true) {\n header = row\n headerLength = length(header)\n return\n }\n let data = row\n if (headerLength) {\n const rowLength = length(row)\n\n if (headerLength !== rowLength) {\n if (errorOnFieldsMismatch) {\n // enqueueError('FieldsMismatch', `Parsed ${rowLength} fields, expected ${headerLength}.`)\n if (headerLength < rowLength) {\n enqueueError(\n 'FieldsMismatchTooMany',\n `Too many fields were parsed, expected ${headerLength}.`\n )\n } else if (rowLength < headerLength) {\n enqueueError(\n 'FieldsMismatchTooFew',\n `Too few fields were parsed, expected ${headerLength}.`\n )\n }\n }\n return\n } else {\n data = {}\n for (let i = 0; i < rowLength; i++) {\n data[header[i]] = row[i]\n }\n }\n }\n enqueue({ idx, data })\n }\n\n const enqueueError = (code, message) => {\n enqueue({ idx, err: { code, message } })\n }\n\n const findNext = (searchValue, start = cursor) => {\n return chunk.indexOf(searchValue, start)\n }\n\n const parseField = (end) => {\n return chunk.substring(cursor, end)\n }\n const transformField = (field, idx) => {\n return coerceField(field || emptyFieldValue, idx)\n }\n\n // TODO idea: when header == true/array using a different addFieldToRow function to allow faster key:value mapping\n // const resetRow = () => {\n // row = []\n // }\n const addFieldToRow = (field, idx) => {\n row.push(transformField(field, idx))\n }\n\n const checkForEmptyLine = () => {\n if (findNext(newlineChar) === cursor) {\n idx += 1\n cursor += newlineCharLength\n if (errorOnEmptyLine) {\n enqueueError('EmptyLineExists', 'Empty line detected.')\n }\n return checkForEmptyLine()\n } else if (commentPrefixValue && findNext(commentPrefixValue) === cursor) {\n idx += 1\n cursor = findNext(newlineChar) + newlineCharLength\n if (errorOnComment) {\n enqueueError('CommentExists', 'Comment detected.')\n }\n return checkForEmptyLine()\n }\n }\n\n const chunkParse = (string, controller, flush = false) => {\n chunk = string\n chunkLength = length(chunk)\n enqueue = controller.enqueue\n partialLine = ''\n cursor = 0\n row = [] // resetRow()\n\n // auto-detect\n if (!newlineChar) {\n newlineChar = detectChar(\n chunk.substring(0, 1024),\n detectNewlineCharRegExp\n )\n newlineCharLength = length(newlineChar)\n }\n delimiterChar ||= detectChar(\n chunk.substring(0, 1024),\n detectDelimiterCharRegExp\n )\n\n checkForEmptyLine()\n let lineStart = 0\n for (;;) {\n let quoted\n let nextCursor = cursor\n let nextCursorLength\n let atNewline\n if (chunk[cursor] === quoteChar) {\n cursor += quoteCharLength\n quoted = true\n nextCursor = cursor\n for (;;) {\n nextCursor = findNext(quoteChar, nextCursor)\n if (nextCursor < 0) {\n partialLine = chunk.substring(lineStart, chunkLength) + partialLine\n if (flush) {\n throw new Error('QuotedFieldMalformed', { cause: idx })\n }\n return\n }\n if (\n escapedQuoteEqual &&\n chunk[nextCursor + quoteCharLength] === quoteChar\n ) {\n nextCursor += escapedQuoteCharLength\n continue\n }\n if (\n escapedQuoteNotEqual &&\n chunk[nextCursor - escapeCharLength] === escapeChar\n ) {\n nextCursor += quoteCharLength\n continue\n }\n break\n }\n }\n\n // fallback\n const nextDelimiterChar = findNext(delimiterChar, nextCursor)\n let nextNewlineChar = findNext(newlineChar, nextCursor)\n if (nextNewlineChar < 0) {\n if (!flush) {\n partialLine = chunk.substring(lineStart, chunkLength) + partialLine\n return\n }\n nextNewlineChar = chunkLength\n }\n if (nextDelimiterChar > -1 && nextDelimiterChar < nextNewlineChar) {\n nextCursor = nextDelimiterChar\n nextCursorLength = delimiterCharLength\n } else {\n nextCursor = nextNewlineChar\n nextCursorLength = newlineCharLength\n atNewline = true\n }\n\n if (nextCursor < 0 || !nextCursor) {\n break\n }\n\n let field\n if (quoted) {\n field = parseField(nextCursor - 1).replace(\n escapedQuoteCharRegExp,\n quoteChar\n )\n } else {\n field = parseField(nextCursor)\n }\n addFieldToRow(field, row.length)\n\n cursor = nextCursor + nextCursorLength\n\n if (atNewline) {\n enqueueRow(row)\n row = [] // resetRow()\n checkForEmptyLine()\n lineStart = cursor\n }\n if (chunkLength <= cursor) {\n break\n }\n }\n }\n\n return {\n chunkParse,\n header: () => header,\n previousChunk: () => partialLine\n }\n}\n\nexport const detectChar = (chunk, pattern) => {\n let match\n const chars = {}\n while ((match = pattern.exec(chunk))) {\n const char = match[0]\n chars[char] ??= 0\n chars[char] += 1\n if (chars[char] > 5) return char\n }\n // pattern.lastIndex = 0 // not reused again\n const { key } =\n Object.keys(chars)\n .map((key) => ({ key, value: chars[key] }))\n .sort((a, b) => a.value - b.value)?.[0] ?? {}\n if (!key) {\n throw new Error('UnknownDetectChar', {\n cause: {\n pattern,\n chunk\n }\n })\n }\n return key\n}\n\nexport const coerceTo = {\n string: (field) => field,\n boolean: (field) => {\n const boolean = coerceTo.true(field)\n return typeof boolean === 'boolean' ? boolean : coerceTo.false(field)\n },\n true: (field) => (field.toLowerCase() === 'true' ? true : field),\n false: (field) => (field.toLowerCase() === 'false' ? false : field),\n number: (field) => {\n const decimal = coerceTo.decimal(field)\n return Number.isInteger(decimal) ? coerceTo.integer(field) : decimal\n },\n integer: (field) => Number.parseInt(field, 10) || field,\n decimal: (field) => Number.parseFloat(field) || field,\n json: (field) => {\n try {\n return JSON.parse(field)\n } catch (e) {\n return field\n }\n },\n timestamp: (field) => {\n const date = new Date(field)\n return date.toString() !== 'Invalid Date' ? date : field\n },\n null: (field) => (field.toLowerCase() === 'null' ? null : field),\n any: (field) => {\n // TODO\n return field\n }\n}\n\nexport default (input, opts) => {\n const options = {\n ...defaultOptions,\n ...{\n enableReturn: true,\n chunkSize: 64 * 1024 * 1024,\n enqueue: () => {}\n },\n ...opts\n }\n const { chunkSize, enableReturn, enqueue } = options\n const { chunkParse, previousChunk } = parse(options)\n\n const res = []\n const controller = { enqueue }\n\n if (enableReturn) {\n controller.enqueue = (row) => {\n enqueue(row)\n res.push(row.data)\n }\n }\n\n let position = 0\n while (position < input.length) {\n const chunk =\n previousChunk() + input.substring(position, position + chunkSize)\n\n // Checking if you can use fastParse slows it down more than checking for quoteChar on ever field.\n chunkParse(chunk, controller)\n position += chunkSize\n }\n // flush\n const chunk = previousChunk()\n chunkParse(chunk, controller, true)\n\n return enableReturn && res\n}\n"], | ||
| "mappings": "AACA,IAAMA,EAAiB,CACrB,OAAQ,GAIR,YAAa,GACb,cAAe,GACf,UAAW,IAIX,gBAAiB,GAEjB,YAAcC,GAAUA,EACxB,mBAAoB,GACpB,eAAgB,GAChB,iBAAkB,GAClB,sBAAuB,EAEzB,EAEMC,EAAUC,GAAUA,EAAM,OAC1BC,GAAgBC,GAAWA,EAAO,QAAQ,sBAAuB,MAAM,EAEhEC,GAAQ,CAACC,EAAO,CAAC,IAAM,CAClC,IAAMC,EAAU,CAAE,GAAGR,EAAgB,GAAGO,CAAK,EAC7CC,EAAQ,aAAeA,EAAQ,UAE/B,GAAI,CAAE,OAAAC,EAAQ,YAAAC,EAAa,cAAAC,CAAc,EAAIH,EACvC,CACJ,UAAAI,EACA,WAAAC,EACA,mBAAAC,EACA,gBAAAC,EACA,YAAAC,EACA,iBAAAC,EACA,eAAAC,EACA,sBAAAC,CAEF,EAAIX,EACAY,EAAelB,EAAOO,CAAM,EAC1BY,EAA4B,uBAC5BC,EAA0B,cAE1BC,EAAmBV,EAAaD,EAChCY,EAAyB,IAAI,OACjC,GAAGpB,GAAamB,CAAgB,IAChC,GACF,EAEME,EAAoBZ,IAAeD,EACnCc,EAAuBb,IAAeD,EAExCe,EAAoBzB,EAAOQ,CAAW,EACpCkB,EAAsB,EACtBC,EAAkB,EAClBC,EAAmB,EACnBC,EAAyB,EAG3BC,EAAOC,EAAaC,EAAQC,EAAKC,EACjCC,EAAc,GACdC,EAAM,EACJC,EAAcJ,GAAQ,CAE1B,GADAG,GAAO,EACH7B,IAAW,GAAM,CACnBA,EAAS0B,EACTf,EAAelB,EAAOO,CAAM,EAC5B,MACF,CACA,IAAI+B,EAAOL,EACX,GAAIf,EAAc,CAChB,IAAMqB,EAAYvC,EAAOiC,CAAG,EAE5B,GAAIf,IAAiBqB,EAAW,CAC1BtB,IAEEC,EAAeqB,EACjBC,EACE,wBACA,yCAAyCtB,IAC3C,EACSqB,EAAYrB,GACrBsB,EACE,uBACA,wCAAwCtB,IAC1C,GAGJ,MACF,KAAO,CACLoB,EAAO,CAAC,EACR,QAASG,EAAI,EAAGA,EAAIF,EAAWE,IAC7BH,EAAK/B,EAAOkC,IAAMR,EAAIQ,EAE1B,CACF,CACAP,EAAQ,CAAE,IAAAE,EAAK,KAAAE,CAAK,CAAC,CACvB,EAEME,EAAe,CAACE,EAAMC,IAAY,CACtCT,EAAQ,CAAE,IAAAE,EAAK,IAAK,CAAE,KAAAM,EAAM,QAAAC,CAAQ,CAAE,CAAC,CACzC,EAEMC,EAAW,CAACC,EAAaC,EAAQd,IAC9BF,EAAM,QAAQe,EAAaC,CAAK,EAGnCC,EAAcC,GACXlB,EAAM,UAAUE,EAAQgB,CAAG,EAE9BC,EAAiB,CAAClD,EAAOqC,IACtBtB,EAAYf,GAASc,EAAiBuB,CAAG,EAO5Cc,EAAgB,CAACnD,EAAOqC,IAAQ,CACpCH,EAAI,KAAKgB,EAAelD,EAAOqC,CAAG,CAAC,CACrC,EAEMe,EAAoB,IAAM,CAC9B,GAAIP,EAASpC,CAAW,IAAMwB,EAC5B,OAAAI,GAAO,EACPJ,GAAUP,EACNV,GACFyB,EAAa,kBAAmB,sBAAsB,EAEjDW,EAAkB,EACpB,GAAIvC,GAAsBgC,EAAShC,CAAkB,IAAMoB,EAChE,OAAAI,GAAO,EACPJ,EAASY,EAASpC,CAAW,EAAIiB,EAC7BT,GACFwB,EAAa,gBAAiB,mBAAmB,EAE5CW,EAAkB,CAE7B,EA6GA,MAAO,CACL,WA5GiB,CAAChD,EAAQiD,EAAYC,EAAQ,KAAU,CACxDvB,EAAQ3B,EACR4B,EAAc/B,EAAO8B,CAAK,EAC1BI,EAAUkB,EAAW,QACrBjB,EAAc,GACdH,EAAS,EACTC,EAAM,CAAC,EAGFzB,IACHA,EAAc8C,EACZxB,EAAM,UAAU,EAAG,IAAI,EACvBV,CACF,EACAK,EAAoBzB,EAAOQ,CAAW,GAExCC,IAAkB6C,EAChBxB,EAAM,UAAU,EAAG,IAAI,EACvBX,CACF,EAEAgC,EAAkB,EAClB,IAAII,EAAY,EAChB,OAAS,CACP,IAAIC,EACAC,EAAazB,EACb0B,EACAC,EACJ,GAAI7B,EAAME,KAAYtB,EAIpB,IAHAsB,GAAUL,EACV6B,EAAS,GACTC,EAAazB,IACJ,CAEP,GADAyB,EAAab,EAASlC,EAAW+C,CAAU,EACvCA,EAAa,EAAG,CAElB,GADAtB,EAAcL,EAAM,UAAUyB,EAAWxB,CAAW,EAAII,EACpDkB,EACF,MAAM,IAAI,MAAM,uBAAwB,CAAE,MAAOjB,CAAI,CAAC,EAExD,MACF,CACA,GACEb,GACAO,EAAM2B,EAAa9B,KAAqBjB,EACxC,CACA+C,GAAc5B,EACd,QACF,CACA,GACEL,GACAM,EAAM2B,EAAa7B,KAAsBjB,EACzC,CACA8C,GAAc9B,EACd,QACF,CACA,KACF,CAIF,IAAMiC,EAAoBhB,EAASnC,EAAegD,CAAU,EACxDI,EAAkBjB,EAASpC,EAAaiD,CAAU,EACtD,GAAII,EAAkB,EAAG,CACvB,GAAI,CAACR,EAAO,CACVlB,EAAcL,EAAM,UAAUyB,EAAWxB,CAAW,EAAII,EACxD,MACF,CACA0B,EAAkB9B,CACpB,CAUA,GATI6B,EAAoB,IAAMA,EAAoBC,GAChDJ,EAAaG,EACbF,EAAmBhC,IAEnB+B,EAAaI,EACbH,EAAmBjC,EACnBkC,EAAY,IAGVF,EAAa,GAAK,CAACA,EACrB,MAGF,IAAI1D,EAmBJ,GAlBIyD,EACFzD,EAAQgD,EAAWU,EAAa,CAAC,EAAE,QACjCnC,EACAZ,CACF,EAEAX,EAAQgD,EAAWU,CAAU,EAE/BP,EAAcnD,EAAOkC,EAAI,MAAM,EAE/BD,EAASyB,EAAaC,EAElBC,IACFtB,EAAWJ,CAAG,EACdA,EAAM,CAAC,EACPkB,EAAkB,EAClBI,EAAYvB,GAEVD,GAAeC,EACjB,KAEJ,CACF,EAIE,OAAQ,IAAMzB,EACd,cAAe,IAAM4B,CACvB,CACF,EAEamB,EAAa,CAACxB,EAAOgC,IAAY,CAC5C,IAAIC,EACEC,EAAQ,CAAC,EACf,KAAQD,EAAQD,EAAQ,KAAKhC,CAAK,GAAI,CACpC,IAAMmC,EAAOF,EAAM,GAGnB,GAFAC,EAAMC,KAAU,EAChBD,EAAMC,IAAS,EACXD,EAAMC,GAAQ,EAAG,OAAOA,CAC9B,CAEA,GAAM,CAAE,IAAAC,CAAI,EACV,OAAO,KAAKF,CAAK,EACd,IAAKE,IAAS,CAAE,IAAAA,EAAK,MAAOF,EAAME,EAAK,EAAE,EACzC,KAAK,CAACC,EAAGC,IAAMD,EAAE,MAAQC,EAAE,KAAK,IAAI,IAAM,CAAC,EAChD,GAAI,CAACF,EACH,MAAM,IAAI,MAAM,oBAAqB,CACnC,MAAO,CACL,QAAAJ,EACA,MAAAhC,CACF,CACF,CAAC,EAEH,OAAOoC,CACT,EAEaG,EAAW,CACtB,OAAStE,GAAUA,EACnB,QAAUA,GAAU,CAClB,IAAMuE,EAAUD,EAAS,KAAKtE,CAAK,EACnC,OAAO,OAAOuE,GAAY,UAAYA,EAAUD,EAAS,MAAMtE,CAAK,CACtE,EACA,KAAOA,GAAWA,EAAM,YAAY,IAAM,OAAS,GAAOA,EAC1D,MAAQA,GAAWA,EAAM,YAAY,IAAM,QAAU,GAAQA,EAC7D,OAASA,GAAU,CACjB,IAAMwE,EAAUF,EAAS,QAAQtE,CAAK,EACtC,OAAO,OAAO,UAAUwE,CAAO,EAAIF,EAAS,QAAQtE,CAAK,EAAIwE,CAC/D,EACA,QAAUxE,GAAU,OAAO,SAASA,EAAO,EAAE,GAAKA,EAClD,QAAUA,GAAU,OAAO,WAAWA,CAAK,GAAKA,EAChD,KAAOA,GAAU,CACf,GAAI,CACF,OAAO,KAAK,MAAMA,CAAK,CACzB,MAAE,CACA,OAAOA,CACT,CACF,EACA,UAAYA,GAAU,CACpB,IAAMyE,EAAO,IAAI,KAAKzE,CAAK,EAC3B,OAAOyE,EAAK,SAAS,IAAM,eAAiBA,EAAOzE,CACrD,EACA,KAAOA,GAAWA,EAAM,YAAY,IAAM,OAAS,KAAOA,EAC1D,IAAMA,GAEGA,CAEX,EAEO0E,GAAQ,CAACC,EAAOrE,IAAS,CAC9B,IAAMC,EAAU,CACd,GAAGR,EAED,aAAc,GACd,UAAW,SACX,QAAS,IAAM,CAAC,EAElB,GAAGO,CACL,EACM,CAAE,UAAAsE,EAAW,aAAAC,EAAc,QAAA1C,CAAQ,EAAI5B,EACvC,CAAE,WAAAuE,EAAY,cAAAC,CAAc,EAAI1E,GAAME,CAAO,EAE7CyE,EAAM,CAAC,EACP3B,EAAa,CAAE,QAAAlB,CAAQ,EAEzB0C,IACFxB,EAAW,QAAWnB,GAAQ,CAC5BC,EAAQD,CAAG,EACX8C,EAAI,KAAK9C,EAAI,IAAI,CACnB,GAGF,IAAI+C,EAAW,EACf,KAAOA,EAAWN,EAAM,QAAQ,CAC9B,IAAM5C,EACJgD,EAAc,EAAIJ,EAAM,UAAUM,EAAUA,EAAWL,CAAS,EAGlEE,EAAW/C,EAAOsB,CAAU,EAC5B4B,GAAYL,CACd,CAEA,IAAM7C,EAAQgD,EAAc,EAC5B,OAAAD,EAAW/C,EAAOsB,EAAY,EAAI,EAE3BwB,GAAgBG,CACzB", | ||
| "names": ["defaultOptions", "field", "length", "value", "escapeRegExp", "string", "parse", "opts", "options", "header", "newlineChar", "delimiterChar", "quoteChar", "escapeChar", "commentPrefixValue", "emptyFieldValue", "coerceField", "errorOnEmptyLine", "errorOnComment", "errorOnFieldsMismatch", "headerLength", "detectDelimiterCharRegExp", "detectNewlineCharRegExp", "escapedQuoteChar", "escapedQuoteCharRegExp", "escapedQuoteEqual", "escapedQuoteNotEqual", "newlineCharLength", "delimiterCharLength", "quoteCharLength", "escapeCharLength", "escapedQuoteCharLength", "chunk", "chunkLength", "cursor", "row", "enqueue", "partialLine", "idx", "enqueueRow", "data", "rowLength", "enqueueError", "i", "code", "message", "findNext", "searchValue", "start", "parseField", "end", "transformField", "addFieldToRow", "checkForEmptyLine", "controller", "flush", "detectChar", "lineStart", "quoted", "nextCursor", "nextCursorLength", "atNewline", "nextDelimiterChar", "nextNewlineChar", "pattern", "match", "chars", "char", "key", "a", "b", "coerceTo", "boolean", "decimal", "date", "parse_default", "input", "chunkSize", "enableReturn", "chunkParse", "previousChunk", "res", "position"] | ||
| } |
+3
-165
@@ -32,167 +32,5 @@ <div align="center"> | ||
| </p> | ||
| <p> | ||
| See full documentation at <a href="https://csv-rex.js.org">https://csv-rex.js.org</a> | ||
| </p> | ||
| </div> | ||
| ## Features | ||
| - Free to use under MIT licence | ||
| - Comma-Separated Values (CSV) Files specification compliant ([RFC-4180](https://tools.ietf.org/html/rfc4180)) | ||
| - Small bundle size (~1KB compressed = esbuild + minify + br) | ||
| - Zero dependencies | ||
| - ESM & CJS modules with `.map` files | ||
| - NodeJS and WebStream API support via [@datastream/csv](https://github.com/willfarrell/datastream) | ||
| - It's just fast. See the [benchmarks](https://github.com/willfarrell/csv-benchmarks). | ||
| ## Why not use `papaparse` or `csv-parse`? | ||
| Both are great libraries, we've use them both in many projects over the years. | ||
| - [`csv-parse`](https://csv.js.org/parse/): Built on top of NodeJS native APIs giving it great stream support. If you want to run it in the browser however, you've going to have to ship a very large polyfill. | ||
| - [`papaparse`](https://www.papaparse.com/): Built to be more friendly for browser with an option to run in node as well. Faster than `csv-parse`, but, it's dadbod and lack of native stream support leaves room for improvement. | ||
| The goal with `csv-rex` is to have a csv parser and formatter that is as fast as others, reduced bundle size, and have cross-environment stream support. We think we've achieved our goal and hope you enjoy. | ||
| ## Setup | ||
| ```bash | ||
| npm install csv-rex | ||
| ``` | ||
| ```javascript | ||
| import {parse, format} from 'csv-rex' | ||
| const linesArray = parse(inputString, {}) | ||
| const csv = format(linesArray, {}) | ||
| ``` | ||
| ## Options | ||
| ### Parse | ||
| - `header` (`true`): Keys to be used in JSON object for the parsed row | ||
| - `true`: First row of the `input` is the headers and will need to be pulled out | ||
| - `[...]`: Pre-assign headers because `input` contains no headers. | ||
| - `false`: Don't map to JSON, return array of values instead. | ||
| - `newlineChar` (`''`): What `newline` character(s) to be used. By default will guess from `\r\n`, `\n`, `\r` | ||
| - `delimiterChar` (`''`): Characters used to separate fields. Must be length of 1. By default will guess from `,`, `\t`, `|`, `;`, `\x1E`, `\x1F` | ||
| - `quoteChar` (`"`): Character used to wrap fields that need to have special characters within them. Must be length of 1 | ||
| - `escapeChar` (`${quoteChar}`): Character used to escape the `quoteChar`. Must be length of 1 | ||
| - `enqueue` (`({data, idx, err}) => {}`): Function to run on parsed row data. | ||
| - `emptyFieldValue` (`''`): Value to be used instead of an empty string. Can be set to `undefined` to have empty fields not be included. | ||
| - `coerceField` (`(field, idx) => field`): Function to apply type/value coercion. | ||
| - `commentPrefixValue` (`false`): Lines starting with this value will be ignored (i.e. `#`, `//`). Can be set to `false` if files will never have comments. | ||
| - `errorOnEmptyLine` (`true`): When an empty line is encountered. Push row with error when occurs, row ignored otherwise. | ||
| - `errorOnComment` (`true`): When a comment is encountered. Push row with error when occurs, row ignored otherwise. | ||
| - `errorOnFieldsMismatch` (`true`): When number of headers does not match the number of fields in a row. Push row with error when occurs, row ignored otherwise. | ||
| - `errorOnFieldMalformed` (`true`): When no closing `quoteChar` is found. Throws parsing error. | ||
| - `chunkSize` (`64MB`): Size of chunks to process at once. | ||
| - `enableReturn` (`true`): Will concat rows into a single array. Set to `false` if handing data within enqueue for performance improvements. | ||
| ### Format | ||
| - `header` (`true`): Keys to be used in JSON object for the parsed row | ||
| - `true`: Will create header from `Object.keys()` | ||
| - `[...]`: Pre-assign headers to be included from object. | ||
| - `false`: will not include a header line. Will use `Object.values()` for rows | ||
| - `newlineChar` (`\r\n`): What `newline` character(s) to be used. | ||
| - `delimiterChar` (`,`): Characters used to separate fields. | ||
| - `quoteChar` (`"`): Character used to wrap fields that need to have special characters within them. | ||
| - `escapeChar` (`${quoteChar}`): Character used to escape the `quoteChar`. | ||
| - `quoteColumn`: (`undefined`): Array that maps to the headers to indicate what columns need to have quotes. Used to improve performance. | ||
| - `true`: Always quote column | ||
| - `false`: Never quote column | ||
| - `undefined`/`null`/``: Detect if quotes are needed based on contents | ||
| - `enqueue` (`(string) => {}`): Function to run on formatted row data. | ||
| - `enableReturn` (`true`): Will concat rows into a single string. Set to `false` if handing data within enqueue for performance improvements. | ||
| ## Examples | ||
| ### Parsing a CSV formatted string to JSON (`[{...},{...},...]`) | ||
| ```javascript | ||
| import { parse } from 'csv-rex' | ||
| const enqueue = ({idx, data, err}) => { | ||
| if (err) { | ||
| // handler err | ||
| return | ||
| } | ||
| // modify and/or handle data | ||
| } | ||
| export default (csvString) => parse(csvString, { enqueue }) | ||
| ``` | ||
| ### Formatting an array of objects to CSV string | ||
| ```javascript | ||
| import { format } from 'csv-rex' | ||
| export default (arrayOrObjects) => parse(arrayOrObjects, { newlineChar: '\n' }) | ||
| ``` | ||
| ### NodeJS Stream | ||
| ```javascript | ||
| import { createReadStream } from 'node:fs' | ||
| import { pipeline } from '@datastream/core' | ||
| import { csvParseStream } from '@datastream/csv' | ||
| export default async (filePath, opts = {}) => { | ||
| const streams = [ | ||
| createReadStream(filePath), | ||
| csvParseStream() | ||
| // ... | ||
| ] | ||
| const result = await pipeline(streams) | ||
| console.log(result.csvErrors) | ||
| } | ||
| ``` | ||
| ### Web Stream API | ||
| Requires: Chrome v71 , Edge v79, Firefox ([not supported yet](https://bugzilla.mozilla.org/show_bug.cgi?id=1493537)), Safari v14.5, NodeJS v18 (v16 with import). If you want to use WebStreams with node you need to pass `--conditions=webstream` in the cli to force it's use. | ||
| ```javascript | ||
| import { pipeline } from '@datastream/core' | ||
| import { stringReadableStream } from '@datastream/string' | ||
| import { csvParseStream } from '@datastream/csv' | ||
| export default async (blob, opts = {}) => { | ||
| const streams = [ | ||
| stringReadableStream(blob), | ||
| csvParseStream() | ||
| // ... | ||
| ] | ||
| const result = await pipeline(streams) | ||
| console.log(result.csvErrors) | ||
| } | ||
| ``` | ||
| ### WebWorker using a file | ||
| To prevent blocking of the main thread it is recommended that csv parsing is done in a WebWorker, SharedWebWorker, or ServiceWorker instead of the main thread. This example doesn't use streams due to the lack of Firefox stream support mentioned above. | ||
| ```javascript | ||
| /* eslint-env worker */ | ||
| import parse from 'csv-rex/parse' | ||
| const enqueue = ({data, idx, err}) => { | ||
| if (err) { | ||
| // handler err | ||
| return | ||
| } | ||
| // handle data | ||
| } | ||
| onmessage = async (event) => { | ||
| const { file } = event.data | ||
| const options = {enqueue} | ||
| file.length = file.size // polyfill length | ||
| await parse(file, options) | ||
| // ... | ||
| postMessageEncode() | ||
| } | ||
| const postMessageEncode = (str) => { | ||
| if (typeof str !== 'string') str = JSON.stringify(str) | ||
| const buffer = new TextEncoder().encode(str).buffer | ||
| postMessage(buffer, [buffer]) | ||
| } | ||
| ``` |
59
3.51%46783
-4.95%36
-81.82%