csv42
Advanced tools
Comparing version 3.0.4 to 4.0.0
@@ -15,3 +15,3 @@ "use strict"; | ||
var parse = (options === null || options === void 0 ? void 0 : options.parseValue) || _value.parseValue; | ||
var json = []; | ||
var items = []; | ||
var i = 0; | ||
@@ -21,8 +21,10 @@ var fieldNames = parseHeader(); | ||
var _loop = function _loop() { | ||
var object = {}; | ||
// Note that item starts as a generic, empty object, and will be populated | ||
// with all fields one by one, after which it should be of type T | ||
var item = {}; | ||
parseRecord(function (value, index) { | ||
var _fields$index; | ||
(_fields$index = fields[index]) === null || _fields$index === void 0 ? void 0 : _fields$index.setValue(object, value); | ||
(_fields$index = fields[index]) === null || _fields$index === void 0 ? void 0 : _fields$index.setValue(item, value); | ||
}, parse); | ||
json.push(object); | ||
items.push(item); | ||
}; | ||
@@ -32,3 +34,3 @@ while (i < csv.length) { | ||
} | ||
return json; | ||
return items; | ||
function parseHeader() { | ||
@@ -35,0 +37,0 @@ var names = []; |
@@ -55,7 +55,6 @@ "use strict"; | ||
// an indexOf inside a for loop is inefficient, but it's ok since we're not dealing with a large array | ||
// const index = typeof field.index === 'number' ? field.index : fieldNames.indexOf(field.name) | ||
// @ts-ignore | ||
var index = field.index !== undefined ? field.index : fieldNames.indexOf(field.name); | ||
if (index === -1) { | ||
// @ts-ignores | ||
// @ts-ignore | ||
throw new Error("Field \"".concat(field.name, "\" not found in the csv data")); | ||
@@ -93,5 +92,5 @@ } | ||
function _mergeObject(object, merged, recurse) { | ||
for (var key in object) { | ||
var value = object[key]; | ||
var valueMerged = merged[key] || (merged[key] = Array.isArray(value) ? [] : {}); | ||
for (var _key in object) { | ||
var value = object[_key]; | ||
var valueMerged = merged[_key] || (merged[_key] = Array.isArray(value) ? [] : {}); | ||
if (recurse(value)) { | ||
@@ -115,12 +114,12 @@ _mergeObject(value, valueMerged, recurse); | ||
// mutates the argument `paths` | ||
function _collectPaths(object, parentPath, paths) { | ||
if (object[leaf] === true || object[leaf] === null && isEmpty(object)) { | ||
function _collectPaths(merged, parentPath, paths) { | ||
if (merged[leaf] === true || merged[leaf] === null && isEmpty(merged)) { | ||
paths.push(parentPath); | ||
} else if (Array.isArray(object)) { | ||
object.forEach(function (item, index) { | ||
} else if (Array.isArray(merged)) { | ||
merged.forEach(function (item, index) { | ||
return _collectPaths(item, parentPath.concat(index), paths); | ||
}); | ||
} else if ((0, _object.isObject)(object)) { | ||
for (var key in object) { | ||
_collectPaths(object[key], parentPath.concat(key), paths); | ||
} else if ((0, _object.isObject)(merged)) { | ||
for (var _key2 in merged) { | ||
_collectPaths(merged[_key2], parentPath.concat(_key2), paths); | ||
} | ||
@@ -131,5 +130,5 @@ } | ||
if (path.length === 1) { | ||
var key = path[0]; | ||
var _key3 = path[0]; | ||
return function (item) { | ||
return item[key]; | ||
return item[_key3]; | ||
}; | ||
@@ -136,0 +135,0 @@ } |
@@ -9,3 +9,3 @@ import { parseValue, unescapeValue } from './value.js'; | ||
const parse = options?.parseValue || parseValue; | ||
const json = []; | ||
const items = []; | ||
let i = 0; | ||
@@ -15,9 +15,11 @@ const fieldNames = parseHeader(); | ||
while (i < csv.length) { | ||
const object = {}; | ||
// Note that item starts as a generic, empty object, and will be populated | ||
// with all fields one by one, after which it should be of type T | ||
const item = {}; | ||
parseRecord((value, index) => { | ||
fields[index]?.setValue(object, value); | ||
fields[index]?.setValue(item, value); | ||
}, parse); | ||
json.push(object); | ||
items.push(item); | ||
} | ||
return json; | ||
return items; | ||
function parseHeader() { | ||
@@ -24,0 +26,0 @@ const names = []; |
@@ -31,7 +31,6 @@ import { getIn, isObject, setIn } from './object.js'; | ||
// an indexOf inside a for loop is inefficient, but it's ok since we're not dealing with a large array | ||
// const index = typeof field.index === 'number' ? field.index : fieldNames.indexOf(field.name) | ||
// @ts-ignore | ||
const index = field.index !== undefined ? field.index : fieldNames.indexOf(field.name); | ||
if (index === -1) { | ||
// @ts-ignores | ||
// @ts-ignore | ||
throw new Error(`Field "${field.name}" not found in the csv data`); | ||
@@ -85,10 +84,10 @@ } | ||
// mutates the argument `paths` | ||
function _collectPaths(object, parentPath, paths) { | ||
if (object[leaf] === true || object[leaf] === null && isEmpty(object)) { | ||
function _collectPaths(merged, parentPath, paths) { | ||
if (merged[leaf] === true || merged[leaf] === null && isEmpty(merged)) { | ||
paths.push(parentPath); | ||
} else if (Array.isArray(object)) { | ||
object.forEach((item, index) => _collectPaths(item, parentPath.concat(index), paths)); | ||
} else if (isObject(object)) { | ||
for (const key in object) { | ||
_collectPaths(object[key], parentPath.concat(key), paths); | ||
} else if (Array.isArray(merged)) { | ||
merged.forEach((item, index) => _collectPaths(item, parentPath.concat(index), paths)); | ||
} else if (isObject(merged)) { | ||
for (const key in merged) { | ||
_collectPaths(merged[key], parentPath.concat(key), paths); | ||
} | ||
@@ -95,0 +94,0 @@ } |
@@ -1,3 +0,3 @@ | ||
import { JsonOptions, NestedObject } from './types.js'; | ||
export declare function csv2json(csv: string, options?: JsonOptions): NestedObject[]; | ||
import { JsonOptions } from './types.js'; | ||
export declare function csv2json<T>(csv: string, options?: JsonOptions): T[]; | ||
//# sourceMappingURL=csv2json.d.ts.map |
@@ -1,3 +0,3 @@ | ||
import { CsvField, FlattenCallback, JsonField, NestedObject, Path } from './types.js'; | ||
export declare function collectFields(records: NestedObject[], flatten: FlattenCallback): CsvField[]; | ||
import { CsvField, FlattenCallback, JsonField, Path } from './types.js'; | ||
export declare function collectFields<T>(records: T[], flatten: FlattenCallback): CsvField<T>[]; | ||
export declare function toFields(names: string[], nested: boolean): JsonField[]; | ||
@@ -12,3 +12,3 @@ /** | ||
export declare function mapFields(fieldNames: string[], fields: JsonField[]): (JsonField | undefined)[]; | ||
export declare function collectNestedPaths(array: NestedObject[], recurse: FlattenCallback): Path[]; | ||
export declare function collectNestedPaths<T>(array: T[], recurse: FlattenCallback): Path[]; | ||
//# sourceMappingURL=fields.d.ts.map |
@@ -1,3 +0,3 @@ | ||
import { CsvOptions, NestedObject } from './types.js'; | ||
export declare function json2csv(json: NestedObject[], options?: CsvOptions): string; | ||
import { CsvOptions } from './types.js'; | ||
export declare function json2csv<T>(json: T[], options?: CsvOptions<T>): string; | ||
//# sourceMappingURL=json2csv.d.ts.map |
export type Path = (string | number)[]; | ||
/** @ts-ignore **/ | ||
export type NestedObject = Record<string, NestedObject>; | ||
export type ValueGetter = (object: NestedObject) => unknown; | ||
export type ValueSetter = (object: NestedObject, value: unknown) => void; | ||
export type NestedObject = { | ||
[key: string]: NestedObject | unknown; | ||
}; | ||
export type ValueGetter<T> = (item: T) => unknown; | ||
export type ValueSetter = (item: NestedObject, value: unknown) => void; | ||
export type ValueFormatter = (value: unknown) => string; | ||
export type ValueParser = (value: string) => unknown; | ||
export type FlattenCallback = (value: unknown) => boolean; | ||
export interface CsvField { | ||
export interface CsvField<T> { | ||
name: string; | ||
getValue: ValueGetter; | ||
getValue: ValueGetter<T>; | ||
} | ||
@@ -22,5 +23,5 @@ export interface JsonFieldName { | ||
export type JsonField = JsonFieldName | JsonFieldIndex; | ||
export type CsvFieldsParser = (json: NestedObject[]) => CsvField[]; | ||
export type CsvFieldsParser<T> = (json: T[]) => CsvField<T>[]; | ||
export type JsonFieldsParser = (fieldNames: string[]) => JsonField[]; | ||
export interface CsvOptions { | ||
export interface CsvOptions<T> { | ||
header?: boolean; | ||
@@ -30,3 +31,3 @@ delimiter?: string; | ||
flatten?: boolean | FlattenCallback; | ||
fields?: CsvField[] | CsvFieldsParser; | ||
fields?: CsvField<T>[] | CsvFieldsParser<T>; | ||
formatValue?: ValueFormatter; | ||
@@ -33,0 +34,0 @@ } |
@@ -140,7 +140,6 @@ (function (global, factory) { | ||
// an indexOf inside a for loop is inefficient, but it's ok since we're not dealing with a large array | ||
// const index = typeof field.index === 'number' ? field.index : fieldNames.indexOf(field.name) | ||
// @ts-ignore | ||
const index = field.index !== undefined ? field.index : fieldNames.indexOf(field.name); | ||
if (index === -1) { | ||
// @ts-ignores | ||
// @ts-ignore | ||
throw new Error(`Field "${field.name}" not found in the csv data`); | ||
@@ -194,10 +193,10 @@ } | ||
// mutates the argument `paths` | ||
function _collectPaths(object, parentPath, paths) { | ||
if (object[leaf] === true || object[leaf] === null && isEmpty(object)) { | ||
function _collectPaths(merged, parentPath, paths) { | ||
if (merged[leaf] === true || merged[leaf] === null && isEmpty(merged)) { | ||
paths.push(parentPath); | ||
} else if (Array.isArray(object)) { | ||
object.forEach((item, index) => _collectPaths(item, parentPath.concat(index), paths)); | ||
} else if (isObject(object)) { | ||
for (const key in object) { | ||
_collectPaths(object[key], parentPath.concat(key), paths); | ||
} else if (Array.isArray(merged)) { | ||
merged.forEach((item, index) => _collectPaths(item, parentPath.concat(index), paths)); | ||
} else if (isObject(merged)) { | ||
for (const key in merged) { | ||
_collectPaths(merged[key], parentPath.concat(key), paths); | ||
} | ||
@@ -335,3 +334,3 @@ } | ||
const parse = options?.parseValue || parseValue; | ||
const json = []; | ||
const items = []; | ||
let i = 0; | ||
@@ -341,9 +340,11 @@ const fieldNames = parseHeader(); | ||
while (i < csv.length) { | ||
const object = {}; | ||
// Note that item starts as a generic, empty object, and will be populated | ||
// with all fields one by one, after which it should be of type T | ||
const item = {}; | ||
parseRecord((value, index) => { | ||
fields[index]?.setValue(object, value); | ||
fields[index]?.setValue(item, value); | ||
}, parse); | ||
json.push(object); | ||
items.push(item); | ||
} | ||
return json; | ||
return items; | ||
function parseHeader() { | ||
@@ -350,0 +351,0 @@ const names = []; |
{ | ||
"name": "csv42", | ||
"version": "3.0.4", | ||
"version": "4.0.0", | ||
"description": "A small and fast CSV parser with support for nested JSON", | ||
@@ -5,0 +5,0 @@ "repository": { |
@@ -18,3 +18,3 @@ # csv42 | ||
- **Lightweight**: 2KB gzipped with everything included, 1KB gzipped when only using `json2csv`. | ||
- **Fast**: faster than the popular CSV libraries out there. See [benchmark](/benchmark). | ||
- **Fast**: faster than the popular CSV libraries out there. See [benchmark](https://jsoneditoronline.org/indepth/parse/csv-parser-javascript/). | ||
- **Modular**: only load what you use, thanks to ES5 modules and a plugin architecture. | ||
@@ -74,4 +74,4 @@ - **Powerful**: | ||
fields: [ | ||
{ name: 'name', getValue: (object) => object.name }, | ||
{ name: 'address', getValue: (object) => object.address.city + ' - ' + object.address.street } | ||
{ name: 'name', getValue: (item) => item.name }, | ||
{ name: 'address', getValue: (item) => item.address.city + ' - ' + object.address.street } | ||
] | ||
@@ -113,4 +113,4 @@ }) | ||
fields: [ | ||
{ name: 'name', setValue: (object, value) => (object.name = value) }, | ||
{ name: 'address.city', setValue: (object, value) => (object.city = value) } | ||
{ name: 'name', setValue: (item, value) => (item.name = value) }, | ||
{ name: 'address.city', setValue: (item, value) => (item.city = value) } | ||
] | ||
@@ -127,3 +127,3 @@ }) | ||
### `json2csv(json: NestedObject[], options?: CsvOptions) : string` | ||
### `json2csv<T>(json: T[], options?: CsvOptions<T>) : string` | ||
@@ -138,3 +138,3 @@ Where `options` is an object with the following properties: | ||
| `flatten` | `boolean` or `(value: unknown) => boolean` | If `true` (default), plain, nested objects will be flattened in multiple CSV columns, and arrays and classes will be serialized in a single field. When `false`, nested objects will be serialized as JSON in a single CSV field. This behavior can be customized by providing your own callback function for `flatten`. For example, to flatten objects and arrays, you can use `json2csv(json, { flatten: isObjectOrArray })`, and to flatten a specific class, you can use `json2csv(json, { flatten: value => isObject(value) \|\| isCustomClass(value) })`. The option `flatten`is not applicable when`fields` is defined. | | ||
| `fields` | `CsvField[]` or `CsvFieldsParser` | A list with fields to be put into the CSV file. This allows specifying the order of the fields and which fields to include/excluded. | | ||
| `fields` | `CsvField<T>[]` or `CsvFieldsParser<T>` | A list with fields to be put into the CSV file. This allows specifying the order of the fields and which fields to include/excluded. | | ||
| `formatValue` | `ValueFormatter` | Function used to change any type of value into a serialized string for the CSV. The build in formatter will only enclose values in quotes when necessary, and will stringify nested JSON objects. | | ||
@@ -150,3 +150,3 @@ | ||
### `csv2json(csv: string, options?: JsonOptions) : NestedObject[]` | ||
### `csv2json<T>(csv: string, options?: JsonOptions) : T[]` | ||
@@ -153,0 +153,0 @@ Where `options` is an object with the following properties: |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
190946
1481