@expo/json-file
Advanced tools
Comparing version
'use strict'; | ||
let JsonFile = require('../JsonFile'); | ||
const JsonFile = require('../JsonFile'); | ||
const fs = require('mz/fs'); | ||
const path = require('path') | ||
const mock = require('mock-fs'); | ||
const lockFile = require('lockfile'); | ||
const cp = require('child_process'); | ||
const _ = require('lodash'); | ||
describe('JsonFile', () => { | ||
jasmine.DEFAULT_TIMEOUT_INTERVAL = 2 * 60 * 1000; | ||
describe('JsonFile Basic Tests', () => { | ||
it(`is a class`, () => { | ||
let file = new JsonFile('../package.json'); | ||
let file = new JsonFile(path.join(__dirname, '../package.json')); | ||
expect(file instanceof JsonFile).toBe(true); | ||
@@ -16,21 +24,132 @@ }); | ||
pit(`reads JSON from a file`, () => { | ||
let file = new JsonFile('./package.json'); | ||
return file.readAsync().then(object => { | ||
expect(object.version).toBeDefined(); | ||
}); | ||
it(`reads JSON from a file`, async () => { | ||
let file = new JsonFile(path.join(__dirname, '../package.json')); | ||
let object = await file.readAsync(); | ||
expect(object.version).toBeDefined(); | ||
}); | ||
pit(`reads JSON statically from a file`, () => { | ||
return JsonFile.readAsync('./package.json').then(object => { | ||
expect(object.version).toBeDefined(); | ||
}); | ||
it(`reads JSON statically from a file`, async () => { | ||
let object = await JsonFile.readAsync( | ||
path.join(__dirname, '../package.json') | ||
); | ||
expect(object.version).toBeDefined(); | ||
}); | ||
pit(`reads JSON5 from a file`, () => { | ||
let file = new JsonFile('./test-json5.json', {json5: true}); | ||
return file.readAsync().then(object => { | ||
expect(object.itParsedProperly).toBe(42); | ||
it(`reads JSON5 from a file`, async () => { | ||
let file = new JsonFile(path.join(__dirname, 'files/test-json5.json'), { | ||
json5: true, | ||
}); | ||
let object = await file.readAsync(); | ||
expect(object.itParsedProperly).toBe(42); | ||
}); | ||
}); | ||
let obj1 = { x: 1 }; | ||
describe('JsonFile mockjs basic integration test', () => { | ||
beforeAll(() => { | ||
mock(); | ||
}); | ||
afterAll(() => { | ||
mock.restore(); | ||
}); | ||
it(`writes JSON to a file`, async () => { | ||
expect(fs.existsSync('./write-test.json')).toBe(false); | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await file.writeAsync(obj1); | ||
expect(fs.existsSync('./write-test.json')).toBe(true); | ||
await expect(file.readAsync()).resolves.toEqual(obj1); | ||
}); | ||
it(`rewrite async`, async () => { | ||
expect(fs.existsSync('./write-test.json')).toBe(true); | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await expect(file.rewriteAsync()).resolves; | ||
expect(fs.existsSync('./write-test.json')).toBe(true); | ||
await expect(file.readAsync()).resolves.toEqual(obj1); | ||
}); | ||
it(`changes an existing key in that file`, async () => { | ||
await expect(fs.existsSync('./write-test.json')).toBe(true); | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await expect(file.setAsync('x', 2)).resolves; | ||
await expect(file.readAsync()).resolves.toEqual({ x: 2 }); | ||
}); | ||
it(`adds a new key to the file`, async () => { | ||
await expect(fs.existsSync('./write-test.json')).toBe(true); | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await expect(file.setAsync('y', 3)).resolves; | ||
await expect(file.readAsync()).resolves.toEqual({ x: 2, y: 3 }); | ||
}); | ||
it(`deletes that same new key from the file`, async () => { | ||
await expect(fs.existsSync('./write-test.json')).toBe(true); | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await expect(file.deleteKeyAsync('y')).resolves; | ||
await expect(file.readAsync()).resolves.toEqual({ x: 2 }); | ||
}); | ||
it(`deletes another key from the file`, async () => { | ||
await expect(fs.existsSync('./write-test.json')).toBe(true); | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await expect(file.deleteKeyAsync('x')).resolves; | ||
await expect(file.readAsync()).resolves.toEqual({}); | ||
}); | ||
}); | ||
describe('JsonFile mockjs race condition integration test', () => { | ||
beforeAll(() => { | ||
mock(); | ||
}); | ||
afterAll(() => { | ||
mock.restore(); | ||
}); | ||
// The following test is not possible beacuse child processes do not inherit a mocked file system | ||
xit( | ||
'Multiple updates to the same file from different processes are atomic', | ||
async () => { | ||
let file = new JsonFile('atomic-test.json', { json5: true }); | ||
let baseObj = {}; | ||
for (var i = 0; i < 20; i++) { | ||
const k = i.toString(); | ||
const v = i.toString(); | ||
baseObj = _.extend(baseObj, { [k]: v }); | ||
cp.fork('./worker-test.js', ['./atomic-test.json', k, v]); | ||
} | ||
// The following worker does a setAsync | ||
//cp.fork('./JsonFileWorker', [filename, key, value]) | ||
const json = await file.readAsync(); | ||
console.log(json); | ||
expect(json).toEqual(baseObj); | ||
} | ||
); | ||
// This fails when i is high, around 200. However, no realistic use case would have the user | ||
// constantly update a file that often | ||
it('Multiple updates to the same file have no race conditions', async () => { | ||
let file = new JsonFile('./atomic-test.json', { json5: true }); | ||
for (var i = 0; i < 50; i++) { | ||
await file.writeAsync({}); | ||
let baseObj = {}; | ||
for (var j = 0; j < 20; j++) { | ||
baseObj = _.extend(baseObj, { [j]: j }); | ||
await file.setAsync(j, j); | ||
} | ||
const json = await file.readAsync(); | ||
expect(json).toEqual(baseObj); | ||
} | ||
}); | ||
it('Continuous updating!', async () => { | ||
let file = new JsonFile('./write-test.json', { json5: true }); | ||
await file.writeAsync({ i: 0 }); | ||
for (var i = 0; i < 20; i++) { | ||
file.writeAsync({ i }); | ||
await expect(file.readAsync()).resolves.toEqual({ i }); | ||
} | ||
}); | ||
}); |
'use strict'; | ||
let JsonFileError = require('../JsonFileError'); | ||
const JsonFileError = require('../JsonFileError'); | ||
@@ -5,0 +5,0 @@ describe('JsonFileError', () => { |
module.exports = { | ||
extends: 'exponent', | ||
extends: 'expo', | ||
}; |
132
JsonFile.js
'use strict'; | ||
let fsp = require('mz/fs'); | ||
let _ = require('lodash'); | ||
let util = require('util'); | ||
let JSON5 = require('json5'); | ||
const fsp = require('mz/fs'); | ||
const _ = require('lodash'); | ||
const path = require('path'); | ||
const util = require('util'); | ||
const JSON5 = require('json5'); | ||
const writeFileAtomic = require('write-file-atomic'); | ||
const lockFile = require('lockfile'); | ||
const promisify = require('util.promisify'); | ||
const JsonFileError = require('./JsonFileError'); | ||
let JsonFileError = require('./JsonFileError'); | ||
const lockAsync = promisify(lockFile.lock); | ||
@@ -18,2 +23,29 @@ const DEFAULT_OPTIONS = { | ||
// A promisified writeFileAtomic | ||
const writeFileAtomicAsync = (file, data) => | ||
new Promise((resolve, reject) => { | ||
writeFileAtomic(file, data, err => { | ||
if (err) reject(err); | ||
else resolve(); | ||
}); | ||
}); | ||
const callWithLock = async (file, fn) => { | ||
let result; | ||
const lockFileName = file + '.lock'; | ||
// These options are fairly arbitrary | ||
await lockAsync(lockFileName, { | ||
wait: 5000, | ||
retries: 500, | ||
pollPeriod: 50, | ||
retryWait: 50, | ||
}); | ||
try { | ||
result = await fn(); | ||
} finally { | ||
lockFile.unlockSync(lockFileName); | ||
} | ||
return result; | ||
}; | ||
class JsonFile { | ||
@@ -26,35 +58,53 @@ constructor(file, options) { | ||
readAsync(options) { | ||
return readAsync(this.file, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
readAsync(this.file, this._getOptions(options)) | ||
); | ||
} | ||
writeAsync(object, options) { | ||
return writeAsync(this.file, object, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
writeAsync(this.file, object, this._getOptions(options)) | ||
); | ||
} | ||
getAsync(key, defaultValue, options) { | ||
return getAsync(this.file, key, defaultValue, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
getAsync(this.file, key, defaultValue, this._getOptions(options)) | ||
); | ||
} | ||
setAsync(key, value, options) { | ||
return setAsync(this.file, key, value, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
setAsync(this.file, key, value, this._getOptions(options)) | ||
); | ||
} | ||
updateAsync(key, value, options) { | ||
return updateAsync(this.file, key, value, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
updateAsync(this.file, key, value, this._getOptions(options)) | ||
); | ||
} | ||
mergeAsync(sources, options) { | ||
return mergeAsync(this.file, sources, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
mergeAsync(this.file, sources, this._getOptions(options)) | ||
); | ||
} | ||
deleteKeyAsync(key, options) { | ||
return deleteKeyAsync(this.file, key, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
deleteKeyAsync(this.file, key, this._getOptions(options)) | ||
); | ||
} | ||
deleteKeysAsync(keys, options) { | ||
return deleteKeysAsync(this.file, keys, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
deleteKeysAsync(this.file, keys, this._getOptions(options)) | ||
); | ||
} | ||
rewriteAsync(options) { | ||
return rewriteAsync(this.file, this._getOptions(options)); | ||
return callWithLock(this.file, () => | ||
rewriteAsync(this.file, this._getOptions(options)) | ||
); | ||
} | ||
@@ -69,13 +119,23 @@ | ||
var json5 = _getOption(options, 'json5'); | ||
return fsp.readFile(file, 'utf8').then(json => { | ||
try { | ||
if (json5) { | ||
return JSON5.parse(json); | ||
} else { | ||
return JSON.parse(json); | ||
return fsp.readFile(file, 'utf8').then( | ||
json => { | ||
try { | ||
if (json5) { | ||
return JSON5.parse(json); | ||
} else { | ||
return JSON.parse(json); | ||
} | ||
} catch (e) { | ||
let defaultValue = jsonParseErrorDefault(options); | ||
if (defaultValue === undefined) { | ||
throw new JsonFileError(`Error parsing JSON file: ${file}`, e); | ||
} else { | ||
return defaultValue; | ||
} | ||
} | ||
} catch (e) { | ||
let defaultValue = jsonParseErrorDefault(options); | ||
}, | ||
error => { | ||
let defaultValue = cantReadFileDefault(options); | ||
if (defaultValue === undefined) { | ||
throw new JsonFileError(`Error parsing JSON file: ${file}`, e); | ||
throw new JsonFileError(`Can't read JSON file: ${file}`, error); | ||
} else { | ||
@@ -85,10 +145,3 @@ return defaultValue; | ||
} | ||
}, error => { | ||
let defaultValue = cantReadFileDefault(options); | ||
if (defaultValue === undefined) { | ||
throw new JsonFileError(`Can't read JSON file: ${file}`, error); | ||
} else { | ||
return defaultValue; | ||
} | ||
}); | ||
); | ||
} | ||
@@ -118,5 +171,8 @@ | ||
} catch (e) { | ||
throw new JsonFileError(`Couldn't JSON.stringify object for file: ${file}`, e); | ||
throw new JsonFileError( | ||
`Couldn't JSON.stringify object for file: ${file}`, | ||
e | ||
); | ||
} | ||
return fsp.writeFile(file, json, 'utf8').then(() => object); | ||
return writeFileAtomicAsync(file, json).then(() => object); | ||
} | ||
@@ -197,3 +253,3 @@ | ||
Object.assign(JsonFile, { | ||
const fns = { | ||
readAsync, | ||
@@ -208,4 +264,10 @@ writeAsync, | ||
rewriteAsync, | ||
}); | ||
}; | ||
const lockedFns = _.mapValues(fns, fn => (file, ...args) => | ||
callWithLock(file, () => fn(file, ...args)) | ||
); | ||
Object.assign(JsonFile, lockedFns); | ||
module.exports = JsonFile; |
@@ -8,5 +8,5 @@ 'use strict'; | ||
constructor(message, cause) { | ||
let fullMessage = cause ? | ||
`${message}\nāā Cause: ${cause.name}: ${cause.message}` : | ||
message; | ||
let fullMessage = cause | ||
? `${message}\nāā Cause: ${cause.name}: ${cause.message}` | ||
: message; | ||
super(fullMessage); | ||
@@ -13,0 +13,0 @@ this.name = this.constructor.name; |
{ | ||
"name": "@expo/json-file", | ||
"version": "5.3.0", | ||
"version": "6.0.0", | ||
"description": "A module for reading, writing, and manipulating JSON files", | ||
"main": "JsonFile.js", | ||
"scripts": { | ||
"test": "jest --no-cache" | ||
"lint": "eslint .", | ||
"test": "jest" | ||
}, | ||
@@ -22,9 +23,9 @@ "repository": { | ||
"homepage": "https://github.com/expo/json-file#readme", | ||
"jest": { | ||
"automock": false | ||
}, | ||
"dependencies": { | ||
"json5": "^0.5.0", | ||
"lockfile": "^1.0.3", | ||
"lodash": "^4.6.1", | ||
"mz": "^2.6.0" | ||
"mz": "^2.6.0", | ||
"util.promisify": "^1.0.0", | ||
"write-file-atomic": "^2.1.0" | ||
}, | ||
@@ -34,6 +35,10 @@ "devDependencies": { | ||
"eslint": "^2.5.3", | ||
"eslint-config-expo": "^1.0.7", | ||
"eslint-config-expo": "^5.1.3", | ||
"eslint-plugin-babel": "^4.1.2", | ||
"eslint-plugin-flowtype": "^2.35.0", | ||
"eslint-plugin-import": "^2.7.0", | ||
"eslint-plugin-react": "^4.2.3", | ||
"jest-cli": "^12.1.1" | ||
"jest": "^20.0.4", | ||
"mock-fs": "^4.4.1" | ||
} | ||
} |
@@ -1,78 +0,1 @@ | ||
# json-file | ||
A module for reading, writing, and manipulating JSON files | ||
## Importing the package | ||
```js | ||
import JsonFile from '@exponent/json-file'; | ||
``` | ||
## Promise-based async API | ||
Everything returns `Promise`s. If you are using ES7 (or Babel), you can write code like this: | ||
```js | ||
let config = await JsonFile.readAsync('config.json', {cantReadFileDefault: {}}); | ||
``` | ||
If you are using ES6, you can just use the return values the way you normally would use Promises. | ||
```js | ||
JsonFile.readAsync('config.json', {cantReadFileDefault: {}}).then(config => { | ||
... | ||
}); | ||
``` | ||
## Used as an object | ||
```js | ||
var file = new JsonFile('config.json', {cantReadFileDefault: {}}); | ||
var somethingSaved = await file.getAsync('somethingSaved', null); | ||
``` | ||
## Used as functions | ||
```js | ||
var pkg = await JsonFile.readAsync('package.json'); | ||
var main = await JsonFile.getAsync('package.json', 'main', 'index.js'); | ||
... | ||
``` | ||
## Options you can set, and their default values | ||
|Option | Description | Default Value| | ||
|-------|-------------|--------------| | ||
|`space`|How many spaces to use when pretty-printing, (0 for no pretty-printing)|`2`| | ||
|`default`|Catch-all default value for missing values, bad JSON, and files that can't be read|`undefined`| | ||
|`jsonParseErrorDefault`|The default value for when a file is read but it doesn't contain valid JSON|`undefined`| | ||
|`cantReadFileDefault`|The default value for when a file can't be read|`undefined`| | ||
* Note that if defaults are `undefined`, then an `Error` will be thrown instead of `undefined` being returned | ||
## Methods | ||
#### .readAsync([options]) | ||
Returns the parse of the whole file as an object | ||
#### .getAsync(key, [default-value], [options]) | ||
Returns a single value from a JSON file, using lodash's `_.get` to query the whole object. | ||
See https://lodash.com/docs#get | ||
#### .writeAsync(data, [options]) | ||
Writes out the given data to the file | ||
#### .setAsync(key, val, [options]) | ||
Updates the file, inserting or updating the value for `<key>` with `<val>` | ||
#### .mergeAsync(sources, [options]) | ||
Merges the values in `<sources>` into the object currently encoded in the file. | ||
#### .deleteKeyAsync(key, [options]) | ||
Deletes a single key from the top level of the file. | ||
## Functions | ||
The functions available all mirror the methods above but take `file` (filename as a string) as their first argument. | ||
# json-file [](https://circleci.com/gh/expo/json-file) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
107992
42.36%14
7.69%430
61.05%6
100%9
80%2
-97.47%2
Infinity%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added