New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

jsoner

Package Overview
Dependencies
Maintainers
1
Versions
12
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

jsoner - npm Package Compare versions

Comparing version 0.1.6 to 0.2.0

npm-debug.log.e65d9b19889f27a607d10db27adb8018

6

gulpfile.js

@@ -49,6 +49,6 @@ var coveralls = require('gulp-coveralls');

global: {
statements: 100,
branches: 100,
statements: 98,
branches: 95,
functions: 100,
lines: 100
lines: 98
}

@@ -55,0 +55,0 @@ }

var _ = require('lodash');
var fs = require('fs');
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));

@@ -20,54 +21,62 @@ function findLast(path, character, from) {

*/
var totalBytesRead = 0;
var bytesRead = 0;
var chunkSize = 1024;
var buffer = new Buffer(chunkSize);
var index = 0;
var fd = fs.openSync(path, 'r');
try {
var stats = fs.fstatSync(fd);
var skip = (stats.size - from) || 0;
while (totalBytesRead < stats.size) {
var position = stats.size - totalBytesRead - chunkSize;
bytesRead = fs.readSync(fd, buffer, 0, chunkSize, position);
var chunk = buffer.slice(0, bytesRead).toString();
function searchFor(fd, stats, totalBytesRead, character) {
return Promise.try(function() {
var chunkSize = 1024;
var skip = (stats.size - from) || 0;
var buffer = new Buffer(chunkSize);
var index = 0;
// Scan the line looking for the desired character, while ignoring
// all whitespace characters and immediately exiting if any other
// non whitespace is found.
for (index = bytesRead - 1; index >= 0; index--) {
if (skip > 0) {
skip--;
continue;
}
if (totalBytesRead < stats.size) {
var position = stats.size - totalBytesRead - chunkSize;
return fs.readAsync(fd, buffer, 0, chunkSize, position)
.then(function(bytesRead) {
var chunk = buffer.slice(0, bytesRead).toString();
if (chunk[index].match(/\s/)) {
continue;
}
// Scan the line looking for the desired character, while ignoring
// all whitespace characters and immediately exiting if any other
// non whitespace is found.
for (index = bytesRead - 1; index >= 0; index--) {
if (skip > 0) {
skip--;
continue;
}
if (chunk[index] === character) {
return stats.size - (totalBytesRead + (bytesRead - index));
}
if (chunk[index].match(/\s/)) {
continue;
}
return -1;
if (chunk[index] === character) {
return Promise.resolve(stats.size - (totalBytesRead + (bytesRead - index)));
}
return Promise.resolve(-1);
}
return searchFor(fd, stats, totalBytesRead + bytesRead, character);
});
}
totalBytesRead += bytesRead;
}
// If the whole file is empty or comprised of simply whitespace
return Promise.resolve(-2);
});
}
// If the whole file is empty or comprised of simply whitespace
return -2;
} finally {
fs.closeSync(fd);
}
return fs.openAsync(path, 'r')
.then(function(fd) {
return fs.fstatAsync(fd)
.then(function(stats) {
return searchFor(fd, stats, 0, character);
})
.finally(function() {
return fs.closeAsync(fd);
});
});
}
module.exports = {
appendFileSync: function(filename, element, options) {
appendFile: function(filename, element, options) {
/**
* Append a JSON object to the filename specified by appending to the
* existing JSON Array.
*
* options: {
* * options: {
* replacer: null, // replacer argument passed to

@@ -78,69 +87,77 @@ * // JSON.stringify(value, replacer, space)

* }
*
* returns: Promise
*/
var fd;
return Promise.try(function() {
try {
fs.statSync(filename);
} catch (err) {
if (err.toString().match(/ENOENT/)) {
fs.writeFileSync(filename, '[]');
} else {
return Promise.reject(err);
}
}
try {
fs.accessSync(filename, fs.R_OK | fs.W_OK);
} catch (err) {
if (err.toString().match(/ENOENT/)) {
fs.writeFileSync(filename, '[]');
} else {
throw err;
if (element.length === 0) {
// nothing to write
return Promise.resolve();
}
}
if (element.length === 0) {
// nothing to write
return;
}
options = _.extend({
replacer: null,
space: 4
}, options);
options = _.extend({
replacer: null,
space: 4
}, options);
function stringify(object) {
return JSON.stringify(object, options.replacer, options.space);
}
function stringify(object) {
return JSON.stringify(object, options.replacer, options.space);
}
fd = fs.openSync(filename, 'r+');
var fd = fs.openSync(filename, 'r+');
return findLast(filename, ']')
.then(function(lastSquareBracket) {
if (lastSquareBracket === -1) {
return Promise.reject(new Error(filename + ' not a valid JSON format'));
}
try {
var lastSquareBracket = findLast(filename, ']');
var elementString;
if (lastSquareBracket === -1) {
throw Error(filename + ' not a valid JSON format');
}
if (element instanceof Array) {
elementString = stringify(element);
elementString = elementString.slice(1,
elementString.length - 1);
} else {
elementString = stringify(element);
}
var elementString;
if (element instanceof Array) {
elementString = stringify(element);
elementString = elementString.slice(1,
elementString.length - 1);
} else {
elementString = stringify(element);
}
var output;
if (lastSquareBracket === -2) {
output = '[';
output += elementString + ']';
fs.writeSync(fd, output);
} else {
// If we can't find a } then this is a file with simply [] in it
var lastCurlyBracket = findLast(filename,
'}',
lastSquareBracket);
if (lastCurlyBracket > 0) {
output = ',\n';
var output;
if (lastSquareBracket === -2) {
output = '[';
output += elementString + ']';
return fs.writeAsync(fd, output);
} else {
output = '';
// If we can't find a } then this is a file with simply [] in it
return findLast(filename, '}', lastSquareBracket)
.then(function(lastCurlyBracket) {
if (lastCurlyBracket > 0) {
output = ',\n';
} else {
output = '';
}
output += elementString + ']';
return fs.writeAsync(fd, output, lastSquareBracket);
});
}
output += elementString + ']';
fs.writeSync(fd, output, lastSquareBracket);
}
} finally {
fs.closeSync(fd);
}
})
.finally(function() {
return Promise.try(function() {
if (fd) {
fs.closeSync(fd);
}
});
});
});
}
};
module.exports = {
parse: require('./parse.js'),
appendFileSync: require('./append.js').appendFileSync
appendFileSync: require('./append.js').appendFileSync,
appendFile: require('./append.js').appendFile
};
{
"name": "jsoner",
"version": "0.1.6",
"version": "0.2.0",
"description": "Simple, fast, minimalist JSON library for node",

@@ -35,3 +35,5 @@ "keywords": [

},
"dependencies": {}
"dependencies": {
"bluebird": "^3.1.4"
}
}

@@ -46,5 +46,11 @@ # jsoner

jsoner.appendFileSync('users.json', {
jsoner.appendFile('users.json', {
firstName: "John",
lastName:"Doe"
})
.then(function() {
console.log('all done');
})
.catch(function(err) {
console.error('there was an issue', err);
});

@@ -55,2 +61,8 @@

lastName:"Doe"
})
.then(function() {
console.log('all done');
})
.catch(function(err) {
console.error('there was an issue', err);
});

@@ -57,0 +69,0 @@ ```

@@ -23,15 +23,25 @@ var _ = require('lodash');

describe('.appendFileSync', function() {
describe('.appendFile', function() {
it('fails to append to a file in an inexistent path', function() {
expect(function() {
jsoner.appendFileSync('/no/mans/land/foo.juttle', {});
}).to.throw(/ENOENT. no such file or directory/);
return jsoner.appendFile('/no/mans/land/foo.juttle')
.then(function() {
throw Error('previous statement should have failed');
})
.catch(function(err) {
expect(err.toString()).to.match(/ENOENT. no such file or directory/);
});
});
it('fails to append an object to an incomplete JSON array', function() {
fs.writeFileSync(tmpFilename, '[ ');
expect(function() {
jsoner.appendFileSync(tmpFilename, {});
}).to.throw('not a valid JSON format');
return fs.writeFileAsync(tmpFilename, '[ ')
.then(function() {
return jsoner.appendFile(tmpFilename, {})
})
.then(function() {
throw Error('previous statement should have failed');
})
.catch(function(err) {
expect(err.toString()).to.contain('not a valid JSON format');
});
});

@@ -42,6 +52,12 @@

var object = { foo: 'bar' };
jsoner.appendFileSync(newFilename, object);
var data = fs.readFileSync(newFilename);
expect(JSON.parse(data.toString())).to.deep.equal([object]);
fs.unlinkSync(newFilename);
jsoner.appendFile(newFilename, object)
.then(function() {
return fs.readFileAsync(newFilename)
})
.then(function(data) {
expect(JSON.parse(data.toString())).to.deep.equal([object]);
})
.finally(function() {
return fs.unlinkAsync(newFilename);
});
});

@@ -51,6 +67,12 @@

var object = { foo: 'bar' };
fs.writeFileSync(tmpFilename, '');
jsoner.appendFileSync(tmpFilename, object);
var data = fs.readFileSync(tmpFilename);
expect(JSON.parse(data.toString())).to.deep.equal([object]);
return fs.writeFileAsync(tmpFilename, '')
.then(function() {
return jsoner.appendFile(tmpFilename, object);
})
.then(function() {
return fs.readFileSync(tmpFilename);
})
.then(function(data) {
expect(JSON.parse(data.toString())).to.deep.equal([object]);
});
});

@@ -60,6 +82,14 @@

var object = { foo: 'bar' };
fs.writeFileSync(tmpFilename, _.pad('', 4096, ' '));
jsoner.appendFileSync(tmpFilename, object);
var data = fs.readFileSync(tmpFilename);
expect(JSON.parse(data.toString())).to.deep.equal([object]);
// XXX: 4096 is 4x the default read chunkSize in the append code this
// should be configurable.
return fs.writeFileAsync(tmpFilename, _.pad('', 4096, ' '))
.then(function() {
return jsoner.appendFile(tmpFilename, object);
})
.then(function() {
return fs.readFileAsync(tmpFilename);
})
.then(function(data) {
expect(JSON.parse(data.toString())).to.deep.equal([object]);
});
});

@@ -69,9 +99,15 @@

var object = { foo: 'bar' };
fs.writeFileSync(tmpFilename, '[]');
jsoner.appendFileSync(tmpFilename, object);
var data = fs.readFileSync(tmpFilename);
expect(JSON.parse(data.toString())).to.deep.equal([object]);
return fs.writeFileAsync(tmpFilename, '[]')
.then(function() {
return jsoner.appendFile(tmpFilename, object);
})
.then(function() {
return fs.readFileAsync(tmpFilename);
})
.then(function(data) {
expect(JSON.parse(data.toString())).to.deep.equal([object]);
});
});
it('appends multiple JSON objects at once', function(done) {
it('appends multiple JSON objects at once', function() {
var objects = [];

@@ -84,21 +120,27 @@ for (var index = 0; index < 1024; index++) {

}
fs.writeFileSync(tmpFilename, '[]');
jsoner.appendFileSync(tmpFilename, objects);
var stream = fs.createReadStream(tmpFilename);
var processed = 0;
jsoner.parse(stream)
.on('object', function(object) {
expect(object).to.deep.equal({ foo: 'bar', index: processed });
processed++;
return fs.writeFileAsync(tmpFilename, '[]')
.then(function() {
return jsoner.appendFile(tmpFilename, objects);
})
.on('end', function() {
expect(processed).to.equal(1024);
done();
})
.on('error', function(err) {
done(err);
.then(function() {
return new Promise(function(resolve, reject) {
var stream = fs.createReadStream(tmpFilename);
var processed = 0;
jsoner.parse(stream)
.on('object', function(object) {
expect(object).to.deep.equal({ foo: 'bar', index: processed });
processed++;
})
.on('end', function() {
expect(processed).to.equal(1024);
resolve();
})
.on('error', function(err) {
reject(err);
});
});
});
});
it('appends multiple JSON objects one by one', function(done) {
it('appends multiple JSON objects one by one', function() {
var objects = [];

@@ -111,23 +153,35 @@ for (var index = 0; index < 1024; index++) {

}
fs.writeFileSync(tmpFilename, '[]');
_.each(objects, function(object) {
jsoner.appendFileSync(tmpFilename, object);
});
var stream = fs.createReadStream(tmpFilename);
var processed = 0;
jsoner.parse(stream)
.on('object', function(object) {
expect(object).to.deep.equal({ foo: 'bar', index: processed });
processed++;
return fs.writeFileAsync(tmpFilename, '[]')
.then(function() {
var base = Promise.resolve();
_.each(objects, function(object) {
base = base.then(function() {
return jsoner.appendFile(tmpFilename, object);
});
});
return base;
})
.on('end', function() {
expect(processed).to.equal(1024);
done();
})
.on('error', function(err) {
done(err);
.then(function() {
return new Promise(function(resolve, reject) {
var stream = fs.createReadStream(tmpFilename);
var processed = 0;
jsoner.parse(stream)
.on('object', function(object) {
expect(object).to.deep.equal({ foo: 'bar', index: processed });
processed++;
})
.on('end', function() {
expect(processed).to.equal(1024);
resolve();
})
.on('error', function(err) {
reject(err);
});
});
});
});
it('appends multiple JSON arrays', function(done) {
it('appends multiple JSON arrays', function() {
var array1 = [

@@ -142,18 +196,25 @@ { user: 'foo' },

fs.writeFileSync(tmpFilename, '[]');
jsoner.appendFileSync(tmpFilename, array1);
jsoner.appendFileSync(tmpFilename, array2);
var stream = fs.createReadStream(tmpFilename);
var results = [];
jsoner.parse(stream)
.on('object', function(object) {
results.push(object);
return fs.writeFileAsync(tmpFilename, '[]')
.then(function() {
return jsoner.appendFile(tmpFilename, array1);
})
.on('end', function() {
expect(results).to.deep.equal(array1.concat(array2));
done();
.then(function() {
return jsoner.appendFile(tmpFilename, array2);
})
.on('error', function(err) {
done(err);
.then(function() {
return new Promise(function(resolve, reject) {
var stream = fs.createReadStream(tmpFilename);
var results = [];
jsoner.parse(stream)
.on('object', function(object) {
results.push(object);
})
.on('end', function() {
expect(results).to.deep.equal(array1.concat(array2));
resolve();
})
.on('error', function(err) {
reject(err);
});
});
});

@@ -169,19 +230,31 @@ });

fs.writeFileSync(tmpFilename, '[]');
jsoner.appendFileSync(tmpFilename, array1);
jsoner.appendFileSync(tmpFilename, array2);
var jsonString = fs.readFileSync(tmpFilename).toString();
var results = JSON.parse(jsonString);
expect(results).to.deep.equal(array1.concat(array2));
return fs.writeFileAsync(tmpFilename, '[]')
.then(function() {
return jsoner.appendFile(tmpFilename, array1);
})
.then(function() {
return jsoner.appendFile(tmpFilename, array2);
})
.then(function() {
return fs.readFileSync(tmpFilename).toString();
})
.then(function(data) {
var results = JSON.parse(data);
expect(results).to.deep.equal(array1.concat(array2));
});
});
it('always creates the file even on an empty append', function() {
jsoner.appendFileSync(tmpFilename, []);
var jsonString = fs.readFileSync(tmpFilename).toString();
var results = JSON.parse(jsonString);
expect(results).to.deep.equal([]);
return jsoner.appendFile(tmpFilename, [])
.then(function() {
return fs.readFileSync(tmpFilename).toString();
})
.then(function(data) {
var results = JSON.parse(data);
expect(results).to.deep.equal([]);
});
});
});
});

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc