json-colorizer
Advanced tools
Comparing version 1.1.1 to 2.0.0
@@ -1,3 +0,3 @@ | ||
var colorize = require('./src/lib'); | ||
const colorize = require('./src/lib'); | ||
console.log(colorize( {"foo": "bar" })); | ||
{ | ||
"name": "json-colorizer", | ||
"version": "1.1.1", | ||
"version": "2.0.0", | ||
"description": "A library to format JSON with colors for display in the console", | ||
"main": "src/lib/index.js", | ||
"scripts": { | ||
"test": "mocha src/test" | ||
"test": "mocha src/test", | ||
"test:watch": "mocha -w src/test", | ||
"prettify": "prettier --write src/**/*.js", | ||
"lint": "eslint src" | ||
}, | ||
@@ -24,8 +27,25 @@ "repository": { | ||
"dependencies": { | ||
"chalk": "^1.1.3" | ||
"chalk": "^2.4.1" | ||
}, | ||
"devDependencies": { | ||
"chai": "^3.5.0", | ||
"mocha": "^3.1.2" | ||
"chai": "^4.2.0", | ||
"eslint": "^5.9.0", | ||
"eslint-plugin-prettier": "^3.0.0", | ||
"husky": "^1.1.4", | ||
"lint-staged": "^8.0.5", | ||
"lodash": "^4.17.11", | ||
"mocha": "^5.2.0", | ||
"prettier": "^1.15.2" | ||
}, | ||
"husky": { | ||
"hooks": { | ||
"pre-commit": "lint-staged" | ||
} | ||
}, | ||
"lint-staged": { | ||
"src/**/*.js": [ | ||
"prettier --write", | ||
"git add" | ||
] | ||
} | ||
} |
@@ -14,3 +14,3 @@ # json-colorizer | ||
```js | ||
var colorize = require('json-colorizer'); | ||
const colorize = require('json-colorizer'); | ||
console.log(colorize({ "foo": "bar" }); | ||
@@ -22,4 +22,4 @@ ``` | ||
```js | ||
var colorize = require('json-colorizer'); | ||
var json = JSON.stringify({"foo": "bar"}, null, 2); | ||
const colorize = require('json-colorizer'); | ||
const json = JSON.stringify({"foo": "bar"}, null, 2); | ||
console.log(colorize(json); | ||
@@ -31,10 +31,12 @@ ``` | ||
You can specify a function to use for coloring individual tokens by providing a `colors` object: | ||
__NOTE__: Prior to version 2.x, the colors were specified by referencing `chalk` color functions directly. This required requiring `chalk` into the file. Starting with version 2.x, the colors are specified as a string which is the name (or property path) to the desired color function. | ||
You can specify a color to use for coloring individual tokens by providing a `colors` object. This should map token types to the names of color functions (see the [chalk styles reference](https://www.npmjs.com/package/chalk#styles)). | ||
```js | ||
var colorize = require('json-colorizer'); | ||
var chalk = require('chalk'); | ||
const colorize = require('json-colorizer'); | ||
console.log(colorize({ "foo": "bar" }, { | ||
colors: { | ||
STRING_KEY: chalk.green | ||
STRING_KEY: 'green', | ||
STRING_LITERAL: 'magenta.bold' | ||
} | ||
@@ -41,0 +43,0 @@ })); |
@@ -1,27 +0,24 @@ | ||
var chalk = require('chalk'); | ||
const chalk = require('chalk'); | ||
const { get } = require('lodash'); | ||
var defaultColors = { | ||
BRACE: chalk.gray, | ||
BRACKET: chalk.gray, | ||
COLON: chalk.gray, | ||
COMMA: chalk.gray, | ||
STRING_KEY: chalk.magenta, | ||
STRING_LITERAL: chalk.yellow, | ||
NUMBER_LITERAL: chalk.green, | ||
BOOLEAN_LITERAL: chalk.cyan, | ||
NULL_LITERAL: chalk.white | ||
const defaultColors = { | ||
BRACE: 'gray', | ||
BRACKET: 'gray', | ||
COLON: 'gray', | ||
COMMA: 'gray', | ||
STRING_KEY: 'magenta', | ||
STRING_LITERAL: 'yellow', | ||
NUMBER_LITERAL: 'green', | ||
BOOLEAN_LITERAL: 'cyan', | ||
NULL_LITERAL: 'white' | ||
}; | ||
exports.colorize = function colorize(tokens, options) { | ||
var opts = options || {}; | ||
var colors = opts.colors || {}; | ||
var str = ''; | ||
var colorFn; | ||
const opts = options || {}; | ||
const colors = opts.colors || {}; | ||
tokens.forEach(function (token) { | ||
colorFn = colors[token.type] || defaultColors[token.type]; | ||
str += colorFn ? colorFn(token.value) : token.value; | ||
}); | ||
return str; | ||
return tokens.reduce((acc, token) => { | ||
const colorFn = get(chalk, colors[token.type] || defaultColors[token.type]); | ||
return acc + (colorFn ? colorFn(token.value) : token.value); | ||
}, ''); | ||
}; |
const tokenTypes = [ | ||
{ regex: /^\s+/, tokenType: 'WHITESPACE' }, | ||
{ regex: /^[{}]/, tokenType: 'BRACE' }, | ||
{ regex: /^[\[\]]/, tokenType: 'BRACKET' }, | ||
{ regex: /^[[\]]/, tokenType: 'BRACKET' }, | ||
{ regex: /^:/, tokenType: 'COLON' }, | ||
{ regex: /^,/, tokenType: 'COMMA' }, | ||
{ regex: /^-?\d+(?:\.\d+)?(?:e[+\-]?\d+)?/i, tokenType: 'NUMBER_LITERAL' }, | ||
{ regex: /^"(?:\\.|[^"\\])*"(?=\s*:)/, tokenType: 'STRING_KEY'}, | ||
{ regex: /^"(?:\\.|[^"\\])*"/, tokenType: 'STRING_LITERAL'}, | ||
{ regex: /^-?\d+(?:\.\d+)?(?:e[+-]?\d+)?/i, tokenType: 'NUMBER_LITERAL' }, | ||
{ regex: /^"(?:\\.|[^"\\])*"(?=\s*:)/, tokenType: 'STRING_KEY' }, | ||
{ regex: /^"(?:\\.|[^"\\])*"/, tokenType: 'STRING_LITERAL' }, | ||
{ regex: /^true|false/, tokenType: 'BOOLEAN_LITERAL' }, | ||
@@ -15,15 +15,11 @@ { regex: /^null/, tokenType: 'NULL_LITERAL' } | ||
exports.getTokens = function getTokens(json) { | ||
var input = typeof json === 'string' ? json : JSON.stringify(json); | ||
let input = typeof json === 'string' ? json : JSON.stringify(json); | ||
var tokens = []; | ||
var foundToken; | ||
let tokens = []; | ||
let foundToken; | ||
var match; | ||
var i; | ||
var numTokenTypes = tokenTypes.length; | ||
do { | ||
foundToken = false; | ||
for (i = 0; i < numTokenTypes; i++) { | ||
match = tokenTypes[i].regex.exec(input); | ||
for (let i = 0; i < tokenTypes.length; i++) { | ||
const match = tokenTypes[i].regex.exec(input); | ||
if (match) { | ||
@@ -34,3 +30,3 @@ tokens.push({ type: tokenTypes[i].tokenType, value: match[0] }); | ||
break; | ||
} | ||
} | ||
} | ||
@@ -40,3 +36,2 @@ } while (input.length > 0 && foundToken); | ||
return tokens; | ||
} | ||
}; |
@@ -1,22 +0,20 @@ | ||
var chai = require('chai'); | ||
var chalk = require('chalk'); | ||
var expect = chai.expect; | ||
const { expect } = require('chai'); | ||
var lexer = require('../lib/lexer'); | ||
var colorizer = require('../lib/colorizer'); | ||
var customColors = { | ||
BRACE: chalk.white, | ||
BRACKET: chalk.white, | ||
COLON: chalk.white, | ||
COMMA: chalk.white, | ||
STRING_KEY: chalk.yellow, | ||
NULL_LITERAL: chalk.red, | ||
STRING_LITERAL: chalk.green, | ||
NUMBER_LITERAL: chalk.magenta.bold, | ||
BOOLEAN_LITERAL: chalk.cyan | ||
const { getTokens } = require('../lib/lexer'); | ||
const { colorize } = require('../lib/colorizer'); | ||
const customColors = { | ||
BRACE: 'white', | ||
BRACKET: 'white', | ||
COLON: 'white', | ||
COMMA: 'white', | ||
STRING_KEY: 'yellow', | ||
NULL_LITERAL: 'red', | ||
STRING_LITERAL: 'green', | ||
NUMBER_LITERAL: 'magenta.bold', | ||
BOOLEAN_LITERAL: 'cyan' | ||
}; | ||
var fixture = { | ||
const fixture = { | ||
foo: null, | ||
bar: {baz: true}, | ||
bar: { baz: true }, | ||
number: 13, | ||
@@ -26,48 +24,175 @@ array: ['values'] | ||
describe('Colorizer', function() { | ||
it('colorizes with default options', function() { | ||
const tokens = getTokens(fixture); | ||
const result = colorize(tokens); | ||
describe('Colorizer', function () { | ||
it('colorizes with default options', function () { | ||
var tokens = lexer.getTokens(fixture); | ||
var result = colorizer.colorize(tokens); | ||
expect(result).to.equal([ | ||
'', | ||
'[90m{', '[39m', '[35m\"foo\"', '[39m', '[90m:', '[39m', '[37mnull', | ||
'[39m', '[90m,', '[39m', '[35m\"bar\"', '[39m', '[90m:', '[39m', '[90m{', | ||
'[39m', '[35m\"baz\"', '[39m', '[90m:', '[39m', '[36mtrue', '[39m', '[90m}', | ||
'[39m', '[90m,', '[39m', '[35m\"number\"', '[39m', '[90m:', '[39m', '[32m13', | ||
'[39m', '[90m,', '[39m', '[35m\"array\"', '[39m', '[90m:', '[39m', '[90m[', | ||
'[39m', '[33m\"values\"', '[39m', '[90m]', '[39m', '[90m}', '[39m' | ||
].join('\u001b')); | ||
expect(result).to.equal( | ||
[ | ||
'', | ||
'[90m{', | ||
'[39m', | ||
'[35m"foo"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[37mnull', | ||
'[39m', | ||
'[90m,', | ||
'[39m', | ||
'[35m"bar"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[90m{', | ||
'[39m', | ||
'[35m"baz"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[36mtrue', | ||
'[39m', | ||
'[90m}', | ||
'[39m', | ||
'[90m,', | ||
'[39m', | ||
'[35m"number"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[32m13', | ||
'[39m', | ||
'[90m,', | ||
'[39m', | ||
'[35m"array"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[90m[', | ||
'[39m', | ||
'[33m"values"', | ||
'[39m', | ||
'[90m]', | ||
'[39m', | ||
'[90m}', | ||
'[39m' | ||
].join('\u001b') | ||
); | ||
}); | ||
it('colorizes with custom colors', function () { | ||
var tokens = lexer.getTokens(fixture); | ||
var result = colorizer.colorize(tokens, {colors: customColors}); | ||
it('colorizes with custom colors', function() { | ||
const tokens = getTokens(fixture); | ||
const result = colorize(tokens, { colors: customColors }); | ||
expect(result).to.equal([ | ||
'', | ||
'[37m{', '[39m', '[33m\"foo\"', '[39m', '[37m:', '[39m', '[31mnull', | ||
'[39m', '[37m,', '[39m', '[33m\"bar\"', '[39m', '[37m:', '[39m', '[37m{', | ||
'[39m', '[33m\"baz\"', '[39m', '[37m:', '[39m', '[36mtrue', '[39m', '[37m}', | ||
'[39m', '[37m,', '[39m', '[33m\"number\"', '[39m', '[37m:', '[39m', '[35m', '[1m13', | ||
'[22m', '[39m', '[37m,', '[39m', '[33m\"array\"', '[39m', '[37m:', '[39m', '[37m[', | ||
'[39m', '[32m\"values\"', '[39m', '[37m]', '[39m', '[37m}', '[39m', | ||
].join('\u001b')); | ||
expect(result).to.equal( | ||
[ | ||
'', | ||
'[37m{', | ||
'[39m', | ||
'[33m"foo"', | ||
'[39m', | ||
'[37m:', | ||
'[39m', | ||
'[31mnull', | ||
'[39m', | ||
'[37m,', | ||
'[39m', | ||
'[33m"bar"', | ||
'[39m', | ||
'[37m:', | ||
'[39m', | ||
'[37m{', | ||
'[39m', | ||
'[33m"baz"', | ||
'[39m', | ||
'[37m:', | ||
'[39m', | ||
'[36mtrue', | ||
'[39m', | ||
'[37m}', | ||
'[39m', | ||
'[37m,', | ||
'[39m', | ||
'[33m"number"', | ||
'[39m', | ||
'[37m:', | ||
'[39m', | ||
'[35m', | ||
'[1m13', | ||
'[22m', | ||
'[39m', | ||
'[37m,', | ||
'[39m', | ||
'[33m"array"', | ||
'[39m', | ||
'[37m:', | ||
'[39m', | ||
'[37m[', | ||
'[39m', | ||
'[32m"values"', | ||
'[39m', | ||
'[37m]', | ||
'[39m', | ||
'[37m}', | ||
'[39m' | ||
].join('\u001b') | ||
); | ||
}); | ||
it('colorizes with only specific overrides for colors', function () { | ||
var tokens = lexer.getTokens(fixture); | ||
var result = colorizer.colorize(tokens, {colors: {NUMBER_LITERAL: chalk.red}}); | ||
it('colorizes with only specific overrides for colors', function() { | ||
const tokens = getTokens(fixture); | ||
const result = colorize(tokens, { colors: { NUMBER_LITERAL: 'red' } }); | ||
expect(result).to.equal([ | ||
'', | ||
'[90m{', '[39m', '[35m\"foo\"', '[39m', '[90m:', '[39m', '[37mnull', | ||
'[39m', '[90m,', '[39m', '[35m\"bar\"', '[39m', '[90m:', '[39m', '[90m{', | ||
'[39m', '[35m\"baz\"', '[39m', '[90m:', '[39m', '[36mtrue', '[39m', '[90m}', | ||
'[39m', '[90m,', '[39m', '[35m\"number\"', '[39m', '[90m:', '[39m', '[31m13', | ||
'[39m', '[90m,', '[39m', '[35m\"array\"', '[39m', '[90m:', '[39m', '[90m[', | ||
'[39m', '[33m\"values\"', '[39m', '[90m]', '[39m', '[90m}', '[39m' | ||
].join('\u001b')); | ||
expect(result).to.equal( | ||
[ | ||
'', | ||
'[90m{', | ||
'[39m', | ||
'[35m"foo"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[37mnull', | ||
'[39m', | ||
'[90m,', | ||
'[39m', | ||
'[35m"bar"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[90m{', | ||
'[39m', | ||
'[35m"baz"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[36mtrue', | ||
'[39m', | ||
'[90m}', | ||
'[39m', | ||
'[90m,', | ||
'[39m', | ||
'[35m"number"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[31m13', | ||
'[39m', | ||
'[90m,', | ||
'[39m', | ||
'[35m"array"', | ||
'[39m', | ||
'[90m:', | ||
'[39m', | ||
'[90m[', | ||
'[39m', | ||
'[33m"values"', | ||
'[39m', | ||
'[90m]', | ||
'[39m', | ||
'[90m}', | ||
'[39m' | ||
].join('\u001b') | ||
); | ||
}); | ||
}); |
@@ -1,10 +0,8 @@ | ||
var chai = require('chai'); | ||
var expect = chai.expect; | ||
const { expect } = require('chai'); | ||
const { getTokens } = require('../lib/lexer'); | ||
var lexer = require('../lib/lexer'); | ||
describe('Lexer', function () { | ||
it('tokenizes a basic JSON object', function () { | ||
var result = lexer.getTokens({ | ||
foo: 'bar' | ||
describe('Lexer', () => { | ||
it('tokenizes a basic JSON object', () => { | ||
const result = getTokens({ | ||
foo: 'bar' | ||
}); | ||
@@ -19,6 +17,6 @@ | ||
]); | ||
}); | ||
}); | ||
it('tokenizes a basic JSON string', function () { | ||
var result = lexer.getTokens('{"foo":"bar"}'); | ||
it('tokenizes a basic JSON string', () => { | ||
const result = getTokens('{"foo":"bar"}'); | ||
@@ -34,4 +32,4 @@ expect(result).to.deep.equal([ | ||
it('includes whitespace', function () { | ||
var result = lexer.getTokens('{\n "foo": "bar"\n}'); | ||
it('includes whitespace', () => { | ||
const result = getTokens('{\n "foo": "bar"\n}'); | ||
@@ -50,47 +48,46 @@ expect(result).to.deep.equal([ | ||
it('tokenizes boolean values', function () { | ||
var result = lexer.getTokens('true'); | ||
it('tokenizes boolean values', () => { | ||
let result = getTokens('true'); | ||
expect(result).to.deep.equal([{ type: 'BOOLEAN_LITERAL', value: 'true' }]); | ||
result = lexer.getTokens('false'); | ||
result = getTokens('false'); | ||
expect(result).to.deep.equal([{ type: 'BOOLEAN_LITERAL', value: 'false' }]); | ||
}); | ||
it('tokenizes integer values', function () { | ||
var result = lexer.getTokens('123'); | ||
it('tokenizes integer values', () => { | ||
let result = getTokens('123'); | ||
expect(result).to.deep.equal([{ type: 'NUMBER_LITERAL', value: '123' }]); | ||
result = lexer.getTokens('-10'); | ||
result = getTokens('-10'); | ||
expect(result).to.deep.equal([{ type: 'NUMBER_LITERAL', value: '-10' }]); | ||
}); | ||
it('tokenizes a decimal number', function () { | ||
var result = lexer.getTokens('1.234'); | ||
it('tokenizes a decimal number', () => { | ||
const result = getTokens('1.234'); | ||
expect(result).to.deep.equal([{ type: 'NUMBER_LITERAL', value: '1.234' }]); | ||
}); | ||
it('tokenizes a scientific notation number', function () { | ||
var result = lexer.getTokens('12e5'); | ||
it('tokenizes a scientific notation number', () => { | ||
let result = getTokens('12e5'); | ||
expect(result).to.deep.equal([{ type: 'NUMBER_LITERAL', value: '12e5' }]); | ||
result = lexer.getTokens('12e+5'); | ||
result = getTokens('12e+5'); | ||
expect(result).to.deep.equal([{ type: 'NUMBER_LITERAL', value: '12e+5' }]); | ||
result = lexer.getTokens('12E-5'); | ||
result = getTokens('12E-5'); | ||
expect(result).to.deep.equal([{ type: 'NUMBER_LITERAL', value: '12E-5' }]); | ||
}); | ||
it('tokenizes null', function () { | ||
var result = lexer.getTokens('null'); | ||
it('tokenizes null', () => { | ||
const result = getTokens('null'); | ||
expect(result).to.deep.equal([{ type: 'NULL_LITERAL', value: 'null' }]); | ||
}); | ||
it('tokenizes a string literal with brace characters', function () { | ||
var result = lexer.getTokens('"{hello}"'); | ||
it('tokenizes a string literal with brace characters', () => { | ||
const result = getTokens('"{hello}"'); | ||
expect(result).to.deep.equal([{ type: 'STRING_LITERAL', value: '"{hello}"' }]); | ||
}); | ||
it('tokenizes a key-value pair with whitespace between the :', function () { | ||
var result = lexer.getTokens('"foo" : "bar"'); | ||
it('tokenizes a key-value pair with whitespace between the :', () => { | ||
const result = getTokens('"foo" : "bar"'); | ||
expect(result).to.deep.equal([ | ||
@@ -104,3 +101,2 @@ { type: 'STRING_KEY', value: '"foo"' }, | ||
}); | ||
}); |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
31483
13
342
54
8
1
+ Addedansi-styles@3.2.1(transitive)
+ Addedchalk@2.4.2(transitive)
+ Addedcolor-convert@1.9.3(transitive)
+ Addedcolor-name@1.1.3(transitive)
+ Addedhas-flag@3.0.0(transitive)
+ Addedsupports-color@5.5.0(transitive)
- Removedansi-regex@2.1.1(transitive)
- Removedansi-styles@2.2.1(transitive)
- Removedchalk@1.1.3(transitive)
- Removedhas-ansi@2.0.0(transitive)
- Removedstrip-ansi@3.0.1(transitive)
- Removedsupports-color@2.0.0(transitive)
Updatedchalk@^2.4.1