ackee-node-logger
Advanced tools
Comparing version 0.2.5 to 0.2.6
@@ -0,1 +1,11 @@ | ||
## [0.2.6] - 2018-10-04 | ||
### Added | ||
- log version of `ackee-node-logger` package being used to `pkgVersion` field | ||
### Changed | ||
- change default serializer for `process` object | ||
- do not log undefined and empty objects in default serializers | ||
- remove some lodash packages | ||
## [0.2.5] - 2018-09-10 | ||
@@ -2,0 +12,0 @@ |
26
index.js
@@ -1,2 +0,3 @@ | ||
const _ = require('lodash'); | ||
const isString = require('lodash.isstring'); | ||
const isObject = require('lodash.isobject'); | ||
const pino = require('pino'); | ||
@@ -8,2 +9,3 @@ const multistream = require('pino-multi-stream').multistream; | ||
const { StackDriverFormatStream } = require('./stackdriver'); | ||
const { decorateStreams, DefaultTransformStream } = require('./streams'); | ||
@@ -73,14 +75,8 @@ // This is a custom slightly edited version of pino-multistream's wirte method, whch adds support for maximum log level | ||
} | ||
if (!_.get(options, 'disableStackdriverFormat', false)) { | ||
streams = streams.map(stream => { | ||
const newStream = new StackDriverFormatStream(); | ||
newStream.pipe(stream.stream); | ||
return { | ||
level: stream.level, | ||
maxLevel: stream.maxLevel, | ||
stream: newStream, | ||
}; | ||
}); | ||
if (!options.disableStackdriverFormat) { | ||
streams = decorateStreams(streams, StackDriverFormatStream); | ||
} | ||
streams = decorateStreams(streams, DefaultTransformStream); | ||
if (!options.ignoredHttpMethods) { | ||
@@ -91,3 +87,5 @@ options.ignoredHttpMethods = ['OPTIONS']; | ||
const logger = pino( | ||
_.merge( | ||
// no deep-merging needed, so assign is OK | ||
Object.assign( | ||
{}, | ||
{ | ||
@@ -123,5 +121,5 @@ level: defaultLevel, | ||
if (data) { | ||
if (_.isString(data)) { | ||
if (isString(data)) { | ||
moduleName = data; | ||
} else if (_.isObject(data)) { | ||
} else if (isObject(data)) { | ||
options = data; | ||
@@ -128,0 +126,0 @@ } else { |
{ | ||
"name": "ackee-node-logger", | ||
"version": "0.2.5", | ||
"version": "0.2.6", | ||
"description": "Ackee Node Logger", | ||
@@ -17,4 +17,13 @@ "main": "index.js", | ||
"license": "ISC", | ||
"engines": { | ||
"node": "8.6.0" | ||
}, | ||
"dependencies": { | ||
"lodash": "^4.17.5", | ||
"lodash.difference": "^4.5.0", | ||
"lodash.foreach": "^4.5.0", | ||
"lodash.isempty": "^4.4.0", | ||
"lodash.isobject": "^3.0.2", | ||
"lodash.isstring": "^4.0.1", | ||
"lodash.pick": "^4.4.0", | ||
"omit-deep": "^0.3.0", | ||
"on-finished": "^2.3.0", | ||
@@ -21,0 +30,0 @@ "on-headers": "^1.0.1", |
@@ -103,3 +103,3 @@ # Simple pino-based logger setup for Ackee purposes | ||
- `pretty` - if set to `true`, logger will use [pino pretty human-readable logs](https://github.com/pinojs/pino/blob/master/docs/API.md#pretty). This option can be overriden by `streams` | ||
- `disableStackdriverFormat` - if set to `true`, logger will add `severity` field to all log objects, so that log levels in Google Stackdriver work as expected | ||
- `disableStackdriverFormat` - if set to `true`, logger will add `severity` field to all log objects, so that log levels in Google Stackdriver work as expected. Defaults to `true` | ||
- `config` - object, which will be passed to underlying logger object. Right now, underlying logger is [pino](https://github.com/pinojs/pino), so for available options see [pino API docs](https://github.com/pinojs/pino/blob/master/docs/API.md#pinooptions-stream) | ||
@@ -106,0 +106,0 @@ |
@@ -1,2 +0,4 @@ | ||
const _ = require('lodash'); | ||
const forEach = require('lodash.foreach'); | ||
const pick = require('lodash.pick'); | ||
const { removeEmpty, shallowOmit } = require('./utils'); | ||
@@ -6,29 +8,39 @@ const serializers = { | ||
return { | ||
message: _.get(obj, 'message'), | ||
code: _.get(obj, 'code'), | ||
stack: _.get(obj, 'stack'), | ||
data: _.get(obj, 'data'), | ||
message: obj.message, | ||
code: obj.code, | ||
stack: obj.stack, | ||
data: obj.data, | ||
}; | ||
}, | ||
processEnv(obj) { | ||
return { | ||
nodePath: _.get(obj, 'NODE_PATH'), | ||
nodeEnv: _.get(obj, 'NODE_ENV'), | ||
}; | ||
process(obj) { | ||
if (!obj.env) { | ||
return obj; | ||
} | ||
const nodePath = obj.env.NODE_PATH; | ||
const nodeEnv = obj.env.NODE_ENV; | ||
const filteredEnv = { env: removeEmpty({ nodePath, nodeEnv }) }; | ||
const { env, ...rest } = obj; | ||
return removeEmpty(Object.assign({}, filteredEnv, rest)); | ||
}, | ||
req(obj) { | ||
const omitFields = ['password', 'passwordCheck']; | ||
const [body, query] = ['body', 'query'].map(name => _.omit(_.get(obj, name), omitFields)); | ||
const [body, query] = ['body', 'query'].map(name => { | ||
const source = obj[name]; | ||
if (source) { | ||
const { password, passwordCheck, ...rest } = source; | ||
return rest; | ||
} | ||
return source; | ||
}); | ||
return { | ||
return removeEmpty({ | ||
body, | ||
query, | ||
url: obj.originalUrl || obj.url, | ||
method: _.get(obj, 'method'), | ||
}; | ||
method: obj.method, | ||
}); | ||
}, | ||
res(obj) { | ||
return { | ||
out: _.get(obj, 'out'), | ||
time: _.get(obj, 'time'), | ||
out: obj.out, | ||
time: obj.time, | ||
}; | ||
@@ -42,3 +54,3 @@ }, | ||
} | ||
_.forEach(serializers, (value, key) => { | ||
forEach(serializers, (value, key) => { | ||
const matcher = new RegExp(`^${key}.(.*)`); | ||
@@ -54,3 +66,3 @@ const affectedFields = []; | ||
const newSerializer = obj => { | ||
return _.omit(value(obj), affectedFields); | ||
return shallowOmit(value(obj), affectedFields); | ||
}; | ||
@@ -66,3 +78,3 @@ serializers[key] = newSerializer; | ||
} | ||
_.forEach(serializers, (value, key) => { | ||
forEach(serializers, (value, key) => { | ||
const matcher = new RegExp(`^${key}.(.*)`); | ||
@@ -78,5 +90,5 @@ const affectedFields = []; | ||
const newSerializer = obj => { | ||
const newFields = _.pick(obj, affectedFields); | ||
const newFields = pick(obj, affectedFields); | ||
const originalResult = value(obj); | ||
return _.assign(originalResult, newFields); | ||
return Object.assign({}, originalResult, newFields); | ||
}; | ||
@@ -83,0 +95,0 @@ serializers[key] = newSerializer; |
@@ -187,1 +187,22 @@ const express = require('express'); | ||
}); | ||
test('logger version is logged', () => { | ||
const loggerWrites = jest.fn(); | ||
const logger = loggerFactory({ | ||
streams: [ | ||
{ | ||
stream: new stream.Writable({ | ||
write: (chunk, encoding, next) => { | ||
const json = JSON.parse(chunk); | ||
expect(json.pkgVersion).not.toBe(undefined); | ||
loggerWrites(); | ||
next(); | ||
}, | ||
}), | ||
}, | ||
], | ||
}); | ||
logger.fatal('Hello'); | ||
expect(loggerWrites).toBeCalled(); | ||
}); |
@@ -1,2 +0,3 @@ | ||
const _ = require('lodash'); | ||
const pick = require('lodash.pick'); | ||
const omitDeep = require('omit-deep'); | ||
const stream = require('stream'); | ||
@@ -12,2 +13,3 @@ | ||
test('Default serializers', () => { | ||
const loggerWrites = jest.fn(); | ||
const error = { | ||
@@ -21,7 +23,9 @@ message: 'Bad error', | ||
}; | ||
const processEnv = { | ||
NODE_PATH: 'app:config', | ||
NODE_ENV: 'local', | ||
PATH: '.', | ||
USER: 'root', | ||
const process = { | ||
env: { | ||
NODE_PATH: 'app:config', | ||
NODE_ENV: 'local', | ||
PATH: '.', | ||
USER: 'root', | ||
}, | ||
}; | ||
@@ -34,7 +38,2 @@ const res = { | ||
const req = { | ||
query: { | ||
password: '1234', | ||
passwordCheck: '1234', | ||
search: 'my cat', | ||
}, | ||
body: { | ||
@@ -56,19 +55,16 @@ password: '1234', | ||
const json = JSON.parse(chunk); | ||
expect(json.error).toEqual(_.pick(error, ['message', 'code', 'stack', 'data'])); | ||
expect(json.processEnv).toEqual({ | ||
nodePath: processEnv.NODE_PATH, | ||
nodeEnv: processEnv.NODE_ENV, | ||
expect(json.error).toEqual(pick(error, ['message', 'code', 'stack', 'data'])); | ||
expect(json.process.env).toEqual({ | ||
nodePath: process.env.NODE_PATH, | ||
nodeEnv: process.env.NODE_ENV, | ||
}); | ||
expect(json.req).toEqual( | ||
_.pick( | ||
_.omit(req, [ | ||
'body.password', | ||
'body.passwordCheck', | ||
'query.password', | ||
'query.passwordCheck', | ||
]), | ||
['url', 'body', 'query', 'method'] | ||
) | ||
); | ||
expect(json.res).toEqual(_.pick(res, ['out', 'time'])); | ||
const filteredReq = omitDeep(req, [ | ||
'body.password', | ||
'body.passwordCheck', | ||
'query.password', | ||
'query.passwordCheck', | ||
]); | ||
expect(json.req).toEqual(pick(filteredReq, ['url', 'body', 'query', 'method'])); | ||
expect(json.res).toEqual(pick(res, ['out', 'time'])); | ||
loggerWrites(); | ||
next(); | ||
@@ -81,9 +77,52 @@ }, | ||
logger.info({ error, processEnv, req, res }); | ||
logger.info({ error, process, req, res }); | ||
expect(loggerWrites).toBeCalled(); | ||
}); | ||
test('No extra fields are added in default serializers', () => { | ||
const loggerWrites = jest.fn(); | ||
const error = { | ||
devInfo: 'Lorem', | ||
userInfo: 'Ipsum', | ||
}; | ||
const process = { | ||
stuff: { | ||
NODE_PATH: 'app:config', | ||
NODE_ENV: 'local', | ||
PATH: '.', | ||
USER: 'root', | ||
}, | ||
}; | ||
const res = { | ||
noteToSelf: 'Send to user', | ||
}; | ||
const req = { | ||
extraData: 'Some server data', | ||
}; | ||
const logger = loggerFactory({ | ||
streams: [ | ||
{ | ||
stream: new stream.Writable({ | ||
write: (chunk, encoding, next) => { | ||
const json = JSON.parse(chunk); | ||
expect(json.error).toEqual({}); | ||
expect(json.res).toEqual({}); | ||
expect(json.req).toEqual({}); | ||
expect(json.process).toEqual(process); | ||
loggerWrites(); | ||
next(); | ||
}, | ||
}), | ||
}, | ||
], | ||
}); | ||
logger.info({ error, process, req, res }); | ||
expect(loggerWrites).toBeCalled(); | ||
}); | ||
test('Disable custom path', () => { | ||
const loggerWrites = jest.fn(); | ||
const req = { | ||
body: {}, | ||
query: {}, | ||
url: 'www.example.com', | ||
@@ -101,3 +140,4 @@ method: 'GET', | ||
const json = JSON.parse(chunk); | ||
expect(json.req).toEqual(_.pick(req, ['body', 'query', 'method'])); | ||
expect(json.req).toEqual(pick(req, ['body', 'query', 'method'])); | ||
loggerWrites(); | ||
next(); | ||
@@ -111,8 +151,8 @@ }, | ||
logger.info({ req }); | ||
expect(loggerWrites).toBeCalled(); | ||
}); | ||
test('Enable custom path', () => { | ||
const loggerWrites = jest.fn(); | ||
const req = { | ||
body: {}, | ||
query: {}, | ||
url: 'www.example.com', | ||
@@ -130,3 +170,4 @@ method: 'GET', | ||
const json = JSON.parse(chunk); | ||
expect(json.req).toEqual(_.pick(req, ['body', 'query', 'method', 'url', 'extraData'])); | ||
expect(json.req).toEqual(pick(req, ['body', 'query', 'method', 'url', 'extraData'])); | ||
loggerWrites(); | ||
next(); | ||
@@ -140,2 +181,3 @@ }, | ||
logger.info({ req }); | ||
expect(loggerWrites).toBeCalled(); | ||
}); |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
30114
17
665
11
4
+ Addedlodash.difference@^4.5.0
+ Addedlodash.foreach@^4.5.0
+ Addedlodash.isempty@^4.4.0
+ Addedlodash.isobject@^3.0.2
+ Addedlodash.isstring@^4.0.1
+ Addedlodash.pick@^4.4.0
+ Addedomit-deep@^0.3.0
+ Addedget-value@2.0.6(transitive)
+ Addedhas-value@0.3.1(transitive)
+ Addedhas-values@0.1.4(transitive)
+ Addedis-plain-object@2.0.4(transitive)
+ Addedisobject@2.1.03.0.1(transitive)
+ Addedlodash.difference@4.5.0(transitive)
+ Addedlodash.foreach@4.5.0(transitive)
+ Addedlodash.isempty@4.4.0(transitive)
+ Addedlodash.isobject@3.0.2(transitive)
+ Addedlodash.isstring@4.0.1(transitive)
+ Addedlodash.pick@4.4.0(transitive)
+ Addedomit-deep@0.3.0(transitive)
+ Addedunset-value@0.1.2(transitive)
- Removedlodash@^4.17.5
- Removedlodash@4.17.21(transitive)