New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@jsreport/jsreport-core

Package Overview
Dependencies
Maintainers
2
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@jsreport/jsreport-core - npm Package Compare versions

Comparing version 3.6.1 to 3.7.0

lib/main/createNormalizeMetaLoggerFormat.js

2

index.js

@@ -28,3 +28,3 @@ const path = require('path')

blobStorage: () => require('./test/blobStorage/common.js'),
listeners: () => require('./test/extensions/validExtensions/listeners/jsreport.config')
listeners: () => require('./test/extensions/validExtensions/listeners/jsreport.dontdiscover.config')
}
const { MESSAGE } = require('triple-beam')
const colors = require('@colors/colors/safe')
const winston = require('winston')

@@ -7,3 +8,3 @@

const { level, message, ...meta } = info
info[MESSAGE] = `${options.timestamp === true ? `${new Date().toISOString()} - ` : ''}${level}: ${message}`
info[MESSAGE] = `${options.timestamp === true ? `${new Date().toISOString()} - ` : ''}${level}: ${info.userLevel === true ? colors.cyan(message) : message}`

@@ -16,4 +17,8 @@ const metaKeys = Object.keys(meta)

if (info.userLevel === true) {
info.level = colors.cyan(info.level)
}
return info
})
}

@@ -34,24 +34,5 @@ const omit = require('lodash.omit')

module.exports = (reporter) => async ({ source, target, shouldCopy, shouldReplace }, req) => {
const sourceCol = reporter.documentStore.collection(source.entitySet)
const isSingleSource = Array.isArray(source) ? source.length === 1 : true
const sourceList = isSingleSource ? [source] : source
if (!sourceCol) {
throw reporter.createError(`Invalid entity set "${source.entitySet}" for source`, {
statusCode: 400
})
}
const sourceEntity = await sourceCol.findOne({ _id: source.id }, req)
if (!sourceEntity) {
throw reporter.createError('Source entity with specified id does not exists', {
statusCode: 400
})
}
const onlyChildren = source.onlyChildren === true
if (onlyChildren && source.entitySet !== 'folders') {
throw reporter.createError('onlyChildren option can only be enabled when source is a folder')
}
if (target.shortid === undefined) {

@@ -69,250 +50,298 @@ throw reporter.createError('target should specify ".shortid"', {

let entitiesInHierarchy = []
const allEntitiesInvolved = []
await collectEntitiesInHierarchy(
reporter,
entitiesInHierarchy,
Object.assign(sourceEntity, { __entitySet: source.entitySet }),
onlyChildren,
req
)
for (const sourceInfo of sourceList) {
const sourceCol = reporter.documentStore.collection(sourceInfo.entitySet)
let rootChildren
if (!sourceCol) {
throw reporter.createError(`Invalid entity set "${sourceInfo.entitySet}" for source`, {
statusCode: 400
})
}
if (onlyChildren) {
rootChildren = entitiesInHierarchy.filter((e) => {
return e.folder.shortid === sourceEntity.shortid
})
}
const sourceEntity = await sourceCol.findOne({ _id: sourceInfo.id }, req)
// ignore if we are doing a move at the same level of hierarchy between source and target
if (
(sourceEntity.folder == null && target.shortid === null) ||
(sourceEntity.folder != null &&
target.shortid != null &&
sourceEntity.folder.shortid === target.shortid)
) {
return []
}
if (!shouldCopy) {
// validates that we can't move entities from higher level
// into lower level of the same hierarchy
if (entitiesInHierarchy.some((e) => e.shortid === target.shortid)) {
return []
if (!sourceEntity) {
throw reporter.createError('Source entity with specified id does not exists', {
statusCode: 400
})
}
let updateQ
const onlyChildren = sourceInfo.onlyChildren === true
if (target.shortid === null) {
updateQ = {
$set: {
folder: null
}
}
} else {
updateQ = {
$set: {
folder: {
shortid: target.shortid
}
}
}
if (onlyChildren && sourceInfo.entitySet !== 'folders') {
throw reporter.createError('onlyChildren option can only be enabled when source is a folder')
}
let sourceEntities
let entitiesInHierarchy = []
if (!onlyChildren) {
sourceEntities = [sourceEntity]
} else {
sourceEntities = rootChildren
await collectEntitiesInHierarchy(
reporter,
entitiesInHierarchy,
Object.assign(sourceEntity, { __entitySet: sourceInfo.entitySet }),
onlyChildren,
req
)
let rootChildren
if (onlyChildren) {
rootChildren = entitiesInHierarchy.filter((e) => {
return e.folder.shortid === sourceEntity.shortid
})
}
for (const entity of sourceEntities) {
try {
await reporter.documentStore.collection(entity.__entitySet).update({
_id: entity._id
}, updateQ, req)
} catch (e) {
if (e.code === 'DUPLICATED_ENTITY' && shouldReplace) {
// replacing is not supported when it generates a conflict with folder
if (e.existingEntityEntitySet === 'folders') {
throw e
// ignore if we are doing a copy/move at the same level of hierarchy between source and target
if (
(sourceEntity.folder == null && target.shortid === null) ||
(sourceEntity.folder != null &&
target.shortid != null &&
sourceEntity.folder.shortid === target.shortid)
) {
continue
}
if (!shouldCopy) {
// validates that we can't move entities from higher level
// into lower level of the same hierarchy
if (entitiesInHierarchy.some((e) => e.shortid === target.shortid)) {
continue
}
let updateQ
if (target.shortid === null) {
updateQ = {
$set: {
folder: null
}
}
} else {
updateQ = {
$set: {
folder: {
shortid: target.shortid
}
}
}
}
const removeFolderQ = target.shortid === null ? { folder: null } : { folder: { shortid: target.shortid } }
let sourceEntities
await reporter.documentStore.collection(e.existingEntityEntitySet).remove({
_id: e.existingEntity._id,
...removeFolderQ
}, req)
if (!onlyChildren) {
sourceEntities = [sourceEntity]
} else {
sourceEntities = rootChildren
}
for (const entity of sourceEntities) {
try {
await reporter.documentStore.collection(entity.__entitySet).update({
_id: entity._id
}, updateQ, req)
} else {
throw e
} catch (e) {
if (e.code === 'DUPLICATED_ENTITY' && shouldReplace) {
// replacing is not supported when it generates a conflict with folder
if (e.existingEntityEntitySet === 'folders') {
throw e
}
const removeFolderQ = target.shortid === null ? { folder: null } : { folder: { shortid: target.shortid } }
await reporter.documentStore.collection(e.existingEntityEntitySet).remove({
_id: e.existingEntity._id,
...removeFolderQ
}, req)
await reporter.documentStore.collection(entity.__entitySet).update({
_id: entity._id
}, updateQ, req)
} else {
throw e
}
}
}
}
const sourceEntityItems = entitiesInHierarchy.filter((e) => {
return sourceEntities.find((childE) => childE._id === e._id) != null
})
const sourceEntityItems = entitiesInHierarchy.filter((e) => {
return sourceEntities.find((childE) => childE._id === e._id) != null
})
for (const sourceEntityItem of sourceEntityItems) {
if (target.shortid === null) {
sourceEntityItem.folder = null
} else {
sourceEntityItem.folder = {
shortid: target.shortid
for (const sourceEntityItem of sourceEntityItems) {
if (target.shortid === null) {
sourceEntityItem.folder = null
} else {
sourceEntityItem.folder = {
shortid: target.shortid
}
}
}
}
} else {
const entitiesInHierarchyByCollection = entitiesInHierarchy.reduce((acu, entity) => {
acu[entity.__entitySet] = acu[entity.__entitySet] || []
acu[entity.__entitySet].push(entity)
return acu
}, {})
} else {
const entitiesInHierarchyByCollection = entitiesInHierarchy.reduce((acu, entity) => {
acu[entity.__entitySet] = acu[entity.__entitySet] || []
acu[entity.__entitySet].push(entity)
return acu
}, {})
const entityReferencesMap = new WeakMap()
const originalEntitiesNewMap = new WeakMap()
const entityRecordNewValueMap = new WeakMap()
const records = []
const entityReferencesMap = new WeakMap()
const originalEntitiesNewMap = new WeakMap()
const entityRecordNewValueMap = new WeakMap()
const records = []
// eslint-disable-next-line
function createUpdateReferences (record) {
return async (newEntity) => {
const { entitySet, entity, originalEntity } = record
const linkedEntities = entityReferencesMap.get(entity)
// eslint-disable-next-line
function createUpdateReferences (record) {
return async (newEntity) => {
const { entitySet, entity, originalEntity } = record
const linkedEntities = entityReferencesMap.get(entity)
if (linkedEntities.length === 0) {
return
}
if (linkedEntities.length === 0) {
return
}
for (const { properties: linkedProperties, entity: originalLinkedEntity } of linkedEntities) {
const currentNewLinkedEntity = originalEntitiesNewMap.get(originalLinkedEntity)
for (const { properties: linkedProperties, entity: originalLinkedEntity } of linkedEntities) {
const currentNewLinkedEntity = originalEntitiesNewMap.get(originalLinkedEntity)
if (entityRecordNewValueMap.has(currentNewLinkedEntity)) {
const currentEntityProcessedNew = entityRecordNewValueMap.get(currentNewLinkedEntity)
const currentEntityUpdate = {}
if (entityRecordNewValueMap.has(currentNewLinkedEntity)) {
const currentEntityProcessedNew = entityRecordNewValueMap.get(currentNewLinkedEntity)
const currentEntityUpdate = {}
// if we get here it means that the entity was already processed, so we need to
// execute an update directly to the store
for (const prop of linkedProperties) {
// here we care to use the new object result because we want to preserve other values
// in case the property is array with objects
reporter.documentStore.updateReference(originalLinkedEntity.__entitySet, currentEntityProcessedNew, entitySet, { referenceProp: prop, referenceValue: originalEntity.shortid }, newEntity.shortid)
const rootProp = prop.split('.')[0]
currentEntityUpdate[rootProp] = currentEntityProcessedNew[rootProp]
// if we get here it means that the entity was already processed, so we need to
// execute an update directly to the store
for (const prop of linkedProperties) {
// here we care to use the new object result because we want to preserve other values
// in case the property is array with objects
reporter.documentStore.updateReference(originalLinkedEntity.__entitySet, currentEntityProcessedNew, entitySet, { referenceProp: prop, referenceValue: originalEntity.shortid }, newEntity.shortid)
const rootProp = prop.split('.')[0]
currentEntityUpdate[rootProp] = currentEntityProcessedNew[rootProp]
}
await reporter.documentStore.collection(originalLinkedEntity.__entitySet).update({
_id: currentEntityProcessedNew._id
}, { $set: currentEntityUpdate }, req)
} else {
// here we care to update all properties to point to old reference value
reporter.documentStore.updateReference(originalLinkedEntity.__entitySet, currentNewLinkedEntity, entitySet, { referenceValue: originalEntity.shortid }, newEntity.shortid)
}
await reporter.documentStore.collection(originalLinkedEntity.__entitySet).update({
_id: currentEntityProcessedNew._id
}, { $set: currentEntityUpdate }, req)
} else {
// here we care to update all properties to point to old reference value
reporter.documentStore.updateReference(originalLinkedEntity.__entitySet, currentNewLinkedEntity, entitySet, { referenceValue: originalEntity.shortid }, newEntity.shortid)
}
}
}
}
if (targetUpdateReferences) {
const targetEntity = await reporter.documentStore.collection('folders').findOne({
shortid: target.shortid
}, req)
if (targetUpdateReferences) {
const targetEntity = await reporter.documentStore.collection('folders').findOne({
shortid: target.shortid
}, req)
targetEntity.__entitySet = 'folders'
entitiesInHierarchyByCollection.folders = entitiesInHierarchyByCollection.folders || []
entitiesInHierarchyByCollection.folders.push(targetEntity)
originalEntitiesNewMap.set(targetEntity, targetEntity)
entityRecordNewValueMap.set(targetEntity, targetEntity)
}
for (const entity of entitiesInHierarchy) {
const newEntity = {
...omit(entity, ['_id', 'shortid', '__entitySet'])
targetEntity.__entitySet = 'folders'
entitiesInHierarchyByCollection.folders = entitiesInHierarchyByCollection.folders || []
entitiesInHierarchyByCollection.folders.push(targetEntity)
originalEntitiesNewMap.set(targetEntity, targetEntity)
entityRecordNewValueMap.set(targetEntity, targetEntity)
}
let isSourceEntityItem
for (const entity of entitiesInHierarchy) {
const newEntity = {
...omit(entity, ['_id', 'shortid', '__entitySet'])
}
if (!onlyChildren) {
isSourceEntityItem = source.id === entity._id
} else {
isSourceEntityItem = rootChildren.find((e) => e._id === entity._id) != null
}
let isSourceEntityItem
if (isSourceEntityItem) {
if (target.shortid === null) {
newEntity.folder = null
if (!onlyChildren) {
isSourceEntityItem = sourceInfo.id === entity._id
} else {
newEntity.folder = {
shortid: target.shortid
isSourceEntityItem = rootChildren.find((e) => e._id === entity._id) != null
}
if (isSourceEntityItem) {
if (target.shortid === null) {
newEntity.folder = null
} else {
newEntity.folder = {
shortid: target.shortid
}
}
// when we are copying with multi selection we want to normalize names
// so in case of duplicates we just add "(copy)" suffix, for single source
// we want the replace dialog
if (!isSingleSource) {
let copyAttempt = 0
let existsAtTarget
do {
existsAtTarget = await reporter.documentStore.collection(entity.__entitySet).findOne({
name: newEntity.name,
folder: newEntity.folder
})
if (existsAtTarget != null) {
copyAttempt++
newEntity.name = `${entity.name}(copy${copyAttempt > 1 ? copyAttempt : ''})`
}
} while (existsAtTarget != null)
}
}
}
const entitySet = entity.__entitySet
newEntity.__entitySet = entitySet
const entitySet = entity.__entitySet
newEntity.__entitySet = entitySet
const linkedEntities = reporter.documentStore.findLinkedEntitiesForReference(
entitiesInHierarchyByCollection,
entitySet,
entity.shortid
)
const linkedEntities = reporter.documentStore.findLinkedEntitiesForReference(
entitiesInHierarchyByCollection,
entitySet,
entity.shortid
)
const record = {
entitySet,
originalEntity: entity,
entity: newEntity
const record = {
entitySet,
originalEntity: entity,
entity: newEntity
}
record.updateReferences = createUpdateReferences(record)
originalEntitiesNewMap.set(entity, newEntity)
entityReferencesMap.set(newEntity, linkedEntities)
records.push(record)
}
record.updateReferences = createUpdateReferences(record)
const processNewEntity = async (entitySet, entity) => {
const newEntityFromStore = await reporter.documentStore.collection(entitySet).insert({
...omit(entity, ['__entitySet'])
}, req)
originalEntitiesNewMap.set(entity, newEntity)
entityReferencesMap.set(newEntity, linkedEntities)
entityRecordNewValueMap.set(entity, newEntityFromStore)
}
records.push(record)
}
for (const record of records) {
try {
await processNewEntity(record.entitySet, record.entity)
} catch (e) {
if (e.code === 'DUPLICATED_ENTITY' && shouldReplace) {
// replacing is not supported when it generates a conflict with folder
if (e.existingEntityEntitySet === 'folders') {
throw e
}
const processNewEntity = async (entitySet, entity) => {
const newEntityFromStore = await reporter.documentStore.collection(entitySet).insert({
...omit(entity, ['__entitySet'])
}, req)
const removeFolderQ = target.shortid === null ? { folder: null } : { folder: { shortid: target.shortid } }
entityRecordNewValueMap.set(entity, newEntityFromStore)
}
await reporter.documentStore.collection(e.existingEntityEntitySet).remove({
_id: e.existingEntity._id,
...removeFolderQ
}, req)
for (const record of records) {
try {
await processNewEntity(record.entitySet, record.entity)
} catch (e) {
if (e.code === 'DUPLICATED_ENTITY' && shouldReplace) {
// replacing is not supported when it generates a conflict with folder
if (e.existingEntityEntitySet === 'folders') {
await processNewEntity(record.entitySet, record.entity)
} else {
throw e
}
}
const removeFolderQ = target.shortid === null ? { folder: null } : { folder: { shortid: target.shortid } }
await reporter.documentStore.collection(e.existingEntityEntitySet).remove({
_id: e.existingEntity._id,
...removeFolderQ
}, req)
await processNewEntity(record.entitySet, record.entity)
} else {
throw e
}
await record.updateReferences(entityRecordNewValueMap.get(record.entity))
}
await record.updateReferences(entityRecordNewValueMap.get(record.entity))
entitiesInHierarchy = records.map((record) => ({
...omit(entityRecordNewValueMap.get(record.entity), ['$entitySet', '__entitySet']),
__entitySet: record.entitySet
}))
}
entitiesInHierarchy = records.map((record) => ({
...omit(entityRecordNewValueMap.get(record.entity), ['$entitySet', '__entitySet']),
__entitySet: record.entitySet
}))
allEntitiesInvolved.push(...entitiesInHierarchy)
}

@@ -322,3 +351,3 @@

// this helps with concurrent validation on studio
await Promise.all(entitiesInHierarchy.map(async (entity) => {
await Promise.all(allEntitiesInvolved.map(async (entity) => {
const entitySet = reporter.documentStore.model.entitySets[entity.__entitySet]

@@ -356,3 +385,3 @@ const entityType = entitySet.entityTypeDef

return entitiesInHierarchy
return allEntitiesInvolved
}

@@ -8,6 +8,8 @@ const path = require('path')

const createDefaultLoggerFormat = require('./createDefaultLoggerFormat')
const normalizeMetaFromLogs = require('../shared/normalizeMetaFromLogs')
const createNormalizeMetaLoggerFormat = require('./createNormalizeMetaLoggerFormat')
const Request = require('./request')
const defaultLoggerFormat = createDefaultLoggerFormat()
const defaultLoggerFormatWithTimestamp = createDefaultLoggerFormat({ timestamp: true })
const normalizeMetaLoggerFormat = createNormalizeMetaLoggerFormat()

@@ -184,21 +186,26 @@ function createLogger () {

logger.__configured__ = true
}
const originalLog = logger.log
function getConfigurationOptions () {
const normalizeMeta = winston.format((info) => {
const { level, message, ...meta } = info
const newMeta = normalizeMetaFromLogs(level, message, meta)
// we want to normalize the req has httpIncomingRequest early
// otherwise we will get serialization issues when trying to
// log http.IncomingRequest
logger.log = function (level, msg, ...splat) {
const [meta] = splat
if (newMeta != null) {
return {
level,
message,
...newMeta
}
if (
typeof meta === 'object' &&
meta !== null &&
meta.context != null &&
meta.socket != null
) {
splat[0] = Request(meta)
}
return info
})
return originalLog.call(this, level, msg, ...splat)
}
logger.__configured__ = true
}
function getConfigurationOptions () {
return {

@@ -212,3 +219,3 @@ levels: {

format: winston.format.combine(
normalizeMeta(),
normalizeMetaLoggerFormat(),
defaultLoggerFormatWithTimestamp()

@@ -237,2 +244,3 @@ ),

winston.format.colorize(),
normalizeMetaLoggerFormat(),
defaultLoggerFormat()

@@ -239,0 +247,0 @@ )

const EventEmitter = require('events')
const winston = require('winston')
const extend = require('node.extend.without.arrays')

@@ -23,5 +24,7 @@ const generateRequestId = require('../shared/generateRequestId')

const profilersMap = new Map()
const profilerOperationsChainsMap = new Map()
const profilerRequestMap = new Map()
const profilerLogRequestMap = new Map()
const profilerOperationsChainsMap = new Map()
function runInProfilerChain (fn, req) {
function runInProfilerChain (fnOrOptions, req) {
if (req.context.profiling.mode === 'disabled') {

@@ -31,3 +34,23 @@ return

let fn
let cleanFn
if (typeof fnOrOptions === 'function') {
fn = fnOrOptions
} else {
fn = fnOrOptions.fn
cleanFn = fnOrOptions.cleanFn
}
// this only happens when rendering remote delegated requests on docker workers
// there won't be operations chain because the request started from another server
if (!profilerOperationsChainsMap.has(req.context.rootId)) {
return
}
profilerOperationsChainsMap.set(req.context.rootId, profilerOperationsChainsMap.get(req.context.rootId).then(async () => {
if (cleanFn) {
cleanFn()
}
if (req.context.profiling.chainFailed) {

@@ -38,3 +61,5 @@ return

try {
await fn()
if (fn) {
await fn()
}
} catch (e) {

@@ -60,3 +85,3 @@ reporter.logger.warn('Failed persist profile', e)

function emitProfiles (events, req) {
function emitProfiles ({ events, log = true }, req) {
if (events.length === 0) {

@@ -70,3 +95,5 @@ return

if (m.type === 'log') {
reporter.logger[m.level](m.message, { ...req, ...m.meta, timestamp: m.timestamp })
if (log) {
reporter.logger[m.level](m.message, { ...req, ...m.meta, timestamp: m.timestamp, fromEmitProfile: true })
}
} else {

@@ -86,15 +113,33 @@ lastOperation = m

runInProfilerChain(() => {
if (req.context.profiling.logFilePath) {
return fs.appendFile(req.context.profiling.logFilePath, Buffer.from(events.map(m => JSON.stringify(m)).join('\n') + '\n'))
}
return reporter.blobStorage.append(
req.context.profiling.entity.blobName,
Buffer.from(events.map(m => JSON.stringify(m)).join('\n') + '\n'), req
)
return fs.appendFile(req.context.profiling.logFilePath, Buffer.from(events.map(m => JSON.stringify(m)).join('\n') + '\n'))
}, req)
}
reporter.registerMainAction('profile', async (events, req) => {
return emitProfiles(events, req)
reporter.registerMainAction('profile', async (eventsOrOptions, _req) => {
let req = _req
// if there is request stored here then take it, this is needed
// for docker workers remote requests, so the emitProfile can work
// with the real render request object
if (profilerRequestMap.has(req.context.rootId) && req.__isJsreportRequest__ == null) {
req = profilerRequestMap.get(req.context.rootId)
}
let events
let log
if (Array.isArray(eventsOrOptions)) {
events = eventsOrOptions
} else {
events = eventsOrOptions.events
log = eventsOrOptions.log
}
const params = { events }
if (log != null) {
params.log = log
}
return emitProfiles(params, req)
})

@@ -127,4 +172,2 @@

const blobName = `profiles/${req.context.rootId}.log`
const profile = {

@@ -134,10 +177,7 @@ _id: reporter.documentStore.generateId(),

state: 'queued',
mode: req.context.profiling.mode,
blobName
mode: req.context.profiling.mode
}
if (!reporter.blobStorage.supportsAppend) {
const { pathToFile } = await reporter.writeTempFile((uuid) => `${uuid}.log`, '')
req.context.profiling.logFilePath = pathToFile
}
const { pathToFile } = await reporter.writeTempFile((uuid) => `${uuid}.log`, '')
req.context.profiling.logFilePath = pathToFile

@@ -160,10 +200,12 @@ runInProfilerChain(async () => {

emitProfiles([profileStartOperation], req)
emitProfiles({ events: [profileStartOperation] }, req)
emitProfiles([createProfileMessage({
type: 'log',
level: 'info',
message: `Render request ${req.context.reportCounter} queued for execution and waiting for availible worker`,
previousOperationId: profileStartOperation.operationId
}, req)], req)
emitProfiles({
events: [createProfileMessage({
type: 'log',
level: 'info',
message: `Render request ${req.context.reportCounter} queued for execution and waiting for available worker`,
previousOperationId: profileStartOperation.operationId
}, req)]
}, req)
})

@@ -176,20 +218,11 @@

// we set the request here because this listener will container the req which
// the .render() starts
profilerRequestMap.set(req.context.rootId, req)
const template = await reporter.templates.resolveTemplate(req)
if (template && template._id) {
req.context.resolvedTemplate = extend(true, {}, template)
const templatePath = await reporter.folders.resolveEntityPath(template, 'templates', req)
const blobName = `profiles/${templatePath.substring(1)}/${req.context.rootId}.log`
update.templateShortid = template.shortid
const originalBlobName = req.context.profiling.entity.blobName
// we want to store the profile into blobName path reflecting the template path so we need to copy the blob to new path now
runInProfilerChain(async () => {
if (req.context.profiling.logFilePath == null) {
const content = await reporter.blobStorage.read(originalBlobName, req)
await reporter.blobStorage.write(blobName, content, req)
return reporter.blobStorage.remove(originalBlobName, req)
}
}, req)
update.blobName = blobName
}

@@ -199,2 +232,3 @@

req.context.skipValidationFor = update
return reporter.documentStore.collection('profiles').update({

@@ -211,9 +245,11 @@ _id: req.context.profiling.entity._id

reporter.afterRenderListeners.add('profiler', async (req, res) => {
emitProfiles([createProfileMessage({
type: 'operationEnd',
doDiffs: false,
previousEventId: req.context.profiling.lastEventId,
previousOperationId: req.context.profiling.lastOperationId,
operationId: req.context.profiling.profileStartOperationId
}, req)], req)
emitProfiles({
events: [createProfileMessage({
type: 'operationEnd',
doDiffs: false,
previousEventId: req.context.profiling.lastEventId,
previousOperationId: req.context.profiling.lastOperationId,
operationId: req.context.profiling.profileStartOperationId
}, req)]
}, req)

@@ -223,13 +259,21 @@ res.meta.profileId = req.context.profiling?.entity?._id

runInProfilerChain(async () => {
if (req.context.profiling.logFilePath != null) {
const content = await fs.readFile(req.context.profiling.logFilePath)
await reporter.blobStorage.write(req.context.profiling.entity.blobName, content, req)
await fs.unlink(req.context.profiling.logFilePath)
let blobName = `profiles/${req.context.rootId}.log`
if (req.context.resolvedTemplate) {
const templatePath = await reporter.folders.resolveEntityPath(req.context.resolvedTemplate, 'templates', req)
blobName = `profiles/${templatePath.substring(1)}/${req.context.rootId}.log`
}
const content = await fs.readFile(req.context.profiling.logFilePath)
blobName = await reporter.blobStorage.write(blobName, content, req)
await fs.unlink(req.context.profiling.logFilePath)
const update = {
state: 'success',
finishedOn: new Date()
finishedOn: new Date(),
blobName
}
req.context.skipValidationFor = update
await reporter.documentStore.collection('profiles').update({

@@ -242,11 +286,12 @@ _id: req.context.profiling.entity._id

// we don't remove from profiler requests map, because the renderErrorListeners are invoked if the afterRenderListener fails
// we don't clean the profiler maps here, we do it later in main reporter .render,
// because the renderErrorListeners can be invoked if the afterRenderListener fails
})
reporter.renderErrorListeners.add('profiler', async (req, res, e) => {
try {
res.meta.profileId = req.context.profiling?.entity?._id
res.meta.profileId = req.context.profiling?.entity?._id
if (req.context.profiling?.entity != null) {
emitProfiles([{
if (req.context.profiling?.entity != null) {
emitProfiles({
events: [{
type: 'error',

@@ -258,30 +303,89 @@ timestamp: new Date().getTime(),

message: e.message
}], req)
runInProfilerChain(async () => {
if (req.context.profiling.logFilePath != null) {
const content = await fs.readFile(req.context.profiling.logFilePath, 'utf8')
await reporter.blobStorage.write(req.context.profiling.entity.blobName, content, req)
await fs.unlink(req.context.profiling.logFilePath)
}]
}, req)
runInProfilerChain(async () => {
const update = {
state: 'error',
finishedOn: new Date(),
error: e.toString()
}
if (req.context.profiling.logFilePath != null) {
let blobName = `profiles/${req.context.rootId}.log`
if (req.context.resolvedTemplate) {
const templatePath = await reporter.folders.resolveEntityPath(req.context.resolvedTemplate, 'templates', req)
blobName = `profiles/${templatePath.substring(1)}/${req.context.rootId}.log`
}
const update = {
state: 'error',
finishedOn: new Date(),
error: e.toString()
}
req.context.skipValidationFor = update
await reporter.documentStore.collection('profiles').update({
_id: req.context.profiling.entity._id
}, {
$set: update
}, req)
const content = await fs.readFile(req.context.profiling.logFilePath)
blobName = await reporter.blobStorage.write(blobName, content, req)
await fs.unlink(req.context.profiling.logFilePath)
update.blobName = blobName
}
req.context.skipValidationFor = update
await reporter.documentStore.collection('profiles').update({
_id: req.context.profiling.entity._id
}, {
$set: update
}, req)
}
} finally {
profilersMap.delete(req.context.rootId)
profilerOperationsChainsMap.delete(req.context.rootId)
}, req)
// we don't clean the profiler maps here, we do it later in main reporter .render,
// we do this to ensure a single and clear order
}
})
const configuredPreviously = reporter.logger.__profilerConfigured__ === true
if (!configuredPreviously) {
const originalLog = reporter.logger.log
// we want to catch the original request
reporter.logger.log = function (level, msg, ...splat) {
const [meta] = splat
if (typeof meta === 'object' && meta !== null && meta.context?.rootId != null) {
profilerLogRequestMap.set(meta.context.rootId, meta)
}
return originalLog.call(this, level, msg, ...splat)
}
const mainLogsToProfile = winston.format((info) => {
// propagate the request logs occurring on main to the profile
if (info.rootId != null && info.fromEmitProfile == null && profilerLogRequestMap.has(info.rootId)) {
const req = profilerLogRequestMap.get(info.rootId)
emitProfiles({
events: [createProfileMessage({
type: 'log',
level: info.level,
message: info.message,
previousOperationId: req.context.profiling.lastOperationId
}, req)],
log: false
}, req)
}
if (info.fromEmitProfile != null) {
delete info.fromEmitProfile
}
return info
})
reporter.logger.format = winston.format.combine(
reporter.logger.format,
mainLogsToProfile()
)
reporter.logger.__profilerConfigured__ = true
}
let profilesCleanupInterval
reporter.initializeListeners.add('profiler', async () => {

@@ -292,9 +396,16 @@ reporter.documentStore.collection('profiles').beforeRemoveListeners.add('profiles', async (query, req) => {

for (const profile of profiles) {
await reporter.blobStorage.remove(profile.blobName)
if (profile.blobName != null) {
await reporter.blobStorage.remove(profile.blobName)
}
}
})
profilesCleanupInterval = setInterval(profilesCleanup, reporter.options.profiler.cleanupInterval)
function profilesCleanupExec () {
return reporter._profilesCleanup()
}
profilesCleanupInterval = setInterval(profilesCleanupExec, reporter.options.profiler.cleanupInterval)
profilesCleanupInterval.unref()
await profilesCleanup()
await reporter._profilesCleanup()
})

@@ -313,11 +424,20 @@

}
profilersMap.clear()
profilerOperationsChainsMap.clear()
profilerRequestMap.clear()
profilerLogRequestMap.clear()
})
let profilesCleanupRunning = false
async function profilesCleanup () {
reporter._profilesCleanup = async function profilesCleanup () {
if (profilesCleanupRunning) {
return
}
profilesCleanupRunning = true
let lastRemoveError
try {

@@ -350,2 +470,26 @@ const profiles = await reporter.documentStore.collection('profiles').find({}).sort({ timestamp: -1 })

}
return function cleanProfileInRequest (req) {
// - req.context.profiling is empty only on an early error
// that happens before setting the profiler.
// - when profiling.mode is "disabled" there is no profiler chain to append
// in both cases we want the clean code to happen immediately
if (req.context.profiling?.entity == null || req.context.profiling?.mode === 'disabled') {
profilersMap.delete(req.context.rootId)
profilerOperationsChainsMap.delete(req.context.rootId)
profilerRequestMap.delete(req.context.rootId)
profilerLogRequestMap.delete(req.context.rootId)
return
}
// this will get executed always even if some fn in the chain fails
runInProfilerChain({
cleanFn: () => {
profilersMap.delete(req.context.rootId)
profilerOperationsChainsMap.delete(req.context.rootId)
profilerRequestMap.delete(req.context.rootId)
profilerLogRequestMap.delete(req.context.rootId)
}
}, req)
}
}

@@ -199,4 +199,5 @@ /*!

Templates(this)
Profiler(this)
this._cleanProfileInRequest = Profiler(this)
this.folders = Object.assign(this.folders, Folders(this))

@@ -476,7 +477,13 @@

Object.assign(res, responseResult)
await this.afterRenderListeners.fire(req, res)
res.stream = Readable.from(res.content)
this._cleanProfileInRequest(req)
return res
} catch (err) {
await this._handleRenderError(req, res, err)
this._cleanProfileInRequest(req)
throw err

@@ -483,0 +490,0 @@ } finally {

@@ -96,2 +96,8 @@ /*!

}
},
createAsyncHelperResult: (v) => {
const asyncResultMap = executionAsyncResultsMap.get(context.__executionId)
const asyncResultId = nanoid(7)
asyncResultMap.set(asyncResultId, v)
return `{#asyncHelperResult ${asyncResultId}}`
}

@@ -201,3 +207,7 @@ }

for (const h of Object.keys(topLevelFunctions)) {
wrappedTopLevelFunctions[h] = wrapHelperForAsyncSupport(topLevelFunctions[h], asyncResultMap)
if (engine.getWrappingHelpersEnabled && engine.getWrappingHelpersEnabled(req) === false) {
wrappedTopLevelFunctions[h] = engine.wrapHelper(topLevelFunctions[h], { context })
} else {
wrappedTopLevelFunctions[h] = wrapHelperForAsyncSupport(topLevelFunctions[h], asyncResultMap)
}
}

@@ -254,3 +264,3 @@

context: {
...(engine.createContext ? engine.createContext() : {})
...(engine.createContext ? engine.createContext(req) : {})
},

@@ -257,0 +267,0 @@ userCode: normalizedHelpers,

@@ -39,3 +39,6 @@ const extend = require('node.extend.without.arrays')

profilingInfo.batch = []
await this.reporter.executeMainAction('profile', batch, profilingInfo.req).catch((e) => this.reporter.logger.error(e, profilingInfo.req))
if (batch.length > 0) {
await this.reporter.executeMainAction('profile', batch, profilingInfo.req).catch((e) => this.reporter.logger.error(e, profilingInfo.req))
}
}

@@ -162,3 +165,6 @@ }

this.profiledRequestsMap.delete(req.context.rootId)
await this.reporter.executeMainAction('profile', profilingInfo.batch, req)
if (profilingInfo.batch.length > 0) {
await this.reporter.executeMainAction('profile', profilingInfo.batch, req)
}
}

@@ -165,0 +171,0 @@ }

@@ -133,3 +133,3 @@ /*!

reporter.logger.info(`Starting rendering request ${request.context.reportCounter} (user: ${(request.context.user ? request.context.user.username : 'null')})`, request)
reporter.logger.info(`Starting rendering request ${request.context.reportCounter} (user: ${(request.context.user ? request.context.user.name : 'null')})`, request)

@@ -136,0 +136,0 @@ // TODO

@@ -37,3 +37,5 @@ const LRU = require('lru-cache')

onLog: (log) => {
reporter.logger[log.level](log.message, { ...req, timestamp: log.timestamp })
// we mark any log done in sandbox as userLevel: true, this allows us to detect which logs belongs to user
// and can potentially contain sensitive information
reporter.logger[log.level](log.message, { ...req, timestamp: log.timestamp, userLevel: true })
},

@@ -40,0 +42,0 @@ formatError: (error, moduleName) => {

{
"name": "@jsreport/jsreport-core",
"version": "3.6.1",
"version": "3.7.0",
"description": "javascript based business reporting",

@@ -20,2 +20,8 @@ "keywords": [

},
"maintainers": [
{
"name": "pofider",
"email": "jan.blaha@hotmail.com"
}
],
"main": "index.js",

@@ -26,3 +32,4 @@ "files": [

"test/store/common.js",
"test/blobStorage/common.js"
"test/blobStorage/common.js",
"test/extensions/validExtensions/listeners"
],

@@ -37,4 +44,6 @@ "scripts": {

"@babel/traverse": "7.12.9",
"@colors/colors": "1.5.0",
"@jsreport/advanced-workers": "1.2.3",
"@jsreport/mingo": "2.4.1",
"@jsreport/reap": "0.1.0",
"ajv": "6.12.6",

@@ -62,3 +71,2 @@ "app-root-path": "3.0.0",

"node.extend.without.arrays": "1.1.6",
"@jsreport/reap": "0.1.0",
"semver": "7.3.5",

@@ -71,4 +79,4 @@ "serializator": "1.0.2",

"vm2": "3.9.9",
"winston": "3.3.3",
"winston-transport": "4.4.0"
"winston": "3.8.1",
"winston-transport": "4.5.0"
},

@@ -85,8 +93,2 @@ "devDependencies": {

},
"maintainers": [
{
"name": "pofider",
"email": "jan.blaha@hotmail.com"
}
],
"standard": {

@@ -93,0 +95,0 @@ "env": {

@@ -285,2 +285,16 @@ # @jsreport/jsreport-core

### 3.7.0
- add support for multiple source entities when copying and moving
- fix some issues with blobs and profiles
- format user logs differently on stdout
- fix logging of req as http.IncomingRequest
- fix profile compatibility with jsreport container based execution
- fix memory leak in profiling
- add support for logs of main reporter to show in profile
### 3.6.1
- update @jsreport/advanced-workers to fix bug with cloning req.data when `trustUserCode` is true
### 3.6.0

@@ -287,0 +301,0 @@

@@ -231,3 +231,3 @@ const should = require('should')

await getCollection(colName).insert({ name: '1', engine: 'none', recipe: 'a' })
await getCollection(colName).insert({ name: '2', engine: 'none', recipe: 'a' })
await getCollection(colName).insert({ name: '2', engine: 'test2', recipe: 'a' })
const res = await getCollection(colName).update({ recipe: 'a' }, { $set: { engine: 'test2' } })

@@ -234,0 +234,0 @@ res.should.be.eql(2)

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc