🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

dd-trace

Package Overview
Dependencies
Maintainers
1
Versions
635
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dd-trace - npm Package Compare versions

Comparing version

to
6.0.0-pre-d6f5ded

initialize.mjs

14

ci/init.js

@@ -6,2 +6,4 @@ /* eslint-disable no-console */

const isJestWorker = !!process.env.JEST_WORKER_ID
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID
const isMochaWorker = !!process.env.MOCHA_WORKER_ID

@@ -41,2 +43,14 @@ const options = {

if (isCucumberWorker) {
options.experimental = {
exporter: 'cucumber_worker'
}
}
if (isMochaWorker) {
options.experimental = {
exporter: 'mocha_worker'
}
}
if (shouldInit) {

@@ -43,0 +57,0 @@ tracer.init(options)

4

ext/exporters.d.ts

@@ -6,5 +6,7 @@ declare const exporters: {

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker',
MOCHA_WORKER: 'mocha_worker'
}
export = exporters

@@ -7,3 +7,5 @@ 'use strict'

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker',
MOCHA_WORKER: 'mocha_worker'
}

@@ -8,4 +8,5 @@ import * as opentracing from 'opentracing'

LOG: 'log'
TEXT_MAP_DSM: 'text_map_dsm'
}
export = formats

@@ -7,3 +7,4 @@ 'use strict'

BINARY: 'binary',
LOG: 'log'
LOG: 'log',
TEXT_MAP_DSM: 'text_map_dsm'
}

@@ -13,2 +13,3 @@ declare const tags: {

BASE_SERVICE: '_dd.base_service'
DD_PARENT_ID: '_dd.parent_id'
HTTP_URL: 'http.url'

@@ -15,0 +16,0 @@ HTTP_METHOD: 'http.method'

@@ -16,2 +16,3 @@ 'use strict'

BASE_SERVICE: '_dd.base_service',
DD_PARENT_ID: '_dd.parent_id',

@@ -18,0 +19,0 @@ // HTTP

'use strict'
const tracer = require('.')
const path = require('path')
const Module = require('module')
const semver = require('semver')
const log = require('./packages/dd-trace/src/log')
const { isTrue } = require('./packages/dd-trace/src/util')
const telemetry = require('./packages/dd-trace/src/telemetry/init-telemetry')
tracer.init()
let initBailout = false
let clobberBailout = false
const forced = isTrue(process.env.DD_INJECT_FORCE)
module.exports = tracer
if (process.env.DD_INJECTION_ENABLED) {
// If we're running via single-step install, and we're not in the app's
// node_modules, then we should not initialize the tracer. This prevents
// single-step-installed tracer from clobbering the manually-installed tracer.
let resolvedInApp
const entrypoint = process.argv[1]
try {
resolvedInApp = Module.createRequire(entrypoint).resolve('dd-trace')
} catch (e) {
// Ignore. If we can't resolve the module, we assume it's not in the app.
}
if (resolvedInApp) {
const ourselves = path.join(__dirname, 'index.js')
if (ourselves !== resolvedInApp) {
clobberBailout = true
}
}
// If we're running via single-step install, and the runtime doesn't match
// the engines field in package.json, then we should not initialize the tracer.
if (!clobberBailout) {
const { engines } = require('./package.json')
const version = process.versions.node
if (!semver.satisfies(version, engines.node)) {
initBailout = true
telemetry([
{ name: 'abort', tags: ['reason:incompatible_runtime'] },
{ name: 'abort.runtime', tags: [] }
])
log.info('Aborting application instrumentation due to incompatible_runtime.')
log.info(`Found incompatible runtime nodejs ${version}, Supported runtimes: nodejs ${engines.node}.`)
if (forced) {
log.info('DD_INJECT_FORCE enabled, allowing unsupported runtimes and continuing.')
}
}
}
}
if (!clobberBailout && (!initBailout || forced)) {
const tracer = require('.')
tracer.init()
module.exports = tracer
telemetry('complete', [`injection_forced:${forced && initBailout ? 'true' : 'false'}`])
log.info('Application instrumentation bootstrapping complete')
}
{
"name": "dd-trace",
"version": "6.0.0-pre-ce51217",
"version": "6.0.0-pre-d6f5ded",
"description": "Datadog APM tracing client for JavaScript",

@@ -16,17 +16,18 @@ "main": "index.js",

"lint": "node scripts/check_licenses.js && eslint . && yarn audit --groups dependencies",
"lint-fix": "node scripts/check_licenses.js && eslint . --fix && yarn audit --groups dependencies",
"services": "node ./scripts/install_plugin_modules && node packages/dd-trace/test/setup/services",
"test": "SERVICES=* yarn services && mocha --colors --exit --expose-gc 'packages/dd-trace/test/setup/node.js' 'packages/*/test/**/*.spec.js'",
"test:appsec": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" \"packages/dd-trace/test/appsec/**/*.spec.js\"",
"test": "SERVICES=* yarn services && mocha --expose-gc 'packages/dd-trace/test/setup/node.js' 'packages/*/test/**/*.spec.js'",
"test:appsec": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" \"packages/dd-trace/test/appsec/**/*.spec.js\"",
"test:appsec:ci": "nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" -- npm run test:appsec",
"test:appsec:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"",
"test:appsec:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"",
"test:appsec:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" -- npm run test:appsec:plugins",
"test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"",
"test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"",
"test:instrumentations": "mocha --colors -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'",
"test:instrumentations": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'",
"test:instrumentations:ci": "nyc --no-clean --include 'packages/datadog-instrumentations/src/**/*.js' -- npm run test:instrumentations",
"test:core": "tap \"packages/datadog-core/test/**/*.spec.js\"",
"test:core:ci": "npm run test:core -- --coverage --nyc-arg=--include=\"packages/datadog-core/src/**/*.js\"",
"test:lambda": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/lambda/**/*.spec.js\"",
"test:lambda": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/lambda/**/*.spec.js\"",
"test:lambda:ci": "nyc --no-clean --include \"packages/dd-trace/src/lambda/**/*.js\" -- npm run test:lambda",
"test:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\"",
"test:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\"",
"test:plugins:ci": "yarn services && nyc --no-clean --include \"packages/datadog-instrumentations/src/@($(echo $PLUGINS)).js\" --include \"packages/datadog-instrumentations/src/@($(echo $PLUGINS))/**/*.js\" --include \"packages/datadog-plugin-@($(echo $PLUGINS))/src/**/*.js\" -- npm run test:plugins",

@@ -36,15 +37,17 @@ "test:plugins:upstream": "node ./packages/dd-trace/test/plugins/suite.js",

"test:profiler:ci": "npm run test:profiler -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/profiling/**/*.js\"",
"test:integration": "mocha --colors --timeout 30000 \"integration-tests/*.spec.js\"",
"test:integration:cucumber": "mocha --colors --timeout 30000 \"integration-tests/cucumber/*.spec.js\"",
"test:integration:cypress": "mocha --colors --timeout 30000 \"integration-tests/cypress/*.spec.js\"",
"test:integration:playwright": "mocha --colors --timeout 30000 \"integration-tests/playwright/*.spec.js\"",
"test:integration:selenium": "mocha --colors --timeout 30000 \"integration-tests/selenium/*.spec.js\"",
"test:integration:profiler": "mocha --colors --timeout 90000 \"integration-tests/profiler/*.spec.js\"",
"test:integration:serverless": "mocha --colors --timeout 30000 \"integration-tests/serverless/*.spec.js\"",
"test:integration:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:unit:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:shimmer": "mocha --colors 'packages/datadog-shimmer/test/**/*.spec.js'",
"test:shimmer:ci": "nyc --no-clean --include 'packages/datadog-shimmer/src/**/*.js' -- npm run test:shimmer",
"leak:core": "node ./scripts/install_plugin_modules && (cd packages/memwatch && yarn) && NODE_PATH=./packages/memwatch/node_modules node --no-warnings ./node_modules/.bin/tape 'packages/dd-trace/test/leak/**/*.js'",
"leak:plugins": "yarn services && (cd packages/memwatch && yarn) && NODE_PATH=./packages/memwatch/node_modules node --no-warnings ./node_modules/.bin/tape \"packages/datadog-plugin-@($(echo $PLUGINS))/test/leak.js\""
"test:integration": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/*.spec.js\"",
"test:integration:appsec": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/appsec/*.spec.js\"",
"test:integration:cucumber": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cucumber/*.spec.js\"",
"test:integration:cypress": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cypress/*.spec.js\"",
"test:integration:jest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/jest/*.spec.js\"",
"test:integration:mocha": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/mocha/*.spec.js\"",
"test:integration:playwright": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/playwright/*.spec.js\"",
"test:integration:selenium": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/selenium/*.spec.js\"",
"test:integration:vitest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/vitest/*.spec.js\"",
"test:integration:profiler": "mocha --timeout 180000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/profiler/*.spec.js\"",
"test:integration:serverless": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/serverless/*.spec.js\"",
"test:integration:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:unit:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:shimmer": "mocha 'packages/datadog-shimmer/test/**/*.spec.js'",
"test:shimmer:ci": "nyc --no-clean --include 'packages/datadog-shimmer/src/**/*.js' -- npm run test:shimmer"
},

@@ -75,9 +78,9 @@ "repository": {

"dependencies": {
"@datadog/native-appsec": "7.1.1",
"@datadog/native-iast-rewriter": "2.3.0",
"@datadog/native-iast-taint-tracking": "1.7.0",
"@datadog/native-appsec": "8.1.1",
"@datadog/native-iast-rewriter": "2.4.1",
"@datadog/native-iast-taint-tracking": "3.1.0",
"@datadog/native-metrics": "^2.0.0",
"@datadog/pprof": "5.2.0",
"@datadog/pprof": "5.3.0",
"@datadog/sketches-js": "^2.1.0",
"@opentelemetry/api": "^1.0.0",
"@opentelemetry/api": ">=1.0.0 <1.9.0",
"@opentelemetry/core": "^1.14.0",

@@ -87,7 +90,7 @@ "crypto-randomuuid": "^1.0.0",

"ignore": "^5.2.4",
"import-in-the-middle": "^1.7.3",
"import-in-the-middle": "^1.8.1",
"int64-buffer": "^0.1.9",
"ipaddr.js": "^2.1.0",
"istanbul-lib-coverage": "3.2.0",
"jest-docblock": "^29.7.0",
"jsonpath-plus": "^9.0.0",
"koalas": "^1.0.2",

@@ -97,11 +100,10 @@ "limiter": "1.1.5",

"lru-cache": "^7.14.0",
"methods": "^1.1.2",
"module-details-from-path": "^1.0.3",
"msgpack-lite": "^0.1.26",
"node-abort-controller": "^3.1.1",
"opentracing": ">=0.12.1",
"path-to-regexp": "^0.1.2",
"path-to-regexp": "^0.1.10",
"pprof-format": "^2.1.0",
"protobufjs": "^7.2.5",
"retry": "^0.13.1",
"rfdc": "^1.3.1",
"semver": "^7.5.4",

@@ -112,6 +114,6 @@ "shell-quote": "^1.8.1",

"devDependencies": {
"@types/node": ">=18",
"@types/node": "^16.18.103",
"autocannon": "^4.5.2",
"aws-sdk": "^2.1446.0",
"axios": "^1.6.7",
"axios": "^1.7.4",
"benchmark": "^2.1.4",

@@ -125,9 +127,8 @@ "body-parser": "^1.20.2",

"esbuild": "0.16.12",
"eslint": "^8.23.0",
"eslint": "^8.57.0",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-mocha": "^10.4.3",
"eslint-plugin-n": "^16.6.2",
"eslint-plugin-promise": "^6.4.0",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-mocha": "^10.1.0",
"eslint-plugin-n": "^15.7.0",
"eslint-plugin-promise": "^3.6.0",
"eslint-plugin-standard": "^3.0.1",
"express": "^4.18.2",

@@ -149,4 +150,4 @@ "get-port": "^3.2.0",

"tap": "^16.3.7",
"tape": "^5.6.5"
"tiktoken": "^1.0.15"
}
}

@@ -5,11 +5,2 @@ 'use strict'

const semver = require('semver')
// https://github.com/nodejs/node/pull/33801
const hasJavaScriptAsyncHooks = semver.satisfies(process.versions.node, '>=14.5')
if (hasJavaScriptAsyncHooks) {
module.exports = require('./async_resource')
} else {
module.exports = require('./async_hooks')
}
module.exports = require('./async_resource')

@@ -7,5 +7,10 @@ 'use strict'

const hooks = require('../datadog-instrumentations/src/helpers/hooks.js')
const extractPackageAndModulePath = require('../datadog-instrumentations/src/utils/src/extract-package-and-module-path')
for (const hook of Object.values(hooks)) {
hook()
if (typeof hook === 'object') {
hook.fn()
} else {
hook()
}
}

@@ -25,3 +30,2 @@

const NM = 'node_modules/'
const INSTRUMENTED = Object.keys(instrumentations)

@@ -80,3 +84,6 @@ const RAW_BUILTINS = require('module').builtinModules

} catch (err) {
console.warn(`MISSING: Unable to find "${args.path}". Is the package dead code?`)
if (DEBUG) {
console.warn(`Warning: Unable to find "${args.path}".` +
"Unless it's dead code this could cause a problem at runtime.")
}
return

@@ -99,3 +106,6 @@ }

if (!internal) {
console.warn(`MISSING: Unable to find "${extracted.pkg}/package.json". Is the package dead code?`)
if (DEBUG) {
console.warn(`Warning: Unable to find "${extracted.pkg}/package.json".` +
"Unless it's dead code this could cause a problem at runtime.")
}
}

@@ -182,31 +192,1 @@ return

}
/**
* For a given full path to a module,
* return the package name it belongs to and the local path to the module
* input: '/foo/node_modules/@co/stuff/foo/bar/baz.js'
* output: { pkg: '@co/stuff', path: 'foo/bar/baz.js' }
*/
function extractPackageAndModulePath (fullPath) {
const nm = fullPath.lastIndexOf(NM)
if (nm < 0) {
return { pkg: null, path: null }
}
const subPath = fullPath.substring(nm + NM.length)
const firstSlash = subPath.indexOf('/')
if (subPath[0] === '@') {
const secondSlash = subPath.substring(firstSlash + 1).indexOf('/')
return {
pkg: subPath.substring(0, firstSlash + 1 + secondSlash),
path: subPath.substring(firstSlash + 1 + secondSlash + 1)
}
}
return {
pkg: subPath.substring(0, firstSlash),
path: subPath.substring(firstSlash + 1)
}
}

@@ -46,3 +46,3 @@ 'use strict'

commandFactory => {
return shimmer.wrap(commandFactory, wrapCreateCommand(commandFactory))
return shimmer.wrapFunction(commandFactory, f => wrapCreateCommand(f))
})

@@ -11,2 +11,5 @@ 'use strict'

const { NODE_MAJOR, NODE_MINOR } = require('../../../version')
const MIN_VERSION = ((NODE_MAJOR > 22) || (NODE_MAJOR === 22 && NODE_MINOR >= 2)) ? '>=0.5.3' : '>=0.5.0'
const startCh = channel('apm:amqplib:command:start')

@@ -18,3 +21,3 @@ const finishCh = channel('apm:amqplib:command:finish')

addHook({ name: 'amqplib', file: 'lib/defs.js', versions: ['>=0.5'] }, defs => {
addHook({ name: 'amqplib', file: 'lib/defs.js', versions: [MIN_VERSION] }, defs => {
methods = Object.keys(defs)

@@ -27,3 +30,3 @@ .filter(key => Number.isInteger(defs[key]))

addHook({ name: 'amqplib', file: 'lib/channel.js', versions: ['>=0.5'] }, channel => {
addHook({ name: 'amqplib', file: 'lib/channel.js', versions: [MIN_VERSION] }, channel => {
shimmer.wrap(channel.Channel.prototype, 'sendImmediately', sendImmediately => function (method, fields) {

@@ -30,0 +33,0 @@ return instrument(sendImmediately, this, arguments, methods[method], fields)

'use strict'
const { AbortController } = require('node-abort-controller')
const { addHook } = require('./helpers/instrument')

@@ -5,0 +4,0 @@ const shimmer = require('../../datadog-shimmer')

'use strict'
const { AbortController } = require('node-abort-controller')
const dc = require('dc-polyfill')

@@ -59,3 +58,3 @@

return shimmer.wrap(originalMiddleware, function (req, res, next) {
return shimmer.wrapFunction(originalMiddleware, originalMiddleware => function (req, res, next) {
if (!graphqlMiddlewareChannel.start.hasSubscribers) {

@@ -62,0 +61,0 @@ return originalMiddleware.apply(this, arguments)

@@ -23,3 +23,4 @@ 'use strict'

this.on('complete', innerAr.bind(response => {
channel(`apm:aws:request:complete:${channelSuffix}`).publish({ response })
const cbExists = typeof cb === 'function'
channel(`apm:aws:request:complete:${channelSuffix}`).publish({ response, cbExists })
}))

@@ -78,3 +79,3 @@

if (typeof cb === 'function') {
args[args.length - 1] = function (err, result) {
args[args.length - 1] = shimmer.wrapFunction(cb, cb => function (err, result) {
const message = getMessage(request, err, result)

@@ -93,3 +94,3 @@

})
}
})
} else { // always a promise

@@ -118,3 +119,3 @@ return send.call(this, command, ...args)

// eslint-disable-next-line n/handle-callback-err
return function wrappedCb (err, response) {
return shimmer.wrapFunction(cb, cb => function wrappedCb (err, response) {
const obj = { request, response }

@@ -147,3 +148,3 @@ return ar.runInAsyncScope(() => {

})
}
})
}

@@ -170,4 +171,7 @@

's3',
'sfn',
'sns',
'sqs'
'sqs',
'states',
'stepfunctions'
].includes(name)

@@ -174,0 +178,0 @@ ? name

'use strict'
const { AbortController } = require('node-abort-controller') // AbortController is not available in node <15
const shimmer = require('../../datadog-shimmer')
const { channel, addHook } = require('./helpers/instrument')
const { channel, addHook, AsyncResource } = require('./helpers/instrument')

@@ -10,3 +9,3 @@ const bodyParserReadCh = channel('datadog:body-parser:read:finish')

function publishRequestBodyAndNext (req, res, next) {
return function () {
return shimmer.wrapFunction(next, next => function () {
if (bodyParserReadCh.hasSubscribers && req) {

@@ -22,3 +21,3 @@ const abortController = new AbortController()

return next.apply(this, arguments)
}
})
}

@@ -29,5 +28,17 @@

file: 'lib/read.js',
versions: ['>=1.4.0']
versions: ['>=1.4.0 <1.20.0']
}, read => {
return shimmer.wrap(read, function (req, res, next) {
return shimmer.wrapFunction(read, read => function (req, res, next) {
const nextResource = new AsyncResource('bound-anonymous-fn')
arguments[2] = nextResource.bind(publishRequestBodyAndNext(req, res, next))
return read.apply(this, arguments)
})
})
addHook({
name: 'body-parser',
file: 'lib/read.js',
versions: ['>=1.20.0']
}, read => {
return shimmer.wrapFunction(read, read => function (req, res, next) {
arguments[2] = publishRequestBodyAndNext(req, res, next)

@@ -34,0 +45,0 @@ return read.apply(this, arguments)

@@ -183,3 +183,3 @@ 'use strict'

function wrapCallback (finishCh, errorCh, asyncResource, callback) {
return asyncResource.bind(function (err) {
return shimmer.wrapFunction(callback, callback => asyncResource.bind(function (err) {
finish(finishCh, errorCh, err)

@@ -189,3 +189,3 @@ if (callback) {

}
})
}))
}

@@ -192,0 +192,0 @@

@@ -42,6 +42,6 @@ 'use strict'

childProcessInfo.command = childProcessInfo.command + ' ' + args[1].join(' ')
if (args[2] != null && typeof args[2] === 'object') {
if (args[2] !== null && typeof args[2] === 'object') {
childProcessInfo.options = args[2]
}
} else if (args[1] != null && typeof args[1] === 'object') {
} else if (args[1] !== null && typeof args[1] === 'object') {
childProcessInfo.options = args[1]

@@ -137,4 +137,4 @@ }

const wrapedChildProcessCustomPromisifyMethod =
shimmer.wrap(childProcessMethod[util.promisify.custom],
wrapChildProcessCustomPromisifyMethod(childProcessMethod[util.promisify.custom]), shell)
shimmer.wrapFunction(childProcessMethod[util.promisify.custom],
promisify => wrapChildProcessCustomPromisifyMethod(promisify, shell))

@@ -141,0 +141,0 @@ // should do it in this way because the original property is readonly

@@ -62,3 +62,3 @@ 'use strict'

return shimmer.wrap(original, function () {
return shimmer.wrapFunction(original, original => function () {
if (!enterChannel.hasSubscribers) return original.apply(this, arguments)

@@ -94,3 +94,3 @@

function wrapNext (req, next) {
return function (error) {
return shimmer.wrapFunction(next, next => function (error) {
if (error) {

@@ -104,7 +104,7 @@ errorChannel.publish({ req, error })

next.apply(this, arguments)
}
})
}
addHook({ name: 'connect', versions: ['>=3'] }, connect => {
return shimmer.wrap(connect, wrapConnect(connect))
return shimmer.wrapFunction(connect, connect => wrapConnect(connect))
})

@@ -111,0 +111,0 @@

'use strict'
const { AbortController } = require('node-abort-controller') // AbortController is not available in node <15
const shimmer = require('../../datadog-shimmer')

@@ -10,3 +9,3 @@ const { channel, addHook } = require('./helpers/instrument')

function publishRequestCookieAndNext (req, res, next) {
return function cookieParserWrapper () {
return shimmer.wrapFunction(next, next => function cookieParserWrapper () {
if (cookieParserReadCh.hasSubscribers && req) {

@@ -23,3 +22,3 @@ const abortController = new AbortController()

return next.apply(this, arguments)
}
})
}

@@ -31,6 +30,6 @@

}, cookieParser => {
return shimmer.wrap(cookieParser, function () {
return shimmer.wrapFunction(cookieParser, cookieParser => function () {
const cookieMiddleware = cookieParser.apply(this, arguments)
return shimmer.wrap(cookieMiddleware, function (req, res, next) {
return shimmer.wrapFunction(cookieMiddleware, cookieMiddleware => function (req, res, next) {
arguments[2] = publishRequestCookieAndNext(req, res, next)

@@ -37,0 +36,0 @@ return cookieMiddleware.apply(this, arguments)

@@ -40,3 +40,3 @@ 'use strict'

}
return shimmer.wrap(_maybeInvoke, wrapped)
return wrapped
}

@@ -55,3 +55,3 @@

}
return shimmer.wrap(query, wrapped)
return wrapped
}

@@ -81,3 +81,3 @@

arguments[callbackIndex] = asyncResource.bind(function (error, result) {
arguments[callbackIndex] = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (error, result) {
if (error) {

@@ -88,3 +88,3 @@ errorCh.publish(error)

return cb.apply(this, arguments)
})
}))

@@ -101,3 +101,3 @@ try {

}
return shimmer.wrap(fn, wrapped)
return wrapped
}

@@ -126,3 +126,3 @@

const cb = callbackResource.bind(args[cbIndex])
args[cbIndex] = asyncResource.bind(function (error, result) {
args[cbIndex] = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (error, result) {
if (error) {

@@ -133,3 +133,3 @@ errorCh.publish(error)

return cb.apply(thisArg, arguments)
})
}))
}

@@ -176,4 +176,4 @@ const res = fn.apply(thisArg, args)

Bucket.prototype._maybeInvoke = wrapMaybeInvoke(Bucket.prototype._maybeInvoke)
Bucket.prototype.query = wrapQuery(Bucket.prototype.query)
shimmer.wrap(Bucket.prototype, '_maybeInvoke', maybeInvoke => wrapMaybeInvoke(maybeInvoke))
shimmer.wrap(Bucket.prototype, 'query', query => wrapQuery(query))

@@ -214,3 +214,3 @@ shimmer.wrap(Bucket.prototype, '_n1qlReq', _n1qlReq => function (host, q, adhoc, emitter) {

wrapAllNames(['upsert', 'insert', 'replace', 'append', 'prepend'], name => {
Bucket.prototype[name] = wrap(`apm:couchbase:${name}`, Bucket.prototype[name])
shimmer.wrap(Bucket.prototype, name, fn => wrap(`apm:couchbase:${name}`, fn))
})

@@ -222,4 +222,4 @@

addHook({ name: 'couchbase', file: 'lib/cluster.js', versions: ['^2.6.12'] }, Cluster => {
Cluster.prototype._maybeInvoke = wrapMaybeInvoke(Cluster.prototype._maybeInvoke)
Cluster.prototype.query = wrapQuery(Cluster.prototype.query)
shimmer.wrap(Cluster.prototype, '_maybeInvoke', maybeInvoke => wrapMaybeInvoke(maybeInvoke))
shimmer.wrap(Cluster.prototype, 'query', query => wrapQuery(query))

@@ -226,0 +226,0 @@ shimmer.wrap(Cluster.prototype, 'openBucket', openBucket => {

@@ -9,2 +9,3 @@ 'use strict'

const testStartCh = channel('ci:cucumber:test:start')
const testRetryCh = channel('ci:cucumber:test:retry')
const testFinishCh = channel('ci:cucumber:test:finish') // used for test steps too

@@ -26,4 +27,8 @@

const workerReportTraceCh = channel('ci:cucumber:worker-report:trace')
const itrSkippedSuitesCh = channel('ci:cucumber:itr:skipped-suites')
const getCodeCoverageCh = channel('ci:nyc:get-coverage')
const {

@@ -34,3 +39,4 @@ getCoveredFilenamesFromCoverage,

fromCoverageMapToCoverage,
getTestSuitePath
getTestSuitePath,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE
} = require('../../dd-trace/src/plugins/util/test')

@@ -50,5 +56,9 @@

const numRetriesByPickleId = new Map()
const numAttemptToAsyncResource = new Map()
let pickleByFile = {}
const pickleResultByFile = {}
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
let skippableSuites = []

@@ -58,5 +68,10 @@ let itrCorrelationId = ''

let isUnskippable = false
let isSuitesSkippingEnabled = false
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let isFlakyTestRetriesEnabled = false
let numTestRetries = 0
let knownTests = []
let skippedSuites = []
let isSuitesSkipped = false

@@ -114,2 +129,39 @@ function getSuiteStatusFromTestStatuses (testStatuses) {

function getChannelPromise (channelToPublishTo) {
return new Promise(resolve => {
sessionAsyncResource.runInAsyncScope(() => {
channelToPublishTo.publish({ onDone: resolve })
})
})
}
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function wrapRun (pl, isLatestVersion) {

@@ -125,42 +177,76 @@ if (patched.has(pl)) return

let numAttempt = 0
const asyncResource = new AsyncResource('bound-anonymous-fn')
return asyncResource.runInAsyncScope(() => {
const testFileAbsolutePath = this.pickle.uri
const testSourceLine = this.gherkinDocument?.feature?.location?.line
numAttemptToAsyncResource.set(numAttempt, asyncResource)
testStartCh.publish({
testName: this.pickle.name,
testFileAbsolutePath,
testSourceLine
})
try {
const promise = run.apply(this, arguments)
promise.finally(() => {
const result = this.getWorstStepResult()
const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result)
: getStatusFromResult(result)
const testFileAbsolutePath = this.pickle.uri
if (lastStatusByPickleId.has(this.pickle.id)) {
lastStatusByPickleId.get(this.pickle.id).push(status)
} else {
lastStatusByPickleId.set(this.pickle.id, [status])
const testSourceLine = this.gherkinDocument?.feature?.location?.line
const testStartPayload = {
testName: this.pickle.name,
testFileAbsolutePath,
testSourceLine,
isParallel: !!process.env.CUCUMBER_WORKER_ID
}
asyncResource.runInAsyncScope(() => {
testStartCh.publish(testStartPayload)
})
try {
this.eventBroadcaster.on('envelope', shimmer.wrapFunction(null, () => (testCase) => {
// Only supported from >=8.0.0
if (testCase?.testCaseFinished) {
const { testCaseFinished: { willBeRetried } } = testCase
if (willBeRetried) { // test case failed and will be retried
const failedAttemptAsyncResource = numAttemptToAsyncResource.get(numAttempt)
failedAttemptAsyncResource.runInAsyncScope(() => {
testRetryCh.publish(numAttempt++ > 0) // the current span will be finished and a new one will be created
})
const newAsyncResource = new AsyncResource('bound-anonymous-fn')
numAttemptToAsyncResource.set(numAttempt, newAsyncResource)
newAsyncResource.runInAsyncScope(() => {
testStartCh.publish(testStartPayload) // a new span will be created
})
}
let isNew = false
let isEfdRetry = false
if (isEarlyFlakeDetectionEnabled && status !== 'skip') {
const numRetries = numRetriesByPickleId.get(this.pickle.id)
}
}))
let promise
isNew = numRetries !== undefined
isEfdRetry = numRetries > 0
}
testFinishCh.publish({ status, skipReason, errorMessage, isNew, isEfdRetry })
asyncResource.runInAsyncScope(() => {
promise = run.apply(this, arguments)
})
promise.finally(() => {
const result = this.getWorstStepResult()
const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result)
: getStatusFromResult(result)
if (lastStatusByPickleId.has(this.pickle.id)) {
lastStatusByPickleId.get(this.pickle.id).push(status)
} else {
lastStatusByPickleId.set(this.pickle.id, [status])
}
let isNew = false
let isEfdRetry = false
if (isEarlyFlakeDetectionEnabled && status !== 'skip') {
const numRetries = numRetriesByPickleId.get(this.pickle.id)
isNew = numRetries !== undefined
isEfdRetry = numRetries > 0
}
const attemptAsyncResource = numAttemptToAsyncResource.get(numAttempt)
attemptAsyncResource.runInAsyncScope(() => {
testFinishCh.publish({ status, skipReason, errorMessage, isNew, isEfdRetry, isFlakyRetry: numAttempt > 0 })
})
return promise
} catch (err) {
errorCh.publish(err)
throw err
}
})
})
return promise
} catch (err) {
errorCh.publish(err)
throw err
}
})

@@ -203,8 +289,2 @@ shimmer.wrap(pl.prototype, 'runStep', runStep => function () {

function pickleHook (PickleRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return PickleRunner
}
const pl = PickleRunner.default

@@ -218,8 +298,2 @@

function testCaseHook (TestCaseRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return TestCaseRunner
}
const pl = TestCaseRunner.default

@@ -232,44 +306,3 @@

addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function getWrappedStart (start, frameworkVersion) {
function getWrappedStart (start, frameworkVersion, isParallel = false) {
return async function () {

@@ -279,26 +312,14 @@ if (!libraryConfigurationCh.hasSubscribers) {

}
const asyncResource = new AsyncResource('bound-anonymous-fn')
let onDone
let errorSkippableRequest
const configPromise = new Promise(resolve => {
onDone = resolve
})
const configurationResponse = await getChannelPromise(libraryConfigurationCh)
asyncResource.runInAsyncScope(() => {
libraryConfigurationCh.publish({ onDone })
})
const configurationResponse = await configPromise
isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries
isSuitesSkippingEnabled = configurationResponse.libraryConfig?.isSuitesSkippingEnabled
isFlakyTestRetriesEnabled = configurationResponse.libraryConfig?.isFlakyTestRetriesEnabled
numTestRetries = configurationResponse.libraryConfig?.flakyTestRetriesCount
if (isEarlyFlakeDetectionEnabled) {
const knownTestsPromise = new Promise(resolve => {
onDone = resolve
})
asyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
const knownTestsResponse = await knownTestsPromise
const knownTestsResponse = await getChannelPromise(knownTestsCh)
if (!knownTestsResponse.err) {

@@ -311,31 +332,22 @@ knownTests = knownTestsResponse.knownTests

const skippableSuitesPromise = new Promise(resolve => {
onDone = resolve
})
if (isSuitesSkippingEnabled) {
const skippableResponse = await getChannelPromise(skippableSuitesCh)
asyncResource.runInAsyncScope(() => {
skippableSuitesCh.publish({ onDone })
})
errorSkippableRequest = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
const skippableResponse = await skippableSuitesPromise
if (!errorSkippableRequest) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
const err = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
let skippedSuites = []
let isSuitesSkipped = false
this.pickleIds = picklesToRun
if (!err) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
this.pickleIds = picklesToRun
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
}
}

@@ -348,7 +360,11 @@

asyncResource.runInAsyncScope(() => {
if (isFlakyTestRetriesEnabled && !this.options.retry && numTestRetries > 0) {
this.options.retry = numTestRetries
}
sessionAsyncResource.runInAsyncScope(() => {
sessionStartCh.publish({ command, frameworkVersion })
})
if (!err && skippedSuites.length) {
if (!errorSkippableRequest && skippedSuites.length) {
itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })

@@ -359,2 +375,7 @@ }

let untestedCoverage
if (getCodeCoverageCh.hasSubscribers) {
untestedCoverage = await getChannelPromise(getCodeCoverageCh)
}
let testCodeCoverageLinesTotal

@@ -364,2 +385,5 @@

try {
if (untestedCoverage) {
originalCoverageMap.merge(fromCoverageMapToCoverage(untestedCoverage))
}
testCodeCoverageLinesTotal = originalCoverageMap.getCoverageSummary().lines.pct

@@ -373,3 +397,3 @@ } catch (e) {

asyncResource.runInAsyncScope(() => {
sessionAsyncResource.runInAsyncScope(() => {
sessionFinishCh.publish({

@@ -382,3 +406,4 @@ status: success ? 'pass' : 'fail',

hasForcedToRunSuites: isForcedToRun,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
})

@@ -452,3 +477,4 @@ })

coverageFiles,
suiteFile: testFileAbsolutePath
suiteFile: testFileAbsolutePath,
testSuitePath
})

@@ -461,3 +487,3 @@ // We need to reset coverage to get a code coverage per suite

testSuiteFinishCh.publish(testSuiteStatus)
testSuiteFinishCh.publish({ status: testSuiteStatus, testSuitePath })
}

@@ -469,6 +495,89 @@

// From 7.3.0 onwards, runPickle becomes runTestCase
function getWrappedParseWorkerMessage (parseWorkerMessageFunction) {
return function (worker, message) {
// If the message is an array, it's a dd-trace message, so we need to stop cucumber processing,
// or cucumber will throw an error
// TODO: identify the message better
if (Array.isArray(message)) {
const [messageCode, payload] = message
if (messageCode === CUCUMBER_WORKER_TRACE_PAYLOAD_CODE) {
sessionAsyncResource.runInAsyncScope(() => {
workerReportTraceCh.publish(payload)
})
return
}
}
const { jsonEnvelope } = message
if (!jsonEnvelope) {
return parseWorkerMessageFunction.apply(this, arguments)
}
let parsed = jsonEnvelope
if (typeof parsed === 'string') {
try {
parsed = JSON.parse(jsonEnvelope)
} catch (e) {
// ignore errors and continue
return parseWorkerMessageFunction.apply(this, arguments)
}
}
if (parsed.testCaseStarted) {
const { pickleId } = this.eventDataCollector.testCaseMap[parsed.testCaseStarted.testCaseId]
const pickle = this.eventDataCollector.getPickle(pickleId)
const testFileAbsolutePath = pickle.uri
// First test in suite
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = []
testSuiteStartCh.publish({
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
const parseWorkerResponse = parseWorkerMessageFunction.apply(this, arguments)
// after calling `parseWorkerMessageFunction`, the test status can already be read
if (parsed.testCaseFinished) {
const { pickle, worstTestStepResult } =
this.eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId)
const { status } = getStatusFromResultLatest(worstTestStepResult)
const testFileAbsolutePath = pickle.uri
const finished = pickleResultByFile[testFileAbsolutePath]
finished.push(status)
if (finished.length === pickleByFile[testFileAbsolutePath].length) {
testSuiteFinishCh.publish({
status: getSuiteStatusFromTestStatuses(finished),
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
return parseWorkerResponse
}
}
// Test start / finish for older versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
// Test start / finish for newer versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0 <11.0.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
// From 7.3.0 onwards, runPickle becomes runTestCase. Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0 <11.0.0'],
file: 'lib/runtime/index.js'

@@ -482,2 +591,5 @@ }, (runtimePackage, frameworkVersion) => {

// Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({

@@ -493,1 +605,19 @@ name: '@cucumber/cucumber',

})
// Only executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedGiveWork` generates suite start events and sets pickleResultByFile (used by suite finish events)
// `getWrappedParseWorkerMessage` generates suite finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=8.0.0 <11.0.0'],
file: 'lib/runtime/parallel/coordinator.js'
}, (coordinatorPackage, frameworkVersion) => {
shimmer.wrap(coordinatorPackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion, true))
shimmer.wrap(
coordinatorPackage.default.prototype,
'parseWorkerMessage',
parseWorkerMessage => getWrappedParseWorkerMessage(parseWorkerMessage)
)
return coordinatorPackage
})

@@ -24,6 +24,6 @@ 'use strict'

addHook({ name: names }, dns => {
dns.lookup = wrap('apm:dns:lookup', dns.lookup, 2)
dns.lookupService = wrap('apm:dns:lookup_service', dns.lookupService, 3)
dns.resolve = wrap('apm:dns:resolve', dns.resolve, 2)
dns.reverse = wrap('apm:dns:reverse', dns.reverse, 2)
shimmer.wrap(dns, 'lookup', fn => wrap('apm:dns:lookup', fn, 2))
shimmer.wrap(dns, 'lookupService', fn => wrap('apm:dns:lookup_service', fn, 2))
shimmer.wrap(dns, 'resolve', fn => wrap('apm:dns:resolve', fn, 2))
shimmer.wrap(dns, 'reverse', fn => wrap('apm:dns:reverse', fn, 2))

@@ -33,4 +33,4 @@ patchResolveShorthands(dns)

if (dns.Resolver) {
dns.Resolver.prototype.resolve = wrap('apm:dns:resolve', dns.Resolver.prototype.resolve, 2)
dns.Resolver.prototype.reverse = wrap('apm:dns:reverse', dns.Resolver.prototype.reverse, 2)
shimmer.wrap(dns.Resolver.prototype, 'resolve', fn => wrap('apm:dns:resolve', fn, 2))
shimmer.wrap(dns.Resolver.prototype, 'reverse', fn => wrap('apm:dns:reverse', fn, 2))

@@ -48,3 +48,3 @@ patchResolveShorthands(dns.Resolver.prototype)

rrtypeMap.set(prototype[method], rrtypes[method])
prototype[method] = wrap('apm:dns:resolve', prototype[method], 2, rrtypes[method])
shimmer.wrap(prototype, method, fn => wrap('apm:dns:resolve', fn, 2, rrtypes[method]))
})

@@ -78,3 +78,3 @@ }

arguments[arguments.length - 1] = asyncResource.bind(function (error, result) {
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (error, result) {
if (error) {

@@ -85,3 +85,3 @@ errorCh.publish(error)

cb.apply(this, arguments)
})
}))

@@ -100,3 +100,3 @@ try {

return shimmer.wrap(fn, wrapped)
return wrapped
}

@@ -51,3 +51,3 @@ 'use strict'

const cb = arguments[0]
arguments[0] = function (err, connection) {
arguments[0] = shimmer.wrapFunction(cb, cb => function (err, connection) {
if (connectCh.hasSubscribers && connection && connection.host) {

@@ -57,3 +57,3 @@ connectCh.publish({ hostname: connection.host.host, port: connection.host.port })

cb(err, connection)
}
})
}

@@ -91,6 +91,6 @@ return request.apply(this, arguments)

arguments[lastIndex] = asyncResource.bind(function (error) {
arguments[lastIndex] = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (error) {
finish(params, error)
return cb.apply(null, arguments)
})
}))
return request.apply(this, arguments)

@@ -97,0 +97,0 @@ } else {

@@ -25,6 +25,6 @@ 'use strict'

return shimmer.wrap(expressMongoSanitize, function () {
return shimmer.wrapFunction(expressMongoSanitize, expressMongoSanitize => function () {
const middleware = expressMongoSanitize.apply(this, arguments)
return shimmer.wrap(middleware, function (req, res, next) {
return shimmer.wrapFunction(middleware, middleware => function (req, res, next) {
if (!sanitizeMiddlewareFinished.hasSubscribers) {

@@ -34,3 +34,3 @@ return middleware.apply(this, arguments)

const wrappedNext = shimmer.wrap(next, function () {
const wrappedNext = shimmer.wrapFunction(next, next => function () {
sanitizeMiddlewareFinished.publish({

@@ -37,0 +37,0 @@ sanitizedProperties: propertiesToSanitize,

@@ -6,3 +6,2 @@ 'use strict'

const { addHook, channel } = require('./helpers/instrument')
const { AbortController } = require('node-abort-controller')

@@ -54,3 +53,3 @@ const handleChannel = channel('apm:express:request:handle')

function publishQueryParsedAndNext (req, res, next) {
return function () {
return shimmer.wrapFunction(next, next => function () {
if (queryParserReadCh.hasSubscribers && req) {

@@ -66,3 +65,3 @@ const abortController = new AbortController()

return next.apply(this, arguments)
}
})
}

@@ -75,6 +74,6 @@

}, query => {
return shimmer.wrap(query, function () {
return shimmer.wrapFunction(query, query => function () {
const queryMiddleware = query.apply(this, arguments)
return shimmer.wrap(queryMiddleware, function (req, res, next) {
return shimmer.wrapFunction(queryMiddleware, queryMiddleware => function (req, res, next) {
arguments[2] = publishQueryParsedAndNext(req, res, next)

@@ -81,0 +80,0 @@ return queryMiddleware.apply(this, arguments)

@@ -37,3 +37,3 @@ 'use strict'

function wrapAddHook (addHook) {
return function addHookWithTrace (name, fn) {
return shimmer.wrapFunction(addHook, addHook => function addHookWithTrace (name, fn) {
fn = arguments[arguments.length - 1]

@@ -43,3 +43,3 @@

arguments[arguments.length - 1] = shimmer.wrap(fn, function (request, reply, done) {
arguments[arguments.length - 1] = shimmer.wrapFunction(fn, fn => function (request, reply, done) {
const req = getReq(request)

@@ -83,3 +83,3 @@

return addHook.apply(this, arguments)
}
})
}

@@ -157,3 +157,3 @@

addHook({ name: 'fastify', versions: ['>=3'] }, fastify => {
const wrapped = shimmer.wrap(fastify, wrapFastify(fastify, true))
const wrapped = shimmer.wrapFunction(fastify, fastify => wrapFastify(fastify, true))

@@ -167,7 +167,7 @@ wrapped.fastify = wrapped

addHook({ name: 'fastify', versions: ['2'] }, fastify => {
return shimmer.wrap(fastify, wrapFastify(fastify, true))
return shimmer.wrapFunction(fastify, fastify => wrapFastify(fastify, true))
})
addHook({ name: 'fastify', versions: ['1'] }, fastify => {
return shimmer.wrap(fastify, wrapFastify(fastify, false))
return shimmer.wrapFunction(fastify, fastify => wrapFastify(fastify, false))
})

@@ -11,3 +11,3 @@ 'use strict'

globalThis.fetch = shimmer.wrap(fetch, wrapFetch(fetch))
globalThis.fetch = shimmer.wrapFunction(fetch, fetch => wrapFetch(fetch))
}

@@ -12,7 +12,7 @@ 'use strict'

const handler = arguments[index]
const wrapper = function (req) {
const wrapper = shimmer.wrapFunction(handler, handler => function (req) {
routeChannel.publish({ req, route: path })
return handler.apply(this, arguments)
}
})

@@ -19,0 +19,0 @@ if (typeof handler === 'function') {

@@ -274,4 +274,4 @@ 'use strict'

arguments[lastIndex] = innerResource.bind(function (e) {
if (typeof e === 'object') { // fs.exists receives a boolean
arguments[lastIndex] = shimmer.wrapFunction(cb, cb => innerResource.bind(function (e) {
if (e !== null && typeof e === 'object') { // fs.exists receives a boolean
errorChannel.publish(e)

@@ -283,3 +283,3 @@ }

return outerResource.runInAsyncScope(() => cb.apply(this, arguments))
})
}))
}

@@ -286,0 +286,0 @@

@@ -79,3 +79,3 @@ 'use strict'

arguments[arguments.length - 1] = innerAsyncResource.bind(function (error) {
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => innerAsyncResource.bind(function (error) {
if (error) {

@@ -88,3 +88,3 @@ requestErrorCh.publish(error)

return outerAsyncResource.runInAsyncScope(() => cb.apply(this, arguments))
})
}))

@@ -91,0 +91,0 @@ return method.apply(this, arguments)

'use strict'
const { AbortController } = require('node-abort-controller')
const {

@@ -6,0 +4,0 @@ addHook,

@@ -91,9 +91,7 @@ 'use strict'

const wrapped = function () {
const wrapped = shimmer.wrapFunction(method, method => function () {
const args = ensureMetadata(this, arguments, 1)
return callMethod(this, method, args, path, args[1], type, hasPeer)
}
})
Object.assign(wrapped, method)
patched.add(wrapped)

@@ -105,3 +103,3 @@

function wrapCallback (ctx, callback = () => { }) {
return function (err) {
return shimmer.wrapFunction(callback, callback => function (err) {
if (err) {

@@ -116,3 +114,3 @@ ctx.error = err

})
}
})
}

@@ -119,0 +117,0 @@

@@ -122,3 +122,3 @@ 'use strict'

function wrapCallback (callback = () => {}, call, ctx, onCancel) {
return function (err, value, trailer, flags) {
return shimmer.wrapFunction(callback, callback => function (err, value, trailer, flags) {
if (err) {

@@ -140,3 +140,3 @@ ctx.error = err

})
}
})
}

@@ -143,0 +143,0 @@

'use strict'
const tracingChannel = require('dc-polyfill').tracingChannel
const shimmer = require('../../datadog-shimmer')
const { addHook, channel, AsyncResource } = require('./helpers/instrument')
const { addHook, channel } = require('./helpers/instrument')

@@ -9,3 +10,3 @@ const handleChannel = channel('apm:hapi:request:handle')

const errorChannel = channel('apm:hapi:request:error')
const enterChannel = channel('apm:hapi:extension:enter')
const hapiTracingChannel = tracingChannel('apm:hapi:extension')

@@ -31,3 +32,3 @@ function wrapServer (server) {

function wrapStart (start) {
return function () {
return shimmer.wrapFunction(start, start => function () {
if (this && typeof this.ext === 'function') {

@@ -38,8 +39,8 @@ this.ext('onPreResponse', onPreResponse)

return start.apply(this, arguments)
}
})
}
function wrapExt (ext) {
return function (events, method, options) {
if (typeof events === 'object') {
return shimmer.wrapFunction(ext, ext => function (events, method, options) {
if (events !== null && typeof events === 'object') {
arguments[0] = wrapEvents(events)

@@ -51,3 +52,3 @@ } else {

return ext.apply(this, arguments)
}
})
}

@@ -98,3 +99,3 @@

return function (request, h) {
return shimmer.wrapFunction(handler, handler => function (request, h) {
const req = request && request.raw && request.raw.req

@@ -104,10 +105,6 @@

const asyncResource = new AsyncResource('bound-anonymous-fn')
return asyncResource.runInAsyncScope(() => {
enterChannel.publish({ req })
return hapiTracingChannel.traceSync(() => {
return handler.apply(this, arguments)
})
}
})
}

@@ -114,0 +111,0 @@

@@ -10,9 +10,14 @@ 'use strict'

const req = new Request(input, init)
const headers = req.headers
const ctx = { req, headers }
if (input instanceof Request) {
const ctx = { req: input }
return ch.tracePromise(() => fetch.call(this, req, { headers: ctx.headers }), ctx)
return ch.tracePromise(() => fetch.call(this, input, init), ctx)
} else {
const req = new Request(input, init)
const ctx = { req }
return ch.tracePromise(() => fetch.call(this, req), ctx)
}
}
}
}

@@ -14,5 +14,10 @@ 'use strict'

*/
function Hook (modules, onrequire) {
if (!(this instanceof Hook)) return new Hook(modules, onrequire)
function Hook (modules, hookOptions, onrequire) {
if (!(this instanceof Hook)) return new Hook(modules, hookOptions, onrequire)
if (typeof hookOptions === 'function') {
onrequire = hookOptions
hookOptions = {}
}
this._patched = Object.create(null)

@@ -32,3 +37,3 @@

this._ritmHook = ritm(modules, {}, safeHook)
this._iitmHook = iitm(modules, {}, (moduleExports, moduleName, moduleBaseDir) => {
this._iitmHook = iitm(modules, hookOptions, (moduleExports, moduleName, moduleBaseDir) => {
// TODO: Move this logic to import-in-the-middle and only do it for CommonJS

@@ -35,0 +40,0 @@ // modules and not ESM. In the meantime, all the modules we instrument are

@@ -26,2 +26,3 @@ 'use strict'

'@smithy/smithy-client': () => require('../aws-sdk'),
'@vitest/runner': { esmFirst: true, fn: () => require('../vitest') },
aerospike: () => require('../aerospike'),

@@ -61,3 +62,2 @@ amqp10: () => require('../amqp10'),

'jest-environment-jsdom': () => require('../jest'),
'jest-jasmine2': () => require('../jest'),
'jest-runtime': () => require('../jest'),

@@ -71,2 +71,3 @@ 'jest-worker': () => require('../jest'),

'limitd-client': () => require('../limitd-client'),
lodash: () => require('../lodash'),
mariadb: () => require('../mariadb'),

@@ -93,2 +94,3 @@ memcached: () => require('../memcached'),

'node:net': () => require('../net'),
nyc: () => require('../nyc'),
oracledb: () => require('../oracledb'),

@@ -115,4 +117,7 @@ openai: () => require('../openai'),

tedious: () => require('../tedious'),
undici: () => require('../undici'),
vitest: { esmFirst: true, fn: () => require('../vitest') },
when: () => require('../when'),
winston: () => require('../winston')
winston: () => require('../winston'),
workerpool: () => require('../mocha')
}

@@ -20,6 +20,7 @@ 'use strict'

* @param {string[]} args.versions array of semver range strings
* @param {string} args.file path to file within package to instrument?
* @param {string} args.file path to file within package to instrument
* @param {string} args.filePattern pattern to match files within package to instrument
* @param Function hook
*/
exports.addHook = function addHook ({ name, versions, file }, hook) {
exports.addHook = function addHook ({ name, versions, file, filePattern }, hook) {
if (typeof name === 'string') {

@@ -33,3 +34,3 @@ name = [name]

}
instrumentations[val].push({ name: val, versions, file, hook })
instrumentations[val].push({ name: val, versions, file, filePattern, hook })
}

@@ -36,0 +37,0 @@ }

@@ -9,4 +9,9 @@ 'use strict'

const log = require('../../../dd-trace/src/log')
const checkRequireCache = require('../check_require_cache')
const telemetry = require('../../../dd-trace/src/telemetry/init-telemetry')
const { DD_TRACE_DISABLED_INSTRUMENTATIONS = '' } = process.env
const {
DD_TRACE_DISABLED_INSTRUMENTATIONS = '',
DD_TRACE_DEBUG = ''
} = process.env

@@ -28,13 +33,33 @@ const hooks = require('./hooks')

if (!disabledInstrumentations.has('process')) {
require('../process')
}
const HOOK_SYMBOL = Symbol('hookExportsMap')
if (DD_TRACE_DEBUG && DD_TRACE_DEBUG.toLowerCase() !== 'false') {
checkRequireCache.checkForRequiredModules()
setImmediate(checkRequireCache.checkForPotentialConflicts)
}
const seenCombo = new Set()
// TODO: make this more efficient
for (const packageName of names) {
if (disabledInstrumentations.has(packageName)) continue
Hook([packageName], (moduleExports, moduleName, moduleBaseDir, moduleVersion) => {
const hookOptions = {}
let hook = hooks[packageName]
if (typeof hook === 'object') {
hookOptions.internals = hook.esmFirst
hook = hook.fn
}
Hook([packageName], hookOptions, (moduleExports, moduleName, moduleBaseDir, moduleVersion) => {
moduleName = moduleName.replace(pathSepExpr, '/')
// This executes the integration file thus adding its entries to `instrumentations`
hooks[packageName]()
hook()

@@ -45,4 +70,9 @@ if (!instrumentations[packageName]) {

for (const { name, file, versions, hook } of instrumentations[packageName]) {
const namesAndSuccesses = {}
for (const { name, file, versions, hook, filePattern } of instrumentations[packageName]) {
let fullFilePattern = filePattern
const fullFilename = filename(name, file)
if (fullFilePattern) {
fullFilePattern = filename(name, fullFilePattern)
}

@@ -55,9 +85,29 @@ // Create a WeakMap associated with the hook function so that patches on the same moduleExport only happens once

}
let matchesFile = false
if (moduleName === fullFilename) {
const version = moduleVersion || getVersion(moduleBaseDir)
matchesFile = moduleName === fullFilename
if (fullFilePattern) {
// Some libraries include a hash in their filenames when installed,
// so our instrumentation has to include a '.*' to match them for more than a single version.
matchesFile = matchesFile || new RegExp(fullFilePattern).test(moduleName)
}
if (matchesFile) {
let version = moduleVersion
try {
version = version || getVersion(moduleBaseDir)
} catch (e) {
log.error(`Error getting version for "${name}": ${e.message}`)
log.error(e)
continue
}
if (typeof namesAndSuccesses[`${name}@${version}`] === 'undefined') {
namesAndSuccesses[`${name}@${version}`] = false
}
if (matchVersion(version, versions)) {
// Check if the hook already has a set moduleExport
if (hook[HOOK_SYMBOL].has(moduleExports)) {
namesAndSuccesses[`${name}@${version}`] = true
return moduleExports

@@ -74,7 +124,26 @@ }

} catch (e) {
log.error(e)
log.info('Error during ddtrace instrumentation of application, aborting.')
log.info(e)
telemetry('error', [
`error_type:${e.constructor.name}`,
`integration:${name}`,
`integration_version:${version}`
])
}
namesAndSuccesses[`${name}@${version}`] = true
}
}
}
for (const nameVersion of Object.keys(namesAndSuccesses)) {
const [name, version] = nameVersion.split('@')
const success = namesAndSuccesses[nameVersion]
if (!success && !seenCombo.has(nameVersion)) {
telemetry('abort.integration', [
`integration:${name}`,
`integration_version:${version}`
])
log.info(`Found incompatible integration version: ${nameVersion}`)
seenCombo.add(nameVersion)
}
}

@@ -81,0 +150,0 @@ return moduleExports

@@ -46,4 +46,6 @@ 'use strict'

const ctx = { args, http }
const abortController = new AbortController()
const ctx = { args, http, abortController }
return startChannel.runStores(ctx, () => {

@@ -54,7 +56,7 @@ let finished = false

if (callback) {
callback = function () {
callback = shimmer.wrapFunction(args.callback, cb => function () {
return asyncStartChannel.runStores(ctx, () => {
return args.callback.apply(this, arguments)
return cb.apply(this, arguments)
})
}
})
}

@@ -112,2 +114,6 @@

if (abortController.signal.aborted) {
req.destroy(abortController.signal.reason || new Error('Aborted'))
}
return req

@@ -138,3 +144,3 @@ } catch (e) {

function combineOptions (inputURL, inputOptions) {
if (typeof inputOptions === 'object') {
if (inputOptions !== null && typeof inputOptions === 'object') {
return Object.assign(inputURL || {}, inputOptions)

@@ -141,0 +147,0 @@ } else {

'use strict'
const { AbortController } = require('node-abort-controller') // AbortController is not available in node <15
const {

@@ -14,3 +13,5 @@ channel,

const finishServerCh = channel('apm:http:server:request:finish')
const startWriteHeadCh = channel('apm:http:server:response:writeHead:start')
const finishSetHeaderCh = channel('datadog:http:server:response:set-header:finish')
const startSetHeaderCh = channel('datadog:http:server:response:set-header:start')

@@ -25,2 +26,11 @@ const requestFinishedSet = new WeakSet()

shimmer.wrap(http.Server.prototype, 'emit', wrapEmit)
shimmer.wrap(http.ServerResponse.prototype, 'writeHead', wrapWriteHead)
shimmer.wrap(http.ServerResponse.prototype, 'write', wrapWrite)
shimmer.wrap(http.ServerResponse.prototype, 'end', wrapEnd)
shimmer.wrap(http.ServerResponse.prototype, 'setHeader', wrapSetHeader)
shimmer.wrap(http.ServerResponse.prototype, 'removeHeader', wrapAppendOrRemoveHeader)
// Added in node v16.17.0
if (http.ServerResponse.prototype.appendHeader) {
shimmer.wrap(http.ServerResponse.prototype, 'appendHeader', wrapAppendOrRemoveHeader)
}
return http

@@ -67,5 +77,3 @@ })

}
if (finishSetHeaderCh.hasSubscribers) {
wrapSetHeader(res)
}
return emit.apply(this, arguments)

@@ -84,10 +92,136 @@ } catch (err) {

function wrapSetHeader (res) {
shimmer.wrap(res, 'setHeader', setHeader => {
return function (name, value) {
const setHeaderResult = setHeader.apply(this, arguments)
finishSetHeaderCh.publish({ name, value, res })
return setHeaderResult
function wrapWriteHead (writeHead) {
return function wrappedWriteHead (statusCode, reason, obj) {
if (!startWriteHeadCh.hasSubscribers) {
return writeHead.apply(this, arguments)
}
})
const abortController = new AbortController()
if (typeof reason !== 'string') {
obj ??= reason
}
// support writeHead(200, ['key1', 'val1', 'key2', 'val2'])
if (Array.isArray(obj)) {
const headers = {}
for (let i = 0; i < obj.length; i += 2) {
headers[obj[i]] = obj[i + 1]
}
obj = headers
}
// this doesn't support explicit duplicate headers, but it's an edge case
const responseHeaders = Object.assign(this.getHeaders(), obj)
startWriteHeadCh.publish({
req: this.req,
res: this,
abortController,
statusCode,
responseHeaders
})
if (abortController.signal.aborted) {
return this
}
return writeHead.apply(this, arguments)
}
}
function wrapWrite (write) {
return function wrappedWrite () {
if (!startWriteHeadCh.hasSubscribers) {
return write.apply(this, arguments)
}
const abortController = new AbortController()
const responseHeaders = this.getHeaders()
startWriteHeadCh.publish({
req: this.req,
res: this,
abortController,
statusCode: this.statusCode,
responseHeaders
})
if (abortController.signal.aborted) {
return true
}
return write.apply(this, arguments)
}
}
function wrapSetHeader (setHeader) {
return function wrappedSetHeader (name, value) {
if (!startSetHeaderCh.hasSubscribers && !finishSetHeaderCh.hasSubscribers) {
return setHeader.apply(this, arguments)
}
if (startSetHeaderCh.hasSubscribers) {
const abortController = new AbortController()
startSetHeaderCh.publish({ res: this, abortController })
if (abortController.signal.aborted) {
return
}
}
const setHeaderResult = setHeader.apply(this, arguments)
if (finishSetHeaderCh.hasSubscribers) {
finishSetHeaderCh.publish({ name, value, res: this })
}
return setHeaderResult
}
}
function wrapAppendOrRemoveHeader (originalMethod) {
return function wrappedAppendOrRemoveHeader () {
if (!startSetHeaderCh.hasSubscribers) {
return originalMethod.apply(this, arguments)
}
const abortController = new AbortController()
startSetHeaderCh.publish({ res: this, abortController })
if (abortController.signal.aborted) {
return this
}
return originalMethod.apply(this, arguments)
}
}
function wrapEnd (end) {
return function wrappedEnd () {
if (!startWriteHeadCh.hasSubscribers) {
return end.apply(this, arguments)
}
const abortController = new AbortController()
const responseHeaders = this.getHeaders()
startWriteHeadCh.publish({
req: this.req,
res: this,
abortController,
statusCode: this.statusCode,
responseHeaders
})
if (abortController.signal.aborted) {
return this
}
return end.apply(this, arguments)
}
}

@@ -22,3 +22,2 @@ 'use strict'

} = require('../../datadog-plugin-jest/src/util')
const { DD_MAJOR } = require('../../../version')

@@ -54,2 +53,5 @@ const testSessionStartCh = channel('ci:jest:session:start')

const FLUSH_TIMEOUT = 10000
// eslint-disable-next-line
// https://github.com/jestjs/jest/blob/41f842a46bb2691f828c3a5f27fc1d6290495b82/packages/jest-circus/src/types.ts#L9C8-L9C54
const RETRY_TIMES = Symbol.for('RETRY_TIMES')

@@ -73,10 +75,2 @@ let skippableSuites = []

const specStatusToTestStatus = {
pending: 'skip',
disabled: 'skip',
todo: 'skip',
passed: 'pass',
failed: 'fail'
}
const asyncResources = new WeakMap()

@@ -142,2 +136,4 @@ const originalTestFns = new WeakMap()

this.isEarlyFlakeDetectionEnabled = this.testEnvironmentOptions._ddIsEarlyFlakeDetectionEnabled
this.isFlakyTestRetriesEnabled = this.testEnvironmentOptions._ddIsFlakyTestRetriesEnabled
this.flakyTestRetriesCount = this.testEnvironmentOptions._ddFlakyTestRetriesCount

@@ -156,2 +152,9 @@ if (this.isEarlyFlakeDetectionEnabled) {

}
if (this.isFlakyTestRetriesEnabled) {
const currentNumRetries = this.global[RETRY_TIMES]
if (!currentNumRetries) {
this.global[RETRY_TIMES] = this.flakyTestRetriesCount
}
}
}

@@ -165,3 +168,3 @@

try {
const { _snapshotData } = this.context.expect.getState().snapshotState
const { _snapshotData } = this.getVmContext().expect.getState().snapshotState
hasSnapshotTests = Object.keys(_snapshotData).length > 0

@@ -236,2 +239,3 @@ } catch (e) {

}
const isJestRetry = event.test?.invocations > 1
asyncResource.runInAsyncScope(() => {

@@ -247,3 +251,4 @@ testStartCh.publish({

isNew: isNewTest,
isEfdRetry: numEfdRetry > 0
isEfdRetry: numEfdRetry > 0,
isJestRetry
})

@@ -609,6 +614,9 @@ originalTestFns.set(event.test, event.test.fn)

* If ITR is active, we're running fewer tests, so of course the total code coverage is reduced.
* This calculation adds no value, so we'll skip it.
* This calculation adds no value, so we'll skip it, as long as the user has not manually opted in to code coverage,
* in which case we'll leave it.
*/
shimmer.wrap(CoverageReporter.prototype, '_addUntestedFiles', addUntestedFiles => async function () {
if (isSuitesSkippingEnabled) {
// If the user has added coverage manually, they're willing to pay the price of this execution, so
// we will not skip it.
if (isSuitesSkippingEnabled && !isUserCodeCoverageEnabled) {
return Promise.resolve()

@@ -642,3 +650,3 @@ }

const adapter = jestAdapter.default ? jestAdapter.default : jestAdapter
const newAdapter = shimmer.wrap(adapter, function () {
const newAdapter = shimmer.wrapFunction(adapter, adapter => function () {
const environment = arguments[2]

@@ -770,2 +778,3 @@ if (!environment) {

_ddTestCommand,
_ddTestSessionName,
_ddForcedToRun,

@@ -778,2 +787,4 @@ _ddUnskippable,

_ddRepositoryRoot,
_ddIsFlakyTestRetriesEnabled,
_ddFlakyTestRetriesCount,
...restOfTestEnvironmentOptions

@@ -852,41 +863,2 @@ } = testEnvironmentOptions

function jasmineAsyncInstallWraper (jasmineAsyncInstallExport, jestVersion) {
log.warn('jest-jasmine2 support is removed from dd-trace@v4. Consider changing to jest-circus as `testRunner`.')
return function (globalConfig, globalInput) {
globalInput._ddtrace = global._ddtrace
shimmer.wrap(globalInput.jasmine.Spec.prototype, 'execute', execute => function (onComplete) {
const asyncResource = new AsyncResource('bound-anonymous-fn')
asyncResource.runInAsyncScope(() => {
const testSuite = getTestSuitePath(this.result.testPath, globalConfig.rootDir)
testStartCh.publish({
name: this.getFullName(),
suite: testSuite,
runner: 'jest-jasmine2',
frameworkVersion: jestVersion
})
const spec = this
const callback = asyncResource.bind(function () {
if (spec.result.failedExpectations && spec.result.failedExpectations.length) {
const formattedError = formatJestError(spec.result.failedExpectations[0].error)
testErrCh.publish(formattedError)
}
testRunFinishCh.publish({ status: specStatusToTestStatus[spec.result.status] })
onComplete.apply(this, arguments)
})
arguments[0] = callback
execute.apply(this, arguments)
})
})
return jasmineAsyncInstallExport.default(globalConfig, globalInput)
}
}
if (DD_MAJOR < 4) {
addHook({
name: 'jest-jasmine2',
versions: ['>=24.8.0'],
file: 'build/jasmineAsyncInstall.js'
}, jasmineAsyncInstallWraper)
}
const LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE = [

@@ -893,0 +865,0 @@ 'selenium-webdriver'

@@ -20,2 +20,6 @@ 'use strict'

const batchConsumerStartCh = channel('apm:kafkajs:consume-batch:start')
const batchConsumerFinishCh = channel('apm:kafkajs:consume-batch:finish')
const batchConsumerErrorCh = channel('apm:kafkajs:consume-batch:error')
function commitsFromEvent (event) {

@@ -63,3 +67,3 @@ const { payload: { groupId, topics } } = event

for (const message of messages) {
if (typeof message === 'object') {
if (message !== null && typeof message === 'object') {
message.headers = message.headers || {}

@@ -101,2 +105,13 @@ }

const eachMessageExtractor = (args) => {
const { topic, partition, message } = args[0]
return { topic, partition, message, groupId }
}
const eachBatchExtractor = (args) => {
const { batch } = args[0]
const { topic, partition, messages } = batch
return { topic, partition, messages, groupId }
}
const consumer = createConsumer.apply(this, arguments)

@@ -109,38 +124,26 @@

const groupId = arguments[0].groupId
consumer.run = function ({ eachMessage, ...runArgs }) {
if (typeof eachMessage !== 'function') return run({ eachMessage, ...runArgs })
consumer.run = function ({ eachMessage, eachBatch, ...runArgs }) {
eachMessage = wrapFunction(
eachMessage,
consumerStartCh,
consumerFinishCh,
consumerErrorCh,
eachMessageExtractor
)
eachBatch = wrapFunction(
eachBatch,
batchConsumerStartCh,
batchConsumerFinishCh,
batchConsumerErrorCh,
eachBatchExtractor
)
return run({
eachMessage: function (...eachMessageArgs) {
const innerAsyncResource = new AsyncResource('bound-anonymous-fn')
return innerAsyncResource.runInAsyncScope(() => {
const { topic, partition, message } = eachMessageArgs[0]
consumerStartCh.publish({ topic, partition, message, groupId })
try {
const result = eachMessage.apply(this, eachMessageArgs)
if (result && typeof result.then === 'function') {
result.then(
innerAsyncResource.bind(() => consumerFinishCh.publish(undefined)),
innerAsyncResource.bind(err => {
if (err) {
consumerErrorCh.publish(err)
}
consumerFinishCh.publish(undefined)
})
)
} else {
consumerFinishCh.publish(undefined)
}
return result
} catch (e) {
consumerErrorCh.publish(e)
consumerFinishCh.publish(undefined)
throw e
}
})
},
eachMessage,
eachBatch,
...runArgs
})
}
return consumer

@@ -150,1 +153,34 @@ })

})
const wrapFunction = (fn, startCh, finishCh, errorCh, extractArgs) => {
return typeof fn === 'function'
? function (...args) {
const innerAsyncResource = new AsyncResource('bound-anonymous-fn')
return innerAsyncResource.runInAsyncScope(() => {
const extractedArgs = extractArgs(args)
startCh.publish(extractedArgs)
try {
const result = fn.apply(this, args)
if (result && typeof result.then === 'function') {
result.then(
innerAsyncResource.bind(() => finishCh.publish(undefined)),
innerAsyncResource.bind(err => {
if (err) {
errorCh.publish(err)
}
finishCh.publish(undefined)
})
)
} else {
finishCh.publish(undefined)
}
return result
} catch (e) {
errorCh.publish(e)
finishCh.publish(undefined)
throw e
}
})
}
: fn
}

@@ -84,6 +84,6 @@ 'use strict'

return function () {
return shimmer.wrapFunction(callback, callback => function () {
finish()
callback.apply(this, arguments)
}
})
}

@@ -74,3 +74,3 @@ 'use strict'

const handler = shimmer.wrap(middleware, wrapMiddleware(middleware, layer))
const handler = shimmer.wrapFunction(middleware, middleware => wrapMiddleware(middleware, layer))

@@ -88,3 +88,3 @@ originals.set(handler, middleware)

return function (ctx, next) {
return shimmer.wrapFunction(fn, fn => function (ctx, next) {
if (!ctx || !enterChannel.hasSubscribers) return fn.apply(this, arguments)

@@ -127,3 +127,3 @@

}
}
})
}

@@ -148,7 +148,7 @@

function wrapNext (req, next) {
return function () {
return shimmer.wrapFunction(next, next => function () {
nextChannel.publish({ req })
return next.apply(this, arguments)
}
})
}

@@ -155,0 +155,0 @@

@@ -64,3 +64,3 @@ 'use strict'

filter = options
} else if (typeof options === 'object' && options.filter) {
} else if (options !== null && typeof options === 'object' && options.filter) {
if (isString(options.filter)) {

@@ -81,4 +81,4 @@ filter = options.filter

// eslint-disable-next-line n/handle-callback-err
arguments[callbackIndex] = shimmer.wrap(callback, function (err, corkedEmitter) {
if (typeof corkedEmitter === 'object' && typeof corkedEmitter.on === 'function') {
arguments[callbackIndex] = shimmer.wrapFunction(callback, callback => function (err, corkedEmitter) {
if (corkedEmitter !== null && typeof corkedEmitter === 'object' && typeof corkedEmitter.on === 'function') {
wrapEmitter(corkedEmitter)

@@ -85,0 +85,0 @@ }

@@ -14,3 +14,3 @@ 'use strict'

function wrapCommandStart (start, callbackResource) {
return function () {
return shimmer.wrapFunction(start, start => function () {
if (!startCh.hasSubscribers) return start.apply(this, arguments)

@@ -48,3 +48,3 @@

})
}
})
}

@@ -103,3 +103,3 @@

arguments[arguments.length - 1] = asyncResource.bind(function (err) {
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (err) {
if (err) {

@@ -114,3 +114,3 @@ errorCh.publish(err)

}
})
}))

@@ -126,3 +126,3 @@ return asyncResource.runInAsyncScope(() => {

function wrapConnection (Connection, promiseMethod) {
function wrapConnection (promiseMethod, Connection) {
return function (options) {

@@ -178,11 +178,11 @@ Connection.apply(this, arguments)

addHook({ name, file: 'lib/connection.js', versions: ['>=2.5.2 <3'] }, (Connection) => {
return shimmer.wrap(Connection, wrapConnection(Connection, '_queryPromise'))
return shimmer.wrapFunction(Connection, wrapConnection.bind(null, '_queryPromise'))
})
addHook({ name, file: 'lib/connection.js', versions: ['>=2.0.4 <=2.5.1'] }, (Connection) => {
return shimmer.wrap(Connection, wrapConnection(Connection, 'query'))
return shimmer.wrapFunction(Connection, wrapConnection.bind(null, 'query'))
})
addHook({ name, file: 'lib/pool-base.js', versions: ['>=2.0.4 <3'] }, (PoolBase) => {
return shimmer.wrap(PoolBase, wrapPoolBase(PoolBase))
return shimmer.wrapFunction(PoolBase, wrapPoolBase)
})

@@ -29,3 +29,3 @@ 'use strict'

query.callback = asyncResource.bind(function (err) {
query.callback = shimmer.wrapFunction(callback, callback => asyncResource.bind(function (err) {
if (err) {

@@ -37,3 +37,3 @@ errorCh.publish(err)

return callback.apply(this, arguments)
})
}))
startCh.publish({ client, server, query })

@@ -40,0 +40,0 @@

@@ -11,3 +11,5 @@ 'use strict'

const name = 'microgateway-core'
const versions = ['>=2.1']
// TODO Remove " <=3.0.0" when "volos-util-apigee" module is fixed
const versions = ['>=2.1 <=3.0.0']
const requestResources = new WeakMap()

@@ -44,3 +46,3 @@

function wrapNext (req, res, next) {
return function nextWithTrace (err) {
return shimmer.wrapFunction(next, next => function nextWithTrace (err) {
const requestResource = requestResources.get(req)

@@ -59,11 +61,11 @@

return next.apply(this, arguments)
}
})
}
addHook({ name, versions, file: 'lib/config-proxy-middleware.js' }, configProxyFactory => {
return shimmer.wrap(configProxyFactory, wrapConfigProxyFactory(configProxyFactory))
return shimmer.wrapFunction(configProxyFactory, wrapConfigProxyFactory)
})
addHook({ name, versions, file: 'lib/plugins-middleware.js' }, pluginsFactory => {
return shimmer.wrap(pluginsFactory, wrapPluginsFactory(pluginsFactory))
return shimmer.wrapFunction(pluginsFactory, wrapPluginsFactory)
})

@@ -1,674 +0,9 @@

const { createCoverageMap } = require('istanbul-lib-coverage')
const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util')
const { addHook, channel, AsyncResource } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const log = require('../../dd-trace/src/log')
const {
getCoveredFilenamesFromCoverage,
resetCoverage,
mergeCoverage,
getTestSuitePath,
fromCoverageMapToCoverage,
getCallSites,
addEfdStringToTestName,
removeEfdStringFromTestName
} = require('../../dd-trace/src/plugins/util/test')
const testStartCh = channel('ci:mocha:test:start')
const errorCh = channel('ci:mocha:test:error')
const skipCh = channel('ci:mocha:test:skip')
const testFinishCh = channel('ci:mocha:test:finish')
const parameterizedTestCh = channel('ci:mocha:test:parameterize')
const libraryConfigurationCh = channel('ci:mocha:library-configuration')
const knownTestsCh = channel('ci:mocha:known-tests')
const skippableSuitesCh = channel('ci:mocha:test-suite:skippable')
const testSessionStartCh = channel('ci:mocha:session:start')
const testSessionFinishCh = channel('ci:mocha:session:finish')
const testSuiteStartCh = channel('ci:mocha:test-suite:start')
const testSuiteFinishCh = channel('ci:mocha:test-suite:finish')
const testSuiteErrorCh = channel('ci:mocha:test-suite:error')
const testSuiteCodeCoverageCh = channel('ci:mocha:test-suite:code-coverage')
const itrSkippedSuitesCh = channel('ci:mocha:itr:skipped-suites')
// TODO: remove when root hooks and fixtures are implemented
const patched = new WeakSet()
const testToAr = new WeakMap()
const originalFns = new WeakMap()
const testFileToSuiteAr = new Map()
const testToStartLine = new WeakMap()
const newTests = {}
// `isWorker` is true if it's a Mocha worker
let isWorker = false
// We'll preserve the original coverage here
const originalCoverageMap = createCoverageMap()
let suitesToSkip = []
let frameworkVersion
let isSuitesSkipped = false
let skippedSuites = []
const unskippableSuites = []
let isForcedToRun = false
let itrCorrelationId = ''
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let isSuitesSkippingEnabled = false
let knownTests = []
function getSuitesByTestFile (root) {
const suitesByTestFile = {}
function getSuites (suite) {
if (suite.file) {
if (suitesByTestFile[suite.file]) {
suitesByTestFile[suite.file].push(suite)
} else {
suitesByTestFile[suite.file] = [suite]
}
}
suite.suites.forEach(suite => {
getSuites(suite)
})
}
getSuites(root)
const numSuitesByTestFile = Object.keys(suitesByTestFile).reduce((acc, testFile) => {
acc[testFile] = suitesByTestFile[testFile].length
return acc
}, {})
return { suitesByTestFile, numSuitesByTestFile }
if (process.env.MOCHA_WORKER_ID) {
require('./mocha/worker')
} else {
require('./mocha/main')
}
function getTestStatus (test) {
if (test.isPending()) {
return 'skip'
}
if (test.isFailed() || test.timedOut) {
return 'fail'
}
return 'pass'
}
function isRetry (test) {
return test._currentRetry !== undefined && test._currentRetry !== 0
}
function getTestFullName (test) {
return `mocha.${getTestSuitePath(test.file, process.cwd())}.${removeEfdStringFromTestName(test.fullTitle())}`
}
function isNewTest (test) {
const testSuite = getTestSuitePath(test.file, process.cwd())
const testName = removeEfdStringFromTestName(test.fullTitle())
const testsForSuite = knownTests.mocha?.[testSuite] || []
return !testsForSuite.includes(testName)
}
function retryTest (test) {
const originalTestName = test.title
const suite = test.parent
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
const clonedTest = test.clone()
clonedTest.title = addEfdStringToTestName(originalTestName, retryIndex + 1)
suite.addTest(clonedTest)
clonedTest._ddIsNew = true
clonedTest._ddIsEfdRetry = true
}
}
function getTestAsyncResource (test) {
if (!test.fn) {
return testToAr.get(test)
}
if (!test.fn.asyncResource) {
return testToAr.get(test.fn)
}
const originalFn = originalFns.get(test.fn)
return testToAr.get(originalFn)
}
function getFilteredSuites (originalSuites) {
return originalSuites.reduce((acc, suite) => {
const testPath = getTestSuitePath(suite.file, process.cwd())
const shouldSkip = suitesToSkip.includes(testPath)
const isUnskippable = unskippableSuites.includes(suite.file)
if (shouldSkip && !isUnskippable) {
acc.skippedSuites.add(testPath)
} else {
acc.suitesToRun.push(suite)
}
return acc
}, { suitesToRun: [], skippedSuites: new Set() })
}
function mochaHook (Runner) {
if (patched.has(Runner)) return Runner
patched.add(Runner)
shimmer.wrap(Runner.prototype, 'runTests', runTests => function (suite, fn) {
if (isEarlyFlakeDetectionEnabled) {
// by the time we reach `this.on('test')`, it is too late. We need to add retries here
suite.tests.forEach(test => {
if (!test.isPending() && isNewTest(test)) {
test._ddIsNew = true
retryTest(test)
}
})
}
return runTests.apply(this, arguments)
})
shimmer.wrap(Runner.prototype, 'run', run => function () {
if (!testStartCh.hasSubscribers || isWorker) {
return run.apply(this, arguments)
}
const { suitesByTestFile, numSuitesByTestFile } = getSuitesByTestFile(this.suite)
const testRunAsyncResource = new AsyncResource('bound-anonymous-fn')
this.once('end', testRunAsyncResource.bind(function () {
let status = 'pass'
let error
if (this.stats) {
status = this.stats.failures === 0 ? 'pass' : 'fail'
if (this.stats.tests === 0) {
status = 'skip'
}
} else if (this.failures !== 0) {
status = 'fail'
}
if (isEarlyFlakeDetectionEnabled) {
/**
* If Early Flake Detection (EFD) is enabled the logic is as follows:
* - If all attempts for a test are failing, the test has failed and we will let the test process fail.
* - If just a single attempt passes, we will prevent the test process from failing.
* The rationale behind is the following: you may still be able to block your CI pipeline by gating
* on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too.
*/
for (const tests of Object.values(newTests)) {
const failingNewTests = tests.filter(test => test.isFailed())
const areAllNewTestsFailing = failingNewTests.length === tests.length
if (failingNewTests.length && !areAllNewTestsFailing) {
this.stats.failures -= failingNewTests.length
this.failures -= failingNewTests.length
}
}
}
if (status === 'fail') {
error = new Error(`Failed tests: ${this.failures}.`)
}
testFileToSuiteAr.clear()
let testCodeCoverageLinesTotal
if (global.__coverage__) {
try {
testCodeCoverageLinesTotal = originalCoverageMap.getCoverageSummary().lines.pct
} catch (e) {
// ignore errors
}
// restore the original coverage
global.__coverage__ = fromCoverageMapToCoverage(originalCoverageMap)
}
testSessionFinishCh.publish({
status,
isSuitesSkipped,
testCodeCoverageLinesTotal,
numSkippedSuites: skippedSuites.length,
hasForcedToRunSuites: isForcedToRun,
hasUnskippableSuites: !!unskippableSuites.length,
error,
isEarlyFlakeDetectionEnabled
})
}))
this.once('start', testRunAsyncResource.bind(function () {
const processArgv = process.argv.slice(2).join(' ')
const command = `mocha ${processArgv}`
testSessionStartCh.publish({ command, frameworkVersion })
if (skippedSuites.length) {
itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })
}
}))
this.on('suite', function (suite) {
if (suite.root || !suite.tests.length) {
return
}
let asyncResource = testFileToSuiteAr.get(suite.file)
if (!asyncResource) {
asyncResource = new AsyncResource('bound-anonymous-fn')
testFileToSuiteAr.set(suite.file, asyncResource)
const isUnskippable = unskippableSuites.includes(suite.file)
isForcedToRun = isUnskippable && suitesToSkip.includes(getTestSuitePath(suite.file, process.cwd()))
asyncResource.runInAsyncScope(() => {
testSuiteStartCh.publish({
testSuite: suite.file,
isUnskippable,
isForcedToRun,
itrCorrelationId
})
})
}
})
this.on('suite end', function (suite) {
if (suite.root) {
return
}
const suitesInTestFile = suitesByTestFile[suite.file]
const isLastSuite = --numSuitesByTestFile[suite.file] === 0
if (!isLastSuite) {
return
}
let status = 'pass'
if (suitesInTestFile.every(suite => suite.pending)) {
status = 'skip'
} else {
// has to check every test in the test file
suitesInTestFile.forEach(suite => {
suite.eachTest(test => {
if (test.state === 'failed' || test.timedOut) {
status = 'fail'
}
})
})
}
if (global.__coverage__) {
const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__)
testSuiteCodeCoverageCh.publish({
coverageFiles,
suiteFile: suite.file
})
// We need to reset coverage to get a code coverage per suite
// Before that, we preserve the original coverage
mergeCoverage(global.__coverage__, originalCoverageMap)
resetCoverage(global.__coverage__)
}
const asyncResource = testFileToSuiteAr.get(suite.file)
asyncResource.runInAsyncScope(() => {
testSuiteFinishCh.publish(status)
})
})
this.on('test', (test) => {
if (isRetry(test)) {
return
}
const testStartLine = testToStartLine.get(test)
const asyncResource = new AsyncResource('bound-anonymous-fn')
testToAr.set(test.fn, asyncResource)
const {
file: testSuiteAbsolutePath,
title,
_ddIsNew: isNew,
_ddIsEfdRetry: isEfdRetry
} = test
const testInfo = {
testName: test.fullTitle(),
testSuiteAbsolutePath,
title,
isNew,
isEfdRetry,
testStartLine
}
// We want to store the result of the new tests
if (isNew) {
const testFullName = getTestFullName(test)
if (newTests[testFullName]) {
newTests[testFullName].push(test)
} else {
newTests[testFullName] = [test]
}
}
asyncResource.runInAsyncScope(() => {
testStartCh.publish(testInfo)
})
})
this.on('test end', (test) => {
const asyncResource = getTestAsyncResource(test)
const status = getTestStatus(test)
// if there are afterEach to be run, we don't finish the test yet
if (asyncResource && !test.parent._afterEach.length) {
asyncResource.runInAsyncScope(() => {
testFinishCh.publish(status)
})
}
})
// If the hook passes, 'hook end' will be emitted. Otherwise, 'fail' will be emitted
this.on('hook end', (hook) => {
const test = hook.ctx.currentTest
if (test && hook.parent._afterEach.includes(hook)) { // only if it's an afterEach
const isLastAfterEach = hook.parent._afterEach.indexOf(hook) === hook.parent._afterEach.length - 1
if (isLastAfterEach) {
const status = getTestStatus(test)
const asyncResource = getTestAsyncResource(test)
asyncResource.runInAsyncScope(() => {
testFinishCh.publish(status)
})
}
}
})
this.on('fail', (testOrHook, err) => {
const testFile = testOrHook.file
let test = testOrHook
const isHook = testOrHook.type === 'hook'
if (isHook && testOrHook.ctx) {
test = testOrHook.ctx.currentTest
}
let testAsyncResource
if (test) {
testAsyncResource = getTestAsyncResource(test)
}
if (testAsyncResource) {
testAsyncResource.runInAsyncScope(() => {
if (isHook) {
err.message = `${testOrHook.fullTitle()}: ${err.message}`
errorCh.publish(err)
// if it's a hook and it has failed, 'test end' will not be called
testFinishCh.publish('fail')
} else {
errorCh.publish(err)
}
})
}
const testSuiteAsyncResource = testFileToSuiteAr.get(testFile)
if (testSuiteAsyncResource) {
// we propagate the error to the suite
const testSuiteError = new Error(
`"${testOrHook.parent.fullTitle()}" failed with message "${err.message}"`
)
testSuiteError.stack = err.stack
testSuiteAsyncResource.runInAsyncScope(() => {
testSuiteErrorCh.publish(testSuiteError)
})
}
})
this.on('pending', (test) => {
const testStartLine = testToStartLine.get(test)
const {
file: testSuiteAbsolutePath,
title
} = test
const testInfo = {
testName: test.fullTitle(),
testSuiteAbsolutePath,
title,
testStartLine
}
const asyncResource = getTestAsyncResource(test)
if (asyncResource) {
asyncResource.runInAsyncScope(() => {
skipCh.publish(testInfo)
})
} else {
// if there is no async resource, the test has been skipped through `test.skip`
// or the parent suite is skipped
const skippedTestAsyncResource = new AsyncResource('bound-anonymous-fn')
if (test.fn) {
testToAr.set(test.fn, skippedTestAsyncResource)
} else {
testToAr.set(test, skippedTestAsyncResource)
}
skippedTestAsyncResource.runInAsyncScope(() => {
skipCh.publish(testInfo)
})
}
})
return run.apply(this, arguments)
})
return Runner
}
function mochaEachHook (mochaEach) {
if (patched.has(mochaEach)) return mochaEach
patched.add(mochaEach)
return shimmer.wrap(mochaEach, function () {
const [params] = arguments
const { it, ...rest } = mochaEach.apply(this, arguments)
return {
it: function (title) {
parameterizedTestCh.publish({ title, params })
it.apply(this, arguments)
},
...rest
}
})
}
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/mocha.js'
}, (Mocha, mochaVersion) => {
frameworkVersion = mochaVersion
const mochaRunAsyncResource = new AsyncResource('bound-anonymous-fn')
/**
* Get ITR configuration and skippable suites
* If ITR is disabled, `onDone` is called immediately on the subscriber
*/
shimmer.wrap(Mocha.prototype, 'run', run => function () {
if (this.options.parallel) {
log.warn('Unable to initialize CI Visibility because Mocha is running in parallel mode.')
return run.apply(this, arguments)
}
if (!libraryConfigurationCh.hasSubscribers || this.isWorker) {
if (this.isWorker) {
isWorker = true
}
return run.apply(this, arguments)
}
this.options.delay = true
const runner = run.apply(this, arguments)
this.files.forEach(path => {
const isUnskippable = isMarkedAsUnskippable({ path })
if (isUnskippable) {
unskippableSuites.push(path)
}
})
const onReceivedSkippableSuites = ({ err, skippableSuites, itrCorrelationId: responseItrCorrelationId }) => {
if (err) {
suitesToSkip = []
} else {
suitesToSkip = skippableSuites
itrCorrelationId = responseItrCorrelationId
}
// We remove the suites that we skip through ITR
const filteredSuites = getFilteredSuites(runner.suite.suites)
const { suitesToRun } = filteredSuites
isSuitesSkipped = suitesToRun.length !== runner.suite.suites.length
log.debug(
() => `${suitesToRun.length} out of ${runner.suite.suites.length} suites are going to run.`
)
runner.suite.suites = suitesToRun
skippedSuites = Array.from(filteredSuites.skippedSuites)
global.run()
}
const onReceivedKnownTests = ({ err, knownTests: receivedKnownTests }) => {
if (err) {
knownTests = []
isEarlyFlakeDetectionEnabled = false
} else {
knownTests = receivedKnownTests
}
if (isSuitesSkippingEnabled) {
skippableSuitesCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
})
} else {
global.run()
}
}
const onReceivedConfiguration = ({ err, libraryConfig }) => {
if (err || !skippableSuitesCh.hasSubscribers || !knownTestsCh.hasSubscribers) {
return global.run()
}
isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled
earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
if (isEarlyFlakeDetectionEnabled) {
knownTestsCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedKnownTests)
})
} else if (isSuitesSkippingEnabled) {
skippableSuitesCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
})
} else {
global.run()
}
}
mochaRunAsyncResource.runInAsyncScope(() => {
libraryConfigurationCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedConfiguration)
})
})
return runner
})
return Mocha
})
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/suite.js'
}, (Suite) => {
shimmer.wrap(Suite.prototype, 'addTest', addTest => function (test) {
const callSites = getCallSites()
let startLine
const testCallSite = callSites.find(site => site.getFileName() === test.file)
if (testCallSite) {
startLine = testCallSite.getLineNumber()
testToStartLine.set(test, startLine)
}
return addTest.apply(this, arguments)
})
return Suite
})
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/runner.js'
}, mochaHook)
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/cli/run-helpers.js'
}, (run) => {
shimmer.wrap(run, 'runMocha', runMocha => async function () {
if (!testStartCh.hasSubscribers) {
return runMocha.apply(this, arguments)
}
const mocha = arguments[0]
/**
* This attaches `run` to the global context, which we'll call after
* our configuration and skippable suites requests
*/
if (!mocha.options.parallel) {
mocha.options.delay = true
}
return runMocha.apply(this, arguments)
})
return run
})
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/runnable.js'
}, (Runnable) => {
shimmer.wrap(Runnable.prototype, 'run', run => function () {
if (!testStartCh.hasSubscribers) {
return run.apply(this, arguments)
}
const isBeforeEach = this.parent._beforeEach.includes(this)
const isAfterEach = this.parent._afterEach.includes(this)
const isTestHook = isBeforeEach || isAfterEach
// we restore the original user defined function
if (this.fn.asyncResource) {
const originalFn = originalFns.get(this.fn)
this.fn = originalFn
}
if (isTestHook || this.type === 'test') {
const test = isTestHook ? this.ctx.currentTest : this
const asyncResource = getTestAsyncResource(test)
if (asyncResource) {
// we bind the test fn to the correct async resource
const newFn = asyncResource.bind(this.fn)
// we store the original function, not to lose it
originalFns.set(newFn, this.fn)
this.fn = newFn
// Temporarily keep functionality when .asyncResource is removed from node
// in https://github.com/nodejs/node/pull/46432
if (!this.fn.asyncResource) {
this.fn.asyncResource = asyncResource
}
}
}
return run.apply(this, arguments)
})
return Runnable
})
addHook({
name: 'mocha-each',
versions: ['>=2.0.1']
}, mochaEachHook)
// TODO add appropriate calls to wrapFunction whenever we're adding a callback
// wrapper. Right now this is less of an issue since that only has effect in
// SSI, where CI Vis isn't supported.

@@ -27,3 +27,3 @@ 'use strict'

return function datadogMiddleware (ctx) {
return shimmer.wrapFunction(next, next => function datadogMiddleware (ctx) {
const actionResource = new AsyncResource('bound-anonymous-fn')

@@ -51,3 +51,3 @@

})
}
})
}

@@ -54,0 +54,0 @@ }

@@ -95,3 +95,3 @@ 'use strict'

}
return shimmer.wrap(command, wrapped)
return wrapped
}

@@ -113,3 +113,3 @@

}
return shimmer.wrap(command, wrapped)
return wrapped
}

@@ -128,3 +128,3 @@

return shimmer.wrap(query, wrapped)
return wrapped
}

@@ -141,3 +141,3 @@

}
return shimmer.wrap(cursor, wrapped)
return wrapped
}

@@ -152,3 +152,3 @@

}
return shimmer.wrap(command, wrapped)
return wrapped
}

@@ -172,3 +172,3 @@

args[index] = asyncResource.bind(function (err, res) {
args[index] = shimmer.wrapFunction(callback, callback => asyncResource.bind(function (err, res) {
if (err) {

@@ -183,3 +183,3 @@ errorCh.publish(err)

}
})
}))

@@ -186,0 +186,0 @@ try {

@@ -131,4 +131,3 @@ 'use strict'

// not using shimmer here because resolve/reject could be empty
arguments[0] = function wrappedResolve () {
arguments[0] = shimmer.wrapFunction(resolve, resolve => function wrappedResolve () {
finish()

@@ -139,5 +138,5 @@

}
}
})
arguments[1] = function wrappedReject () {
arguments[1] = shimmer.wrapFunction(reject, reject => function wrappedReject () {
finish()

@@ -148,3 +147,3 @@

}
}
})

@@ -175,3 +174,3 @@ return originalThen.apply(this, arguments)

}, sanitizeFilter => {
return shimmer.wrap(sanitizeFilter, function wrappedSanitizeFilter () {
return shimmer.wrapFunction(sanitizeFilter, sanitizeFilter => function wrappedSanitizeFilter () {
const sanitizedObject = sanitizeFilter.apply(this, arguments)

@@ -178,0 +177,0 @@

@@ -28,5 +28,5 @@ 'use strict'

const filters = arg0 && typeof arg0 === 'object' ? [arg0] : []
const filters = arg0 !== null && typeof arg0 === 'object' ? [arg0] : []
if (arg1 && typeof arg1 === 'object' && methodsOptionalArgs.includes(methodName)) {
if (arg1 !== null && typeof arg1 === 'object' && methodsOptionalArgs.includes(methodName)) {
filters.push(arg1)

@@ -33,0 +33,0 @@ }

@@ -40,3 +40,3 @@ 'use strict'

const cb = callbackResource.bind(res._callback)
res._callback = asyncResource.bind(function (error, result) {
res._callback = shimmer.wrapFunction(cb, cb => asyncResource.bind(function (error, result) {
if (error) {

@@ -48,3 +48,3 @@ errorCh.publish(error)

return cb.apply(this, arguments)
})
}))
} else {

@@ -97,3 +97,3 @@ const cb = asyncResource.bind(function () {

if (typeof cb === 'function') {
arguments[arguments.length - 1] = shimmer.wrap(cb, function () {
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => function () {
finish()

@@ -100,0 +100,0 @@ return cb.apply(this, arguments)

@@ -34,3 +34,3 @@ 'use strict'

function bindExecute (cmd, execute, asyncResource) {
return asyncResource.bind(function executeWithTrace (packet, connection) {
return shimmer.wrapFunction(execute, execute => asyncResource.bind(function executeWithTrace (packet, connection) {
if (this.onResult) {

@@ -41,3 +41,3 @@ this.onResult = asyncResource.bind(this.onResult)

return execute.apply(this, arguments)
}, cmd)
}, cmd))
}

@@ -48,3 +48,3 @@

return asyncResource.bind(function executeWithTrace (packet, connection) {
return shimmer.wrapFunction(execute, execute => asyncResource.bind(function executeWithTrace (packet, connection) {
const sql = cmd.statement ? cmd.statement.query : cmd.sql

@@ -63,3 +63,3 @@ const payload = { sql, conf: config }

this.onResult = asyncResource.bind(function (error) {
this.onResult = shimmer.wrapFunction(onResult, onResult => asyncResource.bind(function (error) {
if (error) {

@@ -70,3 +70,3 @@ errorCh.publish(error)

onResult.apply(this, arguments)
}, 'bound-anonymous-fn', this)
}, 'bound-anonymous-fn', this))
} else {

@@ -84,4 +84,4 @@ this.on('error', asyncResource.bind(error => errorCh.publish(error)))

}
}, cmd)
}, cmd))
}
})

@@ -61,3 +61,3 @@ 'use strict'

const emit = this.emit
this.emit = function (eventName) {
this.emit = shimmer.wrapFunction(emit, emit => function (eventName) {
switch (eventName) {

@@ -72,3 +72,3 @@ case 'ready':

}
}
})

@@ -75,0 +75,0 @@ try {

@@ -49,3 +49,3 @@ 'use strict'

return instrument(req, res, () => {
const page = (typeof match === 'object' && typeof match.definition === 'object')
const page = (match !== null && typeof match === 'object' && typeof match.definition === 'object')
? match.definition.pathname

@@ -192,3 +192,3 @@ : undefined

name: 'next',
versions: ['>=13.3.0'],
versions: ['>=13.3.0 <14.2.7'],
file: 'dist/server/web/spec-extension/adapters/next-request.js'

@@ -208,3 +208,3 @@ }, NextRequestAdapter => {

name: 'next',
versions: ['>=11.1'],
versions: ['>=11.1 <14.2.7'],
file: 'dist/server/serve-static.js'

@@ -219,3 +219,3 @@ }, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic))

addHook({ name: 'next', versions: ['>=11.1'], file: 'dist/server/next-server.js' }, nextServer => {
addHook({ name: 'next', versions: ['>=11.1 <14.2.7'], file: 'dist/server/next-server.js' }, nextServer => {
const Server = nextServer.default

@@ -237,3 +237,3 @@

// `handleApiRequest` changes parameters/implementation at 13.2.0
addHook({ name: 'next', versions: ['>=13.2'], file: 'dist/server/next-server.js' }, nextServer => {
addHook({ name: 'next', versions: ['>=13.2 <14.2.7'], file: 'dist/server/next-server.js' }, nextServer => {
const Server = nextServer.default

@@ -272,3 +272,3 @@ shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequestWithMatch)

name: 'next',
versions: ['>=13'],
versions: ['>=13 <14.2.7'],
file: 'dist/server/web/spec-extension/request.js'

@@ -275,0 +275,0 @@ }, request => {

'use strict'
const {
channel,
addHook
} = require('./helpers/instrument')
const { addHook } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const startCh = channel('apm:openai:request:start')
const finishCh = channel('apm:openai:request:finish')
const errorCh = channel('apm:openai:request:error')
const tracingChannel = require('dc-polyfill').tracingChannel
const ch = tracingChannel('apm:openai:request')
const V4_PACKAGE_SHIMS = [
{
file: 'resources/chat/completions.js',
targetClass: 'Completions',
baseResource: 'chat.completions',
methods: ['create'],
streamedResponse: true
},
{
file: 'resources/completions.js',
targetClass: 'Completions',
baseResource: 'completions',
methods: ['create'],
streamedResponse: true
},
{
file: 'resources/embeddings.js',
targetClass: 'Embeddings',
baseResource: 'embeddings',
methods: ['create']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['create', 'del', 'list', 'retrieve']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['retrieveContent'],
versions: ['>=4.0.0 <4.17.1']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['content'], // replaced `retrieveContent` in v4.17.1
versions: ['>=4.17.1']
},
{
file: 'resources/images.js',
targetClass: 'Images',
baseResource: 'images',
methods: ['createVariation', 'edit', 'generate']
},
{
file: 'resources/fine-tuning/jobs/jobs.js',
targetClass: 'Jobs',
baseResource: 'fine_tuning.jobs',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.34.0'] // file location changed in 4.34.0
},
{
file: 'resources/fine-tuning/jobs.js',
targetClass: 'Jobs',
baseResource: 'fine_tuning.jobs',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.1.0 <4.34.0']
},
{
file: 'resources/fine-tunes.js', // deprecated after 4.1.0
targetClass: 'FineTunes',
baseResource: 'fine-tune',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.0.0 <4.1.0']
},
{
file: 'resources/models.js',
targetClass: 'Models',
baseResource: 'models',
methods: ['del', 'list', 'retrieve']
},
{
file: 'resources/moderations.js',
targetClass: 'Moderations',
baseResource: 'moderations',
methods: ['create']
},
{
file: 'resources/audio/transcriptions.js',
targetClass: 'Transcriptions',
baseResource: 'audio.transcriptions',
methods: ['create']
},
{
file: 'resources/audio/translations.js',
targetClass: 'Translations',
baseResource: 'audio.translations',
methods: ['create']
}
]
addHook({ name: 'openai', file: 'dist/api.js', versions: ['>=3.0.0 <4'] }, exports => {

@@ -19,7 +109,7 @@ const methodNames = Object.getOwnPropertyNames(exports.OpenAIApi.prototype)

shimmer.wrap(exports.OpenAIApi.prototype, methodName, fn => function () {
if (!startCh.hasSubscribers) {
if (!ch.start.hasSubscribers) {
return fn.apply(this, arguments)
}
startCh.publish({
const ctx = {
methodName,

@@ -29,24 +119,245 @@ args: arguments,

apiKey: this.configuration.apiKey
}
return ch.tracePromise(fn, ctx, this, ...arguments)
})
}
return exports
})
function addStreamedChunk (content, chunk) {
content.usage = chunk.usage // add usage if it was specified to be returned
for (const choice of chunk.choices) {
const choiceIdx = choice.index
const oldChoice = content.choices.find(choice => choice?.index === choiceIdx)
if (!oldChoice) {
// we don't know which choices arrive in which order
content.choices[choiceIdx] = choice
} else {
if (!oldChoice.finish_reason) {
oldChoice.finish_reason = choice.finish_reason
}
// delta exists on chat completions
const delta = choice.delta
if (delta) {
const content = delta.content
if (content) {
if (oldChoice.delta.content) { // we don't want to append to undefined
oldChoice.delta.content += content
} else {
oldChoice.delta.content = content
}
}
} else {
const text = choice.text
if (text) {
if (oldChoice.text) {
oldChoice.text += text
} else {
oldChoice.text = text
}
}
}
// tools only exist on chat completions
const tools = delta && choice.delta.tool_calls
if (tools) {
oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => {
const oldTool = oldChoice.delta.tool_calls?.[toolIdx]
if (oldTool) {
oldTool.function.arguments += newTool.function.arguments
} else {
return newTool
}
return oldTool
})
}
}
}
}
function convertBufferstoObjects (chunks = []) {
return Buffer
.concat(chunks) // combine the buffers
.toString() // stringify
.split(/(?=data:)/) // split on "data:"
.map(chunk => chunk.split('\n').join('')) // remove newlines
.map(chunk => chunk.substring(6)) // remove 'data: ' from the front
.slice(0, -1) // remove the last [DONE] message
.map(JSON.parse) // parse all of the returned objects
}
/**
* For streamed responses, we need to accumulate all of the content in
* the chunks, and let the combined content be the final response.
* This way, spans look the same as when not streamed.
*/
function wrapStreamIterator (response, options, n, ctx) {
let processChunksAsBuffers = false
let chunks = []
return function (itr) {
return function () {
const iterator = itr.apply(this, arguments)
shimmer.wrap(iterator, 'next', next => function () {
return next.apply(this, arguments)
.then(res => {
const { done, value: chunk } = res
if (chunk) {
chunks.push(chunk)
if (chunk instanceof Buffer) {
// this operation should be safe
// if one chunk is a buffer (versus a plain object), the rest should be as well
processChunksAsBuffers = true
}
}
if (done) {
let body = {}
chunks = chunks.filter(chunk => chunk != null) // filter null or undefined values
if (chunks) {
if (processChunksAsBuffers) {
chunks = convertBufferstoObjects(chunks)
}
if (chunks.length) {
// define the initial body having all the content outside of choices from the first chunk
// this will include import data like created, id, model, etc.
body = { ...chunks[0], choices: Array.from({ length: n }) }
// start from the first chunk, and add its choices into the body
for (let i = 0; i < chunks.length; i++) {
addStreamedChunk(body, chunks[i])
}
}
}
finish(ctx, {
headers: response.headers,
data: body,
request: {
path: response.url,
method: options.method
}
})
}
return res
})
.catch(err => {
finish(ctx, undefined, err)
throw err
})
})
return iterator
}
}
}
return fn.apply(this, arguments)
.then((response) => {
finishCh.publish({
headers: response.headers,
body: response.data,
path: response.request.path,
method: response.request.method
for (const shim of V4_PACKAGE_SHIMS) {
const { file, targetClass, baseResource, methods, versions, streamedResponse } = shim
addHook({ name: 'openai', file, versions: versions || ['>=4'] }, exports => {
const targetPrototype = exports[targetClass].prototype
for (const methodName of methods) {
shimmer.wrap(targetPrototype, methodName, methodFn => function () {
if (!ch.start.hasSubscribers) {
return methodFn.apply(this, arguments)
}
// The OpenAI library lets you set `stream: true` on the options arg to any method
// However, we only want to handle streamed responses in specific cases
// chat.completions and completions
const stream = streamedResponse && getOption(arguments, 'stream', false)
// we need to compute how many prompts we are sending in streamed cases for completions
// not applicable for chat completiond
let n
if (stream) {
n = getOption(arguments, 'n', 1)
const prompt = getOption(arguments, 'prompt')
if (Array.isArray(prompt) && typeof prompt[0] !== 'number') {
n *= prompt.length
}
}
const client = this._client || this.client
const ctx = {
methodName: `${baseResource}.${methodName}`,
args: arguments,
basePath: client.baseURL,
apiKey: client.apiKey
}
return ch.start.runStores(ctx, () => {
const apiProm = methodFn.apply(this, arguments)
// wrapping `parse` avoids problematic wrapping of `then` when trying to call
// `withResponse` in userland code after. This way, we can return the whole `APIPromise`
shimmer.wrap(apiProm, 'parse', origApiPromParse => function () {
return origApiPromParse.apply(this, arguments)
// the original response is wrapped in a promise, so we need to unwrap it
.then(body => Promise.all([this.responsePromise, body]))
.then(([{ response, options }, body]) => {
if (stream) {
if (body.iterator) {
shimmer.wrap(body, 'iterator', wrapStreamIterator(response, options, n, ctx))
} else {
shimmer.wrap(
body.response.body, Symbol.asyncIterator, wrapStreamIterator(response, options, n, ctx)
)
}
} else {
finish(ctx, {
headers: response.headers,
data: body,
request: {
path: response.url,
method: options.method
}
})
}
return body
})
.catch(error => {
finish(ctx, undefined, error)
throw error
})
.finally(() => {
// maybe we don't want to unwrap here in case the promise is re-used?
// other hand: we want to avoid resource leakage
shimmer.unwrap(apiProm, 'parse')
})
})
return response
return apiProm
})
.catch((err) => {
errorCh.publish({ err })
})
}
return exports
})
}
throw err
})
})
function finish (ctx, response, error) {
if (error) {
ctx.error = error
ch.error.publish(ctx)
}
return exports
})
ctx.result = response
ch.asyncEnd.publish(ctx)
}
function getOption (args, option, defaultValue) {
return args[args.length - 1]?.[option] || defaultValue
}

@@ -34,6 +34,6 @@ 'use strict'

const outerAr = new AsyncResource('apm:oracledb:outer-scope')
arguments[arguments.length - 1] = function wrappedCb (err, result) {
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => function wrappedCb (err, result) {
finish(err)
return outerAr.runInAsyncScope(() => cb.apply(this, arguments))
}
})
}

@@ -71,3 +71,3 @@

if (callback) {
arguments[1] = (err, connection) => {
arguments[1] = shimmer.wrapFunction(callback, callback => (err, connection) => {
if (connection) {

@@ -77,3 +77,3 @@ connectionAttributes.set(connection, connAttrs)

callback(err, connection)
}
})

@@ -92,3 +92,3 @@ getConnection.apply(this, arguments)

if (callback) {
arguments[1] = (err, pool) => {
arguments[1] = shimmer.wrapFunction(callback, callback => (err, pool) => {
if (pool) {

@@ -98,3 +98,3 @@ poolAttributes.set(pool, poolAttrs)

callback(err, pool)
}
})

@@ -117,3 +117,3 @@ createPool.apply(this, arguments)

if (callback) {
arguments[arguments.length - 1] = (err, connection) => {
arguments[arguments.length - 1] = shimmer.wrapFunction(callback, callback => (err, connection) => {
if (connection) {

@@ -123,3 +123,3 @@ connectionAttributes.set(connection, poolAttributes.get(this))

callback(err, connection)
}
})
getConnection.apply(this, arguments)

@@ -126,0 +126,0 @@ } else {

@@ -7,3 +7,8 @@ 'use strict'

if (process.env.DD_TRACE_OTEL_ENABLED) {
const otelSdkEnabled = process.env.DD_TRACE_OTEL_ENABLED ||
process.env.OTEL_SDK_DISABLED
? !process.env.OTEL_SDK_DISABLED
: undefined
if (otelSdkEnabled) {
addHook({

@@ -10,0 +15,0 @@ name: '@opentelemetry/sdk-trace-node',

@@ -12,3 +12,3 @@ 'use strict'

}, BasicStrategy => {
return shimmer.wrap(BasicStrategy, function () {
return shimmer.wrapFunction(BasicStrategy, BasicStrategy => function () {
const type = 'http'

@@ -15,0 +15,0 @@

@@ -12,3 +12,3 @@ 'use strict'

}, Strategy => {
return shimmer.wrap(Strategy, function () {
return shimmer.wrapFunction(Strategy, Strategy => function () {
const type = 'local'

@@ -15,0 +15,0 @@

@@ -14,3 +14,3 @@ 'use strict'

// eslint-disable-next-line n/handle-callback-err
return shimmer.wrap(verified, function (err, user, info) {
return shimmer.wrapFunction(verified, verified => function (err, user, info) {
const credentials = { type, username }

@@ -17,0 +17,0 @@ passportVerifyChannel.publish({ credentials, user })

@@ -38,3 +38,3 @@ 'use strict'

const pgQuery = arguments[0] && typeof arguments[0] === 'object'
const pgQuery = arguments[0] !== null && typeof arguments[0] === 'object'
? arguments[0]

@@ -113,3 +113,3 @@ : { text: arguments[0] }

const pgQuery = arguments[0] && typeof arguments[0] === 'object' ? arguments[0] : { text: arguments[0] }
const pgQuery = arguments[0] !== null && typeof arguments[0] === 'object' ? arguments[0] : { text: arguments[0] }

@@ -127,3 +127,3 @@ return asyncResource.runInAsyncScope(() => {

if (typeof cb === 'function') {
arguments[arguments.length - 1] = shimmer.wrap(cb, function () {
arguments[arguments.length - 1] = shimmer.wrapFunction(cb, cb => function () {
finish()

@@ -130,0 +130,0 @@ return cb.apply(this, arguments)

@@ -79,3 +79,3 @@ 'use strict'

return shimmer.wrap(pino, wrapPino(asJsonSym, wrapAsJson, pino))
return shimmer.wrapFunction(pino, pino => wrapPino(asJsonSym, wrapAsJson, pino))
})

@@ -86,3 +86,3 @@

return shimmer.wrap(pino, wrapPino(mixinSym, wrapMixin, pino))
return shimmer.wrapFunction(pino, pino => wrapPino(mixinSym, wrapMixin, pino))
})

@@ -93,3 +93,3 @@

const wrapped = shimmer.wrap(pino, wrapPino(mixinSym, wrapMixin, pino))
const wrapped = shimmer.wrapFunction(pino, pino => wrapPino(mixinSym, wrapMixin, pino))
wrapped.pino = wrapped

@@ -107,3 +107,3 @@ wrapped.default = wrapped

addHook({ name: 'pino-pretty', versions: ['1 - 2'] }, prettyFactory => {
return shimmer.wrap(prettyFactory, wrapPrettyFactory(prettyFactory))
return shimmer.wrapFunction(prettyFactory, wrapPrettyFactory)
})

@@ -24,2 +24,3 @@ const semver = require('semver')

const testSuiteToErrors = new Map()
const testSessionAsyncResource = new AsyncResource('bound-anonymous-fn')

@@ -40,2 +41,4 @@ let applyRepeatEachIndex = null

let earlyFlakeDetectionNumRetries = 0
let isFlakyTestRetriesEnabled = false
let flakyTestRetriesCount = 0
let knownTests = {}

@@ -201,2 +204,27 @@ let rootDir = ''

function getTestByTestId (dispatcher, testId) {
if (dispatcher._testById) {
return dispatcher._testById.get(testId)?.test
}
const allTests = dispatcher._allTests || dispatcher._ddAllTests
if (allTests) {
return allTests.find(({ id }) => id === testId)
}
}
function getChannelPromise (channelToPublishTo) {
return new Promise(resolve => {
testSessionAsyncResource.runInAsyncScope(() => {
channelToPublishTo.publish({ onDone: resolve })
})
})
}
// eslint-disable-next-line
// Inspired by https://github.com/microsoft/playwright/blob/2b77ed4d7aafa85a600caa0b0d101b72c8437eeb/packages/playwright/src/reporters/base.ts#L293
// We can't use test.outcome() directly because it's set on follow up handlers:
// our `testEndHandler` is called before the outcome is set.
function testWillRetry (test, testStatus) {
return testStatus === 'fail' && test.results.length <= test.retries
}
function testBeginHandler (test, browserName) {

@@ -255,3 +283,4 @@ const {

testStatus,
steps: testResult.steps,
steps: testResult?.steps || [],
isRetry: testResult?.retry > 0,
error,

@@ -274,4 +303,6 @@ extraTags: annotationTags,

remainingTestsByFile[testSuiteAbsolutePath] = remainingTestsByFile[testSuiteAbsolutePath]
.filter(currentTest => currentTest !== test)
if (!testWillRetry(test, testStatus)) {
remainingTestsByFile[testSuiteAbsolutePath] = remainingTestsByFile[testSuiteAbsolutePath]
.filter(currentTest => currentTest !== test)
}

@@ -305,3 +336,8 @@ // Last test, we finish the suite

function dispatcherRunWrapperNew (run) {
return function () {
return function (testGroups) {
if (!this._allTests) {
// Removed in https://github.com/microsoft/playwright/commit/1e52c37b254a441cccf332520f60225a5acc14c7
// Not available from >=1.44.0
this._ddAllTests = testGroups.map(g => g.tests).flat()
}
remainingTestsByFile = getTestsBySuiteFromTestGroups(arguments[0])

@@ -339,11 +375,2 @@ return run.apply(this, arguments)

function getTestByTestId (dispatcher, testId) {
if (dispatcher._testById) {
return dispatcher._testById.get(testId)?.test
}
if (dispatcher._allTests) {
return dispatcher._allTests.find(({ id }) => id === testId)
}
}
function dispatcherHookNew (dispatcherExport, runWrapper) {

@@ -377,4 +404,2 @@ shimmer.wrap(dispatcherExport.Dispatcher.prototype, 'run', runWrapper)

const testSessionAsyncResource = new AsyncResource('bound-anonymous-fn')
rootDir = getRootDir(this)

@@ -388,17 +413,12 @@

const configurationPromise = new Promise((resolve) => {
onDone = resolve
})
testSessionAsyncResource.runInAsyncScope(() => {
libraryConfigurationCh.publish({ onDone })
})
try {
const { err, libraryConfig } = await configurationPromise
const { err, libraryConfig } = await getChannelPromise(libraryConfigurationCh)
if (!err) {
isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
isFlakyTestRetriesEnabled = libraryConfig.isFlakyTestRetriesEnabled
flakyTestRetriesCount = libraryConfig.flakyTestRetriesCount
}
} catch (e) {
isEarlyFlakeDetectionEnabled = false
log.error(e)

@@ -408,11 +428,4 @@ }

if (isEarlyFlakeDetectionEnabled && semver.gte(playwrightVersion, MINIMUM_SUPPORTED_VERSION_EFD)) {
const knownTestsPromise = new Promise((resolve) => {
onDone = resolve
})
testSessionAsyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
try {
const { err, knownTests: receivedKnownTests } = await knownTestsPromise
const { err, knownTests: receivedKnownTests } = await getChannelPromise(knownTestsCh)
if (!err) {

@@ -424,2 +437,3 @@ knownTests = receivedKnownTests

} catch (err) {
isEarlyFlakeDetectionEnabled = false
log.error(err)

@@ -431,2 +445,10 @@ }

if (isFlakyTestRetriesEnabled && flakyTestRetriesCount > 0) {
projects.forEach(project => {
if (project.retries === 0) { // Only if it hasn't been set by the user
project.retries = flakyTestRetriesCount
}
})
}
const runAllTestsReturn = await runAllTests.apply(this, arguments)

@@ -433,0 +455,0 @@

@@ -159,3 +159,3 @@ 'use strict'

function wrapCallback (finishCh, errorCh, callback) {
return function (err) {
return shimmer.wrapFunction(callback, callback => function (err) {
finish(finishCh, errorCh, err)

@@ -165,3 +165,3 @@ if (callback) {

}
}
})
}

@@ -168,0 +168,0 @@

@@ -43,3 +43,3 @@ 'use strict'

return function (req, res, next) {
return shimmer.wrapFunction(fn, fn => function (req, res, next) {
if (typeof next === 'function') {

@@ -55,3 +55,3 @@ arguments[2] = wrapNext(req, next)

const result = fn.apply(this, arguments)
if (result && typeof result === 'object' && typeof result.then === 'function') {
if (result !== null && typeof result === 'object' && typeof result.then === 'function') {
return result.then(function () {

@@ -77,7 +77,7 @@ nextChannel.publish({ req })

}
}
})
}
function wrapNext (req, next) {
return function () {
return shimmer.wrapFunction(next, next => function () {
nextChannel.publish({ req })

@@ -87,3 +87,3 @@ finishChannel.publish({ req })

next.apply(this, arguments)
}
})
}

@@ -90,0 +90,0 @@

@@ -156,7 +156,7 @@ 'use strict'

const cb = asyncResource.bind(update)
return AsyncResource.bind(function wrappedUpdate (settled, stateData) {
return shimmer.wrapFunction(cb, cb => AsyncResource.bind(function wrappedUpdate (settled, stateData) {
const state = getStateFromData(stateData)
dispatchReceiveCh.publish({ state })
return cb.apply(this, arguments)
})
}))
}

@@ -182,3 +182,3 @@ return function wrappedUpdate (settled, stateData) {

shimmer.wrap(CircularBuffer.prototype, 'pop_if', popIf => function (fn) {
arguments[0] = AsyncResource.bind(function (entry) {
arguments[0] = shimmer.wrapFunction(fn, fn => AsyncResource.bind(function (entry) {
const context = contexts.get(entry)

@@ -203,3 +203,3 @@ const asyncResource = context && context.asyncResource

return shouldPop
})
}))
return popIf.apply(this, arguments)

@@ -206,0 +206,0 @@ })

'use strict'
const METHODS = require('methods').concat('all')
const METHODS = require('http').METHODS.map(v => v.toLowerCase()).concat('all')
const pathToRegExp = require('path-to-regexp')

@@ -21,3 +21,3 @@ const shimmer = require('../../datadog-shimmer')

const handle = shimmer.wrap(original, function () {
const handle = shimmer.wrapFunction(original, original => function () {
if (!enterChannel.hasSubscribers) return original.apply(this, arguments)

@@ -93,3 +93,3 @@

function wrapNext (req, next) {
return function (error) {
return shimmer.wrapFunction(next, next => function (error) {
if (error && error !== 'route' && error !== 'router') {

@@ -103,3 +103,3 @@ errorChannel.publish({ req, error })

next.apply(this, arguments)
}
})
}

@@ -157,3 +157,3 @@

function wrapMethod (original) {
return function methodWithTrace (fn) {
return shimmer.wrapFunction(original, original => function methodWithTrace (fn) {
const offset = this.stack ? [].concat(this.stack).length : 0

@@ -169,3 +169,3 @@ const router = original.apply(this, arguments)

return router
}
})
}

@@ -172,0 +172,0 @@

@@ -26,2 +26,5 @@ const { addHook, channel } = require('./helpers/instrument')

shimmer.wrap(seleniumPackage.WebDriver.prototype, 'get', get => async function () {
if (!ciSeleniumDriverGetStartCh.hasSubscribers) {
return get.apply(this, arguments)
}
let traceId

@@ -44,6 +47,8 @@ const setTraceId = (inputTraceId) => {

await this.manage().addCookie({
name: DD_CIVISIBILITY_TEST_EXECUTION_ID_COOKIE_NAME,
value: traceId
})
if (traceId && isRumActive) {
await this.manage().addCookie({
name: DD_CIVISIBILITY_TEST_EXECUTION_ID_COOKIE_NAME,
value: traceId
})
}

@@ -54,2 +59,5 @@ return getResult

shimmer.wrap(seleniumPackage.WebDriver.prototype, 'quit', quit => async function () {
if (!ciSeleniumDriverGetStartCh.hasSubscribers) {
return quit.apply(this, arguments)
}
const isRumActive = await this.executeScript(RUM_STOP_SESSION_SCRIPT)

@@ -64,6 +72,5 @@

})
await this.manage().deleteCookie(DD_CIVISIBILITY_TEST_EXECUTION_ID_COOKIE_NAME)
}
await this.manage().deleteCookie(DD_CIVISIBILITY_TEST_EXECUTION_ID_COOKIE_NAME)
return quit.apply(this, arguments)

@@ -70,0 +77,0 @@ })

@@ -51,3 +51,3 @@ 'use strict'

arguments[1] = asyncResource.bind(function (error, res) {
arguments[1] = shimmer.wrapFunction(callback, callback => asyncResource.bind(function (error, res) {
if (error) {

@@ -59,3 +59,3 @@ errorCh.publish(error)

return callback.apply(this, arguments)
})
}))

@@ -62,0 +62,0 @@ try {

@@ -45,4 +45,3 @@ 'use strict'

const log = transport.log
transport.log = function wrappedLog (level, msg, meta, callback) {
shimmer.wrap(transport, 'log', log => function wrappedLog (level, msg, meta, callback) {
const payload = { message: meta || {} }

@@ -52,3 +51,3 @@ logCh.publish(payload)

log.apply(this, arguments)
}
})
patched.add(transport)

@@ -55,0 +54,0 @@ }

@@ -7,5 +7,8 @@ 'use strict'

const { isTrue } = require('../../dd-trace/src/util')
const coalesce = require('koalas')
const { tagsFromRequest, tagsFromResponse } = require('../../dd-trace/src/payload-tagging')
class BaseAwsSdkPlugin extends ClientPlugin {
static get id () { return 'aws' }
static get isPayloadReporter () { return false }

@@ -23,2 +26,10 @@ get serviceIdentifier () {

get cloudTaggingConfig () {
return this._tracerConfig.cloudPayloadTagging
}
get payloadTaggingRules () {
return this.cloudTaggingConfig.rules.aws?.[this.constructor.id]
}
constructor (...args) {

@@ -55,2 +66,8 @@ super(...args)

if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.requestsEnabled) {
const maxDepth = this.cloudTaggingConfig.maxDepth
const requestTags = tagsFromRequest(this.payloadTaggingRules, request.params, { maxDepth })
span.addTags(requestTags)
}
const store = storage.getStore()

@@ -70,3 +87,3 @@

this.addSub(`apm:aws:request:complete:${this.serviceIdentifier}`, ({ response }) => {
this.addSub(`apm:aws:request:complete:${this.serviceIdentifier}`, ({ response, cbExists = false }) => {
const store = storage.getStore()

@@ -76,2 +93,8 @@ if (!store) return

if (!span) return
// try to extract DSM context from response if no callback exists as extraction normally happens in CB
if (!cbExists && this.serviceIdentifier === 'sqs') {
const params = response.request.params
const operation = response.request.operation
this.responseExtractDSMContext(operation, params, response.data ?? response, span)
}
this.addResponseTags(span, response)

@@ -117,2 +140,3 @@ this.finish(span, response, response.error)

const extraTags = this.generateTags(params, operation, response) || {}
const tags = Object.assign({

@@ -125,4 +149,20 @@ 'aws.response.request_id': response.requestId,

span.addTags(tags)
if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.responsesEnabled) {
const maxDepth = this.cloudTaggingConfig.maxDepth
const responseBody = this.extractResponseBody(response)
const responseTags = tagsFromResponse(this.payloadTaggingRules, responseBody, { maxDepth })
span.addTags(responseTags)
}
}
extractResponseBody (response) {
if (response.hasOwnProperty('data')) {
return response.data
}
return Object.fromEntries(
Object.entries(response).filter(([key]) => !['request', 'requestId', 'error', '$metadata'].includes(key))
)
}
generateTags () {

@@ -167,4 +207,18 @@ // implemented by subclasses, or not

// check if AWS batch propagation or AWS_[SERVICE] batch propagation is enabled via env variable
const serviceId = serviceIdentifier.toUpperCase()
const batchPropagationEnabled = isTrue(
coalesce(
specificConfig.batchPropagationEnabled,
process.env[`DD_TRACE_AWS_SDK_${serviceId}_BATCH_PROPAGATION_ENABLED`],
config.batchPropagationEnabled,
process.env.DD_TRACE_AWS_SDK_BATCH_PROPAGATION_ENABLED,
false
)
)
// Merge the specific config back into the main config
return Object.assign({}, config, specificConfig, {
splitByAwsService: config.splitByAwsService !== false,
batchPropagationEnabled,
hooks

@@ -171,0 +225,0 @@ })

@@ -24,3 +24,3 @@ 'use strict'

// dynamoDB batch TableName
if (params.RequestItems) {
if (params.RequestItems !== null) {
if (typeof params.RequestItems === 'object') {

@@ -27,0 +27,0 @@ if (Object.keys(params.RequestItems).length === 1) {

@@ -10,4 +10,7 @@ 'use strict'

exports.s3 = require('./s3')
exports.sfn = require('./sfn')
exports.sns = require('./sns')
exports.sqs = require('./sqs')
exports.states = require('./states')
exports.stepfunctions = require('./stepfunctions')
exports.default = require('./default')

@@ -55,3 +55,3 @@ 'use strict'

this.responseExtractDSMContext(
request.operation, response, span || null, streamName
request.operation, request.params, response, span || null, { streamName }
)

@@ -104,3 +104,4 @@ }

responseExtractDSMContext (operation, response, span, streamName) {
responseExtractDSMContext (operation, params, response, span, kwargs = {}) {
const { streamName } = kwargs
if (!this.config.dsmEnabled) return

@@ -156,3 +157,8 @@ if (operation !== 'getRecords') return

for (let i = 0; i < params.Records.length; i++) {
this.injectToMessage(span, params.Records[i], stream, i === 0)
this.injectToMessage(
span,
params.Records[i],
stream,
i === 0 || (this.config.batchPropagationEnabled)
)
}

@@ -159,0 +165,0 @@ }

@@ -10,2 +10,3 @@ 'use strict'

static get peerServicePrecursors () { return ['topicname'] }
static get isPayloadReporter () { return true }

@@ -24,2 +25,3 @@ generateTags (params, operation, response) {

const topicName = arnParts[arnParts.length - 1]
return {

@@ -64,3 +66,8 @@ 'resource.name': `${operation} ${params.TopicArn || response.data.TopicArn}`,

for (let i = 0; i < params.PublishBatchRequestEntries.length; i++) {
this.injectToMessage(span, params.PublishBatchRequestEntries[i], params.TopicArn, i === 0)
this.injectToMessage(
span,
params.PublishBatchRequestEntries[i],
params.TopicArn,
i === 0 || (this.config.batchPropagationEnabled)
)
}

@@ -67,0 +74,0 @@ break

@@ -26,3 +26,3 @@ 'use strict'

let span
let parsedMessageAttributes
let parsedMessageAttributes = null
if (contextExtraction && contextExtraction.datadogContext) {

@@ -43,4 +43,5 @@ obj.needsFinish = true

// extract DSM context after as we might not have a parent-child but may have a DSM context
this.responseExtractDSMContext(
request.operation, request.params, response, span || null, parsedMessageAttributes || null
request.operation, request.params, response, span || null, { parsedMessageAttributes }
)

@@ -161,4 +162,4 @@ })

return JSON.parse(textMap)
} else if (attributes.Type === 'Binary') {
const buffer = Buffer.from(attributes.Value, 'base64')
} else if (attributes.Type === 'Binary' || attributes.DataType === 'Binary') {
const buffer = Buffer.from(attributes.Value ?? attributes.BinaryValue, 'base64')
return JSON.parse(buffer)

@@ -171,3 +172,4 @@ }

responseExtractDSMContext (operation, params, response, span, parsedAttributes) {
responseExtractDSMContext (operation, params, response, span, kwargs = {}) {
let { parsedAttributes } = kwargs
if (!this.config.dsmEnabled) return

@@ -195,3 +197,3 @@ if (operation !== 'receiveMessage') return

}
if (message.MessageAttributes && message.MessageAttributes._datadog) {
if (!parsedAttributes && message.MessageAttributes && message.MessageAttributes._datadog) {
parsedAttributes = this.parseDatadogAttributes(message.MessageAttributes._datadog)

@@ -224,5 +226,21 @@ }

for (let i = 0; i < params.Entries.length; i++) {
this.injectToMessage(span, params.Entries[i], params.QueueUrl, i === 0)
this.injectToMessage(
span,
params.Entries[i],
params.QueueUrl,
i === 0 || (this.config.batchPropagationEnabled)
)
}
break
case 'receiveMessage':
if (!params.MessageAttributeNames) {
params.MessageAttributeNames = ['_datadog']
} else if (
!params.MessageAttributeNames.includes('_datadog') &&
!params.MessageAttributeNames.includes('.*') &&
!params.MessageAttributeNames.includes('All')
) {
params.MessageAttributeNames.push('_datadog')
}
break
}

@@ -229,0 +247,0 @@ }

@@ -57,3 +57,3 @@ 'use strict'

this.startSpan('command_execution', {
service: this.config.service,
service: this.config.service || this._tracerConfig.service,
resource: (shell === true) ? 'sh' : cmdFields[0],

@@ -60,0 +60,0 @@ type: 'system',

@@ -10,6 +10,6 @@ 'use strict'

// eslint-disable-next-line max-len
const PARAM_PATTERN = '^-{0,2}(?:p(?:ass(?:w(?:or)?d)?)?|api_?key|secret|a(?:ccess|uth)_token|mysql_pwd|credentials|(?:stripe)?token)$'
const PARAM_PATTERN = '^-{0,2}(?:p(?:ass(?:w(?:or)?d)?)?|address|api[-_]?key|e?mail|secret(?:[-_]?key)?|a(?:ccess|uth)[-_]?token|mysql_pwd|credentials|(?:stripe)?token)$'
const regexParam = new RegExp(PARAM_PATTERN, 'i')
const ENV_PATTERN = '^(\\w+=\\w+;)*\\w+=\\w+;?$'
const envvarRegex = new RegExp(ENV_PATTERN)
const envVarRegex = new RegExp(ENV_PATTERN)
const REDACTED = '?'

@@ -65,3 +65,5 @@

if (typeof token === 'object') {
if (token === null) {
continue
} else if (typeof token === 'object') {
if (token.pattern) {

@@ -75,3 +77,3 @@ result.push(token.pattern)

} else if (!foundBinary) {
if (envvarRegex.test(token)) {
if (envVarRegex.test(token)) {
const envSplit = token.split('=')

@@ -78,0 +80,0 @@

@@ -21,3 +21,10 @@ 'use strict'

TEST_IS_NEW,
TEST_IS_RETRY
TEST_IS_RETRY,
TEST_SUITE_ID,
TEST_SESSION_ID,
TEST_COMMAND,
TEST_MODULE,
TEST_MODULE_ID,
TEST_SUITE,
CUCUMBER_IS_PARALLEL
} = require('../../dd-trace/src/plugins/util/test')

@@ -34,5 +41,24 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES
TELEMETRY_CODE_COVERAGE_NUM_FILES,
TEST_IS_RUM_ACTIVE,
TEST_BROWSER_DRIVER,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')
const id = require('../../dd-trace/src/id')
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID
function getTestSuiteTags (testSuiteSpan) {
const suiteTags = {
[TEST_SUITE_ID]: testSuiteSpan.context().toSpanId(),
[TEST_SESSION_ID]: testSuiteSpan.context().toTraceId(),
[TEST_COMMAND]: testSuiteSpan.context()._tags[TEST_COMMAND],
[TEST_MODULE]: 'cucumber'
}
if (testSuiteSpan.context()._parentId) {
suiteTags[TEST_MODULE_ID] = testSuiteSpan.context()._parentId.toString(10)
}
return suiteTags
}
class CucumberPlugin extends CiPlugin {

@@ -48,2 +74,4 @@ static get id () {

this.testSuiteSpanByPath = {}
this.addSub('ci:cucumber:session:finish', ({

@@ -56,3 +84,4 @@ status,

hasForcedToRunSuites,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
}) => {

@@ -77,2 +106,5 @@ const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {}

}
if (isParallel) {
this.testSessionSpan.setTag(CUCUMBER_IS_PARALLEL, 'true')
}

@@ -86,2 +118,3 @@ this.testSessionSpan.setTag(TEST_STATUS, status)

finishAllTraceSpans(this.testSessionSpan)
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })

@@ -110,3 +143,3 @@ this.libraryConfig = null

}
this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
const testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
childOf: this.testModuleSpan,

@@ -119,2 +152,4 @@ tags: {

})
this.testSuiteSpanByPath[testSuitePath] = testSuiteSpan
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')

@@ -126,9 +161,10 @@ if (this.libraryConfig?.isCodeCoverageEnabled) {

this.addSub('ci:cucumber:test-suite:finish', status => {
this.testSuiteSpan.setTag(TEST_STATUS, status)
this.testSuiteSpan.finish()
this.addSub('ci:cucumber:test-suite:finish', ({ status, testSuitePath }) => {
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]
testSuiteSpan.setTag(TEST_STATUS, status)
testSuiteSpan.finish()
this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')
})
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => {
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile, testSuitePath }) => {
if (!this.libraryConfig?.isCodeCoverageEnabled) {

@@ -140,2 +176,3 @@ return

}
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]

@@ -148,4 +185,4 @@ const relativeCoverageFiles = [...coverageFiles, suiteFile]

const formattedCoverage = {
sessionId: this.testSuiteSpan.context()._traceId,
suiteId: this.testSuiteSpan.context()._spanId,
sessionId: testSuiteSpan.context()._traceId,
suiteId: testSuiteSpan.context()._spanId,
files: relativeCoverageFiles

@@ -158,3 +195,3 @@ }

this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine }) => {
this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine, isParallel }) => {
const store = storage.getStore()

@@ -168,2 +205,6 @@ const testSuite = getTestSuitePath(testFileAbsolutePath, this.sourceRoot)

}
if (isParallel) {
extraTags[CUCUMBER_IS_PARALLEL] = 'true'
}
const testSpan = this.startTestSpan(testName, testSuite, extraTags)

@@ -174,2 +215,13 @@

this.addSub('ci:cucumber:test:retry', (isFlakyRetry) => {
const store = storage.getStore()
const span = store.span
if (isFlakyRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}
span.setTag(TEST_STATUS, 'fail')
span.finish()
finishAllTraceSpans(span)
})
this.addSub('ci:cucumber:test-step:start', ({ resource }) => {

@@ -189,3 +241,41 @@ const store = storage.getStore()

this.addSub('ci:cucumber:test:finish', ({ isStep, status, skipReason, errorMessage, isNew, isEfdRetry }) => {
this.addSub('ci:cucumber:worker-report:trace', (traces) => {
const formattedTraces = JSON.parse(traces).map(trace =>
trace.map(span => ({
...span,
span_id: id(span.span_id),
trace_id: id(span.trace_id),
parent_id: id(span.parent_id)
}))
)
// We have to update the test session, test module and test suite ids
// before we export them in the main process
formattedTraces.forEach(trace => {
trace.forEach(span => {
if (span.name === 'cucumber.test') {
const testSuite = span.meta[TEST_SUITE]
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
const testSuiteTags = getTestSuiteTags(testSuiteSpan)
span.meta = {
...span.meta,
...testSuiteTags
}
}
})
this.tracer._exporter.export(trace)
})
})
this.addSub('ci:cucumber:test:finish', ({
isStep,
status,
skipReason,
errorMessage,
isNew,
isEfdRetry,
isFlakyRetry
}) => {
const span = storage.getStore().span

@@ -211,10 +301,24 @@ const statusTag = isStep ? 'step.status' : TEST_STATUS

if (isFlakyRetry > 0) {
span.setTag(TEST_IS_RETRY, 'true')
}
span.finish()
if (!isStep) {
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew,
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
finishAllTraceSpans(span)
// If it's a worker, flushing is cheap, as it's just sending data to the main process
if (isCucumberWorker) {
this.tracer._exporter.flush()
}
}

@@ -232,6 +336,7 @@ })

startTestSpan (testName, testSuite, extraTags) {
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
return super.startTestSpan(
testName,
testSuite,
this.testSuiteSpan,
testSuiteSpan,
extraTags

@@ -238,0 +343,0 @@ )

@@ -31,3 +31,6 @@ const {

TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED
TEST_EARLY_FLAKE_ENABLED,
getTestSessionName,
TEST_SESSION_NAME,
TEST_LEVEL_EVENT_TYPES
} = require('../../dd-trace/src/plugins/util/test')

@@ -47,3 +50,5 @@ const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util')

incrementCountMetric,
distributionMetric
distributionMetric,
TELEMETRY_ITR_SKIPPED,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')

@@ -183,3 +188,3 @@

this.repositoryRoot = repositoryRoot
this.isUnsupportedCIProvider = !ciProviderName
this.ciProviderName = ciProviderName
this.codeOwnersEntries = getCodeOwnersFileEntries(repositoryRoot)

@@ -213,2 +218,5 @@

// Init function returns a promise that resolves with the Cypress configuration
// Depending on the received configuration, the Cypress configuration can be modified:
// for example, to enable retries for failed tests.
init (tracer, cypressConfig) {

@@ -218,2 +226,29 @@ this._isInit = true

this.cypressConfig = cypressConfig
this.libraryConfigurationPromise = getLibraryConfiguration(this.tracer, this.testConfiguration)
.then((libraryConfigurationResponse) => {
if (libraryConfigurationResponse.err) {
log.error(libraryConfigurationResponse.err)
} else {
const {
libraryConfig: {
isSuitesSkippingEnabled,
isCodeCoverageEnabled,
isEarlyFlakeDetectionEnabled,
earlyFlakeDetectionNumRetries,
isFlakyTestRetriesEnabled,
flakyTestRetriesCount
}
} = libraryConfigurationResponse
this.isSuitesSkippingEnabled = isSuitesSkippingEnabled
this.isCodeCoverageEnabled = isCodeCoverageEnabled
this.isEarlyFlakeDetectionEnabled = isEarlyFlakeDetectionEnabled
this.earlyFlakeDetectionNumRetries = earlyFlakeDetectionNumRetries
if (isFlakyTestRetriesEnabled) {
this.cypressConfig.retries.runMode = flakyTestRetriesCount
}
}
return this.cypressConfig
})
return this.libraryConfigurationPromise
}

@@ -225,2 +260,3 @@

this.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
return this.tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_suite`, {

@@ -236,3 +272,3 @@ childOf: this.testModuleSpan,

getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) {
getTestSpan ({ testName, testSuite, isUnskippable, isForcedToRun, testSourceFile }) {
const testSuiteTags = {

@@ -261,4 +297,7 @@ [TEST_COMMAND]: this.command,

const codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
if (testSourceFile) {
testSpanMetadata[TEST_SOURCE_FILE] = testSourceFile
}
const codeOwners = this.getTestCodeOwners({ testSuite, testSourceFile })
if (codeOwners) {

@@ -298,3 +337,3 @@ testSpanMetadata[TEST_CODE_OWNERS] = codeOwners

testFramework: 'cypress',
isUnsupportedCIProvider: this.isUnsupportedCIProvider,
isUnsupportedCIProvider: !this.ciProviderName,
...tags

@@ -309,2 +348,5 @@ })

async beforeRun (details) {
// We need to make sure that the plugin is initialized before running the tests
// This is for the case where the user has not returned the promise from the init function
await this.libraryConfigurationPromise
this.command = getCypressCommand(details)

@@ -314,21 +356,2 @@ this.frameworkVersion = getCypressVersion(details)

const libraryConfigurationResponse = await getLibraryConfiguration(this.tracer, this.testConfiguration)
if (libraryConfigurationResponse.err) {
log.error(libraryConfigurationResponse.err)
} else {
const {
libraryConfig: {
isSuitesSkippingEnabled,
isCodeCoverageEnabled,
isEarlyFlakeDetectionEnabled,
earlyFlakeDetectionNumRetries
}
} = libraryConfigurationResponse
this.isSuitesSkippingEnabled = isSuitesSkippingEnabled
this.isCodeCoverageEnabled = isCodeCoverageEnabled
this.isEarlyFlakeDetectionEnabled = isEarlyFlakeDetectionEnabled
this.earlyFlakeDetectionNumRetries = earlyFlakeDetectionNumRetries
}
if (this.isEarlyFlakeDetectionEnabled) {

@@ -359,2 +382,3 @@ const knownTestsResponse = await getKnownTests(

this.itrCorrelationId = correlationId
incrementCountMetric(TELEMETRY_ITR_SKIPPED, { testLevel: 'test' }, this.testsToSkip.length)
}

@@ -382,2 +406,14 @@ }

const testSessionName = getTestSessionName(this.tracer._tracer._config, this.command, this.testEnvironmentMetadata)
if (this.tracer._tracer._exporter?.setMetadataTags) {
const metadataTags = {}
for (const testLevel of TEST_LEVEL_EVENT_TYPES) {
metadataTags[testLevel] = {
[TEST_SESSION_NAME]: testSessionName
}
}
this.tracer._tracer._exporter.setMetadataTags(metadataTags)
}
this.testSessionSpan = this.tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_session`, {

@@ -434,2 +470,5 @@ childOf,

this.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session')
incrementCountMetric(TELEMETRY_TEST_SESSION, {
provider: this.ciProviderName
})

@@ -482,8 +521,12 @@ finishAllTraceSpans(this.testSessionSpan)

)
const skippedTestSpan = this.getTestSpan(cypressTestName, spec.relative)
let testSourceFile
if (spec.absolute && this.repositoryRoot) {
skippedTestSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, this.repositoryRoot))
testSourceFile = getTestSuitePath(spec.absolute, this.repositoryRoot)
} else {
skippedTestSpan.setTag(TEST_SOURCE_FILE, spec.relative)
testSourceFile = spec.relative
}
const skippedTestSpan = this.getTestSpan({ testName: cypressTestName, testSuite: spec.relative, testSourceFile })
skippedTestSpan.setTag(TEST_STATUS, 'skip')

@@ -503,25 +546,57 @@ if (isSkippedByItr) {

let latestError
finishedTests.forEach((finishedTest) => {
const cypressTest = cypressTests.find(test => test.title.join(' ') === finishedTest.testName)
if (!cypressTest) {
return
const finishedTestsByTestName = finishedTests.reduce((acc, finishedTest) => {
if (!acc[finishedTest.testName]) {
acc[finishedTest.testName] = []
}
if (cypressTest.displayError) {
latestError = new Error(cypressTest.displayError)
}
const cypressTestStatus = CYPRESS_STATUS_TO_TEST_STATUS[cypressTest.state]
// update test status
if (cypressTestStatus !== finishedTest.testStatus) {
finishedTest.testSpan.setTag(TEST_STATUS, cypressTestStatus)
finishedTest.testSpan.setTag('error', latestError)
}
if (this.itrCorrelationId) {
finishedTest.testSpan.setTag(ITR_CORRELATION_ID, this.itrCorrelationId)
}
if (spec.absolute && this.repositoryRoot) {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, this.repositoryRoot))
} else {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, spec.relative)
}
finishedTest.testSpan.finish(finishedTest.finishTime)
acc[finishedTest.testName].push(finishedTest)
return acc
}, {})
Object.entries(finishedTestsByTestName).forEach(([testName, finishedTestAttempts]) => {
finishedTestAttempts.forEach((finishedTest, attemptIndex) => {
// TODO: there could be multiple if there have been retries!
// potentially we need to match the test status!
const cypressTest = cypressTests.find(test => test.title.join(' ') === testName)
if (!cypressTest) {
return
}
// finishedTests can include multiple tests with the same name if they have been retried
// by early flake detection. Cypress is unaware of this so .attempts does not necessarily have
// the same length as `finishedTestAttempts`
let cypressTestStatus = CYPRESS_STATUS_TO_TEST_STATUS[cypressTest.state]
if (cypressTest.attempts && cypressTest.attempts[attemptIndex]) {
cypressTestStatus = CYPRESS_STATUS_TO_TEST_STATUS[cypressTest.attempts[attemptIndex].state]
if (attemptIndex > 0) {
finishedTest.testSpan.setTag(TEST_IS_RETRY, 'true')
}
}
if (cypressTest.displayError) {
latestError = new Error(cypressTest.displayError)
}
// Update test status
if (cypressTestStatus !== finishedTest.testStatus) {
finishedTest.testSpan.setTag(TEST_STATUS, cypressTestStatus)
finishedTest.testSpan.setTag('error', latestError)
}
if (this.itrCorrelationId) {
finishedTest.testSpan.setTag(ITR_CORRELATION_ID, this.itrCorrelationId)
}
let testSourceFile
if (spec.absolute && this.repositoryRoot) {
testSourceFile = getTestSuitePath(spec.absolute, this.repositoryRoot)
} else {
testSourceFile = spec.relative
}
if (testSourceFile) {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, testSourceFile)
}
const codeOwners = this.getTestCodeOwners({ testSuite: spec.relative, testSourceFile })
if (codeOwners) {
finishedTest.testSpan.setTag(TEST_CODE_OWNERS, codeOwners)
}
finishedTest.testSpan.finish(finishedTest.finishTime)
})
})

@@ -573,3 +648,8 @@

if (!this.activeTestSpan) {
this.activeTestSpan = this.getTestSpan(testName, testSuite, isUnskippable, isForcedToRun)
this.activeTestSpan = this.getTestSpan({
testName,
testSuite,
isUnskippable,
isForcedToRun
})
}

@@ -629,4 +709,10 @@

}
this.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', {
hasCodeOwners: !!this.activeTestSpan.context()._tags[TEST_CODE_OWNERS],
isNew,
isRum: isRUMActive,
browserDriver: 'cypress'
})
this.activeTestSpan = null
this.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test')
return null

@@ -642,4 +728,11 @@ },

}
getTestCodeOwners ({ testSuite, testSourceFile }) {
if (testSourceFile) {
return getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries)
}
return getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
}
}
module.exports = new CypressPlugin()

@@ -25,7 +25,6 @@ const NoopTracer = require('../../dd-trace/src/noop/tracer')

// We still need to register these tasks or the support file will fail
return on('task', noopTask)
on('task', noopTask)
return config
}
cypressPlugin.init(tracer, config)
on('before:run', cypressPlugin.beforeRun.bind(cypressPlugin))

@@ -35,2 +34,4 @@ on('after:spec', cypressPlugin.afterSpec.bind(cypressPlugin))

on('task', cypressPlugin.getTasks())
return cypressPlugin.init(tracer, config)
}

@@ -20,4 +20,7 @@ 'use strict'

ctx.headers = headers
ctx.req = new globalThis.Request(req, { headers })
for (const name in headers) {
if (!req.headers.has(name)) {
req.headers.set(name, headers[name])
}
}

@@ -24,0 +27,0 @@ return store

@@ -32,3 +32,3 @@ 'use strict'

meta: {
'file.descriptor': (typeof fd === 'object' || typeof fd === 'number') ? fd.toString() : '',
'file.descriptor': ((fd !== null && typeof fd === 'object') || typeof fd === 'number') ? fd.toString() : '',
'file.dest': params.dest || params.newPath || (params.target && params.path),

@@ -35,0 +35,0 @@ 'file.flag': String(flag || defaultFlag || ''),

@@ -83,2 +83,6 @@ 'use strict'

}
finish (finishTime) {
this.activeSpan.finish(finishTime)
}
}

@@ -85,0 +89,0 @@

@@ -35,4 +35,4 @@ 'use strict'

this.addSub('apm:hapi:extension:enter', ({ req }) => {
this.enter(this._requestSpans.get(req))
this.addBind('apm:hapi:extension:start', ({ req }) => {
return this._requestSpans.get(req)
})

@@ -39,0 +39,0 @@ }

@@ -23,3 +23,5 @@ const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')

TEST_EARLY_FLAKE_ABORT_REASON,
JEST_DISPLAY_NAME
JEST_DISPLAY_NAME,
TEST_IS_RUM_ACTIVE,
TEST_BROWSER_DRIVER
} = require('../../dd-trace/src/plugins/util/test')

@@ -36,3 +38,4 @@ const { COMPONENT } = require('../../dd-trace/src/constants')

TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES
TELEMETRY_CODE_COVERAGE_NUM_FILES,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')

@@ -134,2 +137,4 @@

this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
this.tracer._exporter.flush(() => {

@@ -154,2 +159,4 @@ if (onDone) {

config._ddRepositoryRoot = this.repositoryRoot
config._ddIsFlakyTestRetriesEnabled = this.libraryConfig?.isFlakyTestRetriesEnabled ?? false
config._ddFlakyTestRetriesCount = this.libraryConfig?.flakyTestRetriesCount
})

@@ -293,8 +300,16 @@ })

}
span.finish()
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew: spanTags[TEST_IS_NEW] === 'true',
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
span.finish()
finishAllTraceSpans(span)

@@ -332,3 +347,4 @@ })

isNew,
isEfdRetry
isEfdRetry,
isJestRetry
} = test

@@ -358,2 +374,6 @@

if (isJestRetry) {
extraTags[TEST_IS_RETRY] = 'true'
}
return super.startTestSpan(name, suite, this.testSuiteSpan, extraTags)

@@ -360,0 +380,0 @@ }

@@ -5,2 +5,3 @@ 'use strict'

const ConsumerPlugin = require('./consumer')
const BatchConsumerPlugin = require('./batch-consumer')
const CompositePlugin = require('../../dd-trace/src/plugins/composite')

@@ -13,3 +14,4 @@

producer: ProducerPlugin,
consumer: ConsumerPlugin
consumer: ConsumerPlugin,
batchConsumer: BatchConsumerPlugin
}

@@ -16,0 +18,0 @@ }

@@ -84,3 +84,3 @@ 'use strict'

for (const message of messages) {
if (typeof message === 'object') {
if (message !== null && typeof message === 'object') {
this.tracer.inject(span, 'text_map', message.headers)

@@ -87,0 +87,0 @@ if (this.config.dsmEnabled) {

@@ -23,3 +23,12 @@ 'use strict'

TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED
TEST_EARLY_FLAKE_ENABLED,
TEST_SESSION_ID,
TEST_MODULE_ID,
TEST_MODULE,
TEST_SUITE_ID,
TEST_COMMAND,
TEST_SUITE,
MOCHA_IS_PARALLEL,
TEST_IS_RUM_ACTIVE,
TEST_BROWSER_DRIVER
} = require('../../dd-trace/src/plugins/util/test')

@@ -35,5 +44,22 @@ const { COMPONENT } = require('../../dd-trace/src/constants')

TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES
TELEMETRY_CODE_COVERAGE_NUM_FILES,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')
const id = require('../../dd-trace/src/id')
const log = require('../../dd-trace/src/log')
function getTestSuiteLevelVisibilityTags (testSuiteSpan) {
const testSuiteSpanContext = testSuiteSpan.context()
const suiteTags = {
[TEST_SUITE_ID]: testSuiteSpanContext.toSpanId(),
[TEST_SESSION_ID]: testSuiteSpanContext.toTraceId(),
[TEST_COMMAND]: testSuiteSpanContext._tags[TEST_COMMAND],
[TEST_MODULE]: 'mocha'
}
if (testSuiteSpanContext._parentId) {
suiteTags[TEST_MODULE_ID] = testSuiteSpanContext._parentId.toString(10)
}
return suiteTags
}
class MochaPlugin extends CiPlugin {

@@ -55,3 +81,4 @@ static get id () {

}
const testSuiteSpan = this._testSuites.get(suiteFile)
const testSuite = getTestSuitePath(suiteFile, this.sourceRoot)
const testSuiteSpan = this._testSuites.get(testSuite)

@@ -79,3 +106,3 @@ if (!coverageFiles.length) {

this.addSub('ci:mocha:test-suite:start', ({
testSuite,
testSuiteAbsolutePath,
isUnskippable,

@@ -85,7 +112,11 @@ isForcedToRun,

}) => {
const store = storage.getStore()
// If the test module span is undefined, the plugin has not been initialized correctly and we bail out
if (!this.testModuleSpan) {
return
}
const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.sourceRoot)
const testSuiteMetadata = getTestSuiteCommonTags(
this.command,
this.frameworkVersion,
getTestSuitePath(testSuite, this.sourceRoot),
testSuite,
'mocha'

@@ -117,2 +148,3 @@ )

}
const store = storage.getStore()
this.enter(testSuiteSpan, store)

@@ -151,3 +183,7 @@ this._testSuites.set(testSuite, testSuiteSpan)

this.addSub('ci:mocha:test:finish', (status) => {
this.addSub('ci:mocha:worker:finish', () => {
this.tracer._exporter.flush()
})
this.addSub('ci:mocha:test:finish', ({ status, hasBeenRetried }) => {
const store = storage.getStore()

@@ -158,9 +194,19 @@ const span = store?.span

span.setTag(TEST_STATUS, status)
if (hasBeenRetried) {
span.setTag(TEST_IS_RETRY, 'true')
}
span.finish()
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew: spanTags[TEST_IS_NEW] === 'true',
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
span.finish()
finishAllTraceSpans(span)

@@ -182,4 +228,4 @@ }

const store = storage.getStore()
if (err && store && store.span) {
const span = store.span
const span = store?.span
if (err && span) {
if (err.constructor.name === 'Pending' && !this.forbidPending) {

@@ -194,2 +240,28 @@ span.setTag(TEST_STATUS, 'skip')

this.addSub('ci:mocha:test:retry', (isFirstAttempt) => {
const store = storage.getStore()
const span = store?.span
if (span) {
span.setTag(TEST_STATUS, 'fail')
if (!isFirstAttempt) {
span.setTag(TEST_IS_RETRY, 'true')
}
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew: spanTags[TEST_IS_NEW] === 'true',
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
span.finish()
finishAllTraceSpans(span)
}
})
this.addSub('ci:mocha:test:parameterize', ({ title, params }) => {

@@ -207,3 +279,4 @@ this._testTitleToParams[title] = params

error,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
}) => {

@@ -220,2 +293,6 @@ if (this.testSessionSpan) {

if (isParallel) {
this.testSessionSpan.setTag(MOCHA_IS_PARALLEL, 'true')
}
addIntelligentTestRunnerSpanTags(

@@ -245,2 +322,3 @@ this.testSessionSpan,

finishAllTraceSpans(this.testSessionSpan)
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
}

@@ -250,2 +328,33 @@ this.libraryConfig = null

})
this.addSub('ci:mocha:worker-report:trace', (traces) => {
const formattedTraces = JSON.parse(traces).map(trace =>
trace.map(span => {
const formattedSpan = {
...span,
span_id: id(span.span_id),
trace_id: id(span.trace_id),
parent_id: id(span.parent_id)
}
if (formattedSpan.name === 'mocha.test') {
const testSuite = span.meta[TEST_SUITE]
const testSuiteSpan = this._testSuites.get(testSuite)
if (!testSuiteSpan) {
log.warn(`Test suite span not found for test span with test suite ${testSuite}`)
return formattedSpan
}
const suiteTags = getTestSuiteLevelVisibilityTags(testSuiteSpan)
formattedSpan.meta = {
...formattedSpan.meta,
...suiteTags
}
}
return formattedSpan
})
)
formattedTraces.forEach(trace => {
this.tracer._exporter.export(trace)
})
})
}

@@ -259,3 +368,4 @@

isEfdRetry,
testStartLine
testStartLine,
isParallel
} = testInfo

@@ -275,4 +385,8 @@

if (isParallel) {
extraTags[MOCHA_IS_PARALLEL] = 'true'
}
const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.sourceRoot)
const testSuiteSpan = this._testSuites.get(testSuiteAbsolutePath)
const testSuiteSpan = this._testSuites.get(testSuite)

@@ -279,0 +393,0 @@ if (this.repositoryRoot !== this.sourceRoot && !!this.repositoryRoot) {

@@ -118,3 +118,3 @@ 'use strict'

function isObject (val) {
return typeof val === 'object' && val !== null && !(val instanceof Array)
return val !== null && typeof val === 'object' && !Array.isArray(val)
}

@@ -121,0 +121,0 @@

@@ -9,3 +9,3 @@ 'use strict'

const errorPages = ['/404', '/500', '/_error', '/_not-found']
const errorPages = ['/404', '/500', '/_error', '/_not-found', '/_not-found/page']

@@ -124,3 +124,2 @@ class NextPlugin extends ServerPlugin {

})
web.setRoute(req, page)

@@ -127,0 +126,0 @@ }

@@ -10,2 +10,3 @@ 'use strict'

const { MEASURED } = require('../../../ext/tags')
const { estimateTokens } = require('./token-estimator')

@@ -19,2 +20,13 @@ // String#replaceAll unavailable on Node.js@v14 (dd-trace@<=v3)

function safeRequire (path) {
try {
// eslint-disable-next-line import/no-extraneous-dependencies
return require(path)
} catch {
return null
}
}
const encodingForModel = safeRequire('tiktoken')?.encoding_for_model
class OpenApiPlugin extends TracingPlugin {

@@ -24,2 +36,5 @@ static get id () { return 'openai' }

static get system () { return 'openai' }
static get prefix () {
return 'tracing:apm:openai:request'
}

@@ -49,4 +64,6 @@ constructor (...args) {

start ({ methodName, args, basePath, apiKey }) {
bindStart (ctx) {
const { methodName, args, basePath, apiKey } = ctx
const payload = normalizeRequestPayload(methodName, args)
const store = storage.getStore() || {}

@@ -82,7 +99,5 @@ const span = this.startSpan('openai.request', {

}
})
}, false)
const fullStore = storage.getStore() || {} // certain request body fields are later used for logs
const store = Object.create(null)
fullStore.openai = store // namespacing these fields
const openaiStore = Object.create(null)

@@ -92,5 +107,5 @@ const tags = {} // The remaining tags are added one at a time

// createChatCompletion, createCompletion, createImage, createImageEdit, createTranscription, createTranslation
if ('prompt' in payload) {
if (payload.prompt) {
const prompt = payload.prompt
store.prompt = prompt
openaiStore.prompt = prompt
if (typeof prompt === 'string' || (Array.isArray(prompt) && typeof prompt[0] === 'number')) {

@@ -108,10 +123,10 @@ // This is a single prompt, either String or [Number]

// createEdit, createEmbedding, createModeration
if ('input' in payload) {
if (payload.input) {
const normalized = normalizeStringOrTokenArray(payload.input, false)
tags['openai.request.input'] = truncateText(normalized)
store.input = normalized
openaiStore.input = normalized
}
// createChatCompletion, createCompletion
if (typeof payload.logit_bias === 'object' && payload.logit_bias) {
if (payload.logit_bias !== null && typeof payload.logit_bias === 'object') {
for (const [tokenId, bias] of Object.entries(payload.logit_bias)) {

@@ -122,4 +137,10 @@ tags[`openai.request.logit_bias.${tokenId}`] = bias

if (payload.stream) {
tags['openai.request.stream'] = payload.stream
}
switch (methodName) {
case 'createFineTune':
case 'fine_tuning.jobs.create':
case 'fine-tune.create':
createFineTuneRequestExtraction(tags, payload)

@@ -129,13 +150,19 @@ break

case 'createImage':
case 'images.generate':
case 'createImageEdit':
case 'images.edit':
case 'createImageVariation':
commonCreateImageRequestExtraction(tags, payload, store)
case 'images.createVariation':
commonCreateImageRequestExtraction(tags, payload, openaiStore)
break
case 'createChatCompletion':
createChatCompletionRequestExtraction(tags, payload, store)
case 'chat.completions.create':
createChatCompletionRequestExtraction(tags, payload, openaiStore)
break
case 'createFile':
case 'files.create':
case 'retrieveFile':
case 'files.retrieve':
commonFileRequestExtraction(tags, payload)

@@ -145,7 +172,10 @@ break

case 'createTranscription':
case 'audio.transcriptions.create':
case 'createTranslation':
commonCreateAudioRequestExtraction(tags, payload, store)
case 'audio.translations.create':
commonCreateAudioRequestExtraction(tags, payload, openaiStore)
break
case 'retrieveModel':
case 'models.retrieve':
retrieveModelRequestExtraction(tags, payload)

@@ -155,5 +185,12 @@ break

case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'deleteModel':
case 'models.del':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
commonLookupFineTuneRequestExtraction(tags, payload)

@@ -163,3 +200,4 @@ break

case 'createEdit':
createEditRequestExtraction(tags, payload, store)
case 'edits.create':
createEditRequestExtraction(tags, payload, openaiStore)
break

@@ -169,6 +207,28 @@ }

span.addTags(tags)
ctx.currentStore = { ...store, span, openai: openaiStore }
return ctx.currentStore
}
finish ({ headers, body, method, path }) {
const span = this.activeSpan
asyncEnd (ctx) {
const { result } = ctx
const store = ctx.currentStore
const span = store?.span
if (!span) return
const error = !!span.context()._tags.error
let headers, body, method, path
if (!error) {
headers = result.headers
body = result.data
method = result.request.method
path = result.request.path
}
if (!error && headers?.constructor.name === 'Headers') {
headers = Object.fromEntries(headers)
}
const methodName = span._spanContext._tags['resource.name']

@@ -178,103 +238,195 @@

const fullStore = storage.getStore()
const store = fullStore.openai
const openaiStore = store.openai
if (!error && (path?.startsWith('https://') || path?.startsWith('http://'))) {
// basic checking for if the path was set as a full URL
// not using a full regex as it will likely be "https://api.openai.com/..."
path = new URL(path).pathname
}
const endpoint = lookupOperationEndpoint(methodName, path)
const tags = {
'openai.request.endpoint': endpoint,
'openai.request.method': method,
const tags = error
? {}
: {
'openai.request.endpoint': endpoint,
'openai.request.method': method.toUpperCase(),
'openai.organization.id': body.organization_id, // only available in fine-tunes endpoints
'openai.organization.name': headers['openai-organization'],
'openai.organization.id': body.organization_id, // only available in fine-tunes endpoints
'openai.organization.name': headers['openai-organization'],
'openai.response.model': headers['openai-model'] || body.model, // specific model, often undefined
'openai.response.id': body.id, // common creation value, numeric epoch
'openai.response.deleted': body.deleted, // common boolean field in delete responses
'openai.response.model': headers['openai-model'] || body.model, // specific model, often undefined
'openai.response.id': body.id, // common creation value, numeric epoch
'openai.response.deleted': body.deleted, // common boolean field in delete responses
// The OpenAI API appears to use both created and created_at in different places
// Here we're conciously choosing to surface this inconsistency instead of normalizing
'openai.response.created': body.created,
'openai.response.created_at': body.created_at
}
// The OpenAI API appears to use both created and created_at in different places
// Here we're conciously choosing to surface this inconsistency instead of normalizing
'openai.response.created': body.created,
'openai.response.created_at': body.created_at
}
responseDataExtractionByMethod(methodName, tags, body, store)
responseDataExtractionByMethod(methodName, tags, body, openaiStore)
span.addTags(tags)
super.finish()
this.sendLog(methodName, span, tags, store, false)
this.sendMetrics(headers, body, endpoint, span._duration)
span.finish()
this.sendLog(methodName, span, tags, openaiStore, error)
this.sendMetrics(headers, body, endpoint, span._duration, error, tags)
}
error (...args) {
super.error(...args)
sendMetrics (headers, body, endpoint, duration, error, spanTags) {
const tags = [`error:${Number(!!error)}`]
if (error) {
this.metrics.increment('openai.request.error', 1, tags)
} else {
tags.push(`org:${headers['openai-organization']}`)
tags.push(`endpoint:${endpoint}`) // just "/v1/models", no method
tags.push(`model:${headers['openai-model'] || body.model}`)
}
const span = this.activeSpan
const methodName = span._spanContext._tags['resource.name']
this.metrics.distribution('openai.request.duration', duration * 1000, tags)
const fullStore = storage.getStore()
const store = fullStore.openai
const promptTokens = spanTags['openai.response.usage.prompt_tokens']
const promptTokensEstimated = spanTags['openai.response.usage.prompt_tokens_estimated']
// We don't know most information about the request when it fails
const completionTokens = spanTags['openai.response.usage.completion_tokens']
const completionTokensEstimated = spanTags['openai.response.usage.completion_tokens_estimated']
const tags = ['error:1']
this.metrics.distribution('openai.request.duration', span._duration * 1000, tags)
this.metrics.increment('openai.request.error', 1, tags)
if (!error) {
if (promptTokensEstimated) {
this.metrics.distribution(
'openai.tokens.prompt', promptTokens, [...tags, 'openai.estimated:true'])
} else {
this.metrics.distribution('openai.tokens.prompt', promptTokens, tags)
}
if (completionTokensEstimated) {
this.metrics.distribution(
'openai.tokens.completion', completionTokens, [...tags, 'openai.estimated:true'])
} else {
this.metrics.distribution('openai.tokens.completion', completionTokens, tags)
}
this.sendLog(methodName, span, {}, store, true)
}
if (promptTokensEstimated || completionTokensEstimated) {
this.metrics.distribution(
'openai.tokens.total', promptTokens + completionTokens, [...tags, 'openai.estimated:true'])
} else {
this.metrics.distribution('openai.tokens.total', promptTokens + completionTokens, tags)
}
}
sendMetrics (headers, body, endpoint, duration) {
const tags = [
`org:${headers['openai-organization']}`,
`endpoint:${endpoint}`, // just "/v1/models", no method
`model:${headers['openai-model']}`,
'error:0'
]
if (headers) {
if (headers['x-ratelimit-limit-requests']) {
this.metrics.gauge('openai.ratelimit.requests', Number(headers['x-ratelimit-limit-requests']), tags)
}
this.metrics.distribution('openai.request.duration', duration * 1000, tags)
if (headers['x-ratelimit-remaining-requests']) {
this.metrics.gauge(
'openai.ratelimit.remaining.requests', Number(headers['x-ratelimit-remaining-requests']), tags
)
}
if (body && ('usage' in body)) {
const promptTokens = body.usage.prompt_tokens
const completionTokens = body.usage.completion_tokens
this.metrics.distribution('openai.tokens.prompt', promptTokens, tags)
this.metrics.distribution('openai.tokens.completion', completionTokens, tags)
this.metrics.distribution('openai.tokens.total', promptTokens + completionTokens, tags)
if (headers['x-ratelimit-limit-tokens']) {
this.metrics.gauge('openai.ratelimit.tokens', Number(headers['x-ratelimit-limit-tokens']), tags)
}
if (headers['x-ratelimit-remaining-tokens']) {
this.metrics.gauge('openai.ratelimit.remaining.tokens', Number(headers['x-ratelimit-remaining-tokens']), tags)
}
}
}
if ('x-ratelimit-limit-requests' in headers) {
this.metrics.gauge('openai.ratelimit.requests', Number(headers['x-ratelimit-limit-requests']), tags)
sendLog (methodName, span, tags, openaiStore, error) {
if (!openaiStore) return
if (!Object.keys(openaiStore).length) return
if (!this.sampler.isSampled()) return
const log = {
status: error ? 'error' : 'info',
message: `sampled ${methodName}`,
...openaiStore
}
if ('x-ratelimit-remaining-requests' in headers) {
this.metrics.gauge('openai.ratelimit.remaining.requests', Number(headers['x-ratelimit-remaining-requests']), tags)
this.logger.log(log, span, tags)
}
}
function countPromptTokens (methodName, payload, model) {
let promptTokens = 0
let promptEstimated = false
if (methodName === 'chat.completions.create') {
const messages = payload.messages
for (const message of messages) {
const content = message.content
if (typeof content === 'string') {
const { tokens, estimated } = countTokens(content, model)
promptTokens += tokens
promptEstimated = estimated
} else if (Array.isArray(content)) {
for (const c of content) {
if (c.type === 'text') {
const { tokens, estimated } = countTokens(c.text, model)
promptTokens += tokens
promptEstimated = estimated
}
// unsupported token computation for image_url
// as even though URL is a string, its true token count
// is based on the image itself, something onerous to do client-side
}
}
}
} else if (methodName === 'completions.create') {
let prompt = payload.prompt
if (!Array.isArray(prompt)) prompt = [prompt]
if ('x-ratelimit-limit-tokens' in headers) {
this.metrics.gauge('openai.ratelimit.tokens', Number(headers['x-ratelimit-limit-tokens']), tags)
for (const p of prompt) {
const { tokens, estimated } = countTokens(p, model)
promptTokens += tokens
promptEstimated = estimated
}
}
if ('x-ratelimit-remaining-tokens' in headers) {
this.metrics.gauge('openai.ratelimit.remaining.tokens', Number(headers['x-ratelimit-remaining-tokens']), tags)
return { promptTokens, promptEstimated }
}
function countCompletionTokens (body, model) {
let completionTokens = 0
let completionEstimated = false
if (body?.choices) {
for (const choice of body.choices) {
const message = choice.message || choice.delta // delta for streamed responses
const text = choice.text
const content = text || message?.content
const { tokens, estimated } = countTokens(content, model)
completionTokens += tokens
completionEstimated = estimated
}
}
sendLog (methodName, span, tags, store, error) {
if (!Object.keys(store).length) return
if (!this.sampler.isSampled()) return
return { completionTokens, completionEstimated }
}
const log = {
status: error ? 'error' : 'info',
message: `sampled ${methodName}`,
...store
function countTokens (content, model) {
if (encodingForModel) {
try {
// try using tiktoken if it was available
const encoder = encodingForModel(model)
const tokens = encoder.encode(content).length
encoder.free()
return { tokens, estimated: false }
} catch {
// possible errors from tiktoken:
// * model not available for token counts
// * issue encoding content
}
}
this.logger.log(log, span, tags)
return {
tokens: estimateTokens(content),
estimated: true
}
}
function createEditRequestExtraction (tags, payload, store) {
function createEditRequestExtraction (tags, payload, openaiStore) {
const instruction = payload.instruction
tags['openai.request.instruction'] = instruction
store.instruction = instruction
openaiStore.instruction = instruction
}

@@ -286,28 +438,30 @@

function createChatCompletionRequestExtraction (tags, payload, store) {
if (!defensiveArrayLength(payload.messages)) return
function createChatCompletionRequestExtraction (tags, payload, openaiStore) {
const messages = payload.messages
if (!defensiveArrayLength(messages)) return
store.messages = payload.messages
openaiStore.messages = payload.messages
for (let i = 0; i < payload.messages.length; i++) {
const message = payload.messages[i]
tags[`openai.request.${i}.content`] = truncateText(message.content)
tags[`openai.request.${i}.role`] = message.role
tags[`openai.request.${i}.name`] = message.name
tags[`openai.request.${i}.finish_reason`] = message.finish_reason
tagChatCompletionRequestContent(message.content, i, tags)
tags[`openai.request.messages.${i}.role`] = message.role
tags[`openai.request.messages.${i}.name`] = message.name
tags[`openai.request.messages.${i}.finish_reason`] = message.finish_reason
}
}
function commonCreateImageRequestExtraction (tags, payload, store) {
function commonCreateImageRequestExtraction (tags, payload, openaiStore) {
// createImageEdit, createImageVariation
if (payload.file && typeof payload.file === 'object' && payload.file.path) {
const file = path.basename(payload.file.path)
const img = payload.file || payload.image
if (img !== null && typeof img === 'object' && img.path) {
const file = path.basename(img.path)
tags['openai.request.image'] = file
store.file = file
openaiStore.file = file
}
// createImageEdit
if (payload.mask && typeof payload.mask === 'object' && payload.mask.path) {
if (payload.mask !== null && typeof payload.mask === 'object' && payload.mask.path) {
const mask = path.basename(payload.mask.path)
tags['openai.request.mask'] = mask
store.mask = mask
openaiStore.mask = mask
}

@@ -320,5 +474,6 @@

function responseDataExtractionByMethod (methodName, tags, body, store) {
function responseDataExtractionByMethod (methodName, tags, body, openaiStore) {
switch (methodName) {
case 'createModeration':
case 'moderations.create':
createModerationResponseExtraction(tags, body)

@@ -328,10 +483,18 @@ break

case 'createCompletion':
case 'completions.create':
case 'createChatCompletion':
case 'chat.completions.create':
case 'createEdit':
commonCreateResponseExtraction(tags, body, store)
case 'edits.create':
commonCreateResponseExtraction(tags, body, openaiStore, methodName)
break
case 'listFiles':
case 'files.list':
case 'listFineTunes':
case 'fine_tuning.jobs.list':
case 'fine-tune.list':
case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
commonListCountResponseExtraction(tags, body)

@@ -341,7 +504,10 @@ break

case 'createEmbedding':
createEmbeddingResponseExtraction(tags, body)
case 'embeddings.create':
createEmbeddingResponseExtraction(tags, body, openaiStore)
break
case 'createFile':
case 'files.create':
case 'retrieveFile':
case 'files.retrieve':
createRetrieveFileResponseExtraction(tags, body)

@@ -351,2 +517,3 @@ break

case 'deleteFile':
case 'files.del':
deleteFileResponseExtraction(tags, body)

@@ -356,2 +523,4 @@ break

case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
downloadFileResponseExtraction(tags, body)

@@ -361,4 +530,10 @@ break

case 'createFineTune':
case 'fine_tuning.jobs.create':
case 'fine-tune.create':
case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
commonFineTuneResponseExtraction(tags, body)

@@ -368,3 +543,5 @@ break

case 'createTranscription':
case 'audio.transcriptions.create':
case 'createTranslation':
case 'audio.translations.create':
createAudioResponseExtraction(tags, body)

@@ -374,4 +551,7 @@ break

case 'createImage':
case 'images.generate':
case 'createImageEdit':
case 'images.edit':
case 'createImageVariation':
case 'images.createVariation':
commonImageResponseExtraction(tags, body)

@@ -381,2 +561,3 @@ break

case 'listModels':
case 'models.list':
listModelsResponseExtraction(tags, body)

@@ -386,2 +567,3 @@ break

case 'retrieveModel':
case 'models.retrieve':
retrieveModelResponseExtraction(tags, body)

@@ -459,11 +641,15 @@ break

tags['openai.response.fine_tuned_model'] = body.fine_tuned_model
if (body.hyperparams) {
tags['openai.response.hyperparams.n_epochs'] = body.hyperparams.n_epochs
tags['openai.response.hyperparams.batch_size'] = body.hyperparams.batch_size
tags['openai.response.hyperparams.prompt_loss_weight'] = body.hyperparams.prompt_loss_weight
tags['openai.response.hyperparams.learning_rate_multiplier'] = body.hyperparams.learning_rate_multiplier
const hyperparams = body.hyperparams || body.hyperparameters
const hyperparamsKey = body.hyperparams ? 'hyperparams' : 'hyperparameters'
if (hyperparams) {
tags[`openai.response.${hyperparamsKey}.n_epochs`] = hyperparams.n_epochs
tags[`openai.response.${hyperparamsKey}.batch_size`] = hyperparams.batch_size
tags[`openai.response.${hyperparamsKey}.prompt_loss_weight`] = hyperparams.prompt_loss_weight
tags[`openai.response.${hyperparamsKey}.learning_rate_multiplier`] = hyperparams.learning_rate_multiplier
}
tags['openai.response.training_files_count'] = defensiveArrayLength(body.training_files)
tags['openai.response.training_files_count'] = defensiveArrayLength(body.training_files || body.training_file)
tags['openai.response.result_files_count'] = defensiveArrayLength(body.result_files)
tags['openai.response.validation_files_count'] = defensiveArrayLength(body.validation_files)
tags['openai.response.validation_files_count'] = defensiveArrayLength(body.validation_files || body.validation_file)
tags['openai.response.updated_at'] = body.updated_at

@@ -483,10 +669,10 @@ tags['openai.response.status'] = body.status

function commonCreateAudioRequestExtraction (tags, body, store) {
function commonCreateAudioRequestExtraction (tags, body, openaiStore) {
tags['openai.request.response_format'] = body.response_format
tags['openai.request.language'] = body.language
if (body.file && typeof body.file === 'object' && body.file.path) {
if (body.file !== null && typeof body.file === 'object' && body.file.path) {
const filename = path.basename(body.file.path)
tags['openai.request.filename'] = filename
store.file = filename
openaiStore.file = filename
}

@@ -501,3 +687,3 @@ }

// This is a best effort attempt to extract the filename during the request
if (body.file && typeof body.file === 'object' && body.file.path) {
if (body.file !== null && typeof body.file === 'object' && body.file.path) {
tags['openai.request.filename'] = path.basename(body.file.path)

@@ -515,4 +701,4 @@ }

function createEmbeddingResponseExtraction (tags, body) {
usageExtraction(tags, body)
function createEmbeddingResponseExtraction (tags, body, openaiStore) {
usageExtraction(tags, body, openaiStore)

@@ -551,4 +737,4 @@ if (!body.data) return

// createCompletion, createChatCompletion, createEdit
function commonCreateResponseExtraction (tags, body, store) {
usageExtraction(tags, body)
function commonCreateResponseExtraction (tags, body, openaiStore, methodName) {
usageExtraction(tags, body, methodName, openaiStore)

@@ -559,16 +745,31 @@ if (!body.choices) return

store.choices = body.choices
openaiStore.choices = body.choices
for (let i = 0; i < body.choices.length; i++) {
const choice = body.choices[i]
tags[`openai.response.choices.${i}.finish_reason`] = choice.finish_reason
tags[`openai.response.choices.${i}.logprobs`] = ('logprobs' in choice) ? 'returned' : undefined
tags[`openai.response.choices.${i}.text`] = truncateText(choice.text)
for (let choiceIdx = 0; choiceIdx < body.choices.length; choiceIdx++) {
const choice = body.choices[choiceIdx]
// logprobs can be null and we still want to tag it as 'returned' even when set to 'null'
const specifiesLogProb = Object.keys(choice).indexOf('logprobs') !== -1
tags[`openai.response.choices.${choiceIdx}.finish_reason`] = choice.finish_reason
tags[`openai.response.choices.${choiceIdx}.logprobs`] = specifiesLogProb ? 'returned' : undefined
tags[`openai.response.choices.${choiceIdx}.text`] = truncateText(choice.text)
// createChatCompletion only
if ('message' in choice) {
const message = choice.message
tags[`openai.response.choices.${i}.message.role`] = message.role
tags[`openai.response.choices.${i}.message.content`] = truncateText(message.content)
tags[`openai.response.choices.${i}.message.name`] = truncateText(message.name)
const message = choice.message || choice.delta // delta for streamed responses
if (message) {
tags[`openai.response.choices.${choiceIdx}.message.role`] = message.role
tags[`openai.response.choices.${choiceIdx}.message.content`] = truncateText(message.content)
tags[`openai.response.choices.${choiceIdx}.message.name`] = truncateText(message.name)
if (message.tool_calls) {
const toolCalls = message.tool_calls
for (let toolIdx = 0; toolIdx < toolCalls.length; toolIdx++) {
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.function.name`] =
toolCalls[toolIdx].function.name
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.function.arguments`] =
toolCalls[toolIdx].function.arguments
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.id`] =
toolCalls[toolIdx].id
}
}
}

@@ -579,7 +780,36 @@ }

// createCompletion, createChatCompletion, createEdit, createEmbedding
function usageExtraction (tags, body) {
if (typeof body.usage !== 'object' || !body.usage) return
tags['openai.response.usage.prompt_tokens'] = body.usage.prompt_tokens
tags['openai.response.usage.completion_tokens'] = body.usage.completion_tokens
tags['openai.response.usage.total_tokens'] = body.usage.total_tokens
function usageExtraction (tags, body, methodName, openaiStore) {
let promptTokens = 0
let completionTokens = 0
let totalTokens = 0
if (body && body.usage) {
promptTokens = body.usage.prompt_tokens
completionTokens = body.usage.completion_tokens
totalTokens = body.usage.total_tokens
} else if (body.model && ['chat.completions.create', 'completions.create'].includes(methodName)) {
// estimate tokens based on method name for completions and chat completions
const { model } = body
let promptEstimated = false
let completionEstimated = false
// prompt tokens
const payload = openaiStore
const promptTokensCount = countPromptTokens(methodName, payload, model)
promptTokens = promptTokensCount.promptTokens
promptEstimated = promptTokensCount.promptEstimated
// completion tokens
const completionTokensCount = countCompletionTokens(body, model)
completionTokens = completionTokensCount.completionTokens
completionEstimated = completionTokensCount.completionEstimated
// total tokens
totalTokens = promptTokens + completionTokens
if (promptEstimated) tags['openai.response.usage.prompt_tokens_estimated'] = true
if (completionEstimated) tags['openai.response.usage.completion_tokens_estimated'] = true
}
if (promptTokens) tags['openai.response.usage.prompt_tokens'] = promptTokens
if (completionTokens) tags['openai.response.usage.completion_tokens'] = completionTokens
if (totalTokens) tags['openai.response.usage.total_tokens'] = totalTokens
}

@@ -596,2 +826,3 @@

if (!text) return
if (typeof text !== 'string' || !text || (typeof text === 'string' && text.length === 0)) return

@@ -609,2 +840,24 @@ text = text

function tagChatCompletionRequestContent (contents, messageIdx, tags) {
if (typeof contents === 'string') {
tags[`openai.request.messages.${messageIdx}.content`] = contents
} else if (Array.isArray(contents)) {
// content can also be an array of objects
// which represent text input or image url
for (const contentIdx in contents) {
const content = contents[contentIdx]
const type = content.type
tags[`openai.request.messages.${messageIdx}.content.${contentIdx}.type`] = content.type
if (type === 'text') {
tags[`openai.request.messages.${messageIdx}.content.${contentIdx}.text`] = truncateText(content.text)
} else if (type === 'image_url') {
tags[`openai.request.messages.${messageIdx}.content.${contentIdx}.image_url.url`] =
truncateText(content.image_url.url)
}
// unsupported type otherwise, won't be tagged
}
}
// unsupported type otherwise, won't be tagged
}
// The server almost always responds with JSON

@@ -614,6 +867,19 @@ function coerceResponseBody (body, methodName) {

case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return { file: body }
}
return typeof body === 'object' ? body : {}
const type = typeof body
if (type === 'string') {
try {
return JSON.parse(body)
} catch {
return body
}
} else if (type === 'object') {
return body
} else {
return {}
}
}

@@ -625,20 +891,35 @@

case 'deleteModel':
case 'models.del':
case 'retrieveModel':
case 'models.retrieve':
return '/v1/models/*'
case 'deleteFile':
case 'files.del':
case 'retrieveFile':
case 'files.retrieve':
return '/v1/files/*'
case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return '/v1/files/*/content'
case 'retrieveFineTune':
case 'fine-tune.retrieve':
return '/v1/fine-tunes/*'
case 'fine_tuning.jobs.retrieve':
return '/v1/fine_tuning/jobs/*'
case 'listFineTuneEvents':
case 'fine-tune.listEvents':
return '/v1/fine-tunes/*/events'
case 'fine_tuning.jobs.listEvents':
return '/v1/fine_tuning/jobs/*/events'
case 'cancelFineTune':
case 'fine-tune.cancel':
return '/v1/fine-tunes/*/cancel'
case 'fine_tuning.jobs.cancel':
return '/v1/fine_tuning/jobs/*/cancel'
}

@@ -657,4 +938,8 @@

case 'listModels':
case 'models.list':
case 'listFiles':
case 'files.list':
case 'listFineTunes':
case 'fine_tuning.jobs.list':
case 'fine-tune.list':
// no argument

@@ -664,2 +949,3 @@ return {}

case 'retrieveModel':
case 'models.retrieve':
return { id: args[0] }

@@ -674,7 +960,13 @@

case 'deleteFile':
case 'files.del':
case 'retrieveFile':
case 'files.retrieve':
case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return { file_id: args[0] }
case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
return {

@@ -686,4 +978,9 @@ fine_tune_id: args[0],

case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'deleteModel':
case 'models.del':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
return { fine_tune_id: args[0] }

@@ -745,5 +1042,14 @@

function defensiveArrayLength (maybeArray) {
return Array.isArray(maybeArray) ? maybeArray.length : undefined
if (maybeArray) {
if (Array.isArray(maybeArray)) {
return maybeArray.length
} else {
// case of a singular item (ie body.training_file vs body.training_files)
return 1
}
}
return undefined
}
module.exports = OpenApiPlugin

@@ -17,3 +17,4 @@ 'use strict'

TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED
TEST_EARLY_FLAKE_ENABLED,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/plugins/util/test')

@@ -63,2 +64,3 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

finishAllTraceSpans(this.testSessionSpan)
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
appClosingTelemetry()

@@ -121,3 +123,3 @@ this.tracer._exporter.flush(onDone)

})
this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags, isNew, isEfdRetry }) => {
this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags, isNew, isEfdRetry, isRetry }) => {
const store = storage.getStore()

@@ -141,2 +143,5 @@ const span = store && store.span

}
if (isRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}

@@ -164,4 +169,2 @@ steps.forEach(step => {

span.finish()
if (testStatus === 'fail') {

@@ -174,4 +177,9 @@ this.numFailedTests++

'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS],
isNew,
browserDriver: 'playwright'
}
)
span.finish()

@@ -178,0 +186,0 @@ finishAllTraceSpans(span)

@@ -57,5 +57,5 @@ 'use strict'

function isObject (val) {
return typeof val === 'object' && val !== null && !(val instanceof Array)
return val !== null && typeof val === 'object' && !Array.isArray(val)
}
module.exports = SharedbPlugin
'use strict'
const log = require('../../dd-trace/src/log')
// Use a weak map to avoid polluting the wrapped function/method.

@@ -21,5 +23,8 @@ const unwrappers = new WeakMap()

function wrapFn (original, delegate) {
assertFunction(delegate)
assertNotClass(original) // TODO: support constructors of native classes
function wrapFunction (original, wrapper) {
if (typeof original === 'function') assertNotClass(original)
// TODO This needs to be re-done so that this and wrapMethod are distinct.
const target = { func: original }
wrapMethod(target, 'func', wrapper, typeof original !== 'function')
let delegate = target.func

@@ -34,3 +39,3 @@ const shim = function shim () {

copyProperties(original, shim)
if (typeof original === 'function') copyProperties(original, shim)

@@ -40,8 +45,135 @@ return shim

function wrapMethod (target, name, wrapper) {
assertMethod(target, name)
assertFunction(wrapper)
const wrapFn = function (original, delegate) {
throw new Error('calling `wrap()` with 2 args is deprecated. Use wrapFunction instead.')
}
// This is only used in safe mode. It's a simple state machine to track if the
// original method was called and if it returned. We need this to determine if
// an error was thrown by the original method, or by us. We'll use one of these
// per call to a wrapped method.
class CallState {
constructor () {
this.called = false
this.completed = false
this.retVal = undefined
}
startCall () {
this.called = true
}
endCall (retVal) {
this.completed = true
this.retVal = retVal
}
}
function isPromise (obj) {
return obj && typeof obj === 'object' && typeof obj.then === 'function'
}
let safeMode = !!process.env.DD_INEJCTION_ENABLED
function setSafe (value) {
safeMode = value
}
function wrapMethod (target, name, wrapper, noAssert) {
if (!noAssert) {
assertMethod(target, name)
assertFunction(wrapper)
}
const original = target[name]
const wrapped = wrapper(original)
let wrapped
if (safeMode && original) {
// In this mode, we make a best-effort attempt to handle errors that are thrown
// by us, rather than wrapped code. With such errors, we log them, and then attempt
// to return the result as if no wrapping was done at all.
//
// Caveats:
// * If the original function is called in a later iteration of the event loop,
// and we throw _then_, then it won't be caught by this. In practice, we always call
// the original function synchronously, so this is not a problem.
// * While async errors are dealt with here, errors in callbacks are not. This
// is because we don't necessarily know _for sure_ that any function arguments
// are wrapped by us. We could wrap them all anyway and just make that assumption,
// or just assume that the last argument is always a callback set by us if it's a
// function, but those don't seem like things we can rely on. We could add a
// `shimmer.markCallbackAsWrapped()` function that's a no-op outside safe-mode,
// but that means modifying every instrumentation. Even then, the complexity of
// this code increases because then we'd need to effectively do the reverse of
// what we're doing for synchronous functions. This is a TODO.
// We're going to hold on to current callState in this variable in this scope,
// which is fine because any time we reference it, we're referencing it synchronously.
// We'll use it in the our wrapper (which, again, is called syncrhonously), and in the
// errorHandler, which will already have been bound to this callState.
let currentCallState
// Rather than calling the original function directly from the shim wrapper, we wrap
// it again so that we can track if it was called and if it returned. This is because
// we need to know if an error was thrown by the original function, or by us.
// We could do this inside the `wrapper` function defined below, which would simplify
// managing the callState, but then we'd be calling `wrapper` on each invocation, so
// instead we do it here, once.
const innerWrapped = wrapper(function (...args) {
// We need to stash the callState here because of recursion.
const callState = currentCallState
callState.startCall()
const retVal = original.apply(this, args)
if (isPromise(retVal)) {
retVal.then(callState.endCall.bind(callState))
} else {
callState.endCall(retVal)
}
return retVal
})
// This is the crux of what we're doing in safe mode. It handles errors
// that _we_ cause, by logging them, and transparently providing results
// as if no wrapping was done at all. That means detecting (via callState)
// whether the function has already run or not, and if it has, returning
// the result, and otherwise calling the original function unwrapped.
const handleError = function (args, callState, e) {
if (callState.completed) {
// error was thrown after original function returned/resolved, so
// it was us. log it.
log.error(e)
// original ran and returned something. return it.
return callState.retVal
}
if (!callState.called) {
// error was thrown before original function was called, so
// it was us. log it.
log.error(e)
// original never ran. call it unwrapped.
return original.apply(this, args)
}
// error was thrown during original function execution, so
// it was them. throw.
throw e
}
// The wrapped function is the one that will be called by the user.
// It calls our version of the original function, which manages the
// callState. That way when we use the errorHandler, it can tell where
// the error originated.
wrapped = function (...args) {
currentCallState = new CallState()
const errorHandler = handleError.bind(this, args, currentCallState)
try {
const retVal = innerWrapped.apply(this, args)
return isPromise(retVal) ? retVal.catch(errorHandler) : retVal
} catch (e) {
return errorHandler(e)
}
}
} else {
// In non-safe mode, we just wrap the original function directly.
wrapped = wrapper(original)
}
const descriptor = Object.getOwnPropertyDescriptor(target, name)

@@ -54,3 +186,3 @@

copyProperties(original, wrapped)
if (typeof original === 'function') copyProperties(original, wrapped)

@@ -163,5 +295,7 @@ if (descriptor) {

wrap,
wrapFunction,
massWrap,
unwrap,
massUnwrap
massUnwrap,
setSafe
}

@@ -7,3 +7,3 @@ 'use strict'

sample (span, measured, measuredByDefault) {
if (typeof measured === 'object') {
if (measured !== null && typeof measured === 'object') {
this.sample(span, measured[span.context()._name], measuredByDefault)

@@ -10,0 +10,0 @@ } else if (measured !== undefined) {

@@ -18,3 +18,3 @@ 'use strict'

HTTP_OUTGOING_BODY: 'server.response.body',
HTTP_INCOMING_RESPONSE_BODY: 'server.response.body',

@@ -24,3 +24,5 @@ HTTP_CLIENT_IP: 'http.client_ip',

USER_ID: 'usr.id',
WAF_CONTEXT_PROCESSOR: 'waf.context.processor'
WAF_CONTEXT_PROCESSOR: 'waf.context.processor',
HTTP_OUTGOING_URL: 'server.io.net.url'
}

@@ -11,4 +11,7 @@ 'use strict'

let templateGraphqlJson = blockedTemplates.graphqlJson
let blockingConfiguration
let defaultBlockingActionParameters
const responseBlockedSet = new WeakSet()
const specificBlockingTypes = {

@@ -26,4 +29,4 @@ GRAPHQL: 'graphql'

function getBlockWithRedirectData (rootSpan) {
let statusCode = blockingConfiguration.parameters.status_code
function getBlockWithRedirectData (actionParameters) {
let statusCode = actionParameters.status_code
if (!statusCode || statusCode < 300 || statusCode >= 400) {

@@ -33,9 +36,5 @@ statusCode = 303

const headers = {
Location: blockingConfiguration.parameters.location
Location: actionParameters.location
}
rootSpan.addTags({
'appsec.blocked': 'true'
})
return { headers, statusCode }

@@ -54,6 +53,5 @@ }

function getBlockWithContentData (req, specificType, rootSpan) {
function getBlockWithContentData (req, specificType, actionParameters) {
let type
let body
let statusCode

@@ -71,3 +69,3 @@ const specificBlockingType = specificType || detectedSpecificEndpoints[getSpecificKey(req.method, req.url)]

if (!blockingConfiguration || blockingConfiguration.parameters.type === 'auto') {
if (!actionParameters || actionParameters.type === 'auto') {
if (accept?.includes('text/html') && !accept.includes('application/json')) {

@@ -81,3 +79,3 @@ type = 'text/html; charset=utf-8'

} else {
if (blockingConfiguration.parameters.type === 'html') {
if (actionParameters.type === 'html') {
type = 'text/html; charset=utf-8'

@@ -92,7 +90,3 @@ body = templateHtml

if (blockingConfiguration?.type === 'block_request' && blockingConfiguration.parameters.status_code) {
statusCode = blockingConfiguration.parameters.status_code
} else {
statusCode = 403
}
const statusCode = actionParameters?.status_code || 403

@@ -104,18 +98,14 @@ const headers = {

rootSpan.addTags({
'appsec.blocked': 'true'
})
return { body, statusCode, headers }
}
function getBlockingData (req, specificType, rootSpan) {
if (blockingConfiguration?.type === 'redirect_request' && blockingConfiguration.parameters.location) {
return getBlockWithRedirectData(rootSpan)
function getBlockingData (req, specificType, actionParameters) {
if (actionParameters?.location) {
return getBlockWithRedirectData(actionParameters)
} else {
return getBlockWithContentData(req, specificType, rootSpan)
return getBlockWithContentData(req, specificType, actionParameters)
}
}
function block (req, res, rootSpan, abortController, type) {
function block (req, res, rootSpan, abortController, actionParameters = defaultBlockingActionParameters) {
if (res.headersSent) {

@@ -126,9 +116,24 @@ log.warn('Cannot send blocking response when headers have already been sent')

const { body, headers, statusCode } = getBlockingData(req, type, rootSpan)
const { body, headers, statusCode } = getBlockingData(req, null, actionParameters)
rootSpan.addTags({
'appsec.blocked': 'true'
})
for (const headerName of res.getHeaderNames()) {
res.removeHeader(headerName)
}
res.writeHead(statusCode, headers).end(body)
responseBlockedSet.add(res)
abortController?.abort()
}
function getBlockingAction (actions) {
// waf only returns one action, but it prioritizes redirect over block
return actions?.redirect_request || actions?.block_request
}
function setTemplates (config) {

@@ -154,6 +159,12 @@ if (config.appsec.blockedTemplateHtml) {

function updateBlockingConfiguration (newBlockingConfiguration) {
blockingConfiguration = newBlockingConfiguration
function isBlocked (res) {
return responseBlockedSet.has(res)
}
function setDefaultBlockingActionParameters (actions) {
const blockAction = actions?.find(action => action.id === 'block')
defaultBlockingActionParameters = blockAction?.parameters
}
module.exports = {

@@ -164,4 +175,6 @@ addSpecificEndpoint,

getBlockingData,
getBlockingAction,
setTemplates,
updateBlockingConfiguration
isBlocked,
setDefaultBlockingActionParameters
}

@@ -20,3 +20,8 @@ 'use strict'

nextQueryParsed: dc.channel('apm:next:query-parsed'),
responseBody: dc.channel('datadog:express:response:json:start')
responseBody: dc.channel('datadog:express:response:json:start'),
responseWriteHead: dc.channel('apm:http:server:response:writeHead:start'),
httpClientRequestStart: dc.channel('apm:http:client:request:start'),
responseSetHeader: dc.channel('datadog:http:server:response:set-header:start'),
setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start')
}
'use strict'
const { storage } = require('../../../datadog-core')
const { addSpecificEndpoint, specificBlockingTypes, getBlockingData } = require('./blocking')
const {
addSpecificEndpoint,
specificBlockingTypes,
getBlockingData,
getBlockingAction
} = require('./blocking')
const waf = require('./waf')

@@ -35,6 +40,8 @@ const addresses = require('./addresses')

const actions = waf.run({ ephemeral: { [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo } }, req)
if (actions?.includes('block')) {
const blockingAction = getBlockingAction(actions)
if (blockingAction) {
const requestData = graphqlRequestData.get(req)
if (requestData?.isInGraphqlRequest) {
requestData.blocked = true
requestData.wafAction = blockingAction
context?.abortController?.abort()

@@ -91,3 +98,3 @@ }

const blockingData = getBlockingData(req, specificBlockingTypes.GRAPHQL, rootSpan)
const blockingData = getBlockingData(req, specificBlockingTypes.GRAPHQL, requestData.wafAction)
abortData.statusCode = blockingData.statusCode

@@ -97,2 +104,4 @@ abortData.headers = blockingData.headers

rootSpan.setTag('appsec.blocked', 'true')
abortController?.abort()

@@ -99,0 +108,0 @@ }

'use strict'
module.exports = {
CODE_INJECTION_ANALYZER: require('./code-injection-analyzer'),
COMMAND_INJECTION_ANALYZER: require('./command-injection-analyzer'),

@@ -5,0 +6,0 @@ HARCODED_PASSWORD_ANALYZER: require('./hardcoded-password-analyzer'),

@@ -51,2 +51,3 @@ 'use strict'

column: match.location.column,
ident: match.location.ident,
data: match.ruleId

@@ -53,0 +54,0 @@ }))

@@ -12,4 +12,8 @@ 'use strict'

}
_getEvidence (value) {
return { value: `${value.ident}` }
}
}
module.exports = new HardcodedPasswordAnalyzer()

@@ -16,3 +16,3 @@ 'use strict'

function iterateObjectStrings (target, fn, levelKeys = [], depth = 20, visited = new Set()) {
if (target && typeof target === 'object') {
if (target !== null && typeof target === 'object') {
Object.keys(target).forEach((key) => {

@@ -19,0 +19,0 @@ const nextLevelKeys = [...levelKeys, key]

@@ -50,2 +50,4 @@ 'use strict'

_isExcluded (location) {
if (!location) return false
return EXCLUDED_LOCATIONS.some(excludedLocation => {

@@ -52,0 +54,0 @@ return location.path.includes(excludedLocation)

@@ -5,31 +5,5 @@ 'use strict'

const log = require('../../log')
const { calculateDDBasePath } = require('../../util')
const telemetryLog = dc.channel('datadog:telemetry:log')
const ddBasePath = calculateDDBasePath(__dirname)
const EOL = '\n'
const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm
function sanitize (logEntry, stack) {
if (!stack) return logEntry
let stackLines = stack.split(EOL)
const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX))
const isDDCode = firstIndex > -1 && stackLines[firstIndex].includes(ddBasePath)
stackLines = stackLines
.filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath))
.map(line => line.replace(ddBasePath, ''))
logEntry.stack_trace = stackLines.join(EOL)
if (!isDDCode) {
logEntry.message = 'omitted'
}
return logEntry
}
function getTelemetryLog (data, level) {

@@ -46,14 +20,9 @@ try {

let logEntry = {
const logEntry = {
message,
level
}
if (data.stack) {
logEntry = sanitize(logEntry, data.stack)
if (logEntry.stack_trace === '') {
return
}
logEntry.stack_trace = data.stack
}
return logEntry

@@ -114,3 +83,4 @@ } catch (e) {

this.error(data)
return this.publish(data, 'ERROR')
// publish is done automatically by log.error()
return this
}

@@ -117,0 +87,0 @@ }

@@ -6,2 +6,3 @@ 'use strict'

const { calculateDDBasePath } = require('../../util')
const { getCallSiteList } = require('../stack_trace')
const pathLine = {

@@ -28,20 +29,2 @@ getFirstNonDDPathAndLine,

function getCallSiteInfo () {
const previousPrepareStackTrace = Error.prepareStackTrace
const previousStackTraceLimit = Error.stackTraceLimit
let callsiteList
Error.stackTraceLimit = 100
try {
Error.prepareStackTrace = function (_, callsites) {
callsiteList = callsites
}
const e = new Error()
e.stack
} finally {
Error.prepareStackTrace = previousPrepareStackTrace
Error.stackTraceLimit = previousStackTraceLimit
}
return callsiteList
}
function getFirstNonDDPathAndLineFromCallsites (callsites, externallyExcludedPaths) {

@@ -96,3 +79,3 @@ if (callsites) {

function getFirstNonDDPathAndLine (externallyExcludedPaths) {
return getFirstNonDDPathAndLineFromCallsites(getCallSiteInfo(), externallyExcludedPaths)
return getFirstNonDDPathAndLineFromCallsites(getCallSiteList(), externallyExcludedPaths)
}

@@ -99,0 +82,0 @@

@@ -5,2 +5,3 @@ 'use strict'

{ src: 'concat' },
{ src: 'join' },
{ src: 'parse' },

@@ -13,5 +14,8 @@ { src: 'plusOperator', operator: true },

{ src: 'substring' },
{ src: 'toLowerCase', dst: 'stringCase' },
{ src: 'toUpperCase', dst: 'stringCase' },
{ src: 'trim' },
{ src: 'trimEnd' },
{ src: 'trimStart', dst: 'trim' }
{ src: 'trimStart', dst: 'trim' },
{ src: 'eval', allowedWithoutCallee: true }
]

@@ -18,0 +22,0 @@

@@ -7,3 +7,3 @@ 'use strict'

function taintObject (iastContext, object, type, keyTainting, keyType) {
function taintObject (iastContext, object, type) {
let result = object

@@ -26,5 +26,2 @@ const transactionId = iastContext?.[IAST_TRANSACTION_ID]

result = tainted
} else if (keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = tainted
} else {

@@ -39,7 +36,2 @@ parent[key] = tainted

}
if (parent && keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = value
}
}

@@ -46,0 +38,0 @@ } catch (e) {

'use strict'
const dc = require('dc-polyfill')
const TaintedUtils = require('@datadog/native-iast-taint-tracking')

@@ -8,5 +9,11 @@ const { IAST_TRANSACTION_ID } = require('../iast-context')

const { isInfoAllowed } = require('../telemetry/verbosity')
const { getTaintTrackingImpl, getTaintTrackingNoop } = require('./taint-tracking-impl')
const {
getTaintTrackingImpl,
getTaintTrackingNoop,
lodashTaintTrackingHandler
} = require('./taint-tracking-impl')
const { taintObject } = require('./operations-taint-object')
const lodashOperationCh = dc.channel('datadog:lodash:operation')
function createTransaction (id, iastContext) {

@@ -96,2 +103,3 @@ if (id && iastContext) {

global._ddiast = getTaintTrackingImpl(telemetryVerbosity)
lodashOperationCh.subscribe(lodashTaintTrackingHandler)
}

@@ -101,2 +109,3 @@

global._ddiast = getTaintTrackingNoop()
lodashOperationCh.unsubscribe(lodashTaintTrackingHandler)
}

@@ -103,0 +112,0 @@

@@ -48,3 +48,3 @@ 'use strict'

({ req }) => {
if (req && req.body && typeof req.body === 'object') {
if (req && req.body !== null && typeof req.body === 'object') {
const iastContext = getIastContext(storage.getStore())

@@ -67,3 +67,3 @@ if (iastContext && iastContext.body !== req.body) {

({ req }) => {
if (req && req.params && typeof req.params === 'object') {
if (req && req.params !== null && typeof req.params === 'object') {
this._taintTrackingHandler(HTTP_REQUEST_PATH_PARAM, req, 'params')

@@ -100,8 +100,10 @@ }

const iastContext = getIastContext(storage.getStore())
taintObject(iastContext, target, HTTP_REQUEST_COOKIE_VALUE, true, HTTP_REQUEST_COOKIE_NAME)
// Prevent tainting cookie names since it leads to taint literal string with same value.
taintObject(iastContext, target, HTTP_REQUEST_COOKIE_VALUE)
}
taintHeaders (headers, iastContext) {
// Prevent tainting header names since it leads to taint literal string with same value.
this.execSource({
handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE, true, HTTP_REQUEST_HEADER_NAME),
handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE),
tags: REQ_HEADER_TAGS,

@@ -108,0 +110,0 @@ iastContext

@@ -30,3 +30,3 @@ 'use strict'

if (key && typeof key === 'object') {
if (key !== null && typeof key === 'object') {
shimmer.wrap(key, 'toString',

@@ -38,3 +38,3 @@ toString => this.getToStringWrap(toString, iastContext, KAFKA_MESSAGE_KEY))

if (value && typeof value === 'object') {
if (value !== null && typeof value === 'object') {
shimmer.wrap(value, 'toString',

@@ -41,0 +41,0 @@ toString => this.getToStringWrap(toString, iastContext, KAFKA_MESSAGE_VALUE))

@@ -13,2 +13,3 @@ 'use strict'

const mathRandomCallCh = dc.channel('datadog:random:call')
const evalCallCh = dc.channel('datadog:eval:call')

@@ -22,2 +23,4 @@ const JSON_VALUE = 'json.value'

concat: noop,
eval: noop,
join: noop,
parse: noop,

@@ -30,2 +33,3 @@ plusOperator: noop,

substring: noop,
stringCase: noop,
trim: noop,

@@ -119,2 +123,9 @@ trimEnd: noop

stringCase: getCsiFn(
(transactionId, res, target) => TaintedUtils.stringCase(transactionId, res, target),
getContext,
String.prototype.toLowerCase,
String.prototype.toUpperCase
),
trim: getCsiFn(

@@ -134,2 +145,11 @@ (transactionId, res, target) => TaintedUtils.trim(transactionId, res, target),

eval: function (res, fn, target, script) {
// eslint-disable-next-line no-eval
if (evalCallCh.hasSubscribers && fn === globalThis.eval) {
evalCallCh.publish({ script })
}
return res
},
parse: function (res, fn, target, json) {

@@ -156,2 +176,18 @@ if (fn === JSON.parse) {

return res
},
join: function (res, fn, target, separator) {
if (fn === Array.prototype.join) {
try {
const iastContext = getContext()
const transactionId = getTransactionId(iastContext)
if (transactionId) {
res = TaintedUtils.arrayJoin(transactionId, res, target, separator)
}
} catch (e) {
iastLog.error(e)
}
}
return res
}

@@ -185,5 +221,34 @@ }

const lodashFns = {
join: TaintedUtils.arrayJoin,
toLower: TaintedUtils.stringCase,
toUpper: TaintedUtils.stringCase,
trim: TaintedUtils.trim,
trimEnd: TaintedUtils.trimEnd,
trimStart: TaintedUtils.trim
}
function getLodashTaintedUtilFn (lodashFn) {
return lodashFns[lodashFn] || ((transactionId, result) => result)
}
function lodashTaintTrackingHandler (message) {
try {
if (!message.result) return
const context = getContextDefault()
const transactionId = getTransactionId(context)
if (transactionId) {
message.result = getLodashTaintedUtilFn(message.operation)(transactionId, message.result, ...message.arguments)
}
} catch (e) {
iastLog.error(`Error invoking CSI lodash ${message.operation}`)
.errorAndPublish(e)
}
}
module.exports = {
getTaintTrackingImpl,
getTaintTrackingNoop
getTaintTrackingNoop,
lodashTaintTrackingHandler
}

@@ -8,3 +8,5 @@ 'use strict'

const codeInjectionSensitiveAnalyzer = require('./sensitive-analyzers/code-injection-sensitive-analyzer')
const commandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer')
const hardcodedPasswordAnalyzer = require('./sensitive-analyzers/hardcoded-password-analyzer')
const headerSensitiveAnalyzer = require('./sensitive-analyzers/header-sensitive-analyzer')

@@ -26,2 +28,3 @@ const jsonSensitiveAnalyzer = require('./sensitive-analyzers/json-sensitive-analyzer')

this._sensitiveAnalyzers = new Map()
this._sensitiveAnalyzers.set(vulnerabilities.CODE_INJECTION, codeInjectionSensitiveAnalyzer)
this._sensitiveAnalyzers.set(vulnerabilities.COMMAND_INJECTION, commandSensitiveAnalyzer)

@@ -36,2 +39,5 @@ this._sensitiveAnalyzers.set(vulnerabilities.NOSQL_MONGODB_INJECTION, jsonSensitiveAnalyzer)

})
this._sensitiveAnalyzers.set(vulnerabilities.HARDCODED_PASSWORD, (evidence) => {
return hardcodedPasswordAnalyzer(evidence, this._valuePattern)
})
}

@@ -57,3 +63,5 @@

const sensitiveRanges = sensitiveAnalyzer(evidence)
return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources)
if (evidence.ranges || sensitiveRanges?.length) {
return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources)
}
}

@@ -74,3 +82,3 @@ return null

let nextTainted = ranges.shift()
let nextTainted = ranges?.shift()
let nextSensitive = sensitive.shift()

@@ -77,0 +85,0 @@

// eslint-disable-next-line max-len
const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)'
const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)'
// eslint-disable-next-line max-len
const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,}))'
const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,})|[\\w\\.-]+@[a-zA-Z\\d\\.-]+\\.[a-zA-Z]{2,})'

@@ -6,0 +6,0 @@ module.exports = {

@@ -46,2 +46,4 @@ 'use strict'

if (evidence.value == null) return { valueParts }
if (typeof evidence.value === 'object' && evidence.rangesToApply) {

@@ -53,2 +55,6 @@ const { value, ranges } = stringifyWithRanges(evidence.value, evidence.rangesToApply)

if (!evidence.ranges) {
return { value: evidence.value }
}
evidence.ranges.forEach((range, rangeIndex) => {

@@ -70,8 +76,4 @@ if (fromIndex < range.start) {

formatEvidence (type, evidence, sourcesIndexes, sources) {
if (!evidence.ranges && !evidence.rangesToApply) {
if (typeof evidence.value === 'undefined') {
return undefined
} else {
return { value: evidence.value }
}
if (evidence.value === undefined) {
return undefined
}

@@ -78,0 +80,0 @@

module.exports = {
COMMAND_INJECTION: 'COMMAND_INJECTION',
CODE_INJECTION: 'CODE_INJECTION',
HARDCODED_PASSWORD: 'HARDCODED_PASSWORD',

@@ -4,0 +5,0 @@ HARDCODED_SECRET: 'HARDCODED_SECRET',

@@ -7,2 +7,3 @@ 'use strict'

const { IAST_ENABLED_TAG_KEY, IAST_JSON_TAG_KEY } = require('./tags')
const standalone = require('../standalone')

@@ -61,2 +62,5 @@ const VULNERABILITIES_KEY = 'vulnerabilities'

span.addTags(tags)
standalone.sample(span)
if (!rootSpan) span.finish()

@@ -63,0 +67,0 @@ }

@@ -15,3 +15,5 @@ 'use strict'

nextQueryParsed,
responseBody
responseBody,
responseWriteHead,
responseSetHeader
} = require('./channels')

@@ -26,6 +28,7 @@ const waf = require('./waf')

const { HTTP_CLIENT_IP } = require('../../../../ext/tags')
const { block, setTemplates } = require('./blocking')
const { isBlocked, block, setTemplates, getBlockingAction } = require('./blocking')
const { passportTrackEvent } = require('./passport')
const { storage } = require('../../../datadog-core')
const graphql = require('./graphql')
const rasp = require('./rasp')

@@ -42,2 +45,6 @@ let isEnabled = false

if (_config.appsec.rasp.enabled) {
rasp.enable(_config)
}
setTemplates(_config)

@@ -61,2 +68,4 @@

responseBody.subscribe(onResponseBody)
responseWriteHead.subscribe(onResponseWriteHead)
responseSetHeader.subscribe(onResponseSetHeader)

@@ -112,11 +121,4 @@ if (_config.appsec.eventTracking.enabled) {

function incomingHttpEndTranslator ({ req, res }) {
// TODO: this doesn't support headers sent with res.writeHead()
const responseHeaders = Object.assign({}, res.getHeaders())
delete responseHeaders['set-cookie']
const persistent = {}
const persistent = {
[addresses.HTTP_INCOMING_RESPONSE_CODE]: '' + res.statusCode,
[addresses.HTTP_INCOMING_RESPONSE_HEADERS]: responseHeaders
}
// we need to keep this to support other body parsers

@@ -129,3 +131,3 @@ // TODO: no need to analyze it if it was already done by the body-parser hook

// TODO: temporary express instrumentation, will use express plugin later
if (req.params && typeof req.params === 'object') {
if (req.params !== null && typeof req.params === 'object') {
persistent[addresses.HTTP_INCOMING_PARAMS] = req.params

@@ -135,11 +137,13 @@ }

// we need to keep this to support other cookie parsers
if (req.cookies && typeof req.cookies === 'object') {
if (req.cookies !== null && typeof req.cookies === 'object') {
persistent[addresses.HTTP_INCOMING_COOKIES] = req.cookies
}
if (req.query && typeof req.query === 'object') {
if (req.query !== null && typeof req.query === 'object') {
persistent[addresses.HTTP_INCOMING_QUERY] = req.query
}
waf.run({ persistent }, req)
if (Object.keys(persistent).length) {
waf.run({ persistent }, req)
}

@@ -213,3 +217,3 @@ waf.disposeContext(req)

persistent: {
[addresses.HTTP_OUTGOING_BODY]: body
[addresses.HTTP_INCOMING_RESPONSE_BODY]: body
}

@@ -231,7 +235,46 @@ }, req)

const responseAnalyzedSet = new WeakSet()
function onResponseWriteHead ({ req, res, abortController, statusCode, responseHeaders }) {
// avoid "write after end" error
if (isBlocked(res)) {
abortController?.abort()
return
}
// avoid double waf call
if (responseAnalyzedSet.has(res)) {
return
}
const rootSpan = web.root(req)
if (!rootSpan) return
responseHeaders = Object.assign({}, responseHeaders)
delete responseHeaders['set-cookie']
const results = waf.run({
persistent: {
[addresses.HTTP_INCOMING_RESPONSE_CODE]: '' + statusCode,
[addresses.HTTP_INCOMING_RESPONSE_HEADERS]: responseHeaders
}
}, req)
responseAnalyzedSet.add(res)
handleResults(results, req, res, rootSpan, abortController)
}
function onResponseSetHeader ({ res, abortController }) {
if (isBlocked(res)) {
abortController?.abort()
}
}
function handleResults (actions, req, res, rootSpan, abortController) {
if (!actions || !req || !res || !rootSpan || !abortController) return
if (actions.includes('block')) {
block(req, res, rootSpan, abortController)
const blockingAction = getBlockingAction(actions)
if (blockingAction) {
block(req, res, rootSpan, abortController, blockingAction)
}

@@ -248,2 +291,3 @@ }

graphql.disable()
rasp.disable()

@@ -262,2 +306,4 @@ remoteConfig.disableWafUpdate()

if (passportVerify.hasSubscribers) passportVerify.unsubscribe(onPassportVerify)
if (responseWriteHead.hasSubscribers) responseWriteHead.unsubscribe(onResponseWriteHead)
if (responseSetHeader.hasSubscribers) responseSetHeader.unsubscribe(onResponseSetHeader)
}

@@ -264,0 +310,0 @@

@@ -16,3 +16,3 @@ 'use strict'

if (tags && typeof tags === 'object') {
if (tags !== null && typeof tags === 'object') {
called = Object.entries(tags).some(([key, value]) => regexSdkEvent.test(key) && value === 'true')

@@ -19,0 +19,0 @@ }

@@ -9,2 +9,3 @@ 'use strict'

ASM_REQUEST_BLOCKING: 1n << 5n,
ASM_RESPONSE_BLOCKING: 1n << 6n,
ASM_USER_BLOCKING: 1n << 7n,

@@ -19,3 +20,4 @@ ASM_CUSTOM_RULES: 1n << 8n,

APM_TRACING_CUSTOM_TAGS: 1n << 15n,
APM_TRACING_ENABLED: 1n << 19n
APM_TRACING_ENABLED: 1n << 19n,
APM_TRACING_SAMPLE_RULES: 1n << 29n
}

@@ -11,3 +11,3 @@ 'use strict'

function enable (config) {
function enable (config, appsec) {
rc = new RemoteConfigManager(config)

@@ -19,2 +19,3 @@ rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_CUSTOM_TAGS, true)

rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_ENABLED, true)
rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_SAMPLE_RULES, true)

@@ -32,7 +33,7 @@ const activation = Activation.fromConfig(config)

rc.on('ASM_FEATURES', (action, rcConfig) => {
rc.setProductHandler('ASM_FEATURES', (action, rcConfig) => {
if (!rcConfig) return
if (activation === Activation.ONECLICK) {
enableOrDisableAppsec(action, rcConfig, config)
enableOrDisableAppsec(action, rcConfig, config, appsec)
}

@@ -47,3 +48,3 @@

function enableOrDisableAppsec (action, rcConfig, config) {
function enableOrDisableAppsec (action, rcConfig, config, appsec) {
if (typeof rcConfig.asm?.enabled === 'boolean') {

@@ -59,5 +60,5 @@ let shouldEnable

if (shouldEnable) {
require('..').enable(config)
appsec.enable(config)
} else {
require('..').disable()
appsec.disable()
}

@@ -78,2 +79,3 @@ }

rc.updateCapabilities(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true)

@@ -83,5 +85,6 @@ rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true)

rc.on('ASM_DATA', noop)
rc.on('ASM_DD', noop)
rc.on('ASM', noop)
// TODO: delete noop handlers and kPreUpdate and replace with batched handlers
rc.setProductHandler('ASM_DATA', noop)
rc.setProductHandler('ASM_DD', noop)
rc.setProductHandler('ASM', noop)

@@ -101,2 +104,3 @@ rc.on(RemoteConfigManager.kPreUpdate, RuleManager.updateWafFromRC)

rc.updateCapabilities(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, false)

@@ -106,5 +110,5 @@ rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false)

rc.off('ASM_DATA', noop)
rc.off('ASM_DD', noop)
rc.off('ASM', noop)
rc.removeProductHandler('ASM_DATA')
rc.removeProductHandler('ASM_DD')
rc.removeProductHandler('ASM')

@@ -111,0 +115,0 @@ rc.off(RemoteConfigManager.kPreUpdate, RuleManager.updateWafFromRC)

@@ -18,2 +18,3 @@ 'use strict'

const kPreUpdate = Symbol('kPreUpdate')
const kSupportsAckCallback = Symbol('kSupportsAckCallback')

@@ -36,2 +37,5 @@ // There MUST NOT exist separate instances of RC clients in a tracer making separate ClientGetConfigsRequest

this._handlers = new Map()
const appliedConfigs = this.appliedConfigs = new Map()
this.scheduler = new Scheduler((cb) => this.poll(cb), pollInterval)

@@ -41,6 +45,15 @@

client: {
state: { // updated by `parseConfig()`
state: { // updated by `parseConfig()` and `poll()`
root_version: 1,
targets_version: 0,
config_states: [],
// Use getter so `apply_*` can be updated async and still affect the content of `config_states`
get config_states () {
return Array.from(appliedConfigs.values()).map((conf) => ({
id: conf.id,
version: conf.version,
product: conf.product,
apply_state: conf.apply_state,
apply_error: conf.apply_error
}))
},
has_error: false,

@@ -66,4 +79,2 @@ error: '',

}
this.appliedConfigs = new Map()
}

@@ -89,28 +100,20 @@

on (event, listener) {
super.on(event, listener)
setProductHandler (product, handler) {
this._handlers.set(product, handler)
this.updateProducts()
if (this.state.client.products.length) {
if (this.state.client.products.length === 1) {
this.scheduler.start()
}
return this
}
off (event, listener) {
super.off(event, listener)
removeProductHandler (product) {
this._handlers.delete(product)
this.updateProducts()
if (!this.state.client.products.length) {
if (this.state.client.products.length === 0) {
this.scheduler.stop()
}
return this
}
updateProducts () {
this.state.client.products = this.eventNames().filter(e => typeof e === 'string')
this.state.client.products = Array.from(this._handlers.keys())
}

@@ -128,3 +131,6 @@

method: 'POST',
path: '/v0.7/config'
path: '/v0.7/config',
headers: {
'Content-Type': 'application/json; charset=utf-8'
}
}

@@ -234,20 +240,7 @@

this.state.client.state.config_states = []
this.state.cached_target_files = []
for (const conf of this.appliedConfigs.values()) {
this.state.client.state.config_states.push({
id: conf.id,
version: conf.version,
product: conf.product,
apply_state: conf.apply_state,
apply_error: conf.apply_error
})
this.state.cached_target_files.push({
path: conf.path,
length: conf.length,
hashes: Object.entries(conf.hashes).map((entry) => ({ algorithm: entry[0], hash: entry[1] }))
})
}
this.state.cached_target_files = Array.from(this.appliedConfigs.values()).map((conf) => ({
path: conf.path,
length: conf.length,
hashes: Object.entries(conf.hashes).map((entry) => ({ algorithm: entry[0], hash: entry[1] }))
}))
}

@@ -261,17 +254,4 @@ }

// in case the item was already handled by kPreUpdate
if (item.apply_state === UNACKNOWLEDGED || action === 'unapply') {
try {
// TODO: do we want to pass old and new config ?
const hadListeners = this.emit(item.product, action, item.file, item.id)
this._callHandlerFor(action, item)
if (hadListeners) {
item.apply_state = ACKNOWLEDGED
}
} catch (err) {
item.apply_state = ERROR
item.apply_error = err.toString()
}
}
if (action === 'unapply') {

@@ -284,2 +264,45 @@ this.appliedConfigs.delete(item.path)

}
_callHandlerFor (action, item) {
// in case the item was already handled by kPreUpdate
if (item.apply_state !== UNACKNOWLEDGED && action !== 'unapply') return
const handler = this._handlers.get(item.product)
if (!handler) return
try {
if (supportsAckCallback(handler)) {
// If the handler accepts an `ack` callback, expect that to be called and set `apply_state` accordinly
// TODO: do we want to pass old and new config ?
handler(action, item.file, item.id, (err) => {
if (err) {
item.apply_state = ERROR
item.apply_error = err.toString()
} else if (item.apply_state !== ERROR) {
item.apply_state = ACKNOWLEDGED
}
})
} else {
// If the handler doesn't accept an `ack` callback, assume `apply_state` is `ACKNOWLEDGED`,
// unless it returns a promise, in which case we wait for the promise to be resolved or rejected.
// TODO: do we want to pass old and new config ?
const result = handler(action, item.file, item.id)
if (result instanceof Promise) {
result.then(
() => { item.apply_state = ACKNOWLEDGED },
(err) => {
item.apply_state = ERROR
item.apply_error = err.toString()
}
)
} else {
item.apply_state = ACKNOWLEDGED
}
}
} catch (err) {
item.apply_state = ERROR
item.apply_error = err.toString()
}
}
}

@@ -308,2 +331,20 @@

function supportsAckCallback (handler) {
if (kSupportsAckCallback in handler) return handler[kSupportsAckCallback]
const numOfArgs = handler.length
let result = false
if (numOfArgs >= 4) {
result = true
} else if (numOfArgs !== 0) {
const source = handler.toString()
result = source.slice(0, source.indexOf(')')).includes('...')
}
handler[kSupportsAckCallback] = result
return result
}
module.exports = RemoteConfigManager

@@ -10,2 +10,3 @@ 'use strict'

updateWafRequestsMetricTags,
updateRaspRequestsMetricTags,
incrementWafUpdatesMetric,

@@ -16,2 +17,4 @@ incrementWafRequestsMetric,

const zlib = require('zlib')
const { MANUAL_KEEP } = require('../../../../ext/tags')
const standalone = require('./standalone')

@@ -31,3 +34,3 @@ // default limiter, configurable with setRateLimit()

const REQUEST_HEADERS_MAP = mapHeaderAndTags([
const EVENT_HEADERS_MAP = mapHeaderAndTags([
...ipHeaderList,

@@ -38,4 +41,2 @@ 'forwarded',

'host',
'user-agent',
'accept',
'accept-encoding',

@@ -45,3 +46,3 @@ 'accept-language'

const IDENTIFICATION_HEADERS_MAP = mapHeaderAndTags([
const identificationHeaders = [
'x-amzn-trace-id',

@@ -55,2 +56,10 @@ 'cloudfront-viewer-ja3-fingerprint',

'akamai-user-risk'
]
// these request headers are always collected - it breaks the expected spec orders
const REQUEST_HEADERS_MAP = mapHeaderAndTags([
'content-type',
'user-agent',
'accept',
...identificationHeaders
], 'http.request.headers.')

@@ -96,3 +105,3 @@

metricsQueue.set('manual.keep', 'true')
metricsQueue.set(MANUAL_KEEP, 'true')

@@ -102,3 +111,3 @@ incrementWafInitMetric(wafVersion, rulesVersion)

function reportMetrics (metrics) {
function reportMetrics (metrics, raspRuleType) {
const store = storage.getStore()

@@ -111,4 +120,7 @@ const rootSpan = store?.req && web.root(store.req)

}
updateWafRequestsMetricTags(metrics, store.req)
if (raspRuleType) {
updateRaspRequestsMetricTags(metrics, store.req, raspRuleType)
} else {
updateWafRequestsMetricTags(metrics, store.req)
}
}

@@ -124,8 +136,10 @@

const newTags = filterHeaders(req.headers, REQUEST_HEADERS_MAP)
const newTags = {
'appsec.event': 'true'
}
newTags['appsec.event'] = 'true'
if (limiter.isAllowed()) {
newTags[MANUAL_KEEP] = 'true'
if (limiter.isAllowed()) {
newTags['manual.keep'] = 'true' // TODO: figure out how to keep appsec traces with sampling revamp
standalone.sample(rootSpan)
}

@@ -147,7 +161,2 @@

const ua = newTags['http.request.headers.user-agent']
if (ua) {
newTags['http.useragent'] = ua
}
newTags['network.client.ip'] = req.socket.remoteAddress

@@ -182,2 +191,4 @@

standalone.sample(rootSpan)
metricsQueue.clear()

@@ -195,12 +206,27 @@ }

if (metrics?.raspDuration) {
rootSpan.setTag('_dd.appsec.rasp.duration', metrics.raspDuration)
}
if (metrics?.raspDurationExt) {
rootSpan.setTag('_dd.appsec.rasp.duration_ext', metrics.raspDurationExt)
}
if (metrics?.raspEvalCount) {
rootSpan.setTag('_dd.appsec.rasp.rule.eval', metrics.raspEvalCount)
}
incrementWafRequestsMetric(req)
// collect some headers even when no attack is detected
rootSpan.addTags(filterHeaders(req.headers, IDENTIFICATION_HEADERS_MAP))
const mandatoryTags = filterHeaders(req.headers, REQUEST_HEADERS_MAP)
rootSpan.addTags(mandatoryTags)
if (!rootSpan.context()._tags['appsec.event']) return
const tags = rootSpan.context()._tags
if (!shouldCollectEventHeaders(tags)) return
const newTags = filterHeaders(res.getHeaders(), RESPONSE_HEADERS_MAP)
Object.assign(newTags, filterHeaders(req.headers, EVENT_HEADERS_MAP))
if (req.route && typeof req.route.path === 'string') {
if (tags['appsec.event'] === 'true' && typeof req.route?.path === 'string') {
newTags['http.endpoint'] = req.route.path

@@ -212,2 +238,16 @@ }

function shouldCollectEventHeaders (tags = {}) {
if (tags['appsec.event'] === 'true') {
return true
}
for (const tagName of Object.keys(tags)) {
if (tagName.startsWith('appsec.events.')) {
return true
}
}
return false
}
function setRateLimit (rateLimit) {

@@ -214,0 +254,0 @@ limiter = new Limiter(rateLimit)

@@ -6,2 +6,3 @@ 'use strict'

const { ACKNOWLEDGED, ERROR } = require('./remote_config/apply_states')
const blocking = require('./blocking')

@@ -25,5 +26,3 @@

if (defaultRules.actions) {
blocking.updateBlockingConfiguration(defaultRules.actions.find(action => action.id === 'block'))
}
blocking.setDefaultBlockingActionParameters(defaultRules?.actions)
}

@@ -73,3 +72,3 @@

} else {
if (file && file.rules && file.rules.length) {
if (file?.rules?.length) {
const { version, metadata, rules, processors, scanners } = file

@@ -84,26 +83,19 @@

} else if (product === 'ASM') {
let batchConfiguration = false
if (file && file.rules_override && file.rules_override.length) {
batchConfiguration = true
if (file?.rules_override?.length) {
newRulesOverride.set(id, file.rules_override)
}
if (file && file.exclusions && file.exclusions.length) {
batchConfiguration = true
if (file?.exclusions?.length) {
newExclusions.set(id, file.exclusions)
}
if (file && file.custom_rules && file.custom_rules.length) {
batchConfiguration = true
if (file?.custom_rules?.length) {
newCustomRules.set(id, file.custom_rules)
}
if (file && file.actions && file.actions.length) {
if (file?.actions?.length) {
newActions.set(id, file.actions)
}
// "actions" data is managed by tracer and not by waf
if (batchConfiguration) {
batch.add(item)
}
batch.add(item)
}

@@ -119,3 +111,5 @@ }

newExclusions.modified ||
newCustomRules.modified) {
newCustomRules.modified ||
newActions.modified
) {
const payload = newRuleset || {}

@@ -135,2 +129,5 @@

}
if (newActions.modified) {
payload.actions = concatArrays(newActions)
}

@@ -155,2 +152,7 @@ try {

}
if (newActions.modified) {
appliedActions = newActions
blocking.setDefaultBlockingActionParameters(concatArrays(newActions))
}
} catch (err) {

@@ -166,7 +168,2 @@ newApplyState = ERROR

}
if (newActions.modified) {
blocking.updateBlockingConfiguration(concatArrays(newActions).find(action => action.id === 'block'))
appliedActions = newActions
}
}

@@ -253,3 +250,2 @@

waf.destroy()
blocking.updateBlockingConfiguration(undefined)

@@ -264,2 +260,4 @@ defaultRules = undefined

appliedActions.clear()
blocking.setDefaultBlockingActionParameters(undefined)
}

@@ -266,0 +264,0 @@

@@ -7,2 +7,3 @@ 'use strict'

const { setUserTags } = require('./set_user')
const standalone = require('../standalone')

@@ -77,2 +78,4 @@ function trackUserLoginSuccessEvent (tracer, user, metadata) {

rootSpan.addTags(tags)
standalone.sample(rootSpan)
}

@@ -79,0 +82,0 @@

@@ -6,3 +6,3 @@ 'use strict'

const { getRootSpan } = require('./utils')
const { block } = require('../blocking')
const { block, getBlockingAction } = require('../blocking')
const { storage } = require('../../../../datadog-core')

@@ -14,6 +14,3 @@ const { setUserTags } = require('./set_user')

const actions = waf.run({ persistent: { [USER_ID]: user.id } })
if (!actions) return false
return actions.includes('block')
return !!getBlockingAction(actions)
}

@@ -20,0 +17,0 @@

@@ -34,3 +34,6 @@ 'use strict'

duration: 0,
durationExt: 0
durationExt: 0,
raspDuration: 0,
raspDurationExt: 0,
raspEvalCount: 0
}

@@ -80,2 +83,24 @@ }

function updateRaspRequestsMetricTags (metrics, req, raspRuleType) {
if (!req) return
const store = getStore(req)
// it does not depend on whether telemetry is enabled or not
addRaspRequestMetrics(store, metrics)
if (!enabled) return
const tags = { rule_type: raspRuleType, waf_version: metrics.wafVersion }
appsecMetrics.count('rasp.rule.eval', tags).inc(1)
if (metrics.wafTimeout) {
appsecMetrics.count('rasp.timeout', tags).inc(1)
}
if (metrics.ruleTriggered) {
appsecMetrics.count('rasp.rule.match', tags).inc(1)
}
}
function updateWafRequestsMetricTags (metrics, req) {

@@ -146,2 +171,8 @@ if (!req) return

function addRaspRequestMetrics (store, { duration, durationExt }) {
store[DD_TELEMETRY_REQUEST_METRICS].raspDuration += duration || 0
store[DD_TELEMETRY_REQUEST_METRICS].raspDurationExt += durationExt || 0
store[DD_TELEMETRY_REQUEST_METRICS].raspEvalCount++
}
function getRequestMetrics (req) {

@@ -159,2 +190,3 @@ if (req) {

updateWafRequestsMetricTags,
updateRaspRequestsMetricTags,
incrementWafInitMetric,

@@ -161,0 +193,0 @@ incrementWafUpdatesMetric,

@@ -49,3 +49,3 @@ 'use strict'

function run (data, req) {
function run (data, req, raspRuleType) {
if (!req) {

@@ -63,3 +63,3 @@ const store = storage.getStore()

return wafContext.run(data)
return wafContext.run(data, raspRuleType)
}

@@ -66,0 +66,0 @@

@@ -6,2 +6,3 @@ 'use strict'

const addresses = require('../addresses')
const { getBlockingAction } = require('../blocking')

@@ -22,3 +23,8 @@ // TODO: remove once ephemeral addresses are implemented

run ({ persistent, ephemeral }) {
run ({ persistent, ephemeral }, raspRuleType) {
if (this.ddwafContext.disposed) {
log.warn('Calling run on a disposed context')
return
}
const payload = {}

@@ -29,3 +35,3 @@ let payloadHasData = false

if (persistent && typeof persistent === 'object') {
if (persistent !== null && typeof persistent === 'object') {
// TODO: possible optimization: only send params that haven't already been sent with same value to this wafContext

@@ -66,4 +72,5 @@ for (const key of Object.keys(persistent)) {

const ruleTriggered = !!result.events?.length
const blockTriggered = result.actions?.includes('block')
const blockTriggered = !!getBlockingAction(result.actions)
Reporter.reportMetrics({

@@ -77,3 +84,3 @@ duration: result.totalRuntime / 1e3,

wafTimeout: result.timeout
})
}, raspRuleType)

@@ -80,0 +87,0 @@ if (ruleTriggered) {

const request = require('../../exporters/common/request')
const id = require('../../id')
const log = require('../../log')
const {
incrementCountMetric,
distributionMetric,
TELEMETRY_KNOWN_TESTS,
TELEMETRY_KNOWN_TESTS_MS,
TELEMETRY_KNOWN_TESTS_ERRORS,
TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS,
TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES
} = require('../../ci-visibility/telemetry')
function getNumTests (knownTests) {
let totalNumTests = 0
for (const testModule of Object.values(knownTests)) {
for (const testSuite of Object.values(testModule)) {
for (const testList of Object.values(testSuite)) {
totalNumTests += testList.length
}
}
}
return totalNumTests
}
function getKnownTests ({

@@ -67,4 +92,10 @@ url,

request(data, options, (err, res) => {
incrementCountMetric(TELEMETRY_KNOWN_TESTS)
const startTime = Date.now()
request(data, options, (err, res, statusCode) => {
distributionMetric(TELEMETRY_KNOWN_TESTS_MS, {}, Date.now() - startTime)
if (err) {
incrementCountMetric(TELEMETRY_KNOWN_TESTS_ERRORS, { statusCode })
done(err)

@@ -74,2 +105,10 @@ } else {

const { data: { attributes: { tests: knownTests } } } = JSON.parse(res)
const numTests = getNumTests(knownTests)
incrementCountMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS, {}, numTests)
distributionMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES, {}, res.length)
log.debug(() => `Number of received known tests: ${numTests}`)
done(null, knownTests)

@@ -76,0 +115,0 @@ } catch (err) {

@@ -15,4 +15,3 @@ 'use strict'

TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED,
getErrorTypeFromStatusCode
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED
} = require('../../../ci-visibility/telemetry')

@@ -60,6 +59,5 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(
TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
{ endpoint: 'code_coverage', errorType }
{ endpoint: 'code_coverage', statusCode }
)

@@ -66,0 +64,0 @@ incrementCountMetric(

@@ -15,4 +15,3 @@ 'use strict'

TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED,
getErrorTypeFromStatusCode
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED
} = require('../../../ci-visibility/telemetry')

@@ -61,6 +60,5 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(
TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
{ endpoint: 'test_cycle', errorType }
{ endpoint: 'test_cycle', statusCode }
)

@@ -79,4 +77,8 @@ incrementCountMetric(

}
setMetadataTags (tags) {
this._encoder.setMetadataTags(tags)
}
}
module.exports = Writer

@@ -193,3 +193,4 @@ 'use strict'

earlyFlakeDetectionNumRetries,
earlyFlakeDetectionFaultyThreshold
earlyFlakeDetectionFaultyThreshold,
isFlakyTestRetriesEnabled
} = remoteConfiguration

@@ -203,3 +204,5 @@ return {

earlyFlakeDetectionNumRetries,
earlyFlakeDetectionFaultyThreshold
earlyFlakeDetectionFaultyThreshold,
isFlakyTestRetriesEnabled: isFlakyTestRetriesEnabled && this._config.isFlakyTestRetriesEnabled,
flakyTestRetriesCount: this._config.flakyTestRetriesCount
}

@@ -293,4 +296,17 @@ }

}
// By the time setMetadataTags is called, the agent info request might not have finished
setMetadataTags (tags) {
if (this._writer?.setMetadataTags) {
this._writer.setMetadataTags(tags)
} else {
this._canUseCiVisProtocolPromise.then(() => {
if (this._writer?.setMetadataTags) {
this._writer.setMetadataTags(tags)
}
})
}
}
}
module.exports = CiVisibilityExporter

@@ -14,3 +14,4 @@ const fs = require('fs')

isShallowRepository,
unshallowRepository
unshallowRepository,
isGitAvailable
} = require('../../../plugins/util/git')

@@ -28,4 +29,3 @@

TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS,
TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES,
getErrorTypeFromStatusCode
TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES
} = require('../../../ci-visibility/telemetry')

@@ -97,4 +97,3 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, { statusCode })
const error = new Error(`Error fetching commits to exclude: ${err.message}`)

@@ -184,4 +183,3 @@ return callback(error)

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, { statusCode })
const error = new Error(`Could not upload packfiles: status code ${statusCode}: ${err.message}`)

@@ -252,2 +250,5 @@ return callback(error, uploadSize)

function sendGitMetadata (url, { isEvpProxy, evpProxyPrefix }, configRepositoryUrl, callback) {
if (!isGitAvailable()) {
return callback(new Error('Git is not available'))
}
let repositoryUrl = configRepositoryUrl

@@ -254,0 +255,0 @@ if (!repositoryUrl) {

@@ -11,4 +11,3 @@ const request = require('../../exporters/common/request')

TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS,
TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES,
getErrorTypeFromStatusCode
TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES
} = require('../../ci-visibility/telemetry')

@@ -87,4 +86,3 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, { statusCode })
done(err)

@@ -91,0 +89,0 @@ } else {

@@ -10,4 +10,3 @@ const request = require('../../exporters/common/request')

TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS,
TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE,
getErrorTypeFromStatusCode
TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE
} = require('../telemetry')

@@ -85,4 +84,3 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS, { statusCode })
done(err)

@@ -98,3 +96,4 @@ } else {

require_git: requireGit,
early_flake_detection: earlyFlakeDetectionConfig
early_flake_detection: earlyFlakeDetectionConfig,
flaky_test_retries_enabled: isFlakyTestRetriesEnabled
}

@@ -113,3 +112,4 @@ }

earlyFlakeDetectionFaultyThreshold:
earlyFlakeDetectionConfig?.faulty_session_threshold ?? DEFAULT_EARLY_FLAKE_DETECTION_ERROR_THRESHOLD
earlyFlakeDetectionConfig?.faulty_session_threshold ?? DEFAULT_EARLY_FLAKE_DETECTION_ERROR_THRESHOLD,
isFlakyTestRetriesEnabled
}

@@ -116,0 +116,0 @@

@@ -13,3 +13,6 @@ const telemetryMetrics = require('../telemetry/metrics')

hasCodeOwners: 'has_code_owners',
isUnsupportedCIProvider: 'is_unsupported_ci'
isUnsupportedCIProvider: 'is_unsupported_ci',
isNew: 'is_new',
isRum: 'is_rum',
browserDriver: 'browser_driver'
}

@@ -21,2 +24,10 @@

return Object.keys(tagsDictionary).reduce((acc, tagKey) => {
if (tagKey === 'statusCode') {
const statusCode = tagsDictionary[tagKey]
if (isStatusCode400(statusCode)) {
acc.push(`status_code:${statusCode}`)
}
acc.push(`error_type:${getErrorTypeFromStatusCode(statusCode)}`)
return acc
}
const formattedTagKey = formattedTags[tagKey] || tagKey

@@ -41,2 +52,3 @@ if (tagsDictionary[tagKey] === true) {

// CI Visibility telemetry events
const TELEMETRY_TEST_SESSION = 'test_session'
const TELEMETRY_EVENT_CREATED = 'event_created'

@@ -80,3 +92,13 @@ const TELEMETRY_EVENT_FINISHED = 'event_finished'

const TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES = 'itr_skippable_tests.response_bytes'
// early flake detection
const TELEMETRY_KNOWN_TESTS = 'early_flake_detection.request'
const TELEMETRY_KNOWN_TESTS_MS = 'early_flake_detection.request_ms'
const TELEMETRY_KNOWN_TESTS_ERRORS = 'early_flake_detection.request_errors'
const TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS = 'early_flake_detection.response_tests'
const TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES = 'early_flake_detection.response_bytes'
function isStatusCode400 (statusCode) {
return statusCode >= 400 && statusCode < 500
}
function getErrorTypeFromStatusCode (statusCode) {

@@ -95,2 +117,3 @@ if (statusCode >= 400 && statusCode < 500) {

distributionMetric,
TELEMETRY_TEST_SESSION,
TELEMETRY_EVENT_CREATED,

@@ -134,3 +157,7 @@ TELEMETRY_EVENT_FINISHED,

TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES,
getErrorTypeFromStatusCode
TELEMETRY_KNOWN_TESTS,
TELEMETRY_KNOWN_TESTS_MS,
TELEMETRY_KNOWN_TESTS_ERRORS,
TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS,
TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES
}

@@ -5,3 +5,3 @@ 'use strict'

const os = require('os')
const uuid = require('crypto-randomuuid')
const uuid = require('crypto-randomuuid') // we need to keep the old uuid dep because of cypress
const URL = require('url').URL

@@ -19,5 +19,116 @@ const log = require('./log')

const { updateConfig } = require('./telemetry')
const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless')
const telemetryMetrics = require('./telemetry/metrics')
const { getIsGCPFunction, getIsAzureFunction } = require('./serverless')
const { ORIGIN_KEY } = require('./constants')
const { appendRules } = require('./payload-tagging/config')
const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
const telemetryCounters = {
'otel.env.hiding': {},
'otel.env.invalid': {}
}
function getCounter (event, ddVar, otelVar) {
const counters = telemetryCounters[event]
const tags = []
const ddVarPrefix = 'config_datadog:'
const otelVarPrefix = 'config_opentelemetry:'
if (ddVar) {
ddVar = ddVarPrefix + ddVar.toLowerCase()
tags.push(ddVar)
}
if (otelVar) {
otelVar = otelVarPrefix + otelVar.toLowerCase()
tags.push(otelVar)
}
if (!(otelVar in counters)) counters[otelVar] = {}
const counter = tracerMetrics.count(event, tags)
counters[otelVar][ddVar] = counter
return counter
}
const otelDdEnvMapping = {
OTEL_LOG_LEVEL: 'DD_TRACE_LOG_LEVEL',
OTEL_PROPAGATORS: 'DD_TRACE_PROPAGATION_STYLE',
OTEL_SERVICE_NAME: 'DD_SERVICE',
OTEL_TRACES_SAMPLER: 'DD_TRACE_SAMPLE_RATE',
OTEL_TRACES_SAMPLER_ARG: 'DD_TRACE_SAMPLE_RATE',
OTEL_TRACES_EXPORTER: 'DD_TRACE_ENABLED',
OTEL_METRICS_EXPORTER: 'DD_RUNTIME_METRICS_ENABLED',
OTEL_RESOURCE_ATTRIBUTES: 'DD_TAGS',
OTEL_SDK_DISABLED: 'DD_TRACE_OTEL_ENABLED',
OTEL_LOGS_EXPORTER: undefined
}
const VALID_PROPAGATION_STYLES = new Set(['datadog', 'tracecontext', 'b3', 'b3 single header', 'none'])
const VALID_LOG_LEVELS = new Set(['debug', 'info', 'warn', 'error'])
function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) {
const OTEL_TRACES_SAMPLER_MAPPING = {
always_on: '1.0',
always_off: '0.0',
traceidratio: otelTracesSamplerArg,
parentbased_always_on: '1.0',
parentbased_always_off: '0.0',
parentbased_traceidratio: otelTracesSamplerArg
}
return OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler]
}
function validateOtelPropagators (propagators) {
if (!process.env.PROPAGATION_STYLE_EXTRACT &&
!process.env.PROPAGATION_STYLE_INJECT &&
!process.env.DD_TRACE_PROPAGATION_STYLE &&
process.env.OTEL_PROPAGATORS) {
for (const style in propagators) {
if (!VALID_PROPAGATION_STYLES.has(style)) {
log.warn('unexpected value for OTEL_PROPAGATORS environment variable')
getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc()
}
}
}
}
function validateEnvVarType (envVar) {
const value = process.env[envVar]
switch (envVar) {
case 'OTEL_LOG_LEVEL':
return VALID_LOG_LEVELS.has(value)
case 'OTEL_PROPAGATORS':
case 'OTEL_RESOURCE_ATTRIBUTES':
case 'OTEL_SERVICE_NAME':
return typeof value === 'string'
case 'OTEL_TRACES_SAMPLER':
return getFromOtelSamplerMap(value, process.env.OTEL_TRACES_SAMPLER_ARG) !== undefined
case 'OTEL_TRACES_SAMPLER_ARG':
return !isNaN(parseFloat(value))
case 'OTEL_SDK_DISABLED':
return value.toLowerCase() === 'true' || value.toLowerCase() === 'false'
case 'OTEL_TRACES_EXPORTER':
case 'OTEL_METRICS_EXPORTER':
case 'OTEL_LOGS_EXPORTER':
return value.toLowerCase() === 'none'
default:
return false
}
}
function checkIfBothOtelAndDdEnvVarSet () {
for (const [otelEnvVar, ddEnvVar] of Object.entries(otelDdEnvMapping)) {
if (ddEnvVar && process.env[ddEnvVar] && process.env[otelEnvVar]) {
log.warn(`both ${ddEnvVar} and ${otelEnvVar} environment variables are set`)
getCounter('otel.env.hiding', ddEnvVar, otelEnvVar).inc()
}
if (process.env[otelEnvVar] && !validateEnvVarType(otelEnvVar)) {
log.warn(`unexpected value for ${otelEnvVar} environment variable`)
getCounter('otel.env.invalid', ddEnvVar, otelEnvVar).inc()
}
}
}
const fromEntries = Object.fromEntries || (entries =>

@@ -29,5 +140,5 @@ entries.reduce((obj, [k, v]) => Object.assign(obj, { [k]: v }), {}))

// eslint-disable-next-line max-len
const defaultWafObfuscatorKeyRegex = '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization'
const defaultWafObfuscatorKeyRegex = '(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt'
// eslint-disable-next-line max-len
const defaultWafObfuscatorValueRegex = '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:\\s*=[^;]|"\\s*:\\s*"[^"]+")|bearer\\s+[a-z0-9\\._\\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}'
const defaultWafObfuscatorValueRegex = '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=[^;]|"\\s*:\\s*"[^"]+")|bearer\\s+[a-z0-9\\._\\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}'
const runtimeId = uuid()

@@ -69,2 +180,17 @@

/**
* Given a string of comma-separated paths, return the array of paths.
* If a blank path is provided a null is returned to signal that the feature is disabled.
* An empty array means the feature is enabled but that no rules need to be applied.
*
* @param {string} input
* @returns {[string]|null}
*/
function splitJSONPathRules (input) {
if (!input) return null
if (Array.isArray(input)) return input
if (input === 'all') return []
return input.split(',')
}
// Shallow clone with property name remapping

@@ -80,5 +206,5 @@ function remapify (input, mappings) {

function propagationStyle (key, option, defaultValue) {
function propagationStyle (key, option) {
// Extract by key if in object-form value
if (typeof option === 'object' && !Array.isArray(option)) {
if (option !== null && typeof option === 'object' && !Array.isArray(option)) {
option = option[key]

@@ -91,3 +217,3 @@ }

// If it's not an array but not undefined there's something wrong with the input
if (typeof option !== 'undefined') {
if (option !== undefined) {
log.warn('Unexpected input for config.tracePropagationStyle')

@@ -98,4 +224,5 @@ }

const envKey = `DD_TRACE_PROPAGATION_STYLE_${key.toUpperCase()}`
const envVar = coalesce(process.env[envKey], process.env.DD_TRACE_PROPAGATION_STYLE)
if (typeof envVar !== 'undefined') {
const envVar = coalesce(process.env[envKey], process.env.DD_TRACE_PROPAGATION_STYLE, process.env.OTEL_PROPAGATORS)
if (envVar !== undefined) {
return envVar.split(',')

@@ -105,44 +232,33 @@ .filter(v => v !== '')

}
}
return defaultValue
function reformatSpanSamplingRules (rules) {
if (!rules) return rules
return rules.map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate',
max_per_second: 'maxPerSecond'
})
})
}
class Config {
constructor (options) {
options = options || {}
options = this.options = {
constructor (options = {}) {
options = {
...options,
appsec: options.appsec != null ? options.appsec : options.experimental?.appsec,
iastOptions: options.experimental?.iast
iast: options.iast != null ? options.iast : options.experimental?.iast
}
// Configure the logger first so it can be used to warn about other configs
this.debug = isTrue(coalesce(
process.env.DD_TRACE_DEBUG,
false
))
this.logger = options.logger
this.logLevel = coalesce(
options.logLevel,
process.env.DD_TRACE_LOG_LEVEL,
'debug'
)
const logConfig = log.getConfig()
this.debug = logConfig.enabled
this.logger = coalesce(options.logger, logConfig.logger)
this.logLevel = coalesce(options.logLevel, logConfig.logLevel)
log.use(this.logger)
log.toggle(this.debug, this.logLevel, this)
log.toggle(this.debug, this.logLevel)
const DD_TRACE_MEMCACHED_COMMAND_ENABLED = coalesce(
process.env.DD_TRACE_MEMCACHED_COMMAND_ENABLED,
false
)
checkIfBothOtelAndDdEnvVarSet()
const DD_SERVICE_MAPPING = coalesce(
options.serviceMapping,
process.env.DD_SERVICE_MAPPING
? fromEntries(
process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
)
: {}
)
const DD_API_KEY = coalesce(

@@ -153,13 +269,2 @@ process.env.DATADOG_API_KEY,

// TODO: Remove the experimental env vars as a major?
const DD_TRACE_B3_ENABLED = coalesce(
options.experimental && options.experimental.b3,
process.env.DD_TRACE_EXPERIMENTAL_B3_ENABLED,
false
)
const defaultPropagationStyle = ['datadog', 'tracecontext']
if (isTrue(DD_TRACE_B3_ENABLED)) {
defaultPropagationStyle.push('b3')
defaultPropagationStyle.push('b3 single header')
}
if (process.env.DD_TRACE_PROPAGATION_STYLE && (

@@ -175,17 +280,10 @@ process.env.DD_TRACE_PROPAGATION_STYLE_INJECT ||

}
const DD_TRACE_PROPAGATION_STYLE_INJECT = propagationStyle(
const PROPAGATION_STYLE_INJECT = propagationStyle(
'inject',
options.tracePropagationStyle,
defaultPropagationStyle
this._getDefaultPropagationStyle(options)
)
const DD_TRACE_PROPAGATION_STYLE_EXTRACT = propagationStyle(
'extract',
options.tracePropagationStyle,
defaultPropagationStyle
)
const DD_TRACE_PROPAGATION_EXTRACT_FIRST = coalesce(
process.env.DD_TRACE_PROPAGATION_EXTRACT_FIRST,
false
)
validateOtelPropagators(PROPAGATION_STYLE_INJECT)
if (typeof options.appsec === 'boolean') {

@@ -199,29 +297,2 @@ options.appsec = {

const DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = coalesce(
maybeFile(options.appsec.blockedTemplateGraphql),
maybeFile(process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON)
)
const DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = coalesce(
options.appsec.eventTracking && options.appsec.eventTracking.mode,
process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING,
'safe'
).toLowerCase()
const DD_API_SECURITY_ENABLED = coalesce(
options.appsec?.apiSecurity?.enabled,
process.env.DD_API_SECURITY_ENABLED && isTrue(process.env.DD_API_SECURITY_ENABLED),
process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED && isTrue(process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED),
true
)
const DD_API_SECURITY_REQUEST_SAMPLE_RATE = coalesce(
options.appsec?.apiSecurity?.requestSampling,
parseFloat(process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE),
0.1
)
// 0: disabled, 1: logging, 2: garbage collection + logging
const DD_TRACE_SPAN_LEAK_DEBUG = coalesce(
process.env.DD_TRACE_SPAN_LEAK_DEBUG,
0
)
const DD_INSTRUMENTATION_INSTALL_ID = coalesce(

@@ -240,51 +311,26 @@ process.env.DD_INSTRUMENTATION_INSTALL_ID,

const sampler = {
rules: coalesce(
options.samplingRules,
safeJsonParse(process.env.DD_TRACE_SAMPLING_RULES),
[]
).map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate'
})
}),
spanSamplingRules: coalesce(
options.spanSamplingRules,
safeJsonParse(maybeFile(process.env.DD_SPAN_SAMPLING_RULES_FILE)),
safeJsonParse(process.env.DD_SPAN_SAMPLING_RULES),
[]
).map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate',
max_per_second: 'maxPerSecond'
})
})
}
const DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = splitJSONPathRules(
coalesce(
process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING,
options.cloudPayloadTagging?.request,
''
))
const DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = splitJSONPathRules(
coalesce(
process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING,
options.cloudPayloadTagging?.response,
''
))
const DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = coalesce(
process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH,
options.cloudPayloadTagging?.maxDepth,
10
)
// TODO: refactor
this.apiKey = DD_API_KEY
this.serviceMapping = DD_SERVICE_MAPPING
this.tracePropagationStyle = {
inject: DD_TRACE_PROPAGATION_STYLE_INJECT,
extract: DD_TRACE_PROPAGATION_STYLE_EXTRACT
}
this.tracePropagationExtractFirst = isTrue(DD_TRACE_PROPAGATION_EXTRACT_FIRST)
this.sampler = sampler
this.appsec = {
blockedTemplateGraphql: DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON,
eventTracking: {
enabled: ['extended', 'safe'].includes(DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING),
mode: DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING
},
apiSecurity: {
enabled: DD_API_SECURITY_ENABLED,
// Coerce value between 0 and 1
requestSampling: Math.min(1, Math.max(0, DD_API_SECURITY_REQUEST_SAMPLE_RATE))
}
}
// Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
this.memcachedCommandEnabled = isTrue(DD_TRACE_MEMCACHED_COMMAND_ENABLED)
this.isAzureFunctionConsumptionPlan = getIsAzureFunctionConsumptionPlan()
this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG)
// sent in telemetry event app-started
this.installSignature = {

@@ -296,2 +342,11 @@ id: DD_INSTRUMENTATION_INSTALL_ID,

this.cloudPayloadTagging = {
requestsEnabled: !!DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING,
responsesEnabled: !!DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING,
maxDepth: DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH,
rules: appendRules(
DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING
)
}
this._applyDefaults()

@@ -364,7 +419,22 @@ this._applyEnvironment()

_getDefaultPropagationStyle (options) {
// TODO: Remove the experimental env vars as a major?
const DD_TRACE_B3_ENABLED = coalesce(
options.experimental && options.experimental.b3,
process.env.DD_TRACE_EXPERIMENTAL_B3_ENABLED,
false
)
const defaultPropagationStyle = ['datadog', 'tracecontext']
if (isTrue(DD_TRACE_B3_ENABLED)) {
defaultPropagationStyle.push('b3')
defaultPropagationStyle.push('b3 single header')
}
return defaultPropagationStyle
}
_isInServerlessEnvironment () {
const inAWSLambda = process.env.AWS_LAMBDA_FUNCTION_NAME !== undefined
const isGCPFunction = getIsGCPFunction()
const isAzureFunctionConsumptionPlan = getIsAzureFunctionConsumptionPlan()
return inAWSLambda || isGCPFunction || isAzureFunctionConsumptionPlan
const isAzureFunction = getIsAzureFunction()
return inAWSLambda || isGCPFunction || isAzureFunction
}

@@ -388,11 +458,22 @@

const defaults = this._defaults = {}
const defaults = setHiddenProperty(this, '_defaults', {})
this._setValue(defaults, 'appsec.apiSecurity.enabled', true)
this._setValue(defaults, 'appsec.apiSecurity.requestSampling', 0.1)
this._setValue(defaults, 'appsec.blockedTemplateGraphql', undefined)
this._setValue(defaults, 'appsec.blockedTemplateHtml', undefined)
this._setValue(defaults, 'appsec.blockedTemplateJson', undefined)
this._setValue(defaults, 'appsec.enabled', undefined)
this._setValue(defaults, 'appsec.eventTracking.enabled', true)
this._setValue(defaults, 'appsec.eventTracking.mode', 'safe')
this._setValue(defaults, 'appsec.obfuscatorKeyRegex', defaultWafObfuscatorKeyRegex)
this._setValue(defaults, 'appsec.obfuscatorValueRegex', defaultWafObfuscatorValueRegex)
this._setValue(defaults, 'appsec.rasp.enabled', true)
this._setValue(defaults, 'appsec.rateLimit', 100)
this._setValue(defaults, 'appsec.rules', undefined)
this._setValue(defaults, 'appsec.sca.enabled', null)
this._setValue(defaults, 'appsec.standalone.enabled', undefined)
this._setValue(defaults, 'appsec.stackTrace.enabled', true)
this._setValue(defaults, 'appsec.stackTrace.maxDepth', 32)
this._setValue(defaults, 'appsec.stackTrace.maxStackTraces', 2)
this._setValue(defaults, 'appsec.wafTimeout', 5e3) // µs

@@ -423,4 +504,8 @@ this._setValue(defaults, 'clientIpEnabled', false)

this._setValue(defaults, 'iast.telemetryVerbosity', 'INFORMATION')
this._setValue(defaults, 'injectionEnabled', [])
this._setValue(defaults, 'isAzureFunction', false)
this._setValue(defaults, 'isCiVisibility', false)
this._setValue(defaults, 'isEarlyFlakeDetectionEnabled', false)
this._setValue(defaults, 'isFlakyTestRetriesEnabled', false)
this._setValue(defaults, 'flakyTestRetriesCount', 5)
this._setValue(defaults, 'isGCPFunction', false)

@@ -430,4 +515,6 @@ this._setValue(defaults, 'isGitUploadEnabled', false)

this._setValue(defaults, 'isManualApiEnabled', false)
this._setValue(defaults, 'ciVisibilitySessionName', '')
this._setValue(defaults, 'logInjection', false)
this._setValue(defaults, 'lookup', undefined)
this._setValue(defaults, 'memcachedCommandEnabled', false)
this._setValue(defaults, 'openAiLogsEnabled', false)

@@ -438,5 +525,6 @@ this._setValue(defaults, 'openaiSpanCharLimit', 128)

this._setValue(defaults, 'port', '8126')
this._setValue(defaults, 'profiling.enabled', false)
this._setValue(defaults, 'profiling.enabled', undefined)
this._setValue(defaults, 'profiling.exporters', 'agent')
this._setValue(defaults, 'profiling.sourceMap', true)
this._setValue(defaults, 'profiling.longLivedThreshold', undefined)
this._setValue(defaults, 'protocolVersion', '0.4')

@@ -450,7 +538,11 @@ this._setValue(defaults, 'queryStringObfuscation', qsRegex)

this._setValue(defaults, 'sampler.rateLimit', undefined)
this._setValue(defaults, 'sampler.rules', [])
this._setValue(defaults, 'sampler.spanSamplingRules', [])
this._setValue(defaults, 'scope', undefined)
this._setValue(defaults, 'service', service)
this._setValue(defaults, 'serviceMapping', {})
this._setValue(defaults, 'site', 'datadoghq.com')
this._setValue(defaults, 'spanAttributeSchema', 'v0')
this._setValue(defaults, 'spanComputePeerService', false)
this._setValue(defaults, 'spanLeakDebug', 0)
this._setValue(defaults, 'spanRemoveIntegrationFromService', false)

@@ -465,9 +557,14 @@ this._setValue(defaults, 'startupLogs', false)

this._setValue(defaults, 'telemetry.heartbeatInterval', 60000)
this._setValue(defaults, 'telemetry.logCollection', false)
this._setValue(defaults, 'telemetry.logCollection', true)
this._setValue(defaults, 'telemetry.metrics', true)
this._setValue(defaults, 'traceId128BitGenerationEnabled', true)
this._setValue(defaults, 'traceId128BitLoggingEnabled', false)
this._setValue(defaults, 'tracePropagationExtractFirst', false)
this._setValue(defaults, 'tracePropagationStyle.inject', ['datadog', 'tracecontext'])
this._setValue(defaults, 'tracePropagationStyle.extract', ['datadog', 'tracecontext'])
this._setValue(defaults, 'tracePropagationStyle.otelPropagators', false)
this._setValue(defaults, 'tracing', true)
this._setValue(defaults, 'url', undefined)
this._setValue(defaults, 'version', pkg.version)
this._setValue(defaults, 'instrumentation_config_id', undefined)
}

@@ -479,8 +576,17 @@

DD_AGENT_HOST,
DD_API_SECURITY_ENABLED,
DD_API_SECURITY_REQUEST_SAMPLE_RATE,
DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING,
DD_APPSEC_ENABLED,
DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON,
DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML,
DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON,
DD_APPSEC_MAX_STACK_TRACES,
DD_APPSEC_MAX_STACK_TRACE_DEPTH,
DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
DD_APPSEC_RULES,
DD_APPSEC_SCA_ENABLED,
DD_APPSEC_STACK_TRACE_ENABLED,
DD_APPSEC_RASP_ENABLED,
DD_APPSEC_TRACE_RATE_LIMIT,

@@ -493,2 +599,4 @@ DD_APPSEC_WAF_TIMEOUT,

DD_ENV,
DD_EXPERIMENTAL_API_SECURITY_ENABLED,
DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED,
DD_EXPERIMENTAL_PROFILING_ENABLED,

@@ -505,3 +613,5 @@ JEST_WORKER_ID,

DD_IAST_TELEMETRY_VERBOSITY,
DD_INJECTION_ENABLED,
DD_INSTRUMENTATION_TELEMETRY_ENABLED,
DD_INSTRUMENTATION_CONFIG_ID,
DD_LOGS_INJECTION,

@@ -513,2 +623,3 @@ DD_OPENAI_LOGS_ENABLED,

DD_PROFILING_SOURCE_MAP,
DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD,
DD_REMOTE_CONFIGURATION_ENABLED,

@@ -518,4 +629,7 @@ DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS,

DD_SERVICE,
DD_SERVICE_MAPPING,
DD_SERVICE_NAME,
DD_SITE,
DD_SPAN_SAMPLING_RULES,
DD_SPAN_SAMPLING_RULES_FILE,
DD_TAGS,

@@ -540,5 +654,10 @@ DD_TELEMETRY_DEBUG,

DD_TRACE_HEADER_TAGS,
DD_TRACE_MEMCACHED_COMMAND_ENABLED,
DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP,
DD_TRACE_PARTIAL_FLUSH_MIN_SPANS,
DD_TRACE_PEER_SERVICE_MAPPING,
DD_TRACE_PROPAGATION_EXTRACT_FIRST,
DD_TRACE_PROPAGATION_STYLE,
DD_TRACE_PROPAGATION_STYLE_INJECT,
DD_TRACE_PROPAGATION_STYLE_EXTRACT,
DD_TRACE_RATE_LIMIT,

@@ -548,4 +667,6 @@ DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED,

DD_TRACE_SAMPLE_RATE,
DD_TRACE_SAMPLING_RULES,
DD_TRACE_SCOPE,
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA,
DD_TRACE_SPAN_LEAK_DEBUG,
DD_TRACE_STARTUP_LOGS,

@@ -556,8 +677,16 @@ DD_TRACE_TAGS,

DD_TRACING_ENABLED,
DD_VERSION
DD_VERSION,
OTEL_METRICS_EXPORTER,
OTEL_PROPAGATORS,
OTEL_RESOURCE_ATTRIBUTES,
OTEL_SERVICE_NAME,
OTEL_TRACES_SAMPLER,
OTEL_TRACES_SAMPLER_ARG
} = process.env
const tags = {}
const env = this._env = {}
const env = setHiddenProperty(this, '_env', {})
setHiddenProperty(this, '_envUnprocessed', {})
tagger.add(tags, OTEL_RESOURCE_ATTRIBUTES, true)
tagger.add(tags, DD_TAGS)

@@ -567,10 +696,34 @@ tagger.add(tags, DD_TRACE_TAGS)

this._setBoolean(env, 'appsec.apiSecurity.enabled', coalesce(
DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED),
DD_EXPERIMENTAL_API_SECURITY_ENABLED && isTrue(DD_EXPERIMENTAL_API_SECURITY_ENABLED)
))
this._setUnit(env, 'appsec.apiSecurity.requestSampling', DD_API_SECURITY_REQUEST_SAMPLE_RATE)
this._setValue(env, 'appsec.blockedTemplateGraphql', maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON))
this._setValue(env, 'appsec.blockedTemplateHtml', maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML))
this._envUnprocessed['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML
this._setValue(env, 'appsec.blockedTemplateJson', maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON))
this._envUnprocessed['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON
this._setBoolean(env, 'appsec.enabled', DD_APPSEC_ENABLED)
if (DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING) {
this._setValue(env, 'appsec.eventTracking.enabled',
['extended', 'safe'].includes(DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING.toLowerCase()))
this._setValue(env, 'appsec.eventTracking.mode', DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING.toLowerCase())
}
this._setString(env, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP)
this._setString(env, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP)
this._setBoolean(env, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED)
this._setValue(env, 'appsec.rateLimit', maybeInt(DD_APPSEC_TRACE_RATE_LIMIT))
this._envUnprocessed['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT
this._setString(env, 'appsec.rules', DD_APPSEC_RULES)
// DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend
this._setBoolean(env, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED)
this._setBoolean(env, 'appsec.standalone.enabled', DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED)
this._setBoolean(env, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED)
this._setValue(env, 'appsec.stackTrace.maxDepth', maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH))
this._envUnprocessed['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH
this._setValue(env, 'appsec.stackTrace.maxStackTraces', maybeInt(DD_APPSEC_MAX_STACK_TRACES))
this._envUnprocessed['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES
this._setValue(env, 'appsec.wafTimeout', maybeInt(DD_APPSEC_WAF_TIMEOUT))
this._envUnprocessed['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT
this._setBoolean(env, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED)

@@ -588,2 +741,3 @@ this._setString(env, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER)

this._setValue(env, 'flushMinSpans', maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS))
this._envUnprocessed.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS
this._setBoolean(env, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED)

@@ -595,3 +749,5 @@ this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS)

this._setValue(env, 'iast.maxConcurrentRequests', maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS))
this._envUnprocessed['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS
this._setValue(env, 'iast.maxContextOperations', maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS))
this._envUnprocessed['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS
this._setBoolean(env, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED))

@@ -604,16 +760,34 @@ this._setString(env, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN)

}
this._envUnprocessed['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING
this._setString(env, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY)
this._setArray(env, 'injectionEnabled', DD_INJECTION_ENABLED)
this._setBoolean(env, 'isAzureFunction', getIsAzureFunction())
this._setBoolean(env, 'isGCPFunction', getIsGCPFunction())
this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION)
// Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
this._setBoolean(env, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED)
this._setBoolean(env, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED)
this._setValue(env, 'openaiSpanCharLimit', maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT))
this._envUnprocessed.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT
if (DD_TRACE_PEER_SERVICE_MAPPING) {
this._setValue(env, 'peerServiceMapping', fromEntries(
process.env.DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
))
this._envUnprocessed.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING
}
this._setString(env, 'port', DD_TRACE_AGENT_PORT)
this._setBoolean(env, 'profiling.enabled', coalesce(DD_EXPERIMENTAL_PROFILING_ENABLED, DD_PROFILING_ENABLED))
const profilingEnabledEnv = coalesce(DD_EXPERIMENTAL_PROFILING_ENABLED, DD_PROFILING_ENABLED)
const profilingEnabled = isTrue(profilingEnabledEnv)
? 'true'
: isFalse(profilingEnabledEnv)
? 'false'
: profilingEnabledEnv === 'auto' ? 'auto' : undefined
this._setString(env, 'profiling.enabled', profilingEnabled)
this._setString(env, 'profiling.exporters', DD_PROFILING_EXPORTERS)
this._setBoolean(env, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP))
if (DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) {
// This is only used in testing to not have to wait 30s
this._setValue(env, 'profiling.longLivedThreshold', Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD))
}
this._setString(env, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION)

@@ -626,12 +800,33 @@ this._setString(env, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP)

this._setValue(env, 'remoteConfig.pollInterval', maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS))
this._envUnprocessed['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS
this._setBoolean(env, 'reportHostname', DD_TRACE_REPORT_HOSTNAME)
this._setBoolean(env, 'runtimeMetrics', DD_RUNTIME_METRICS_ENABLED)
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE)
// only used to explicitly set runtimeMetrics to false
const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none'
? false
: undefined
this._setBoolean(env, 'runtimeMetrics', DD_RUNTIME_METRICS_ENABLED ||
otelSetRuntimeMetrics)
this._setArray(env, 'sampler.spanSamplingRules', reformatSpanSamplingRules(coalesce(
safeJsonParse(maybeFile(DD_SPAN_SAMPLING_RULES_FILE)),
safeJsonParse(DD_SPAN_SAMPLING_RULES)
)))
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE ||
getFromOtelSamplerMap(OTEL_TRACES_SAMPLER, OTEL_TRACES_SAMPLER_ARG))
this._setValue(env, 'sampler.rateLimit', DD_TRACE_RATE_LIMIT)
this._setSamplingRule(env, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES))
this._envUnprocessed['sampler.rules'] = DD_TRACE_SAMPLING_RULES
this._setString(env, 'scope', DD_TRACE_SCOPE)
this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service)
this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service || OTEL_SERVICE_NAME)
if (DD_SERVICE_MAPPING) {
this._setValue(env, 'serviceMapping', fromEntries(
process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
))
}
this._setString(env, 'site', DD_SITE)
if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) {
this._setString(env, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA))
this._envUnprocessed.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
}
// 0: disabled, 1: logging, 2: garbage collection + logging
this._setValue(env, 'spanLeakDebug', maybeInt(DD_TRACE_SPAN_LEAK_DEBUG))
this._setBoolean(env, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED)

@@ -646,9 +841,18 @@ this._setBoolean(env, 'startupLogs', DD_TRACE_STARTUP_LOGS)

))
this._setString(env, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID)
this._setBoolean(env, 'telemetry.debug', DD_TELEMETRY_DEBUG)
this._setBoolean(env, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED)
this._setValue(env, 'telemetry.heartbeatInterval', maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)))
this._setBoolean(env, 'telemetry.logCollection', coalesce(DD_TELEMETRY_LOG_COLLECTION_ENABLED, DD_IAST_ENABLED))
this._envUnprocessed['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000
this._setBoolean(env, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED)
this._setBoolean(env, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED)
this._setBoolean(env, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
this._setBoolean(env, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
this._setBoolean(env, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST)
this._setBoolean(env, 'tracePropagationStyle.otelPropagators',
DD_TRACE_PROPAGATION_STYLE ||
DD_TRACE_PROPAGATION_STYLE_INJECT ||
DD_TRACE_PROPAGATION_STYLE_EXTRACT
? false
: !!OTEL_PROPAGATORS)
this._setBoolean(env, 'tracing', DD_TRACING_ENABLED)

@@ -659,17 +863,38 @@ this._setString(env, 'version', DD_VERSION || tags.version)

_applyOptions (options) {
const opts = this._options = this._options || {}
const opts = setHiddenProperty(this, '_options', this._options || {})
const tags = {}
setHiddenProperty(this, '_optsUnprocessed', {})
options = this.options = Object.assign({ ingestion: {} }, options, opts)
options = setHiddenProperty(this, '_optionsArg', Object.assign({ ingestion: {} }, options, opts))
tagger.add(tags, options.tags)
this._setBoolean(opts, 'appsec.apiSecurity.enabled', options.appsec.apiSecurity?.enabled)
this._setUnit(opts, 'appsec.apiSecurity.requestSampling', options.appsec.apiSecurity?.requestSampling)
this._setValue(opts, 'appsec.blockedTemplateGraphql', maybeFile(options.appsec.blockedTemplateGraphql))
this._setValue(opts, 'appsec.blockedTemplateHtml', maybeFile(options.appsec.blockedTemplateHtml))
this._optsUnprocessed['appsec.blockedTemplateHtml'] = options.appsec.blockedTemplateHtml
this._setValue(opts, 'appsec.blockedTemplateJson', maybeFile(options.appsec.blockedTemplateJson))
this._optsUnprocessed['appsec.blockedTemplateJson'] = options.appsec.blockedTemplateJson
this._setBoolean(opts, 'appsec.enabled', options.appsec.enabled)
let eventTracking = options.appsec.eventTracking?.mode
if (eventTracking) {
eventTracking = eventTracking.toLowerCase()
this._setValue(opts, 'appsec.eventTracking.enabled', ['extended', 'safe'].includes(eventTracking))
this._setValue(opts, 'appsec.eventTracking.mode', eventTracking)
}
this._setString(opts, 'appsec.obfuscatorKeyRegex', options.appsec.obfuscatorKeyRegex)
this._setString(opts, 'appsec.obfuscatorValueRegex', options.appsec.obfuscatorValueRegex)
this._setBoolean(opts, 'appsec.rasp.enabled', options.appsec.rasp?.enabled)
this._setValue(opts, 'appsec.rateLimit', maybeInt(options.appsec.rateLimit))
this._optsUnprocessed['appsec.rateLimit'] = options.appsec.rateLimit
this._setString(opts, 'appsec.rules', options.appsec.rules)
this._setBoolean(opts, 'appsec.standalone.enabled', options.experimental?.appsec?.standalone?.enabled)
this._setBoolean(opts, 'appsec.stackTrace.enabled', options.appsec.stackTrace?.enabled)
this._setValue(opts, 'appsec.stackTrace.maxDepth', maybeInt(options.appsec.stackTrace?.maxDepth))
this._optsUnprocessed['appsec.stackTrace.maxDepth'] = options.appsec.stackTrace?.maxDepth
this._setValue(opts, 'appsec.stackTrace.maxStackTraces', maybeInt(options.appsec.stackTrace?.maxStackTraces))
this._optsUnprocessed['appsec.stackTrace.maxStackTraces'] = options.appsec.stackTrace?.maxStackTraces
this._setValue(opts, 'appsec.wafTimeout', maybeInt(options.appsec.wafTimeout))
this._optsUnprocessed['appsec.wafTimeout'] = options.appsec.wafTimeout
this._setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled)

@@ -689,20 +914,24 @@ this._setString(opts, 'clientIpHeader', options.clientIpHeader)

this._setValue(opts, 'flushInterval', maybeInt(options.flushInterval))
this._optsUnprocessed.flushInterval = options.flushInterval
this._setValue(opts, 'flushMinSpans', maybeInt(options.flushMinSpans))
this._optsUnprocessed.flushMinSpans = options.flushMinSpans
this._setArray(opts, 'headerTags', options.headerTags)
this._setString(opts, 'hostname', options.hostname)
this._setBoolean(opts, 'iast.deduplicationEnabled', options.iastOptions && options.iastOptions.deduplicationEnabled)
this._setBoolean(opts, 'iast.deduplicationEnabled', options.iast && options.iast.deduplicationEnabled)
this._setBoolean(opts, 'iast.enabled',
options.iastOptions && (options.iastOptions === true || options.iastOptions.enabled === true))
const iastRequestSampling = maybeInt(options.iastOptions?.requestSampling)
options.iast && (options.iast === true || options.iast.enabled === true))
this._setValue(opts, 'iast.maxConcurrentRequests',
maybeInt(options.iastOptions?.maxConcurrentRequests))
this._setValue(opts, 'iast.maxContextOperations',
maybeInt(options.iastOptions && options.iastOptions.maxContextOperations))
this._setBoolean(opts, 'iast.redactionEnabled', options.iastOptions && options.iastOptions.redactionEnabled)
this._setString(opts, 'iast.redactionNamePattern', options.iastOptions?.redactionNamePattern)
this._setString(opts, 'iast.redactionValuePattern', options.iastOptions?.redactionValuePattern)
maybeInt(options.iast?.maxConcurrentRequests))
this._optsUnprocessed['iast.maxConcurrentRequests'] = options.iast?.maxConcurrentRequests
this._setValue(opts, 'iast.maxContextOperations', maybeInt(options.iast?.maxContextOperations))
this._optsUnprocessed['iast.maxContextOperations'] = options.iast?.maxContextOperations
this._setBoolean(opts, 'iast.redactionEnabled', options.iast?.redactionEnabled)
this._setString(opts, 'iast.redactionNamePattern', options.iast?.redactionNamePattern)
this._setString(opts, 'iast.redactionValuePattern', options.iast?.redactionValuePattern)
const iastRequestSampling = maybeInt(options.iast?.requestSampling)
if (iastRequestSampling > -1 && iastRequestSampling < 101) {
this._setValue(opts, 'iast.requestSampling', iastRequestSampling)
this._optsUnprocessed['iast.requestSampling'] = options.iast?.requestSampling
}
this._setString(opts, 'iast.telemetryVerbosity', options.iastOptions && options.iastOptions.telemetryVerbosity)
this._setString(opts, 'iast.telemetryVerbosity', options.iast && options.iast.telemetryVerbosity)
this._setBoolean(opts, 'isCiVisibility', options.isCiVisibility)

@@ -715,16 +944,24 @@ this._setBoolean(opts, 'logInjection', options.logInjection)

this._setString(opts, 'port', options.port)
this._setBoolean(opts, 'profiling.enabled', options.profiling)
const strProfiling = String(options.profiling)
if (['true', 'false', 'auto'].includes(strProfiling)) {
this._setString(opts, 'profiling.enabled', strProfiling)
}
this._setString(opts, 'protocolVersion', options.protocolVersion)
if (options.remoteConfig) {
this._setValue(opts, 'remoteConfig.pollInterval', maybeFloat(options.remoteConfig.pollInterval))
this._optsUnprocessed['remoteConfig.pollInterval'] = options.remoteConfig.pollInterval
}
this._setBoolean(opts, 'reportHostname', options.reportHostname)
this._setBoolean(opts, 'runtimeMetrics', options.runtimeMetrics)
this._setArray(opts, 'sampler.spanSamplingRules', reformatSpanSamplingRules(options.spanSamplingRules))
this._setUnit(opts, 'sampleRate', coalesce(options.sampleRate, options.ingestion.sampleRate))
const ingestion = options.ingestion || {}
this._setValue(opts, 'sampler.rateLimit', coalesce(options.rateLimit, ingestion.rateLimit))
this._setSamplingRule(opts, 'sampler.rules', options.samplingRules)
this._setString(opts, 'service', options.service || tags.service)
this._setValue(opts, 'serviceMapping', options.serviceMapping)
this._setString(opts, 'site', options.site)
if (options.spanAttributeSchema) {
this._setString(opts, 'spanAttributeSchema', validateNamingVersion(options.spanAttributeSchema))
this._optsUnprocessed.spanAttributeSchema = options.spanAttributeSchema
}

@@ -734,4 +971,2 @@ this._setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService)

this._setTags(opts, 'tags', tags)
this._setBoolean(opts, 'telemetry.logCollection', options.iastOptions &&
(options.iastOptions === true || options.iastOptions.enabled === true))
this._setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled)

@@ -744,3 +979,3 @@ this._setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled)

return coalesce(
this.options.isCiVisibility,
this._optionsArg.isCiVisibility,
this._defaults.isCiVisibility

@@ -761,5 +996,5 @@ )

? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
: getAgentUrl(this._getTraceAgentUrl(), this.options)
: getAgentUrl(this._getTraceAgentUrl(), this._optionsArg)
const DD_AGENT_HOST = coalesce(
this.options.hostname,
this._optionsArg.hostname,
process.env.DD_AGENT_HOST,

@@ -775,3 +1010,3 @@ process.env.DD_TRACE_AGENT_HOSTNAME,

coalesce(
this.options.spanAttributeSchema,
this._optionsArg.spanAttributeSchema,
process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA

@@ -782,7 +1017,7 @@ )

const peerServiceSet = (
this.options.hasOwnProperty('spanComputePeerService') ||
this._optionsArg.hasOwnProperty('spanComputePeerService') ||
process.env.hasOwnProperty('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
)
const peerServiceValue = coalesce(
this.options.spanComputePeerService,
this._optionsArg.spanComputePeerService,
process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED

@@ -818,5 +1053,5 @@ )

return coalesce(
this.options.stats,
this._optionsArg.stats,
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,
getIsGCPFunction() || getIsAzureFunctionConsumptionPlan()
getIsGCPFunction() || getIsAzureFunction()
)

@@ -827,3 +1062,3 @@ }

return coalesce(
this.options.url,
this._optionsArg.url,
process.env.DD_TRACE_AGENT_URL,

@@ -837,6 +1072,10 @@ process.env.DD_TRACE_URL,

_applyCalculated () {
const calc = this._calculated = {}
const calc = setHiddenProperty(this, '_calculated', {})
const {
DD_CIVISIBILITY_AGENTLESS_URL
DD_CIVISIBILITY_AGENTLESS_URL,
DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED,
DD_CIVISIBILITY_FLAKY_RETRY_ENABLED,
DD_CIVISIBILITY_FLAKY_RETRY_COUNT,
DD_SESSION_NAME
} = process.env

@@ -847,9 +1086,13 @@

} else {
this._setValue(calc, 'url', getAgentUrl(this._getTraceAgentUrl(), this.options))
this._setValue(calc, 'url', getAgentUrl(this._getTraceAgentUrl(), this._optionsArg))
}
if (this._isCiVisibility()) {
this._setBoolean(calc, 'isEarlyFlakeDetectionEnabled',
coalesce(process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, true))
coalesce(DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, true))
this._setBoolean(calc, 'isFlakyTestRetriesEnabled',
coalesce(DD_CIVISIBILITY_FLAKY_RETRY_ENABLED, true))
this._setValue(calc, 'flakyTestRetriesCount', coalesce(maybeInt(DD_CIVISIBILITY_FLAKY_RETRY_COUNT), 5))
this._setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(this._isCiVisibilityItrEnabled()))
this._setBoolean(calc, 'isManualApiEnabled', this._isCiVisibilityManualApiEnabled())
this._setString(calc, 'ciVisibilitySessionName', DD_SESSION_NAME)
}

@@ -861,6 +1104,20 @@ this._setString(calc, 'dogstatsd.hostname', this._getHostname())

this._setBoolean(calc, 'stats.enabled', this._isTraceStatsComputationEnabled())
const defaultPropagationStyle = this._getDefaultPropagationStyle(this._optionsArg)
this._setValue(calc, 'tracePropagationStyle.inject', propagationStyle(
'inject',
this._optionsArg.tracePropagationStyle
))
this._setValue(calc, 'tracePropagationStyle.extract', propagationStyle(
'extract',
this._optionsArg.tracePropagationStyle
))
if (defaultPropagationStyle.length > 2) {
calc['tracePropagationStyle.inject'] = calc['tracePropagationStyle.inject'] || defaultPropagationStyle
calc['tracePropagationStyle.extract'] = calc['tracePropagationStyle.extract'] || defaultPropagationStyle
}
}
_applyRemote (options) {
const opts = this._remote = this._remote || {}
const opts = setHiddenProperty(this, '_remote', this._remote || {})
setHiddenProperty(this, '_remoteUnprocessed', {})
const tags = {}

@@ -881,4 +1138,19 @@ const headerTags = options.tracing_header_tags

this._setBoolean(opts, 'tracing', options.tracing_enabled)
this._remoteUnprocessed['sampler.rules'] = options.tracing_sampling_rules
this._setSamplingRule(opts, 'sampler.rules', this._reformatTags(options.tracing_sampling_rules))
}
_reformatTags (samplingRules) {
for (const rule of (samplingRules || [])) {
const reformattedTags = {}
if (rule.tags) {
for (const tag of (rule.tags || {})) {
reformattedTags[tag.key] = tag.value_glob
}
rule.tags = reformattedTags
}
}
return samplingRules
}
_setBoolean (obj, name, value) {

@@ -908,3 +1180,3 @@ if (value === undefined || value === null) {

_setArray (obj, name, value) {
if (value === null || value === undefined) {
if (value == null) {
return this._setValue(obj, name, null)

@@ -914,3 +1186,3 @@ }

if (typeof value === 'string') {
value = value && value.split(',')
value = value.split(',')
}

@@ -923,2 +1195,21 @@

_setSamplingRule (obj, name, value) {
if (value == null) {
return this._setValue(obj, name, null)
}
if (typeof value === 'string') {
value = value.split(',')
}
if (Array.isArray(value)) {
value = value.map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate'
})
})
this._setValue(obj, name, value)
}
}
_setString (obj, name, value) {

@@ -949,2 +1240,3 @@ obj[name] = value ? String(value) : undefined // unset for empty strings

const origins = ['remote_config', 'code', 'env_var', 'calculated', 'default']
const unprocessedValues = [this._remoteUnprocessed, this._optsUnprocessed, this._envUnprocessed, {}, {}]
const changes = []

@@ -955,11 +1247,14 @@

const container = containers[i]
const origin = origins[i]
const value = container[name]
if ((container[name] !== null && container[name] !== undefined) || container === this._defaults) {
if (get(this, name) === container[name] && has(this, name)) break
if ((value !== null && value !== undefined) || container === this._defaults) {
if (get(this, name) === value && has(this, name)) break
const value = container[name]
set(this, name, value)
changes.push({ name, value, origin })
changes.push({
name,
value: unprocessedValues[i][name] || value,
origin: origins[i]
})

@@ -980,2 +1275,3 @@ break

}
function maybeFloat (number) {

@@ -1003,2 +1299,11 @@ const parsed = parseFloat(number)

function setHiddenProperty (obj, name, value) {
Object.defineProperty(obj, name, {
value,
enumerable: false,
writable: true
})
return obj[name]
}
module.exports = Config

@@ -18,2 +18,4 @@ 'use strict'

SAMPLING_MECHANISM_SPAN: 8,
SAMPLING_MECHANISM_REMOTE_USER: 11,
SAMPLING_MECHANISM_REMOTE_DYNAMIC: 12,
SPAN_SAMPLING_MECHANISM: '_dd.span_sampling.mechanism',

@@ -34,3 +36,8 @@ SPAN_SAMPLING_RULE_RATE: '_dd.span_sampling.rule_rate',

SCI_REPOSITORY_URL: '_dd.git.repository_url',
SCI_COMMIT_SHA: '_dd.git.commit.sha'
SCI_COMMIT_SHA: '_dd.git.commit.sha',
APM_TRACING_ENABLED_KEY: '_dd.apm.enabled',
APPSEC_PROPAGATION_KEY: '_dd.p.appsec',
PAYLOAD_TAG_REQUEST_PREFIX: 'aws.request.body',
PAYLOAD_TAG_RESPONSE_PREFIX: 'aws.response.body',
PAYLOAD_TAGGING_MAX_TAGS: 758
}

@@ -20,7 +20,9 @@ // encoding used here is sha256

function computeHash (service, env, edgeTags, parentHash) {
const key = `${service}${env}` + edgeTags.join('') + parentHash.toString()
const hashableEdgeTags = edgeTags.filter(item => item !== 'manual_checkpoint:true')
const key = `${service}${env}` + hashableEdgeTags.join('') + parentHash.toString()
if (cache.get(key)) {
return cache.get(key)
}
const currentHash = shaHash(`${service}${env}` + edgeTags.join(''))
const currentHash = shaHash(`${service}${env}` + hashableEdgeTags.join(''))
const buf = Buffer.concat([currentHash, parentHash], 16)

@@ -27,0 +29,0 @@ const val = shaHash(buf.toString())

@@ -135,3 +135,3 @@ const os = require('os')

}
if (typeof obj === 'object') {
if (obj !== null && typeof obj === 'object') {
try {

@@ -215,3 +215,4 @@ return getHeadersSize(obj)

Version: this.version,
Lang: 'javascript'
Lang: 'javascript',
Tags: Object.entries(this.tags).map(([key, value]) => `${key}:${value}`)
}

@@ -218,0 +219,0 @@ this.writer.flush(payload)

@@ -86,9 +86,13 @@ 'use strict'

if (span.type) {
if (span.type && span.meta_struct) {
bytes.buffer[bytes.length++] = 0x8d
} else if (span.type || span.meta_struct) {
bytes.buffer[bytes.length++] = 0x8c
} else {
bytes.buffer[bytes.length++] = 0x8b
}
if (span.type) {
this._encodeString(bytes, 'type')
this._encodeString(bytes, span.type)
} else {
bytes.buffer[bytes.length++] = 0x8b
}

@@ -118,2 +122,6 @@

this._encodeMap(bytes, span.metrics)
if (span.meta_struct) {
this._encodeString(bytes, 'meta_struct')
this._encodeMetaStruct(bytes, span.meta_struct)
}
}

@@ -268,2 +276,80 @@ }

_encodeMetaStruct (bytes, value) {
const keys = Array.isArray(value) ? [] : Object.keys(value)
const validKeys = keys.filter(key => {
const v = value[key]
return typeof v === 'string' ||
typeof v === 'number' ||
(v !== null && typeof v === 'object')
})
this._encodeMapPrefix(bytes, validKeys.length)
for (const key of validKeys) {
const v = value[key]
this._encodeString(bytes, key)
this._encodeObjectAsByteArray(bytes, v)
}
}
_encodeObjectAsByteArray (bytes, value) {
const prefixLength = 5
const offset = bytes.length
bytes.reserve(prefixLength)
bytes.length += prefixLength
this._encodeObject(bytes, value)
// we should do it after encoding the object to know the real length
const length = bytes.length - offset - prefixLength
bytes.buffer[offset] = 0xc6
bytes.buffer[offset + 1] = length >> 24
bytes.buffer[offset + 2] = length >> 16
bytes.buffer[offset + 3] = length >> 8
bytes.buffer[offset + 4] = length
}
_encodeObject (bytes, value, circularReferencesDetector = new Set()) {
circularReferencesDetector.add(value)
if (Array.isArray(value)) {
this._encodeObjectAsArray(bytes, value, circularReferencesDetector)
} else if (value !== null && typeof value === 'object') {
this._encodeObjectAsMap(bytes, value, circularReferencesDetector)
} else if (typeof value === 'string' || typeof value === 'number') {
this._encodeValue(bytes, value)
}
}
_encodeObjectAsMap (bytes, value, circularReferencesDetector) {
const keys = Object.keys(value)
const validKeys = keys.filter(key => {
const v = value[key]
return typeof v === 'string' ||
typeof v === 'number' ||
(v !== null && typeof v === 'object' && !circularReferencesDetector.has(v))
})
this._encodeMapPrefix(bytes, validKeys.length)
for (const key of validKeys) {
const v = value[key]
this._encodeString(bytes, key)
this._encodeObject(bytes, v, circularReferencesDetector)
}
}
_encodeObjectAsArray (bytes, value, circularReferencesDetector) {
const validValue = value.filter(item =>
typeof item === 'string' ||
typeof item === 'number' ||
(item !== null && typeof item === 'object' && !circularReferencesDetector.has(item)))
this._encodeArrayPrefix(bytes, validValue)
for (const item of validValue) {
this._encodeObject(bytes, item, circularReferencesDetector)
}
}
_cacheString (value) {

@@ -270,0 +356,0 @@ if (!(value in this._stringMap)) {

@@ -46,5 +46,11 @@ 'use strict'

this.metadataTags = {}
this.reset()
}
setMetadataTags (tags) {
this.metadataTags = tags
}
_encodeTestSuite (bytes, content) {

@@ -281,2 +287,6 @@ let keysLength = TEST_SUITE_KEYS_LENGTH

_encode (bytes, trace) {
if (this._isReset) {
this._encodePayloadStart(bytes)
this._isReset = false
}
const startTime = Date.now()

@@ -335,3 +345,4 @@

library_version: ddTraceVersion
}
},
...this.metadataTags
},

@@ -355,2 +366,18 @@ events: []

this._encodeMap(bytes, payload.metadata['*'])
if (payload.metadata.test) {
this._encodeString(bytes, 'test')
this._encodeMap(bytes, payload.metadata.test)
}
if (payload.metadata.test_suite_end) {
this._encodeString(bytes, 'test_suite_end')
this._encodeMap(bytes, payload.metadata.test_suite_end)
}
if (payload.metadata.test_module_end) {
this._encodeString(bytes, 'test_module_end')
this._encodeMap(bytes, payload.metadata.test_module_end)
}
if (payload.metadata.test_session_end) {
this._encodeString(bytes, 'test_session_end')
this._encodeMap(bytes, payload.metadata.test_session_end)
}
this._encodeString(bytes, 'events')

@@ -366,3 +393,3 @@ // Get offset of the events list to update the length of the array when calling `makePayload`

this._eventCount = 0
this._encodePayloadStart(this._traceBytes)
this._isReset = true
}

@@ -369,0 +396,0 @@ }

@@ -21,3 +21,5 @@ 'use strict'

case exporters.JEST_WORKER:
return require('./ci-visibility/exporters/jest-worker')
case exporters.CUCUMBER_WORKER:
case exporters.MOCHA_WORKER:
return require('./ci-visibility/exporters/test-worker')
default:

@@ -24,0 +26,0 @@ return inAWSLambda && !usingLambdaExtension ? require('./exporters/log') : require('./exporters/agent')

@@ -10,3 +10,3 @@ 'use strict'

this._config = config
const { url, hostname, port, lookup, protocolVersion, stats = {} } = config
const { url, hostname, port, lookup, protocolVersion, stats = {}, appsec } = config
this._url = url || new URL(format({

@@ -19,3 +19,3 @@ protocol: 'http:',

const headers = {}
if (stats.enabled) {
if (stats.enabled || appsec?.standalone?.enabled) {
headers['Datadog-Client-Computed-Stats'] = 'yes'

@@ -22,0 +22,0 @@ }

@@ -37,2 +37,3 @@ 'use strict'

extractSpanLinks(formatted, span)
extractSpanEvents(formatted, span)
extractRootTags(formatted, span)

@@ -56,2 +57,3 @@ extractChunkTags(formatted, span)

meta: {},
meta_struct: span.meta_struct,
metrics: {},

@@ -93,2 +95,18 @@ start: Math.round(span._startTime * 1e6),

function extractSpanEvents (trace, span) {
const events = []
if (span._events) {
for (const event of span._events) {
const formattedEvent = {
name: event.name,
time_unix_nano: Math.round(event.startTime * 1e6),
attributes: event.attributes && Object.keys(event.attributes).length > 0 ? event.attributes : undefined
}
events.push(formattedEvent)
}
}
if (events.length > 0) { trace.meta.events = JSON.stringify(events) }
}
function extractTags (trace, span) {

@@ -140,3 +158,6 @@ const context = span.context()

if (context._name !== 'fs.operation') {
trace.error = 1
// HACK: to ensure otel.recordException does not influence trace.error
if (tags.setTraceError) {
trace.error = 1
}
} else {

@@ -149,3 +170,2 @@ break

}
setSingleSpanIngestionTags(trace, context._spanSampling)

@@ -152,0 +172,0 @@

@@ -8,4 +8,8 @@ 'use strict'

module.exports = isFalse(process.env.DD_TRACE_ENABLED) || inJestWorker
const ddTraceDisabled = process.env.DD_TRACE_ENABLED
? isFalse(process.env.DD_TRACE_ENABLED)
: String(process.env.OTEL_TRACES_EXPORTER).toLowerCase() === 'none'
module.exports = ddTraceDisabled || inJestWorker
? require('./noop/proxy')
: require('./proxy')

@@ -96,4 +96,5 @@ 'use strict'

}
clearTimeout(__lambdaTimeout)
return result
}
}

@@ -5,2 +5,13 @@ 'use strict'

registerLambdaHook()
/**
* It is safe to do it this way, since customers will never be expected to disable
* this specific instrumentation through the init config object.
*/
const _DD_TRACE_DISABLED_INSTRUMENTATIONS = process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS || ''
const _disabledInstrumentations = new Set(
_DD_TRACE_DISABLED_INSTRUMENTATIONS ? _DD_TRACE_DISABLED_INSTRUMENTATIONS.split(',') : []
)
if (!_disabledInstrumentations.has('lambda')) {
registerLambdaHook()
}

@@ -6,42 +6,67 @@ 'use strict'

const Level = {
Debug: 'debug',
Info: 'info',
Warn: 'warn',
Error: 'error'
trace: 20,
debug: 20,
info: 30,
warn: 40,
error: 50,
critical: 50,
off: 100
}
const defaultLevel = Level.Debug
const debugChannel = channel('datadog:log:debug')
const infoChannel = channel('datadog:log:info')
const warnChannel = channel('datadog:log:warn')
const errorChannel = channel('datadog:log:error')
// based on: https://github.com/trentm/node-bunyan#levels
const logChannels = {
[Level.Debug]: createLogChannel(Level.Debug, 20),
[Level.Info]: createLogChannel(Level.Info, 30),
[Level.Warn]: createLogChannel(Level.Warn, 40),
[Level.Error]: createLogChannel(Level.Error, 50)
}
const defaultLevel = Level.debug
function createLogChannel (name, logLevel) {
const logChannel = channel(`datadog:log:${name}`)
logChannel.logLevel = logLevel
return logChannel
function getChannelLogLevel (level) {
return level && typeof level === 'string'
? Level[level.toLowerCase().trim()] || defaultLevel
: defaultLevel
}
function getChannelLogLevel (level) {
let logChannel
if (level && typeof level === 'string') {
logChannel = logChannels[level.toLowerCase().trim()] || logChannels[defaultLevel]
} else {
logChannel = logChannels[defaultLevel]
class LogChannel {
constructor (level) {
this._level = getChannelLogLevel(level)
}
return logChannel.logLevel
subscribe (logger) {
if (Level.debug >= this._level) {
debugChannel.subscribe(logger.debug)
}
if (Level.info >= this._level) {
infoChannel.subscribe(logger.info)
}
if (Level.warn >= this._level) {
warnChannel.subscribe(logger.warn)
}
if (Level.error >= this._level) {
errorChannel.subscribe(logger.error)
}
}
unsubscribe (logger) {
if (debugChannel.hasSubscribers) {
debugChannel.unsubscribe(logger.debug)
}
if (infoChannel.hasSubscribers) {
infoChannel.unsubscribe(logger.info)
}
if (warnChannel.hasSubscribers) {
warnChannel.unsubscribe(logger.warn)
}
if (errorChannel.hasSubscribers) {
errorChannel.unsubscribe(logger.error)
}
}
}
module.exports = {
Level,
getChannelLogLevel,
LogChannel,
debugChannel: logChannels[Level.Debug],
infoChannel: logChannels[Level.Info],
warnChannel: logChannels[Level.Warn],
errorChannel: logChannels[Level.Error]
debugChannel,
infoChannel,
warnChannel,
errorChannel
}
'use strict'
const coalesce = require('koalas')
const { isTrue } = require('../util')
const { debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels')

@@ -23,4 +25,18 @@ const logWriter = require('./writer')

const config = {
enabled: false,
logger: undefined,
logLevel: 'debug'
}
const log = {
/**
* @returns Read-only version of logging config. To modify config, call `log.use` and `log.toggle`
*/
getConfig () {
return { ...config }
},
use (logger) {
config.logger = logger
logWriter.use(logger)

@@ -31,2 +47,4 @@ return this

toggle (enabled, logLevel) {
config.enabled = enabled
config.logLevel = logLevel
logWriter.toggle(enabled, logLevel)

@@ -81,2 +99,16 @@ return this

const enabled = isTrue(coalesce(
process.env.DD_TRACE_DEBUG,
process.env.OTEL_LOG_LEVEL === 'debug',
config.enabled
))
const logLevel = coalesce(
process.env.DD_TRACE_LOG_LEVEL,
process.env.OTEL_LOG_LEVEL,
config.logLevel
)
log.toggle(enabled, logLevel)
module.exports = log
'use strict'
const { storage } = require('../../../datadog-core')
const { getChannelLogLevel, debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels')
const { LogChannel } = require('./channels')
const defaultLogger = {

@@ -15,3 +14,3 @@ debug: msg => console.debug(msg), /* eslint-disable-line no-console */

let logger = defaultLogger
let logLevel = getChannelLogLevel()
let logChannel = new LogChannel()

@@ -27,32 +26,11 @@ function withNoop (fn) {

function unsubscribeAll () {
if (debugChannel.hasSubscribers) {
debugChannel.unsubscribe(onDebug)
}
if (infoChannel.hasSubscribers) {
infoChannel.unsubscribe(onInfo)
}
if (warnChannel.hasSubscribers) {
warnChannel.unsubscribe(onWarn)
}
if (errorChannel.hasSubscribers) {
errorChannel.unsubscribe(onError)
}
logChannel.unsubscribe({ debug, info, warn, error })
}
function toggleSubscription (enable) {
function toggleSubscription (enable, level) {
unsubscribeAll()
if (enable) {
if (debugChannel.logLevel >= logLevel) {
debugChannel.subscribe(onDebug)
}
if (infoChannel.logLevel >= logLevel) {
infoChannel.subscribe(onInfo)
}
if (warnChannel.logLevel >= logLevel) {
warnChannel.subscribe(onWarn)
}
if (errorChannel.logLevel >= logLevel) {
errorChannel.subscribe(onError)
}
logChannel = new LogChannel(level)
logChannel.subscribe({ debug, info, warn, error })
}

@@ -62,7 +40,4 @@ }

function toggle (enable, level) {
if (level !== undefined) {
logLevel = getChannelLogLevel(level)
}
enabled = enable
toggleSubscription(enabled)
toggleSubscription(enabled, level)
}

@@ -79,22 +54,5 @@

enabled = false
logLevel = getChannelLogLevel()
toggleSubscription(false)
}
function onError (err) {
if (enabled) error(err)
}
function onWarn (message) {
if (enabled) warn(message)
}
function onInfo (message) {
if (enabled) info(message)
}
function onDebug (message) {
if (enabled) debug(message)
}
function error (err) {

@@ -101,0 +59,0 @@ if (typeof err !== 'object' || !err) {

@@ -5,2 +5,3 @@ 'use strict'

const { trace, ROOT_CONTEXT } = require('@opentelemetry/api')
const DataDogSpanContext = require('../opentracing/span_context')

@@ -10,31 +11,2 @@ const SpanContext = require('./span_context')

// Horrible hack to acquire the otherwise inaccessible SPAN_KEY so we can redirect it...
// This is used for getting the current span context in OpenTelemetry, but the SPAN_KEY value is
// not exposed as it's meant to be read-only from outside the module. We want to hijack this logic
// so we can instead get the span context from the datadog context manager instead.
let SPAN_KEY
trace.getSpan({
getValue (key) {
SPAN_KEY = key
}
})
// Whenever a value is acquired from the context map we should mostly delegate to the real getter,
// but when accessing the current span we should hijack that access to instead provide a fake span
// which we can use to get an OTel span context wrapping the datadog active scope span context.
function wrappedGetValue (target) {
return (key) => {
if (key === SPAN_KEY) {
return {
spanContext () {
const activeSpan = tracer.scope().active()
const context = activeSpan && activeSpan.context()
return new SpanContext(context)
}
}
}
return target.getValue(key)
}
}
class ContextManager {

@@ -46,9 +18,18 @@ constructor () {

active () {
const active = this._store.getStore() || ROOT_CONTEXT
const activeSpan = tracer.scope().active()
const store = this._store.getStore()
const context = (activeSpan && activeSpan.context()) || store || ROOT_CONTEXT
return new Proxy(active, {
get (target, key) {
return key === 'getValue' ? wrappedGetValue(target) : target[key]
}
})
if (!(context instanceof DataDogSpanContext)) {
return context
}
if (!context._otelSpanContext) {
const newSpanContext = new SpanContext(context)
context._otelSpanContext = newSpanContext
}
if (store && trace.getSpanContext(store) === context._otelSpanContext) {
return store
}
return trace.setSpanContext(store || ROOT_CONTEXT, context._otelSpanContext)
}

@@ -59,6 +40,10 @@

const ddScope = tracer.scope()
return ddScope.activate(span._ddSpan, () => {
const run = () => {
const cb = thisArg == null ? fn : fn.bind(thisArg)
return this._store.run(context, cb, ...args)
})
}
if (span && span._ddSpan) {
return ddScope.activate(span._ddSpan, run)
}
return run()
}

@@ -73,7 +58,5 @@

// Not part of the spec but the Node.js API expects these
enable () {}
disable () {}
}
module.exports = ContextManager

@@ -27,7 +27,7 @@ 'use strict'

get traceId () {
return this._ddContext._traceId.toString(16)
return this._ddContext.toTraceId(true)
}
get spanId () {
return this._ddContext._spanId.toString(16)
return this._ddContext.toSpanId(true)
}

@@ -34,0 +34,0 @@

@@ -23,2 +23,16 @@ 'use strict'

function isTimeInput (startTime) {
if (typeof startTime === 'number') {
return true
}
if (startTime instanceof Date) {
return true
}
if (Array.isArray(startTime) && startTime.length === 2 &&
typeof startTime[0] === 'number' && typeof startTime[1] === 'number') {
return true
}
return false
}
const spanKindNames = {

@@ -183,2 +197,6 @@ [api.SpanKind.INTERNAL]: kinds.INTERNAL,

setAttribute (key, value) {
if (key === 'http.response.status_code') {
this._ddSpan.setTag('http.status_code', value.toString())
}
this._ddSpan.setTag(key, value)

@@ -189,2 +207,6 @@ return this

setAttributes (attributes) {
if ('http.response.status_code' in attributes) {
attributes['http.status_code'] = attributes['http.response.status_code'].toString()
}
this._ddSpan.addTags(attributes)

@@ -194,7 +216,2 @@ return this

addEvent (name, attributesOrStartTime, startTime) {
api.diag.warn('Events not supported')
return this
}
addLink (context, attributes) {

@@ -243,8 +260,25 @@ // extract dd context

recordException (exception) {
addEvent (name, attributesOrStartTime, startTime) {
startTime = attributesOrStartTime && isTimeInput(attributesOrStartTime) ? attributesOrStartTime : startTime
const hrStartTime = timeInputToHrTime(startTime || (performance.now() + timeOrigin))
startTime = hrTimeToMilliseconds(hrStartTime)
this._ddSpan.addEvent(name, attributesOrStartTime, startTime)
return this
}
recordException (exception, timeInput) {
// HACK: identifier is added so that trace.error remains unchanged after a call to otel.recordException
this._ddSpan.addTags({
[ERROR_TYPE]: exception.name,
[ERROR_MESSAGE]: exception.message,
[ERROR_STACK]: exception.stack
[ERROR_STACK]: exception.stack,
doNotSetTraceError: true
})
const attributes = {}
if (exception.message) attributes['exception.message'] = exception.message
if (exception.type) attributes['exception.type'] = exception.type
if (exception.escaped) attributes['exception.escaped'] = exception.escaped
if (exception.stack) attributes['exception.stacktrace'] = exception.stack
this.addEvent(exception.name, attributes, timeInput)
}

@@ -257,3 +291,3 @@

get ended () {
return typeof this.duration !== 'undefined'
return this.duration !== undefined
}

@@ -260,0 +294,0 @@ }

'use strict'
const { trace, context } = require('@opentelemetry/api')
const { trace, context, propagation } = require('@opentelemetry/api')
const { W3CTraceContextPropagator } = require('@opentelemetry/core')

@@ -55,2 +56,9 @@ const tracer = require('../../')

}
// The default propagator used is the W3C Trace Context propagator, users should be able to pass in others
// as needed
if (config.propagator) {
propagation.setGlobalPropagator(config.propagator)
} else {
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
}
}

@@ -57,0 +65,0 @@

@@ -10,2 +10,3 @@ 'use strict'

const SpanContext = require('./span_context')
const TextMapPropagator = require('../opentracing/propagation/text_map')

@@ -19,2 +20,3 @@ class Tracer {

this.instrumentationLibrary = library
this._spanLimits = {}
}

@@ -26,2 +28,20 @@

_createSpanContextFromParent (parentSpanContext) {
return new SpanContext({
traceId: parentSpanContext._traceId,
spanId: id(),
parentId: parentSpanContext._spanId,
sampling: parentSpanContext._sampling,
baggageItems: Object.assign({}, parentSpanContext._baggageItems),
trace: parentSpanContext._trace,
tracestate: parentSpanContext._tracestate
})
}
// Extracted method to create span context for a new span
_createSpanContextForNewSpan (context) {
const { traceId, spanId, traceFlags, traceState } = context
return TextMapPropagator._convertOtelContextToDatadog(traceId, spanId, traceFlags, traceState)
}
startSpan (name, options = {}, context = api.context.active()) {

@@ -34,17 +54,7 @@ // remove span from context in case a root span is requested via options

const parentSpanContext = parentSpan && parentSpan.spanContext()
let spanContext
// TODO: Need a way to get 128-bit trace IDs for the validity check API to work...
// if (parent && api.trace.isSpanContextValid(parent)) {
if (parentSpanContext && parentSpanContext.traceId) {
const parent = parentSpanContext._ddContext
spanContext = new SpanContext({
traceId: parent._traceId,
spanId: id(),
parentId: parent._spanId,
sampling: parent._sampling,
baggageItems: Object.assign({}, parent._baggageItems),
trace: parent._trace,
tracestate: parent._tracestate
})
if (parentSpanContext && api.trace.isSpanContextValid(parentSpanContext)) {
spanContext = parentSpanContext._ddContext
? this._createSpanContextFromParent(parentSpanContext._ddContext)
: this._createSpanContextForNewSpan(parentSpanContext)
} else {

@@ -125,4 +135,9 @@ spanContext = new SpanContext()

}
// not used in our codebase but needed for compatibility. See issue #1244
getSpanLimits () {
return this._spanLimits
}
}
module.exports = Tracer

@@ -18,3 +18,3 @@ 'use strict'

if (this._config.traceId128BitLoggingEnabled && spanContext._trace.tags['_dd.p.tid']) {
carrier.dd.trace_id = spanContext._trace.tags['_dd.p.tid'] + spanContext._traceId.toString(16)
carrier.dd.trace_id = spanContext.toTraceId(true)
} else {

@@ -21,0 +21,0 @@ carrier.dd.trace_id = spanContext.toTraceId()

@@ -6,7 +6,13 @@ 'use strict'

const DatadogSpanContext = require('../span_context')
const OtelSpanContext = require('../../opentelemetry/span_context')
const log = require('../../log')
const TraceState = require('./tracestate')
const tags = require('../../../../../ext/tags')
const { channel } = require('dc-polyfill')
const { AUTO_KEEP, AUTO_REJECT, USER_KEEP } = require('../../../../../ext/priority')
const injectCh = channel('dd-trace:span:inject')
const extractCh = channel('dd-trace:span:extract')
const traceKey = 'x-datadog-trace-id'

@@ -43,2 +49,3 @@ const spanKey = 'x-datadog-parent-id'

const invalidSegment = /^0+$/
const zeroTraceId = '0000000000000000'

@@ -57,2 +64,6 @@ class TextMapPropagator {

if (injectCh.hasSubscribers) {
injectCh.publish({ spanContext, carrier })
}
log.debug(() => `Inject into carrier: ${JSON.stringify(pick(carrier, logKeys))}.`)

@@ -66,2 +77,6 @@ }

if (extractCh.hasSubscribers) {
extractCh.publish({ spanContext, carrier })
}
log.debug(() => `Extract from carrier: ${JSON.stringify(pick(carrier, logKeys))}.`)

@@ -182,5 +197,5 @@

state.set('p', spanContext._spanId)
} else if (spanContext._trace.tags['_dd.parent_id']) {
} else if (spanContext._trace.tags[tags.DD_PARENT_ID]) {
// Propagate the last Datadog span id set on the remote span.
state.set('p', spanContext._trace.tags['_dd.parent_id'])
state.set('p', spanContext._trace.tags[tags.DD_PARENT_ID])
}

@@ -222,5 +237,51 @@ state.set('s', priority)

_hasTraceIdConflict (w3cSpanContext, firstSpanContext) {
return w3cSpanContext !== null &&
firstSpanContext.toTraceId(true) === w3cSpanContext.toTraceId(true) &&
firstSpanContext.toSpanId() !== w3cSpanContext.toSpanId()
}
_hasParentIdInTags (spanContext) {
return tags.DD_PARENT_ID in spanContext._trace.tags
}
_updateParentIdFromDdHeaders (carrier, firstSpanContext) {
const ddCtx = this._extractDatadogContext(carrier)
if (ddCtx !== null) {
firstSpanContext._trace.tags[tags.DD_PARENT_ID] = ddCtx._spanId.toString().padStart(16, '0')
}
}
_resolveTraceContextConflicts (w3cSpanContext, firstSpanContext, carrier) {
if (!this._hasTraceIdConflict(w3cSpanContext, firstSpanContext)) {
return firstSpanContext
}
if (this._hasParentIdInTags(w3cSpanContext)) {
// tracecontext headers contain a p value, ensure this value is sent to backend
firstSpanContext._trace.tags[tags.DD_PARENT_ID] = w3cSpanContext._trace.tags[tags.DD_PARENT_ID]
} else {
// if p value is not present in tracestate, use the parent id from the datadog headers
this._updateParentIdFromDdHeaders(carrier, firstSpanContext)
}
// the span_id in tracecontext takes precedence over the first extracted propagation style
firstSpanContext._spanId = w3cSpanContext._spanId
return firstSpanContext
}
_extractSpanContext (carrier) {
let spanContext = null
for (const extractor of this._config.tracePropagationStyle.extract) {
let spanContext = null
// add logic to ensure tracecontext headers takes precedence over other extracted headers
if (spanContext !== null) {
if (this._config.tracePropagationExtractFirst) {
return spanContext
}
if (extractor !== 'tracecontext') {
continue
}
spanContext = this._resolveTraceContextConflicts(
this._extractTraceparentContext(carrier), spanContext, carrier)
break
}
switch (extractor) {

@@ -233,17 +294,19 @@ case 'datadog':

break
case 'b3': // TODO: should match "b3 single header" in next major
case 'b3' && this
._config
.tracePropagationStyle
.otelPropagators: // TODO: should match "b3 single header" in next major
case 'b3 single header': // TODO: delete in major after singular "b3"
spanContext = this._extractB3SingleContext(carrier)
break
case 'b3':
case 'b3multi':
spanContext = this._extractB3MultiContext(carrier)
break
case 'b3 single header': // TODO: delete in major after singular "b3"
spanContext = this._extractB3SingleContext(carrier)
break
default:
log.warn(`Unknown propagation style: ${extractor}`)
}
if (spanContext !== null) {
return spanContext
}
}
return this._extractSqsdContext(carrier)
return spanContext || this._extractSqsdContext(carrier)
}

@@ -358,3 +421,3 @@

case 'p': {
spanContext._trace.tags['_dd.parent_id'] = value
spanContext._trace.tags[tags.DD_PARENT_ID] = value
break

@@ -392,6 +455,2 @@ }

if (!spanContext._trace.tags['_dd.parent_id']) {
spanContext._trace.tags['_dd.parent_id'] = '0000000000000000'
}
this._extractBaggageItems(carrier, spanContext)

@@ -537,3 +596,3 @@ return spanContext

if (tid === '0000000000000000') return
if (tid === zeroTraceId) return

@@ -568,4 +627,63 @@ spanContext._trace.tags['_dd.p.tid'] = tid

}
static _convertOtelContextToDatadog (traceId, spanId, traceFlag, ts, meta = {}) {
const origin = null
let samplingPriority = traceFlag
ts = ts?.traceparent || null
if (ts) {
// Use TraceState.fromString to parse the tracestate header
const traceState = TraceState.fromString(ts)
let ddTraceStateData = null
// Extract Datadog specific trace state data
traceState.forVendor('dd', (state) => {
ddTraceStateData = state
return state // You might need to adjust this part based on actual logic needed
})
if (ddTraceStateData) {
// Assuming ddTraceStateData is now a Map or similar structure containing Datadog trace state data
// Extract values as needed, similar to the original logic
const samplingPriorityTs = ddTraceStateData.get('s')
const origin = ddTraceStateData.get('o')
// Convert Map to object for meta
const otherPropagatedTags = Object.fromEntries(ddTraceStateData.entries())
// Update meta and samplingPriority based on extracted values
Object.assign(meta, otherPropagatedTags)
samplingPriority = TextMapPropagator._getSamplingPriority(traceFlag, parseInt(samplingPriorityTs, 10), origin)
} else {
log.debug(`no dd list member in tracestate from incoming request: ${ts}`)
}
}
const spanContext = new OtelSpanContext({
traceId: id(traceId, 16), spanId: id(), tags: meta, parentId: id(spanId, 16)
})
spanContext._sampling = { priority: samplingPriority }
spanContext._trace = { origin }
return spanContext
}
static _getSamplingPriority (traceparentSampled, tracestateSamplingPriority, origin = null) {
const fromRumWithoutPriority = !tracestateSamplingPriority && origin === 'rum'
let samplingPriority
if (!fromRumWithoutPriority && traceparentSampled === 0 &&
(!tracestateSamplingPriority || tracestateSamplingPriority >= 0)) {
samplingPriority = 0
} else if (!fromRumWithoutPriority && traceparentSampled === 1 &&
(!tracestateSamplingPriority || tracestateSamplingPriority < 0)) {
samplingPriority = 1
} else {
samplingPriority = tracestateSamplingPriority
}
return samplingPriority
}
}
module.exports = TextMapPropagator

@@ -30,2 +30,3 @@ 'use strict'

}
this._otelSpanContext = undefined
}

@@ -32,0 +33,0 @@

@@ -70,2 +70,4 @@ 'use strict'

this._events = []
// For internal use only. You probably want `context()._name`.

@@ -101,3 +103,6 @@ // This name property is not updated when the span name changes.

spanleak.addSpan(this, operationName)
startCh.publish(this)
if (startCh.hasSubscribers) {
startCh.publish({ span: this, fields })
}
}

@@ -168,2 +173,15 @@

addEvent (name, attributesOrStartTime, startTime) {
const event = { name }
if (attributesOrStartTime) {
if (typeof attributesOrStartTime === 'object') {
event.attributes = this._sanitizeEventAttributes(attributesOrStartTime)
} else {
startTime = attributesOrStartTime
}
}
event.startTime = startTime || this._getTime()
this._events.push(event)
}
finish (finishTime) {

@@ -227,3 +245,26 @@ if (this._duration !== undefined) {

})
return sanitizedAttributes
}
_sanitizeEventAttributes (attributes = {}) {
const sanitizedAttributes = {}
for (const key in attributes) {
const value = attributes[key]
if (Array.isArray(value)) {
const newArray = []
for (const subkey in value) {
if (ALLOWED.includes(typeof value[subkey])) {
newArray.push(value[subkey])
} else {
log.warn('Dropping span event attribute. It is not of an allowed type')
}
}
sanitizedAttributes[key] = newArray
} else if (ALLOWED.includes(typeof value)) {
sanitizedAttributes[key] = value
} else {
log.warn('Dropping span event attribute. It is not of an allowed type')
}
}
return sanitizedAttributes

@@ -230,0 +271,0 @@ }

@@ -8,2 +8,3 @@ 'use strict'

const TextMapPropagator = require('./propagation/text_map')
const DSMTextMapPropagator = require('./propagation/text_map_dsm')
const HttpPropagator = require('./propagation/http')

@@ -23,3 +24,3 @@ const BinaryPropagator = require('./propagation/binary')

class DatadogTracer {
constructor (config) {
constructor (config, prioritySampler) {
const Exporter = getExporter(config.experimental.exporter)

@@ -33,3 +34,3 @@

this._debug = config.debug
this._prioritySampler = new PrioritySampler(config.env, config.sampler)
this._prioritySampler = prioritySampler ?? new PrioritySampler(config.env, config.sampler)
this._exporter = new Exporter(config, this._prioritySampler)

@@ -44,3 +45,4 @@ this._processor = new SpanProcessor(this._exporter, this._prioritySampler, config)

[formats.BINARY]: new BinaryPropagator(config),
[formats.LOG]: new LogPropagator(config)
[formats.LOG]: new LogPropagator(config),
[formats.TEXT_MAP_DSM]: new DSMTextMapPropagator(config)
}

@@ -78,10 +80,12 @@ if (config.reportHostname) {

inject (spanContext, format, carrier) {
if (spanContext instanceof Span) {
spanContext = spanContext.context()
inject (context, format, carrier) {
if (context instanceof Span) {
context = context.context()
}
try {
this._prioritySampler.sample(spanContext)
this._propagators[format].inject(spanContext, carrier)
if (format !== 'text_map_dsm') {
this._prioritySampler.sample(context)
}
this._propagators[format].inject(context, carrier)
} catch (e) {

@@ -88,0 +92,0 @@ log.error(e)

@@ -139,6 +139,15 @@ 'use strict'

clientIpEnabled,
memcachedCommandEnabled
memcachedCommandEnabled,
ciVisibilitySessionName
} = this._tracerConfig
const sharedConfig = {}
const sharedConfig = {
dbmPropagationMode,
dsmEnabled,
memcachedCommandEnabled,
site,
url,
headers: headerTags || [],
ciVisibilitySessionName
}

@@ -153,6 +162,2 @@ if (logInjection !== undefined) {

sharedConfig.dbmPropagationMode = dbmPropagationMode
sharedConfig.dsmEnabled = dsmEnabled
sharedConfig.memcachedCommandEnabled = memcachedCommandEnabled
if (serviceMapping && serviceMapping[name]) {

@@ -166,8 +171,4 @@ sharedConfig.service = serviceMapping[name]

sharedConfig.site = site
sharedConfig.url = url
sharedConfig.headers = headerTags || []
return sharedConfig
}
}
const {
getTestEnvironmentMetadata,
getTestSessionName,
getCodeOwnersFileEntries,

@@ -16,6 +17,9 @@ getTestParentSpan,

TEST_MODULE,
TEST_SESSION_NAME,
getTestSuiteCommonTags,
TEST_STATUS,
TEST_SKIPPED_BY_ITR,
ITR_CORRELATION_ID
ITR_CORRELATION_ID,
TEST_SOURCE_FILE,
TEST_LEVEL_EVENT_TYPES
} = require('./util/test')

@@ -78,2 +82,15 @@ const Plugin = require('./plugin')

const testSessionName = getTestSessionName(this.config, this.command, this.testEnvironmentMetadata)
const metadataTags = {}
for (const testLevel of TEST_LEVEL_EVENT_TYPES) {
metadataTags[testLevel] = {
[TEST_SESSION_NAME]: testSessionName
}
}
// tracer might not be initialized correctly
if (this.tracer._exporter.setMetadataTags) {
this.tracer._exporter.setMetadataTags(metadataTags)
}
this.testSessionSpan = this.tracer.startSpan(`${this.constructor.id}.test_session`, {

@@ -96,2 +113,10 @@ childOf,

})
// only for vitest
// These are added for the worker threads to use
if (this.constructor.id === 'vitest') {
process.env.DD_CIVISIBILITY_TEST_SESSION_ID = this.testSessionSpan.context().toTraceId()
process.env.DD_CIVISIBILITY_TEST_MODULE_ID = this.testModuleSpan.context().toSpanId()
process.env.DD_CIVISIBILITY_TEST_COMMAND = this.command
}
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'module')

@@ -143,3 +168,3 @@ })

testFramework,
isUnsupportedCIProvider: this.isUnsupportedCIProvider,
isUnsupportedCIProvider: !this.ciProviderName,
...tags

@@ -178,3 +203,3 @@ })

this.isUnsupportedCIProvider = !ciProviderName
this.ciProviderName = ciProviderName

@@ -208,3 +233,9 @@ this.testConfiguration = {

const codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
const { [TEST_SOURCE_FILE]: testSourceFile } = extraTags
// We'll try with the test source file if available (it could be different from the test suite)
let codeOwners = getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries)
if (!codeOwners) {
codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
}
if (codeOwners) {

@@ -211,0 +242,0 @@ testTags[TEST_CODE_OWNERS] = codeOwners

'use strict'
const StoragePlugin = require('./storage')
const { PEER_SERVICE_KEY } = require('../constants')
const { PEER_SERVICE_KEY, PEER_SERVICE_SOURCE_KEY } = require('../constants')

@@ -31,3 +31,3 @@ class DatabasePlugin extends StoragePlugin {

createDBMPropagationCommentService (serviceName) {
createDBMPropagationCommentService (serviceName, span) {
this.encodingServiceTags('dddbs', 'encodedDddbs', serviceName)

@@ -37,7 +37,22 @@ this.encodingServiceTags('dde', 'encodedDde', this.tracer._env)

this.encodingServiceTags('ddpv', 'encodedDdpv', this.tracer._version)
if (span.context()._tags['out.host']) {
this.encodingServiceTags('ddh', 'encodedDdh', span._spanContext._tags['out.host'])
}
if (span.context()._tags['db.name']) {
this.encodingServiceTags('dddb', 'encodedDddb', span._spanContext._tags['db.name'])
}
const { encodedDddbs, encodedDde, encodedDdps, encodedDdpv } = this.serviceTags
const { encodedDddb, encodedDddbs, encodedDde, encodedDdh, encodedDdps, encodedDdpv } = this.serviceTags
return `dddbs='${encodedDddbs}',dde='${encodedDde}',` +
let dbmComment = `dddb='${encodedDddb}',dddbs='${encodedDddbs}',dde='${encodedDde}',ddh='${encodedDdh}',` +
`ddps='${encodedDdps}',ddpv='${encodedDdpv}'`
const peerData = this.getPeerService(span.context()._tags)
if (peerData !== undefined && peerData[PEER_SERVICE_SOURCE_KEY] === PEER_SERVICE_KEY) {
this.encodingServiceTags('ddprs', 'encodedDdprs', peerData[PEER_SERVICE_KEY])
const { encodedDdprs } = this.serviceTags
dbmComment += `,ddprs='${encodedDdprs}'`
}
return dbmComment
}

@@ -61,3 +76,3 @@

const servicePropagation = this.createDBMPropagationCommentService(dbmService)
const servicePropagation = this.createDBMPropagationCommentService(dbmService, span)

@@ -64,0 +79,0 @@ if (isPreparedStatement || mode === 'service') {

@@ -21,2 +21,3 @@ 'use strict'

get '@smithy/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
get '@vitest/runner' () { return require('../../../datadog-plugin-vitest/src') },
get aerospike () { return require('../../../datadog-plugin-aerospike/src') },

@@ -48,3 +49,2 @@ get amqp10 () { return require('../../../datadog-plugin-amqp10/src') },

get 'jest-environment-jsdom' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-jasmine2' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-runtime' () { return require('../../../datadog-plugin-jest/src') },

@@ -60,2 +60,4 @@ get 'jest-worker' () { return require('../../../datadog-plugin-jest/src') },

get 'mocha-each' () { return require('../../../datadog-plugin-mocha/src') },
get vitest () { return require('../../../datadog-plugin-vitest/src') },
get workerpool () { return require('../../../datadog-plugin-mocha/src') },
get moleculer () { return require('../../../datadog-plugin-moleculer/src') },

@@ -73,2 +75,3 @@ get mongodb () { return require('../../../datadog-plugin-mongodb-core/src') },

get 'node:net' () { return require('../../../datadog-plugin-net/src') },
get nyc () { return require('../../../datadog-plugin-nyc/src') },
get oracledb () { return require('../../../datadog-plugin-oracledb/src') },

@@ -88,3 +91,4 @@ get openai () { return require('../../../datadog-plugin-openai/src') },

get tedious () { return require('../../../datadog-plugin-tedious/src') },
get undici () { return require('../../../datadog-plugin-undici/src') },
get winston () { return require('../../../datadog-plugin-winston/src') }
}

@@ -6,2 +6,3 @@ 'use strict'

const dc = require('dc-polyfill')
const logger = require('../log')
const { storage } = require('../../../datadog-core')

@@ -76,3 +77,13 @@

addSub (channelName, handler) {
this._subscriptions.push(new Subscription(channelName, handler))
const plugin = this
const wrappedHandler = function () {
try {
return handler.apply(this, arguments)
} catch (e) {
logger.error('Error in plugin handler:', e)
logger.info('Disabling plugin:', plugin.id)
plugin.configure(false)
}
}
this._subscriptions.push(new Subscription(channelName, wrappedHandler))
}

@@ -79,0 +90,0 @@

@@ -80,2 +80,14 @@ const cp = require('child_process')

function isGitAvailable () {
const isWindows = os.platform() === 'win32'
const command = isWindows ? 'where' : 'which'
try {
cp.execFileSync(command, ['git'], { stdio: 'pipe' })
return true
} catch (e) {
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'check_git', exitCode: 'missing' })
return false
}
}
function isShallowRepository () {

@@ -346,3 +358,4 @@ return sanitizedExec(

isShallowRepository,
unshallowRepository
unshallowRepository,
isGitAvailable
}
'use strict'
const BlockList = require('./ip_blocklist')
const { BlockList } = require('net')
const net = require('net')

@@ -5,0 +5,0 @@

@@ -22,3 +22,4 @@ const path = require('path')

CI_WORKSPACE_PATH,
CI_PIPELINE_URL
CI_PIPELINE_URL,
CI_JOB_NAME
} = require('./tags')

@@ -32,2 +33,5 @@ const id = require('../../id')

// session tags
const TEST_SESSION_NAME = 'test_session.name'
const TEST_FRAMEWORK = 'test.framework'

@@ -66,2 +70,5 @@ const TEST_FRAMEWORK_VERSION = 'test.framework_version'

const CUCUMBER_IS_PARALLEL = 'test.cucumber.is_parallel'
const MOCHA_IS_PARALLEL = 'test.mocha.is_parallel'
const TEST_ITR_TESTS_SKIPPED = '_dd.ci.itr.tests_skipped'

@@ -88,2 +95,8 @@ const TEST_ITR_SKIPPING_ENABLED = 'test.itr.tests_skipping.enabled'

// cucumber worker variables
const CUCUMBER_WORKER_TRACE_PAYLOAD_CODE = 70
// mocha worker variables
const MOCHA_WORKER_TRACE_PAYLOAD_CODE = 80
// Early flake detection util strings

@@ -93,4 +106,12 @@ const EFD_STRING = "Retried by Datadog's Early Flake Detection"

const TEST_LEVEL_EVENT_TYPES = [
'test',
'test_suite_end',
'test_module_end',
'test_session_end'
]
module.exports = {
TEST_CODE_OWNERS,
TEST_SESSION_NAME,
TEST_FRAMEWORK,

@@ -100,2 +121,4 @@ TEST_FRAMEWORK_VERSION,

JEST_DISPLAY_NAME,
CUCUMBER_IS_PARALLEL,
MOCHA_IS_PARALLEL,
TEST_TYPE,

@@ -113,2 +136,4 @@ TEST_NAME,

JEST_WORKER_COVERAGE_PAYLOAD_CODE,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE,
MOCHA_WORKER_TRACE_PAYLOAD_CODE,
TEST_SOURCE_START,

@@ -164,3 +189,5 @@ TEST_SKIPPED_BY_ITR,

TEST_BROWSER_NAME,
TEST_BROWSER_VERSION
TEST_BROWSER_VERSION,
getTestSessionName,
TEST_LEVEL_EVENT_TYPES
}

@@ -613,1 +640,11 @@

}
function getTestSessionName (config, testCommand, envTags) {
if (config.ciVisibilitySessionName) {
return config.ciVisibilitySessionName
}
if (envTags[CI_JOB_NAME]) {
return `${envTags[CI_JOB_NAME]}-${testCommand}`
}
return testCommand
}

@@ -7,2 +7,3 @@ 'use strict'

const SamplingRule = require('./sampling_rule')
const { hasOwn } = require('./util')

@@ -14,2 +15,4 @@ const {

SAMPLING_MECHANISM_MANUAL,
SAMPLING_MECHANISM_REMOTE_USER,
SAMPLING_MECHANISM_REMOTE_DYNAMIC,
SAMPLING_RULE_DECISION,

@@ -46,5 +49,5 @@ SAMPLING_LIMIT_DECISION,

configure (env, { sampleRate, rateLimit = 100, rules = [] } = {}) {
configure (env, { sampleRate, provenance = undefined, rateLimit = 100, rules = [] } = {}) {
this._env = env
this._rules = this._normalizeRules(rules, sampleRate, rateLimit)
this._rules = this._normalizeRules(rules, sampleRate, rateLimit, provenance)
this._limiter = new RateLimiter(rateLimit)

@@ -70,3 +73,3 @@

const tag = this._getPriorityFromTags(context._tags)
const tag = this._getPriorityFromTags(context._tags, context)

@@ -144,2 +147,4 @@ if (this.validate(tag)) {

context._sampling.mechanism = SAMPLING_MECHANISM_RULE
if (rule.provenance === 'customer') context._sampling.mechanism = SAMPLING_MECHANISM_REMOTE_USER
if (rule.provenance === 'dynamic') context._sampling.mechanism = SAMPLING_MECHANISM_REMOTE_DYNAMIC

@@ -189,7 +194,7 @@ return rule.sample() && this._isSampledByRateLimit(context)

_normalizeRules (rules, sampleRate, rateLimit) {
_normalizeRules (rules, sampleRate, rateLimit, provenance) {
rules = [].concat(rules || [])
return rules
.concat({ sampleRate, maxPerSecond: rateLimit })
.concat({ sampleRate, maxPerSecond: rateLimit, provenance })
.map(rule => ({ ...rule, sampleRate: parseFloat(rule.sampleRate) }))

@@ -207,6 +212,2 @@ .filter(rule => !isNaN(rule.sampleRate))

function hasOwn (object, prop) {
return Object.prototype.hasOwnProperty.call(object, prop)
}
module.exports = PrioritySampler

@@ -11,3 +11,3 @@ 'use strict'

start: config => {
const { service, version, env, url, hostname, port, tags, repositoryUrl, commitSHA } = config
const { service, version, env, url, hostname, port, tags, repositoryUrl, commitSHA, injectionEnabled } = config
const { enabled, sourceMap, exporters } = config.profiling

@@ -21,4 +21,13 @@ const logger = {

const libraryInjected = injectionEnabled.length > 0
let activation
if (enabled === 'auto') {
activation = 'auto'
} else if (enabled === 'true') {
activation = 'manual'
} else if (injectionEnabled.includes('profiler')) {
activation = 'injection'
} // else activation = undefined
return profiler.start({
enabled,
service,

@@ -35,3 +44,5 @@ version,

repositoryUrl,
commitSHA
commitSHA,
libraryInjected,
activation
})

@@ -38,0 +49,0 @@ },

@@ -26,3 +26,2 @@ 'use strict'

DD_PROFILING_DEBUG_SOURCE_MAPS,
DD_PROFILING_ENABLED,
DD_PROFILING_ENDPOINT_COLLECTION_ENABLED,

@@ -53,3 +52,2 @@ DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED,

const enabled = isTrue(coalesce(options.enabled, DD_PROFILING_ENABLED, true))
const env = coalesce(options.env, DD_ENV)

@@ -69,3 +67,2 @@ const service = options.service || DD_SERVICE || 'node'

this.enabled = enabled
this.service = service

@@ -106,2 +103,7 @@ this.env = env

if (option && !condition) {
// injection hardening: all of these can only happen if user explicitly
// sets an environment variable to its non-default value on the platform.
// In practical terms, it'd require someone explicitly turning on OOM
// monitoring, code hotspots, endpoint profiling, or CPU profiling on
// Windows, where it is not supported.
throw new Error(`${description} not supported on ${process.platform}.`)

@@ -135,2 +137,4 @@ }

this.libraryInjected = options.libraryInjected
this.activation = options.activation
this.exporters = ensureExporters(options.exporters || [

@@ -137,0 +141,0 @@ new AgentExporter(this)

@@ -19,6 +19,18 @@ 'use strict'

} else {
const injectionEnabled = (process.env.DD_INJECTION_ENABLED || '').split(',')
const libraryInjected = injectionEnabled.length > 0
const profilingEnabled = (process.env.DD_PROFILING_ENABLED || '').toLowerCase()
const activation = ['true', '1'].includes(profilingEnabled)
? 'manual'
: profilingEnabled === 'auto'
? 'auto'
: injectionEnabled.includes('profiling')
? 'injection'
: 'unknown'
return new AgentExporter({
url,
logger,
uploadTimeout: timeoutMs
uploadTimeout: timeoutMs,
libraryInjected,
activation
})

@@ -25,0 +37,0 @@ }

@@ -56,3 +56,3 @@ 'use strict'

class AgentExporter {
constructor ({ url, logger, uploadTimeout, env, host, service, version } = {}) {
constructor ({ url, logger, uploadTimeout, env, host, service, version, libraryInjected, activation } = {}) {
this._url = url

@@ -69,2 +69,4 @@ this._logger = logger

this._appVersion = version
this._libraryInjected = !!libraryInjected
this._activation = activation || 'unknown'
}

@@ -110,2 +112,6 @@

profiler: {
activation: this._activation,
ssi: {
mechanism: this._libraryInjected ? 'injected_agent' : 'none'
},
version

@@ -112,0 +118,0 @@ },

@@ -18,2 +18,8 @@ 'use strict'

function logError (logger, err) {
if (logger) {
logger.error(err)
}
}
class Profiler extends EventEmitter {

@@ -32,5 +38,3 @@ constructor () {

return this._start(options).catch((err) => {
if (options.logger) {
options.logger.error(err)
}
logError(options.logger, err)
return false

@@ -40,2 +44,6 @@ })

_logError (err) {
logError(this._logger, err)
}
async _start (options) {

@@ -45,3 +53,2 @@ if (this._enabled) return true

const config = this._config = new Config(options)
if (!config.enabled) return false

@@ -60,3 +67,3 @@ this._logger = config.logger

mapper = await maybeSourceMap(config.sourceMap, SourceMapper, config.debugSourceMaps)
if (config.SourceMap && config.debugSourceMaps) {
if (config.sourceMap && config.debugSourceMaps) {
this._logger.debug(() => {

@@ -69,3 +76,3 @@ return mapper.infoMap.size === 0

} catch (err) {
this._logger.error(err)
this._logError(err)
}

@@ -87,3 +94,3 @@

} catch (e) {
this._logger.error(e)
this._logError(e)
this._stop()

@@ -177,3 +184,3 @@ return false

} catch (err) {
this._logger.error(err)
this._logError(err)
this._stop()

@@ -193,3 +200,3 @@ }

const task = exporter.export({ profiles, start, end, tags })
.catch(err => this._logger.error(err))
.catch(err => this._logError(err))

@@ -196,0 +203,0 @@ tasks.push(task)

@@ -15,12 +15,7 @@ 'use strict'

function scheduleProfileSubmit () {
timerId = setTimeout(emitProfileSubmit, flushInterval)
}
function emitProfileSubmit () {
timerId = setTimeout(() => {
profileSubmittedChannel.publish()
scheduleProfileSubmit()
}
scheduleProfileSubmit()
timerId.refresh()
}, flushInterval)
timerId.unref()
},

@@ -27,0 +22,0 @@

@@ -16,4 +16,20 @@ 'use strict'

const spanleak = require('./spanleak')
const { SSITelemetry } = require('./profiling/ssi-telemetry')
const { SSIHeuristics } = require('./profiling/ssi-heuristics')
const appsecStandalone = require('./appsec/standalone')
class LazyModule {
constructor (provider) {
this.provider = provider
}
enable (...args) {
this.module = this.provider()
this.module.enable(...args)
}
disable () {
this.module?.disable()
}
}
class Tracer extends NoopProxy {

@@ -28,2 +44,9 @@ constructor () {

this._tracingInitialized = false
this._flare = new LazyModule(() => require('./flare'))
// these requires must work with esm bundler
this._modules = {
appsec: new LazyModule(() => require('./appsec')),
iast: new LazyModule(() => require('./appsec/iast'))
}
}

@@ -63,5 +86,5 @@

if (config.remoteConfig.enabled && !config.isCiVisibility) {
const rc = remoteConfig.enable(config)
const rc = remoteConfig.enable(config, this._modules.appsec)
rc.on('APM_TRACING', (action, conf) => {
rc.setProductHandler('APM_TRACING', (action, conf) => {
if (action === 'unapply') {

@@ -74,24 +97,54 @@ config.configure({}, true)

})
rc.setProductHandler('AGENT_CONFIG', (action, conf) => {
if (!conf?.name?.startsWith('flare-log-level.')) return
if (action === 'unapply') {
this._flare.disable()
} else if (conf.config?.log_level) {
this._flare.enable(config)
this._flare.module.prepare(conf.config.log_level)
}
})
rc.setProductHandler('AGENT_TASK', (action, conf) => {
if (action === 'unapply' || !conf) return
if (conf.task_type !== 'tracer_flare' || !conf.args) return
this._flare.enable(config)
this._flare.module.send(conf.args)
})
}
if (config.isGCPFunction || config.isAzureFunctionConsumptionPlan) {
if (config.isGCPFunction || config.isAzureFunction) {
require('./serverless').maybeStartServerlessMiniAgent(config)
}
const ssiTelemetry = new SSITelemetry()
ssiTelemetry.start()
if (config.profiling.enabled) {
// do not stop tracer initialization if the profiler fails to be imported
try {
const profiler = require('./profiler')
this._profilerStarted = profiler.start(config)
} catch (e) {
log.error(e)
if (config.profiling.enabled !== 'false') {
const ssiHeuristics = new SSIHeuristics(config)
ssiHeuristics.start()
let mockProfiler = null
if (config.profiling.enabled === 'true') {
this._profilerStarted = this._startProfiler(config)
} else if (ssiHeuristics.emitsTelemetry) {
// Start a mock profiler that emits mock profile-submitted events for the telemetry.
// It will be stopped if the real profiler is started by the heuristics.
mockProfiler = require('./profiling/ssi-telemetry-mock-profiler')
mockProfiler.start(config)
}
} else if (ssiTelemetry.enabled()) {
require('./profiling/ssi-telemetry-mock-profiler').start(config)
if (ssiHeuristics.heuristicsActive) {
ssiHeuristics.onTriggered(() => {
if (mockProfiler) {
mockProfiler.stop()
}
this._startProfiler(config)
ssiHeuristics.onTriggered() // deregister this callback
})
}
if (!this._profilerStarted) {
this._profilerStarted = Promise.resolve(false)
}
}
if (!this._profilerStarted) {
this._profilerStarted = Promise.resolve(false)
}

@@ -118,10 +171,20 @@ if (config.runtimeMetrics) {

_startProfiler (config) {
// do not stop tracer initialization if the profiler fails to be imported
try {
return require('./profiler').start(config)
} catch (e) {
log.error(e)
}
}
_enableOrDisableTracing (config) {
if (config.tracing !== false) {
// dirty require for now so zero appsec code is executed unless explicitly enabled
if (config.appsec.enabled) {
require('./appsec').enable(config)
this._modules.appsec.enable(config)
}
if (!this._tracingInitialized) {
this._tracer = new DatadogTracer(config)
const prioritySampler = appsecStandalone.configure(config)
this._tracer = new DatadogTracer(config, prioritySampler)
this.dataStreamsCheckpointer = this._tracer.dataStreamsCheckpointer
this.appsec = new AppsecSdk(this._tracer, config)

@@ -131,7 +194,7 @@ this._tracingInitialized = true

if (config.iast.enabled) {
require('./appsec/iast').enable(config, this._tracer)
this._modules.iast.enable(config, this._tracer)
}
} else if (this._tracingInitialized) {
require('./appsec').disable()
require('./appsec/iast').disable()
this._modules.appsec.disable()
this._modules.iast.disable()
}

@@ -148,2 +211,3 @@

if (!this._profilerStarted) {
// injection hardening: this is only ever invoked from tests.
throw new Error('profilerStarted() must be called after init()')

@@ -150,0 +214,0 @@ }

@@ -6,5 +6,5 @@ 'use strict'

class RateLimiter {
constructor (rateLimit) {
constructor (rateLimit, interval = 'second') {
this._rateLimit = parseInt(rateLimit)
this._limiter = new limiter.RateLimiter(this._rateLimit, 'second')
this._limiter = new limiter.RateLimiter(this._rateLimit, interval)
this._tokensRequested = 0

@@ -11,0 +11,0 @@ this._prevIntervalTokens = 0

@@ -53,4 +53,16 @@ 'use strict'

const _origRequire = Module.prototype.require
patchedRequire = Module.prototype.require = function (request) {
const filename = Module._resolveFilename(request, this)
/*
If resolving the filename for a `require(...)` fails, defer to the wrapped
require implementation rather than failing right away. This allows a
possibly monkey patched `require` to work.
*/
let filename
try {
filename = Module._resolveFilename(request, this)
} catch (resolveErr) {
return _origRequire.apply(this, arguments)
}
const core = filename.indexOf(path.sep) === -1

@@ -57,0 +69,0 @@ let name, basedir, hooks

@@ -67,3 +67,3 @@ 'use strict'

class SamplingRule {
constructor ({ name, service, resource, tags, sampleRate = 1.0, maxPerSecond } = {}) {
constructor ({ name, service, resource, tags, sampleRate = 1.0, provenance = undefined, maxPerSecond } = {}) {
this.matchers = []

@@ -86,2 +86,3 @@

this._limiter = undefined
this.provenance = provenance

@@ -88,0 +89,0 @@ if (Number.isFinite(maxPerSecond)) {

@@ -56,9 +56,7 @@ 'use strict'

function getIsAzureFunctionConsumptionPlan () {
function getIsAzureFunction () {
const isAzureFunction =
process.env.FUNCTIONS_EXTENSION_VERSION !== undefined && process.env.FUNCTIONS_WORKER_RUNTIME !== undefined
const azureWebsiteSKU = process.env.WEBSITE_SKU
const isConsumptionPlan = azureWebsiteSKU === undefined || azureWebsiteSKU === 'Dynamic'
return isAzureFunction && isConsumptionPlan
return isAzureFunction
}

@@ -69,4 +67,4 @@

getIsGCPFunction,
getIsAzureFunctionConsumptionPlan,
getIsAzureFunction,
getRustBinaryPath
}

@@ -33,2 +33,6 @@ const { identityService, httpPluginClientService, awsServiceV0 } = require('../util')

serviceName: awsServiceV0
},
undici: {
opName: () => 'undici.request',
serviceName: httpPluginClientService
}

@@ -35,0 +39,0 @@ },

@@ -32,2 +32,6 @@ const { identityService, httpPluginClientService } = require('../util')

serviceName: identityService
},
undici: {
opName: () => 'undici.request',
serviceName: httpPluginClientService
}

@@ -34,0 +38,0 @@ },

@@ -129,3 +129,4 @@ const os = require('os')

env,
tags
tags,
appsec
} = {}) {

@@ -142,3 +143,3 @@ this.exporter = new SpanStatsExporter({

this.hostname = os.hostname()
this.enabled = enabled
this.enabled = enabled && !appsec?.standalone?.enabled
this.env = env

@@ -148,3 +149,3 @@ this.tags = tags || {}

if (enabled) {
if (this.enabled) {
this.timer = setInterval(this.onInterval.bind(this), interval * 1e3)

@@ -151,0 +152,0 @@ this.timer.unref()

@@ -40,2 +40,19 @@ 'use strict'

const out = tracerInfo({ agentError })
if (agentError) {
out.agent_error = agentError.message
}
info('DATADOG TRACER CONFIGURATION - ' + out)
if (agentError) {
warn('DATADOG TRACER DIAGNOSTIC - Agent Error: ' + agentError.message)
errors.agentError = {
code: agentError.code ? agentError.code : '',
message: `Agent Error:${agentError.message}`
}
}
}
function tracerInfo () {
const url = config.url || `http://${config.hostname || 'localhost'}:${config.port}`

@@ -63,5 +80,2 @@

out.agent_url = url
if (agentError) {
out.agent_error = agentError.message
}
out.debug = !!config.debug

@@ -92,14 +106,3 @@ out.sample_rate = config.sampler.sampleRate

info('DATADOG TRACER CONFIGURATION - ' + out)
if (agentError) {
warn('DATADOG TRACER DIAGNOSTIC - Agent Error: ' + agentError.message)
errors.agentError = {
code: agentError.code ? agentError.code : '',
message: `Agent Error:${agentError.message}`
}
}
config = undefined
pluginManager = undefined
samplingRules = undefined
return out
}

@@ -124,3 +127,4 @@

setSamplingRules,
tracerInfo,
errors
}
'use strict'
const constants = require('./constants')
const log = require('./log')
const ERROR_MESSAGE = constants.ERROR_MESSAGE
const ERROR_STACK = constants.ERROR_STACK
const ERROR_TYPE = constants.ERROR_TYPE
function add (carrier, keyValuePairs) {
const otelTagMap = {
'deployment.environment': 'env',
'service.name': 'service',
'service.version': 'version'
}
function add (carrier, keyValuePairs, parseOtelTags = false) {
if (!carrier || !keyValuePairs) return

@@ -11,3 +21,2 @@

}
try {

@@ -17,11 +26,21 @@ if (typeof keyValuePairs === 'string') {

for (const segment of segments) {
const separatorIndex = segment.indexOf(':')
const separatorIndex = parseOtelTags ? segment.indexOf('=') : segment.indexOf(':')
if (separatorIndex === -1) continue
const key = segment.slice(0, separatorIndex)
let key = segment.slice(0, separatorIndex)
const value = segment.slice(separatorIndex + 1)
if (parseOtelTags && key in otelTagMap) {
key = otelTagMap[key]
}
carrier[key.trim()] = value.trim()
}
} else {
// HACK: to ensure otel.recordException does not influence trace.error
if (ERROR_MESSAGE in keyValuePairs || ERROR_STACK in keyValuePairs || ERROR_TYPE in keyValuePairs) {
if (!('doNotSetTraceError' in keyValuePairs)) {
carrier.setTraceError = true
}
}
Object.assign(carrier, keyValuePairs)

@@ -28,0 +47,0 @@ }

@@ -9,3 +9,4 @@ 'use strict'

const { manager: metricsManager } = require('./metrics')
const logs = require('./logs')
const telemetryLogger = require('./logs')
const logger = require('../log')

@@ -92,3 +93,3 @@ const telemetryStartChannel = dc.channel('datadog:telemetry:start')

version: tracerVersion,
enabled: config.profiling.enabled
enabled: profilingEnabledToBoolean(config.profiling.enabled)
}

@@ -141,2 +142,3 @@ }

metricsManager.send(config, application, host)
telemetryLogger.send(config, application, host)
}

@@ -217,3 +219,3 @@

metricsManager.send(config, application, host)
logs.send(config, application, host)
telemetryLogger.send(config, application, host)

@@ -242,2 +244,6 @@ const { reqType, payload } = createPayload('app-heartbeat')

if (!aConfig.telemetry.enabled) {
if (aConfig.sca?.enabled) {
logger.warn('DD_APPSEC_SCA_ENABLED requires enabling telemetry to work.')
}
return

@@ -253,3 +259,3 @@ }

dependencies.start(config, application, host, getRetryData, updateRetryData)
logs.start(config)
telemetryLogger.start(config)

@@ -317,6 +323,7 @@ sendData(config, application, host, 'app-started', appStarted(config))

headerTags: 'DD_TRACE_HEADER_TAGS',
tags: 'DD_TAGS'
tags: 'DD_TAGS',
'sampler.rules': 'DD_TRACE_SAMPLING_RULES'
}
const namesNeedFormatting = new Set(['DD_TAGS', 'peerServiceMapping'])
const namesNeedFormatting = new Set(['DD_TAGS', 'peerServiceMapping', 'serviceMapping'])

@@ -328,2 +335,3 @@ const configuration = []

const name = nameMapping[change.name] || change.name
names.push(name)

@@ -333,6 +341,13 @@ const { origin, value } = change

if (Array.isArray(value)) entry.value = value.join(',')
if (namesNeedFormatting.has(entry.name)) entry.value = formatMapForTelemetry(entry.value)
if (entry.name === 'url' && entry.value) entry.value = entry.value.toString()
if (namesNeedFormatting.has(entry.name)) {
entry.value = formatMapForTelemetry(entry.value)
} else if (entry.name === 'url') {
if (entry.value) {
entry.value = entry.value.toString()
}
} else if (entry.name === 'DD_TRACE_SAMPLING_RULES') {
entry.value = JSON.stringify(entry.value)
} else if (Array.isArray(entry.value)) {
entry.value = value.join(',')
}
configuration.push(entry)

@@ -356,2 +371,15 @@ }

function profilingEnabledToBoolean (profilingEnabled) {
if (typeof profilingEnabled === 'boolean') {
return profilingEnabled
}
if (['auto', 'true'].includes(profilingEnabled)) {
return true
}
if (profilingEnabled === 'false') {
return false
}
return undefined
}
module.exports = {

@@ -358,0 +386,0 @@ start,

@@ -8,2 +8,3 @@ 'use strict'

const telemetryLog = dc.channel('datadog:telemetry:log')
const errorLog = dc.channel('datadog:log:error')

@@ -37,2 +38,17 @@ let enabled = false

function onErrorLog (msg) {
if (msg instanceof Error) {
onLog({
level: 'ERROR',
message: msg.message,
stack_trace: msg.stack
})
} else if (typeof msg === 'string') {
onLog({
level: 'ERROR',
message: msg
})
}
}
function start (config) {

@@ -44,2 +60,4 @@ if (!config.telemetry.logCollection || enabled) return

telemetryLog.subscribe(onLog)
errorLog.subscribe(onErrorLog)
}

@@ -53,2 +71,4 @@

}
errorLog.unsubscribe(onErrorLog)
}

@@ -55,0 +75,0 @@

'use strict'
const log = require('../../log')
const { calculateDDBasePath } = require('../../util')

@@ -32,2 +33,33 @@ const logs = new Map()

const ddBasePath = calculateDDBasePath(__dirname)
const EOL = '\n'
const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm
function sanitize (logEntry) {
const stack = logEntry.stack_trace
if (!stack) return logEntry
let stackLines = stack.split(EOL)
const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX))
const isDDCode = firstIndex > -1 && stackLines[firstIndex].includes(ddBasePath)
stackLines = stackLines
.filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath))
.map(line => line.replace(ddBasePath, ''))
logEntry.stack_trace = stackLines.join(EOL)
if (logEntry.stack_trace === '') {
// If entire stack was removed, we'd just have a message saying "omitted"
// in which case we'd rather not log it at all.
return null
}
if (!isDDCode) {
logEntry.message = 'omitted'
}
return logEntry
}
const logCollector = {

@@ -41,5 +73,9 @@ add (logEntry) {

overflowedCount++
return
return false
}
logEntry = sanitize(logEntry)
if (!logEntry) {
return false
}
const hash = createHash(logEntry)

@@ -56,2 +92,7 @@ if (!logs.has(hash)) {

// Used for testing
hasEntry (logEntry) {
return logs.has(createHash(logEntry))
},
drain () {

@@ -58,0 +99,0 @@ if (logs.size === 0) return

@@ -14,2 +14,5 @@ 'use strict'

const DataStreamsContext = require('./data_streams_context')
const { DataStreamsCheckpointer } = require('./data_streams')
const { flushStartupLogs } = require('../../datadog-instrumentations/src/check_require_cache')
const log = require('./log/writer')

@@ -22,7 +25,9 @@ const SPAN_TYPE = tags.SPAN_TYPE

class DatadogTracer extends Tracer {
constructor (config) {
super(config)
constructor (config, prioritySampler) {
super(config, prioritySampler)
this._dataStreamsProcessor = new DataStreamsProcessor(config)
this.dataStreamsCheckpointer = new DataStreamsCheckpointer(this)
this._scope = new Scope()
setStartupLogConfig(config)
flushStartupLogs(log)
}

@@ -29,0 +34,0 @@

@@ -72,2 +72,6 @@ 'use strict'

function hasOwn (object, prop) {
return Object.prototype.hasOwnProperty.call(object, prop)
}
module.exports = {

@@ -78,3 +82,4 @@ isTrue,

globMatch,
calculateDDBasePath
calculateDDBasePath,
hasOwn
}

@@ -5,3 +5,2 @@ # `dd-trace`: Node.js APM Tracer Library

[![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node16)
[![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=blue&label=dd-trace%40v3&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node14)
[![codecov](https://codecov.io/gh/DataDog/dd-trace-js/branch/master/graph/badge.svg)](https://codecov.io/gh/DataDog/dd-trace-js)

@@ -32,8 +31,8 @@

| [`v2`](https://github.com/DataDog/dd-trace-js/tree/v2.x) | ![npm v2](https://img.shields.io/npm/v/dd-trace/latest-node12?color=white&label=%20&style=flat-square) | `>= v12` | **End of Life** | 2022-01-28 | 2023-08-15 |
| [`v3`](https://github.com/DataDog/dd-trace-js/tree/v3.x) | ![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=white&label=%20&style=flat-square) | `>= v14` | **Maintenance** | 2022-08-15 | 2024-05-15 |
| [`v3`](https://github.com/DataDog/dd-trace-js/tree/v3.x) | ![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=white&label=%20&style=flat-square) | `>= v14` | **End of Life** | 2022-08-15 | 2024-05-15 |
| [`v4`](https://github.com/DataDog/dd-trace-js/tree/v4.x) | ![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=white&label=%20&style=flat-square) | `>= v16` | **Maintenance** | 2023-05-12 | 2025-01-11 |
| [`v5`](https://github.com/DataDog/dd-trace-js/tree/v5.x) | ![npm v5](https://img.shields.io/npm/v/dd-trace/latest?color=white&label=%20&style=flat-square) | `>= v18` | **Current** | 2024-01-11 | Unknown |
We currently maintain three release lines, namely `v5`, `v4` and `v3`.
Features and bug fixes that are merged are released to the `v5` line and, if appropriate, also the `v4` & `v3` line.
We currently maintain two release lines, namely `v5`, and `v4`.
Features and bug fixes that are merged are released to the `v5` line and, if appropriate, also `v4`.

@@ -47,3 +46,3 @@ For any new projects it is recommended to use the `v5` release line:

However, existing projects that already use the `v4` & `v3` release line, or projects that need to support EOL versions of Node.js, may continue to use these release lines.
However, existing projects that already use the `v4` release line, or projects that need to support EOL versions of Node.js, may continue to use these release lines.
This is done by specifying the version when installing the package.

@@ -73,15 +72,6 @@

## Experimental ESM Support
## EcmaScript Modules (ESM) Support
> **Warning**
>
> ESM support has been temporarily disabled starting from Node 20 as significant
> changes are in progress.
ESM support requires an additional command-line argument. Use the following to enable experimental ESM support with your application:
ESM support is currently in the experimental stages, while CJS has been supported
since inception. This means that code loaded using `require()` should work fine
but code loaded using `import` might not always work.
Use the following command to enable experimental ESM support with your application:
Node.js < v20.6

@@ -115,2 +105,9 @@

Please refer to the [SECURITY.md](https://github.com/DataDog/dd-trace-js/blob/master/SECURITY.md) document if you have found a security issue.
Please refer to the [SECURITY.md](https://github.com/DataDog/dd-trace-js/blob/master/SECURITY.md) document if you have found a security issue.
## Datadog With OpenTelemetery
Please refer to the [Node.js Custom Instrumentation using OpenTelemetry API](https://docs.datadoghq.com/tracing/trace_collection/custom_instrumentation/nodejs/otel/) document. It includes information on how to use the OpenTelemetry API with dd-trace-js.
Note that our internal implementation of the OpenTelemetry API is currently set within the version range `>=1.0.0 <1.9.0`. This range will be updated at a regular cadence therefore, we recommend updating your tracer to the latest release to ensure up to date support.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display