Socket
Socket
Sign inDemoInstall

dd-trace

Package Overview
Dependencies
Maintainers
1
Versions
574
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dd-trace - npm Package Compare versions

Comparing version 6.0.0-pre-4a869d5 to 6.0.0-pre-4df8c4d

ci/cypress/after-run.js

25

ci/init.js
/* eslint-disable no-console */
const tracer = require('../packages/dd-trace')
const { ORIGIN_KEY } = require('../packages/dd-trace/src/constants')
const { isTrue } = require('../packages/dd-trace/src/util')
const isJestWorker = !!process.env.JEST_WORKER_ID
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID
const isMochaWorker = !!process.env.MOCHA_WORKER_ID
const options = {
startupLogs: false,
tags: {
[ORIGIN_KEY]: 'ciapp-test'
},
isCiVisibility: true,

@@ -27,5 +25,5 @@ flushInterval: isJestWorker ? 0 : 5000

} else {
console.error(`DD_CIVISIBILITY_AGENTLESS_ENABLED is set, \
but neither DD_API_KEY nor DATADOG_API_KEY are set in your environment, \
so dd-trace will not be initialized.`)
console.error('DD_CIVISIBILITY_AGENTLESS_ENABLED is set, but neither ' +
'DD_API_KEY nor DATADOG_API_KEY are set in your environment, so ' +
'dd-trace will not be initialized.')
shouldInit = false

@@ -45,7 +43,20 @@ }

if (isCucumberWorker) {
options.experimental = {
exporter: 'cucumber_worker'
}
}
if (isMochaWorker) {
options.experimental = {
exporter: 'mocha_worker'
}
}
if (shouldInit) {
tracer.init(options)
tracer.use('fs', false)
tracer.use('child_process', false)
}
module.exports = tracer

@@ -6,5 +6,7 @@ declare const exporters: {

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker',
MOCHA_WORKER: 'mocha_worker'
}
export = exporters

@@ -7,3 +7,5 @@ 'use strict'

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker',
MOCHA_WORKER: 'mocha_worker'
}

@@ -8,4 +8,5 @@ import * as opentracing from 'opentracing'

LOG: 'log'
TEXT_MAP_DSM: 'text_map_dsm'
}
export = formats

@@ -7,3 +7,4 @@ 'use strict'

BINARY: 'binary',
LOG: 'log'
LOG: 'log',
TEXT_MAP_DSM: 'text_map_dsm'
}

@@ -13,2 +13,3 @@ declare const tags: {

BASE_SERVICE: '_dd.base_service'
DD_PARENT_ID: '_dd.parent_id'
HTTP_URL: 'http.url'

@@ -15,0 +16,0 @@ HTTP_METHOD: 'http.method'

@@ -16,2 +16,3 @@ 'use strict'

BASE_SERVICE: '_dd.base_service',
DD_PARENT_ID: '_dd.parent_id',

@@ -18,0 +19,0 @@ // HTTP

'use strict'
const tracer = require('.')
const path = require('path')
const Module = require('module')
const semver = require('semver')
const log = require('./packages/dd-trace/src/log')
const { isTrue } = require('./packages/dd-trace/src/util')
const telemetry = require('./packages/dd-trace/src/telemetry/init-telemetry')
tracer.init()
let initBailout = false
let clobberBailout = false
const forced = isTrue(process.env.DD_INJECT_FORCE)
module.exports = tracer
if (process.env.DD_INJECTION_ENABLED) {
// If we're running via single-step install, and we're not in the app's
// node_modules, then we should not initialize the tracer. This prevents
// single-step-installed tracer from clobbering the manually-installed tracer.
let resolvedInApp
const entrypoint = process.argv[1]
try {
resolvedInApp = Module.createRequire(entrypoint).resolve('dd-trace')
} catch (e) {
// Ignore. If we can't resolve the module, we assume it's not in the app.
}
if (resolvedInApp) {
const ourselves = path.join(__dirname, 'index.js')
if (ourselves !== resolvedInApp) {
clobberBailout = true
}
}
// If we're running via single-step install, and the runtime doesn't match
// the engines field in package.json, then we should not initialize the tracer.
if (!clobberBailout) {
const { engines } = require('./package.json')
const version = process.versions.node
if (!semver.satisfies(version, engines.node)) {
initBailout = true
telemetry([
{ name: 'abort', tags: ['reason:incompatible_runtime'] },
{ name: 'abort.runtime', tags: [] }
])
log.info('Aborting application instrumentation due to incompatible_runtime.')
log.info(`Found incompatible runtime nodejs ${version}, Supported runtimes: nodejs ${engines.node}.`)
if (forced) {
log.info('DD_INJECT_FORCE enabled, allowing unsupported runtimes and continuing.')
}
}
}
}
if (!clobberBailout && (!initBailout || forced)) {
const tracer = require('.')
tracer.init()
module.exports = tracer
telemetry('complete', [`injection_forced:${forced && initBailout ? 'true' : 'false'}`])
log.info('Application instrumentation bootstrapping complete')
}
{
"name": "dd-trace",
"version": "6.0.0-pre-4a869d5",
"version": "6.0.0-pre-4df8c4d",
"description": "Datadog APM tracing client for JavaScript",

@@ -17,16 +17,16 @@ "main": "index.js",

"services": "node ./scripts/install_plugin_modules && node packages/dd-trace/test/setup/services",
"test": "SERVICES=* yarn services && mocha --colors --exit --expose-gc 'packages/dd-trace/test/setup/node.js' 'packages/*/test/**/*.spec.js'",
"test:appsec": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" \"packages/dd-trace/test/appsec/**/*.spec.js\"",
"test": "SERVICES=* yarn services && mocha --expose-gc 'packages/dd-trace/test/setup/node.js' 'packages/*/test/**/*.spec.js'",
"test:appsec": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" \"packages/dd-trace/test/appsec/**/*.spec.js\"",
"test:appsec:ci": "nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" -- npm run test:appsec",
"test:appsec:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"",
"test:appsec:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"",
"test:appsec:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" -- npm run test:appsec:plugins",
"test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"",
"test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"",
"test:instrumentations": "mocha --colors -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'",
"test:instrumentations": "mocha -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'",
"test:instrumentations:ci": "nyc --no-clean --include 'packages/datadog-instrumentations/src/**/*.js' -- npm run test:instrumentations",
"test:core": "tap \"packages/datadog-core/test/**/*.spec.js\"",
"test:core:ci": "npm run test:core -- --coverage --nyc-arg=--include=\"packages/datadog-core/src/**/*.js\"",
"test:lambda": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/lambda/**/*.spec.js\"",
"test:lambda": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/lambda/**/*.spec.js\"",
"test:lambda:ci": "nyc --no-clean --include \"packages/dd-trace/src/lambda/**/*.js\" -- npm run test:lambda",
"test:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\"",
"test:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\"",
"test:plugins:ci": "yarn services && nyc --no-clean --include \"packages/datadog-instrumentations/src/@($(echo $PLUGINS)).js\" --include \"packages/datadog-instrumentations/src/@($(echo $PLUGINS))/**/*.js\" --include \"packages/datadog-plugin-@($(echo $PLUGINS))/src/**/*.js\" -- npm run test:plugins",

@@ -36,14 +36,17 @@ "test:plugins:upstream": "node ./packages/dd-trace/test/plugins/suite.js",

"test:profiler:ci": "npm run test:profiler -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/profiling/**/*.js\"",
"test:integration": "mocha --colors --timeout 30000 \"integration-tests/*.spec.js\"",
"test:integration:cucumber": "mocha --colors --timeout 30000 \"integration-tests/cucumber/*.spec.js\"",
"test:integration:cypress": "mocha --colors --timeout 30000 \"integration-tests/cypress/*.spec.js\"",
"test:integration:playwright": "mocha --colors --timeout 30000 \"integration-tests/playwright/*.spec.js\"",
"test:integration:profiler": "mocha --colors --timeout 90000 \"integration-tests/profiler/*.spec.js\"",
"test:integration:serverless": "mocha --colors --timeout 30000 \"integration-tests/serverless/*.spec.js\"",
"test:integration:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:unit:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:shimmer": "mocha --colors 'packages/datadog-shimmer/test/**/*.spec.js'",
"test:shimmer:ci": "nyc --no-clean --include 'packages/datadog-shimmer/src/**/*.js' -- npm run test:shimmer",
"leak:core": "node ./scripts/install_plugin_modules && (cd packages/memwatch && yarn) && NODE_PATH=./packages/memwatch/node_modules node --no-warnings ./node_modules/.bin/tape 'packages/dd-trace/test/leak/**/*.js'",
"leak:plugins": "yarn services && (cd packages/memwatch && yarn) && NODE_PATH=./packages/memwatch/node_modules node --no-warnings ./node_modules/.bin/tape \"packages/datadog-plugin-@($(echo $PLUGINS))/test/leak.js\""
"test:integration": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/*.spec.js\"",
"test:integration:appsec": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/appsec/*.spec.js\"",
"test:integration:cucumber": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cucumber/*.spec.js\"",
"test:integration:cypress": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cypress/*.spec.js\"",
"test:integration:jest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/jest/*.spec.js\"",
"test:integration:mocha": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/mocha/*.spec.js\"",
"test:integration:playwright": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/playwright/*.spec.js\"",
"test:integration:selenium": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/selenium/*.spec.js\"",
"test:integration:vitest": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/vitest/*.spec.js\"",
"test:integration:profiler": "mocha --timeout 180000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/profiler/*.spec.js\"",
"test:integration:serverless": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/serverless/*.spec.js\"",
"test:integration:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:unit:plugins": "mocha -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"",
"test:shimmer": "mocha 'packages/datadog-shimmer/test/**/*.spec.js'",
"test:shimmer:ci": "nyc --no-clean --include 'packages/datadog-shimmer/src/**/*.js' -- npm run test:shimmer"
},

@@ -74,16 +77,15 @@ "repository": {

"dependencies": {
"@datadog/native-appsec": "7.0.0",
"@datadog/native-iast-rewriter": "2.2.2",
"@datadog/native-iast-taint-tracking": "1.6.4",
"@datadog/native-appsec": "8.0.1",
"@datadog/native-iast-rewriter": "2.4.1",
"@datadog/native-iast-taint-tracking": "3.1.0",
"@datadog/native-metrics": "^2.0.0",
"@datadog/pprof": "5.0.0",
"@datadog/pprof": "5.3.0",
"@datadog/sketches-js": "^2.1.0",
"@opentelemetry/api": "^1.0.0",
"@opentelemetry/api": ">=1.0.0 <1.9.0",
"@opentelemetry/core": "^1.14.0",
"crypto-randomuuid": "^1.0.0",
"dc-polyfill": "^0.1.2",
"dc-polyfill": "^0.1.4",
"ignore": "^5.2.4",
"import-in-the-middle": "^1.7.3",
"import-in-the-middle": "^1.8.1",
"int64-buffer": "^0.1.9",
"ipaddr.js": "^2.1.0",
"istanbul-lib-coverage": "3.2.0",

@@ -93,24 +95,20 @@ "jest-docblock": "^29.7.0",

"limiter": "1.1.5",
"lodash.kebabcase": "^4.1.1",
"lodash.pick": "^4.4.0",
"lodash.sortby": "^4.7.0",
"lodash.uniq": "^4.5.0",
"lru-cache": "^7.14.0",
"methods": "^1.1.2",
"module-details-from-path": "^1.0.3",
"msgpack-lite": "^0.1.26",
"node-abort-controller": "^3.1.1",
"opentracing": ">=0.12.1",
"path-to-regexp": "^0.1.2",
"pprof-format": "^2.0.7",
"pprof-format": "^2.1.0",
"protobufjs": "^7.2.5",
"retry": "^0.13.1",
"semver": "^7.5.4",
"shell-quote": "^1.8.1",
"tlhunter-sorted-set": "^0.1.0"
},
"devDependencies": {
"@types/node": ">=18",
"@types/node": "^16.18.103",
"autocannon": "^4.5.2",
"aws-sdk": "^2.1446.0",
"axios": "^0.21.2",
"axios": "^1.6.7",
"benchmark": "^2.1.4",

@@ -124,10 +122,8 @@ "body-parser": "^1.20.2",

"esbuild": "0.16.12",
"eslint": "^8.23.0",
"eslint-config-standard": "^11.0.0-beta.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-mocha": "^10.1.0",
"eslint-plugin-n": "^15.7.0",
"eslint-plugin-node": "^5.2.1",
"eslint-plugin-promise": "^3.6.0",
"eslint-plugin-standard": "^3.0.1",
"eslint": "^8.57.0",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-mocha": "^10.4.3",
"eslint-plugin-n": "^16.6.2",
"eslint-plugin-promise": "^6.4.0",
"express": "^4.18.2",

@@ -140,3 +136,3 @@ "get-port": "^3.2.0",

"mkdirp": "^3.0.1",
"mocha": "8",
"mocha": "^9",
"multer": "^1.4.5-lts.1",

@@ -150,4 +146,4 @@ "nock": "^11.3.3",

"tap": "^16.3.7",
"tape": "^5.6.5"
"tiktoken": "^1.0.15"
}
}

@@ -5,11 +5,2 @@ 'use strict'

const semver = require('semver')
// https://github.com/nodejs/node/pull/33801
const hasJavaScriptAsyncHooks = semver.satisfies(process.versions.node, '>=14.5')
if (hasJavaScriptAsyncHooks) {
module.exports = require('./async_resource')
} else {
module.exports = require('./async_hooks')
}
module.exports = require('./async_resource')

@@ -7,5 +7,10 @@ 'use strict'

const hooks = require('../datadog-instrumentations/src/helpers/hooks.js')
const extractPackageAndModulePath = require('../datadog-instrumentations/src/utils/src/extract-package-and-module-path')
for (const hook of Object.values(hooks)) {
hook()
if (typeof hook === 'object') {
hook.fn()
} else {
hook()
}
}

@@ -25,3 +30,2 @@

const NM = 'node_modules/'
const INSTRUMENTED = Object.keys(instrumentations)

@@ -80,3 +84,6 @@ const RAW_BUILTINS = require('module').builtinModules

} catch (err) {
console.warn(`MISSING: Unable to find "${args.path}". Is the package dead code?`)
if (DEBUG) {
console.warn(`Warning: Unable to find "${args.path}".` +
"Unless it's dead code this could cause a problem at runtime.")
}
return

@@ -95,7 +102,10 @@ }

try {
pathToPackageJson = require.resolve(`${extracted.pkg}/package.json`, { paths: [ args.resolveDir ] })
pathToPackageJson = require.resolve(`${extracted.pkg}/package.json`, { paths: [args.resolveDir] })
} catch (err) {
if (err.code === 'MODULE_NOT_FOUND') {
if (!internal) {
console.warn(`MISSING: Unable to find "${extracted.pkg}/package.json". Is the package dead code?`)
if (DEBUG) {
console.warn(`Warning: Unable to find "${extracted.pkg}/package.json".` +
"Unless it's dead code this could cause a problem at runtime.")
}
}

@@ -180,33 +190,3 @@ return

return require.resolve(path, { paths: [ directory ] })
return require.resolve(path, { paths: [directory] })
}
/**
* For a given full path to a module,
* return the package name it belongs to and the local path to the module
* input: '/foo/node_modules/@co/stuff/foo/bar/baz.js'
* output: { pkg: '@co/stuff', path: 'foo/bar/baz.js' }
*/
function extractPackageAndModulePath (fullPath) {
const nm = fullPath.lastIndexOf(NM)
if (nm < 0) {
return { pkg: null, path: null }
}
const subPath = fullPath.substring(nm + NM.length)
const firstSlash = subPath.indexOf('/')
if (subPath[0] === '@') {
const secondSlash = subPath.substring(firstSlash + 1).indexOf('/')
return {
pkg: subPath.substring(0, firstSlash + 1 + secondSlash),
path: subPath.substring(firstSlash + 1 + secondSlash + 1)
}
}
return {
pkg: subPath.substring(0, firstSlash),
path: subPath.substring(firstSlash + 1)
}
}

@@ -8,5 +8,8 @@ 'use strict'

} = require('./helpers/instrument')
const kebabCase = require('lodash.kebabcase')
const kebabCase = require('../../datadog-core/src/utils/src/kebabcase')
const shimmer = require('../../datadog-shimmer')
const { NODE_MAJOR, NODE_MINOR } = require('../../../version')
const MIN_VERSION = ((NODE_MAJOR > 22) || (NODE_MAJOR === 22 && NODE_MINOR >= 2)) ? '>=0.5.3' : '>=0.5.0'
const startCh = channel('apm:amqplib:command:start')

@@ -18,3 +21,3 @@ const finishCh = channel('apm:amqplib:command:finish')

addHook({ name: 'amqplib', file: 'lib/defs.js', versions: ['>=0.5'] }, defs => {
addHook({ name: 'amqplib', file: 'lib/defs.js', versions: [MIN_VERSION] }, defs => {
methods = Object.keys(defs)

@@ -27,3 +30,3 @@ .filter(key => Number.isInteger(defs[key]))

addHook({ name: 'amqplib', file: 'lib/channel.js', versions: ['>=0.5'] }, channel => {
addHook({ name: 'amqplib', file: 'lib/channel.js', versions: [MIN_VERSION] }, channel => {
shimmer.wrap(channel.Channel.prototype, 'sendImmediately', sendImmediately => function (method, fields) {

@@ -34,3 +37,3 @@ return instrument(sendImmediately, this, arguments, methods[method], fields)

shimmer.wrap(channel.Channel.prototype, 'sendMessage', sendMessage => function (fields) {
return instrument(sendMessage, this, arguments, 'basic.publish', fields)
return instrument(sendMessage, this, arguments, 'basic.publish', fields, arguments[2])
})

@@ -37,0 +40,0 @@

'use strict'
const { AbortController } = require('node-abort-controller')
const { addHook } = require('./helpers/instrument')

@@ -5,0 +4,0 @@ const shimmer = require('../../datadog-shimmer')

'use strict'
const { AbortController } = require('node-abort-controller')
const dc = require('dc-polyfill')

@@ -40,3 +39,3 @@

resolve({
headers: headers,
headers,
status: abortData.statusCode,

@@ -43,0 +42,0 @@ body: {

@@ -23,3 +23,4 @@ 'use strict'

this.on('complete', innerAr.bind(response => {
channel(`apm:aws:request:complete:${channelSuffix}`).publish({ response })
const cbExists = typeof cb === 'function'
channel(`apm:aws:request:complete:${channelSuffix}`).publish({ response, cbExists })
}))

@@ -115,2 +116,3 @@

function wrapCb (cb, serviceName, request, ar) {
// eslint-disable-next-line n/handle-callback-err
return function wrappedCb (err, response) {

@@ -166,5 +168,10 @@ const obj = { request, response }

's3',
'sfn',
'sns',
'sqs'
].includes(name) ? name : 'default'
'sqs',
'states',
'stepfunctions'
].includes(name)
? name
: 'default'
}

@@ -171,0 +178,0 @@

'use strict'
const { AbortController } = require('node-abort-controller') // AbortController is not available in node <15
const shimmer = require('../../datadog-shimmer')
const { channel, addHook } = require('./helpers/instrument')
const { channel, addHook, AsyncResource } = require('./helpers/instrument')

@@ -27,5 +26,17 @@ const bodyParserReadCh = channel('datadog:body-parser:read:finish')

file: 'lib/read.js',
versions: ['>=1.4.0']
versions: ['>=1.4.0 <1.20.0']
}, read => {
return shimmer.wrap(read, function (req, res, next) {
const nextResource = new AsyncResource('bound-anonymous-fn')
arguments[2] = nextResource.bind(publishRequestBodyAndNext(req, res, next))
return read.apply(this, arguments)
})
})
addHook({
name: 'body-parser',
file: 'lib/read.js',
versions: ['>=1.20.0']
}, read => {
return shimmer.wrap(read, function (req, res, next) {
arguments[2] = publishRequestBodyAndNext(req, res, next)

@@ -32,0 +43,0 @@ return read.apply(this, arguments)

@@ -13,3 +13,3 @@ 'use strict'

const errorCh = channel('apm:cassandra-driver:query:error')
const connectCh = channel(`apm:cassandra-driver:query:connect`)
const connectCh = channel('apm:cassandra-driver:query:connect')

@@ -16,0 +16,0 @@ addHook({ name: 'cassandra-driver', versions: ['>=3.0.0'] }, cassandra => {

'use strict'
const { AbortController } = require('node-abort-controller') // AbortController is not available in node <15
const shimmer = require('../../datadog-shimmer')

@@ -5,0 +4,0 @@ const { channel, addHook } = require('./helpers/instrument')

'use strict'
const { createCoverageMap } = require('istanbul-lib-coverage')
const { NUM_FAILED_TEST_RETRIES } = require('../../dd-trace/src/plugins/util/test')
const { addHook, channel, AsyncResource } = require('./helpers/instrument')

@@ -9,2 +10,3 @@ const shimmer = require('../../datadog-shimmer')

const testStartCh = channel('ci:cucumber:test:start')
const testRetryCh = channel('ci:cucumber:test:retry')
const testFinishCh = channel('ci:cucumber:test:finish') // used for test steps too

@@ -20,3 +22,4 @@

const itrConfigurationCh = channel('ci:cucumber:itr-configuration')
const libraryConfigurationCh = channel('ci:cucumber:library-configuration')
const knownTestsCh = channel('ci:cucumber:known-tests')
const skippableSuitesCh = channel('ci:cucumber:test-suite:skippable')

@@ -26,4 +29,8 @@ const sessionStartCh = channel('ci:cucumber:session:start')

const workerReportTraceCh = channel('ci:cucumber:worker-report:trace')
const itrSkippedSuitesCh = channel('ci:cucumber:itr:skipped-suites')
const getCodeCoverageCh = channel('ci:nyc:get-coverage')
const {

@@ -34,3 +41,4 @@ getCoveredFilenamesFromCoverage,

fromCoverageMapToCoverage,
getTestSuitePath
getTestSuitePath,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE
} = require('../../dd-trace/src/plugins/util/test')

@@ -48,4 +56,11 @@

const lastStatusByPickleId = new Map()
const numRetriesByPickleId = new Map()
const numAttemptToAsyncResource = new Map()
let pickleByFile = {}
const pickleResultByFile = {}
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
let skippableSuites = []

@@ -55,2 +70,9 @@ let itrCorrelationId = ''

let isUnskippable = false
let isSuitesSkippingEnabled = false
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let isFlakyTestRetriesEnabled = false
let knownTests = []
let skippedSuites = []
let isSuitesSkipped = false

@@ -93,2 +115,54 @@ function getSuiteStatusFromTestStatuses (testStatuses) {

function isNewTest (testSuite, testName) {
const testsForSuite = knownTests.cucumber?.[testSuite] || []
return !testsForSuite.includes(testName)
}
function getTestStatusFromRetries (testStatuses) {
if (testStatuses.every(status => status === 'fail')) {
return 'fail'
}
if (testStatuses.some(status => status === 'pass')) {
return 'pass'
}
return 'pass'
}
function getChannelPromise (channelToPublishTo) {
return new Promise(resolve => {
sessionAsyncResource.runInAsyncScope(() => {
channelToPublishTo.publish({ onDone: resolve })
})
})
}
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function wrapRun (pl, isLatestVersion) {

@@ -104,62 +178,76 @@ if (patched.has(pl)) return

let numAttempt = 0
const asyncResource = new AsyncResource('bound-anonymous-fn')
return asyncResource.runInAsyncScope(() => {
const testSuiteFullPath = this.pickle.uri
if (!pickleResultByFile[testSuiteFullPath]) { // first test in suite
isUnskippable = isMarkedAsUnskippable(this.pickle)
const testSuitePath = getTestSuitePath(testSuiteFullPath, process.cwd())
isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath)
numAttemptToAsyncResource.set(numAttempt, asyncResource)
testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId })
}
const testFileAbsolutePath = this.pickle.uri
const testSourceLine = this.gherkinDocument &&
this.gherkinDocument.feature &&
this.gherkinDocument.feature.location &&
this.gherkinDocument.feature.location.line
const testSourceLine = this.gherkinDocument?.feature?.location?.line
testStartCh.publish({
testName: this.pickle.name,
fullTestSuite: testSuiteFullPath,
testSourceLine
})
try {
const promise = run.apply(this, arguments)
promise.finally(() => {
const result = this.getWorstStepResult()
const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result) : getStatusFromResult(result)
const testStartPayload = {
testName: this.pickle.name,
testFileAbsolutePath,
testSourceLine,
isParallel: !!process.env.CUCUMBER_WORKER_ID
}
asyncResource.runInAsyncScope(() => {
testStartCh.publish(testStartPayload)
})
try {
this.eventBroadcaster.on('envelope', (testCase) => {
// Only supported from >=8.0.0
if (testCase?.testCaseFinished) {
const { testCaseFinished: { willBeRetried } } = testCase
if (willBeRetried) { // test case failed and will be retried
const failedAttemptAsyncResource = numAttemptToAsyncResource.get(numAttempt)
failedAttemptAsyncResource.runInAsyncScope(() => {
testRetryCh.publish(numAttempt++ > 0) // the current span will be finished and a new one will be created
})
if (!pickleResultByFile[testSuiteFullPath]) {
pickleResultByFile[testSuiteFullPath] = [status]
} else {
pickleResultByFile[testSuiteFullPath].push(status)
const newAsyncResource = new AsyncResource('bound-anonymous-fn')
numAttemptToAsyncResource.set(numAttempt, newAsyncResource)
newAsyncResource.runInAsyncScope(() => {
testStartCh.publish(testStartPayload) // a new span will be created
})
}
testFinishCh.publish({ status, skipReason, errorMessage })
// last test in suite
if (pickleResultByFile[testSuiteFullPath].length === pickleByFile[testSuiteFullPath].length) {
const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testSuiteFullPath])
if (global.__coverage__) {
const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__)
}
})
let promise
testSuiteCodeCoverageCh.publish({
coverageFiles,
suiteFile: testSuiteFullPath
})
// We need to reset coverage to get a code coverage per suite
// Before that, we preserve the original coverage
mergeCoverage(global.__coverage__, originalCoverageMap)
resetCoverage(global.__coverage__)
}
asyncResource.runInAsyncScope(() => {
promise = run.apply(this, arguments)
})
promise.finally(() => {
const result = this.getWorstStepResult()
const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result)
: getStatusFromResult(result)
testSuiteFinishCh.publish(testSuiteStatus)
}
if (lastStatusByPickleId.has(this.pickle.id)) {
lastStatusByPickleId.get(this.pickle.id).push(status)
} else {
lastStatusByPickleId.set(this.pickle.id, [status])
}
let isNew = false
let isEfdRetry = false
if (isEarlyFlakeDetectionEnabled && status !== 'skip') {
const numRetries = numRetriesByPickleId.get(this.pickle.id)
isNew = numRetries !== undefined
isEfdRetry = numRetries > 0
}
const attemptAsyncResource = numAttemptToAsyncResource.get(numAttempt)
attemptAsyncResource.runInAsyncScope(() => {
testFinishCh.publish({ status, skipReason, errorMessage, isNew, isEfdRetry, isFlakyRetry: numAttempt > 0 })
})
return promise
} catch (err) {
errorCh.publish(err)
throw err
}
})
})
return promise
} catch (err) {
errorCh.publish(err)
throw err
}
})

@@ -187,3 +275,4 @@ shimmer.wrap(pl.prototype, 'runStep', runStep => function () {

const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result) : getStatusFromResult(result)
? getStatusFromResultLatest(result)
: getStatusFromResult(result)

@@ -202,8 +291,2 @@ testFinishCh.publish({ isStep: true, status, skipReason, errorMessage })

function pickleHook (PickleRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return PickleRunner
}
const pl = PickleRunner.default

@@ -217,8 +300,2 @@

function testCaseHook (TestCaseRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return TestCaseRunner
}
const pl = TestCaseRunner.default

@@ -231,91 +308,45 @@

addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
function getWrappedStart (start, frameworkVersion, isParallel = false) {
return async function () {
if (!libraryConfigurationCh.hasSubscribers) {
return start.apply(this, arguments)
}
let errorSkippableRequest
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
const configurationResponse = await getChannelPromise(libraryConfigurationCh)
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries
isSuitesSkippingEnabled = configurationResponse.libraryConfig?.isSuitesSkippingEnabled
isFlakyTestRetriesEnabled = configurationResponse.libraryConfig?.isFlakyTestRetriesEnabled
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
if (isEarlyFlakeDetectionEnabled) {
const knownTestsResponse = await getChannelPromise(knownTestsCh)
if (!knownTestsResponse.err) {
knownTests = knownTestsResponse.knownTests
} else {
isEarlyFlakeDetectionEnabled = false
}
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
if (isSuitesSkippingEnabled) {
const skippableResponse = await getChannelPromise(skippableSuitesCh)
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.0.0'],
file: 'lib/runtime/index.js'
}, (runtimePackage, frameworkVersion) => {
shimmer.wrap(runtimePackage.default.prototype, 'start', start => async function () {
const asyncResource = new AsyncResource('bound-anonymous-fn')
let onDone
errorSkippableRequest = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
const configPromise = new Promise(resolve => {
onDone = resolve
})
if (!errorSkippableRequest) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
asyncResource.runInAsyncScope(() => {
itrConfigurationCh.publish({ onDone })
})
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
await configPromise
this.pickleIds = picklesToRun
const skippableSuitesPromise = new Promise(resolve => {
onDone = resolve
})
asyncResource.runInAsyncScope(() => {
skippableSuitesCh.publish({ onDone })
})
const skippableResponse = await skippableSuitesPromise
const err = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
let skippedSuites = []
let isSuitesSkipped = false
if (!err) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
this.pickleIds = picklesToRun
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
}
}

@@ -328,7 +359,11 @@

asyncResource.runInAsyncScope(() => {
if (isFlakyTestRetriesEnabled && !this.options.retry) {
this.options.retry = NUM_FAILED_TEST_RETRIES
}
sessionAsyncResource.runInAsyncScope(() => {
sessionStartCh.publish({ command, frameworkVersion })
})
if (!err && skippedSuites.length) {
if (!errorSkippableRequest && skippedSuites.length) {
itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })

@@ -339,2 +374,7 @@ }

let untestedCoverage
if (getCodeCoverageCh.hasSubscribers) {
untestedCoverage = await getChannelPromise(getCodeCoverageCh)
}
let testCodeCoverageLinesTotal

@@ -344,2 +384,5 @@

try {
if (untestedCoverage) {
originalCoverageMap.merge(fromCoverageMapToCoverage(untestedCoverage))
}
testCodeCoverageLinesTotal = originalCoverageMap.getCoverageSummary().lines.pct

@@ -353,3 +396,3 @@ } catch (e) {

asyncResource.runInAsyncScope(() => {
sessionAsyncResource.runInAsyncScope(() => {
sessionFinishCh.publish({

@@ -361,9 +404,214 @@ status: success ? 'pass' : 'fail',

hasUnskippableSuites: isUnskippable,
hasForcedToRunSuites: isForcedToRun
hasForcedToRunSuites: isForcedToRun,
isEarlyFlakeDetectionEnabled,
isParallel
})
})
return success
})
}
}
function getWrappedRunTest (runTestFunction) {
return async function (pickleId) {
const test = this.eventDataCollector.getPickle(pickleId)
const testFileAbsolutePath = test.uri
const testSuitePath = getTestSuitePath(testFileAbsolutePath, process.cwd())
if (!pickleResultByFile[testFileAbsolutePath]) { // first test in suite
isUnskippable = isMarkedAsUnskippable(test)
isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath)
testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId })
}
let isNew = false
if (isEarlyFlakeDetectionEnabled) {
isNew = isNewTest(testSuitePath, test.name)
if (isNew) {
numRetriesByPickleId.set(pickleId, 0)
}
}
const runTestCaseResult = await runTestFunction.apply(this, arguments)
const testStatuses = lastStatusByPickleId.get(pickleId)
const lastTestStatus = testStatuses[testStatuses.length - 1]
// If it's a new test and it hasn't been skipped, we run it again
if (isEarlyFlakeDetectionEnabled && lastTestStatus !== 'skip' && isNew) {
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
numRetriesByPickleId.set(pickleId, retryIndex + 1)
await runTestFunction.apply(this, arguments)
}
}
let testStatus = lastTestStatus
if (isEarlyFlakeDetectionEnabled) {
/**
* If Early Flake Detection (EFD) is enabled the logic is as follows:
* - If all attempts for a test are failing, the test has failed and we will let the test process fail.
* - If just a single attempt passes, we will prevent the test process from failing.
* The rationale behind is the following: you may still be able to block your CI pipeline by gating
* on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too.
*/
testStatus = getTestStatusFromRetries(testStatuses)
if (testStatus === 'pass') {
this.success = true
}
}
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = [testStatus]
} else {
pickleResultByFile[testFileAbsolutePath].push(testStatus)
}
// last test in suite
if (pickleResultByFile[testFileAbsolutePath].length === pickleByFile[testFileAbsolutePath].length) {
const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testFileAbsolutePath])
if (global.__coverage__) {
const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__)
testSuiteCodeCoverageCh.publish({
coverageFiles,
suiteFile: testFileAbsolutePath,
testSuitePath
})
// We need to reset coverage to get a code coverage per suite
// Before that, we preserve the original coverage
mergeCoverage(global.__coverage__, originalCoverageMap)
resetCoverage(global.__coverage__)
}
testSuiteFinishCh.publish({ status: testSuiteStatus, testSuitePath })
}
return runTestCaseResult
}
}
function getWrappedParseWorkerMessage (parseWorkerMessageFunction) {
return function (worker, message) {
// If the message is an array, it's a dd-trace message, so we need to stop cucumber processing,
// or cucumber will throw an error
// TODO: identify the message better
if (Array.isArray(message)) {
const [messageCode, payload] = message
if (messageCode === CUCUMBER_WORKER_TRACE_PAYLOAD_CODE) {
sessionAsyncResource.runInAsyncScope(() => {
workerReportTraceCh.publish(payload)
})
return
}
}
const { jsonEnvelope } = message
if (!jsonEnvelope) {
return parseWorkerMessageFunction.apply(this, arguments)
}
let parsed = jsonEnvelope
if (typeof parsed === 'string') {
try {
parsed = JSON.parse(jsonEnvelope)
} catch (e) {
// ignore errors and continue
return parseWorkerMessageFunction.apply(this, arguments)
}
}
if (parsed.testCaseStarted) {
const { pickleId } = this.eventDataCollector.testCaseMap[parsed.testCaseStarted.testCaseId]
const pickle = this.eventDataCollector.getPickle(pickleId)
const testFileAbsolutePath = pickle.uri
// First test in suite
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = []
testSuiteStartCh.publish({
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
const parseWorkerResponse = parseWorkerMessageFunction.apply(this, arguments)
// after calling `parseWorkerMessageFunction`, the test status can already be read
if (parsed.testCaseFinished) {
const { pickle, worstTestStepResult } =
this.eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId)
const { status } = getStatusFromResultLatest(worstTestStepResult)
const testFileAbsolutePath = pickle.uri
const finished = pickleResultByFile[testFileAbsolutePath]
finished.push(status)
if (finished.length === pickleByFile[testFileAbsolutePath].length) {
testSuiteFinishCh.publish({
status: getSuiteStatusFromTestStatuses(finished),
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
return parseWorkerResponse
}
}
// Test start / finish for older versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
// Test start / finish for newer versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
// From 7.3.0 onwards, runPickle becomes runTestCase. Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/index.js'
}, (runtimePackage, frameworkVersion) => {
shimmer.wrap(runtimePackage.default.prototype, 'runTestCase', runTestCase => getWrappedRunTest(runTestCase))
shimmer.wrap(runtimePackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion))
return runtimePackage
})
// Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.0.0 <7.3.0'],
file: 'lib/runtime/index.js'
}, (runtimePackage, frameworkVersion) => {
shimmer.wrap(runtimePackage.default.prototype, 'runPickle', runPickle => getWrappedRunTest(runPickle))
shimmer.wrap(runtimePackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion))
return runtimePackage
})
// Only executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedGiveWork` generates suite start events and sets pickleResultByFile (used by suite finish events)
// `getWrappedParseWorkerMessage` generates suite finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=8.0.0'],
file: 'lib/runtime/parallel/coordinator.js'
}, (coordinatorPackage, frameworkVersion) => {
shimmer.wrap(coordinatorPackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion, true))
shimmer.wrap(
coordinatorPackage.default.prototype,
'parseWorkerMessage',
parseWorkerMessage => getWrappedParseWorkerMessage(parseWorkerMessage)
)
return coordinatorPackage
})

@@ -6,3 +6,2 @@ 'use strict'

const { addHook, channel } = require('./helpers/instrument')
const { AbortController } = require('node-abort-controller')

@@ -23,2 +22,19 @@ const handleChannel = channel('apm:express:request:handle')

const responseJsonChannel = channel('datadog:express:response:json:start')
function wrapResponseJson (json) {
return function wrappedJson (obj) {
if (responseJsonChannel.hasSubscribers) {
// backward compat as express 4.x supports deprecated 3.x signature
if (arguments.length === 2 && typeof arguments[1] !== 'number') {
obj = arguments[1]
}
responseJsonChannel.publish({ req: this.req, body: obj })
}
return json.apply(this, arguments)
}
}
addHook({ name: 'express', versions: ['>=4'] }, express => {

@@ -29,2 +45,5 @@ shimmer.wrap(express.application, 'handle', wrapHandle)

shimmer.wrap(express.response, 'json', wrapResponseJson)
shimmer.wrap(express.response, 'jsonp', wrapResponseJson)
return express

@@ -31,0 +50,0 @@ })

'use strict'
const shimmer = require('../../datadog-shimmer')
const { channel } = require('./helpers/instrument')
const { tracingChannel } = require('dc-polyfill')
const { createWrapFetch } = require('./helpers/fetch')
const startChannel = channel('apm:fetch:request:start')
const finishChannel = channel('apm:fetch:request:finish')
const errorChannel = channel('apm:fetch:request:error')
if (globalThis.fetch) {
const ch = tracingChannel('apm:fetch:request')
const wrapFetch = createWrapFetch(globalThis.Request, ch)
function wrapFetch (fetch, Request) {
if (typeof fetch !== 'function') return fetch
return function (input, init) {
if (!startChannel.hasSubscribers) return fetch.apply(this, arguments)
const req = new Request(input, init)
const headers = req.headers
const message = { req, headers }
return startChannel.runStores(message, () => {
// Request object is read-only so we need new objects to change headers.
arguments[0] = message.req
arguments[1] = { headers: message.headers }
return fetch.apply(this, arguments)
.then(
res => {
message.res = res
finishChannel.publish(message)
return res
},
err => {
if (err.name !== 'AbortError') {
message.error = err
errorChannel.publish(message)
}
finishChannel.publish(message)
throw err
}
)
})
}
globalThis.fetch = shimmer.wrap(fetch, wrapFetch(fetch))
}
if (globalThis.fetch) {
globalThis.fetch = shimmer.wrap(fetch, wrapFetch(fetch, globalThis.Request))
}

@@ -1,2 +0,1 @@

'use strict'

@@ -276,3 +275,3 @@

arguments[lastIndex] = innerResource.bind(function (e) {
if (typeof e === 'object') { // fs.exists receives a boolean
if (e !== null && typeof e === 'object') { // fs.exists receives a boolean
errorChannel.publish(e)

@@ -279,0 +278,0 @@ }

@@ -14,3 +14,3 @@ 'use strict'

const receiveStartCh = channel(`apm:google-cloud-pubsub:receive:start`)
const receiveStartCh = channel('apm:google-cloud-pubsub:receive:start')
const receiveFinishCh = channel('apm:google-cloud-pubsub:receive:finish')

@@ -17,0 +17,0 @@ const receiveErrorCh = channel('apm:google-cloud-pubsub:receive:error')

'use strict'
const { AbortController } = require('node-abort-controller')
const {

@@ -6,0 +4,0 @@ addHook,

@@ -18,3 +18,3 @@ 'use strict'

function createWrapMakeRequest (type) {
function createWrapMakeRequest (type, hasPeer = false) {
return function wrapMakeRequest (makeRequest) {

@@ -24,3 +24,3 @@ return function (path) {

return callMethod(this, makeRequest, args, path, args[4], type)
return callMethod(this, makeRequest, args, path, args[4], type, hasPeer)
}

@@ -30,3 +30,3 @@ }

function createWrapLoadPackageDefinition () {
function createWrapLoadPackageDefinition (hasPeer = false) {
return function wrapLoadPackageDefinition (loadPackageDefinition) {

@@ -38,3 +38,3 @@ return function (packageDef) {

wrapPackageDefinition(result)
wrapPackageDefinition(result, hasPeer)

@@ -46,9 +46,7 @@ return result

function createWrapMakeClientConstructor () {
function createWrapMakeClientConstructor (hasPeer = false) {
return function wrapMakeClientConstructor (makeClientConstructor) {
return function (methods) {
const ServiceClient = makeClientConstructor.apply(this, arguments)
wrapClientConstructor(ServiceClient, methods)
wrapClientConstructor(ServiceClient, methods, hasPeer)
return ServiceClient

@@ -59,9 +57,9 @@ }

function wrapPackageDefinition (def) {
function wrapPackageDefinition (def, hasPeer = false) {
for (const name in def) {
if (def[name].format) continue
if (def[name].service && def[name].prototype) {
wrapClientConstructor(def[name], def[name].service)
wrapClientConstructor(def[name], def[name].service, hasPeer)
} else {
wrapPackageDefinition(def[name])
wrapPackageDefinition(def[name], hasPeer)
}

@@ -71,3 +69,3 @@ }

function wrapClientConstructor (ServiceClient, methods) {
function wrapClientConstructor (ServiceClient, methods, hasPeer = false) {
const proto = ServiceClient.prototype

@@ -86,7 +84,7 @@

if (methods[name]) {
proto[name] = wrapMethod(proto[name], path, type)
proto[name] = wrapMethod(proto[name], path, type, hasPeer)
}
if (originalName) {
proto[originalName] = wrapMethod(proto[originalName], path, type)
proto[originalName] = wrapMethod(proto[originalName], path, type, hasPeer)
}

@@ -96,3 +94,3 @@ })

function wrapMethod (method, path, type) {
function wrapMethod (method, path, type, hasPeer) {
if (typeof method !== 'function' || patched.has(method)) {

@@ -104,4 +102,3 @@ return method

const args = ensureMetadata(this, arguments, 1)
return callMethod(this, method, args, path, args[1], type)
return callMethod(this, method, args, path, args[1], type, hasPeer)
}

@@ -130,3 +127,16 @@

function createWrapEmit (ctx) {
function createWrapEmit (ctx, hasPeer = false) {
const onStatusWithPeer = function (ctx, arg1, thisArg) {
ctx.result = arg1
ctx.peer = thisArg.getPeer()
finishChannel.publish(ctx)
}
const onStatusWithoutPeer = function (ctx, arg1, thisArg) {
ctx.result = arg1
finishChannel.publish(ctx)
}
const onStatus = hasPeer ? onStatusWithPeer : onStatusWithoutPeer
return function wrapEmit (emit) {

@@ -140,4 +150,3 @@ return function (event, arg1) {

case 'status':
ctx.result = arg1
finishChannel.publish(ctx)
onStatus(ctx, arg1, this)
break

@@ -153,3 +162,3 @@ }

function callMethod (client, method, args, path, metadata, type) {
function callMethod (client, method, args, path, metadata, type, hasPeer = false) {
if (!startChannel.hasSubscribers) return method.apply(client, args)

@@ -175,3 +184,3 @@

if (call && typeof call.emit === 'function') {
shimmer.wrap(call, 'emit', createWrapEmit(ctx))
shimmer.wrap(call, 'emit', createWrapEmit(ctx, hasPeer))
}

@@ -240,20 +249,22 @@

function patch (grpc) {
const proto = grpc.Client.prototype
function patch (hasPeer = false) {
return function patch (grpc) {
const proto = grpc.Client.prototype
instances.set(proto, grpc)
instances.set(proto, grpc)
shimmer.wrap(proto, 'makeBidiStreamRequest', createWrapMakeRequest(types.bidi))
shimmer.wrap(proto, 'makeClientStreamRequest', createWrapMakeRequest(types.clientStream))
shimmer.wrap(proto, 'makeServerStreamRequest', createWrapMakeRequest(types.serverStream))
shimmer.wrap(proto, 'makeUnaryRequest', createWrapMakeRequest(types.unary))
shimmer.wrap(proto, 'makeBidiStreamRequest', createWrapMakeRequest(types.bidi, hasPeer))
shimmer.wrap(proto, 'makeClientStreamRequest', createWrapMakeRequest(types.clientStream, hasPeer))
shimmer.wrap(proto, 'makeServerStreamRequest', createWrapMakeRequest(types.serverStream, hasPeer))
shimmer.wrap(proto, 'makeUnaryRequest', createWrapMakeRequest(types.unary, hasPeer))
return grpc
return grpc
}
}
if (nodeMajor <= 14) {
addHook({ name: 'grpc', versions: ['>=1.24.3'] }, patch)
addHook({ name: 'grpc', versions: ['>=1.24.3'] }, patch(true))
addHook({ name: 'grpc', versions: ['>=1.24.3'], file: 'src/client.js' }, client => {
shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor())
shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor(true))

@@ -264,9 +275,18 @@ return client

addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3'] }, patch)
addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3 <1.1.4'] }, patch(false))
addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3'], file: 'build/src/make-client.js' }, client => {
shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor())
shimmer.wrap(client, 'loadPackageDefinition', createWrapLoadPackageDefinition())
addHook({ name: '@grpc/grpc-js', versions: ['>=1.0.3 <1.1.4'], file: 'build/src/make-client.js' }, client => {
shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor(false))
shimmer.wrap(client, 'loadPackageDefinition', createWrapLoadPackageDefinition(false))
return client
})
addHook({ name: '@grpc/grpc-js', versions: ['>=1.1.4'] }, patch(true))
addHook({ name: '@grpc/grpc-js', versions: ['>=1.1.4'], file: 'build/src/make-client.js' }, client => {
shimmer.wrap(client, 'makeClientConstructor', createWrapMakeClientConstructor(true))
shimmer.wrap(client, 'loadPackageDefinition', createWrapLoadPackageDefinition(true))
return client
})

@@ -95,3 +95,5 @@ 'use strict'

break
case 'finish':
// Streams are always cancelled before `finish` since 1.10.0 so we have
// to use `prefinish` instead to avoid cancellation false positives.
case 'prefinish':
if (call.status) {

@@ -98,0 +100,0 @@ updateChannel.publish(call.status)

@@ -41,3 +41,3 @@ 'use strict'

return function (events, method, options) {
if (typeof events === 'object') {
if (events !== null && typeof events === 'object') {
arguments[0] = wrapEvents(events)

@@ -44,0 +44,0 @@ } else {

@@ -14,5 +14,10 @@ 'use strict'

*/
function Hook (modules, onrequire) {
if (!(this instanceof Hook)) return new Hook(modules, onrequire)
function Hook (modules, hookOptions, onrequire) {
if (!(this instanceof Hook)) return new Hook(modules, hookOptions, onrequire)
if (typeof hookOptions === 'function') {
onrequire = hookOptions
hookOptions = {}
}
this._patched = Object.create(null)

@@ -32,3 +37,3 @@

this._ritmHook = ritm(modules, {}, safeHook)
this._iitmHook = iitm(modules, {}, (moduleExports, moduleName, moduleBaseDir) => {
this._iitmHook = iitm(modules, hookOptions, (moduleExports, moduleName, moduleBaseDir) => {
// TODO: Move this logic to import-in-the-middle and only do it for CommonJS

@@ -35,0 +40,0 @@ // modules and not ESM. In the meantime, all the modules we instrument are

@@ -5,2 +5,3 @@ 'use strict'

'@apollo/server': () => require('../apollo-server'),
'@apollo/gateway': () => require('../apollo'),
'apollo-server-core': () => require('../apollo-server-core'),

@@ -26,32 +27,33 @@ '@aws-sdk/smithy-client': () => require('../aws-sdk'),

'@smithy/smithy-client': () => require('../aws-sdk'),
'aerospike': () => require('../aerospike'),
'amqp10': () => require('../amqp10'),
'amqplib': () => require('../amqplib'),
'@vitest/runner': { esmFirst: true, fn: () => require('../vitest') },
aerospike: () => require('../aerospike'),
amqp10: () => require('../amqp10'),
amqplib: () => require('../amqplib'),
'aws-sdk': () => require('../aws-sdk'),
'bluebird': () => require('../bluebird'),
bluebird: () => require('../bluebird'),
'body-parser': () => require('../body-parser'),
'bunyan': () => require('../bunyan'),
bunyan: () => require('../bunyan'),
'cassandra-driver': () => require('../cassandra-driver'),
'child_process': () => require('../child-process'),
'connect': () => require('../connect'),
'cookie': () => require('../cookie'),
child_process: () => require('../child_process'),
connect: () => require('../connect'),
cookie: () => require('../cookie'),
'cookie-parser': () => require('../cookie-parser'),
'couchbase': () => require('../couchbase'),
'crypto': () => require('../crypto'),
'cypress': () => require('../cypress'),
'dns': () => require('../dns'),
'elasticsearch': () => require('../elasticsearch'),
'express': () => require('../express'),
couchbase: () => require('../couchbase'),
crypto: () => require('../crypto'),
cypress: () => require('../cypress'),
dns: () => require('../dns'),
elasticsearch: () => require('../elasticsearch'),
express: () => require('../express'),
'express-mongo-sanitize': () => require('../express-mongo-sanitize'),
'fastify': () => require('../fastify'),
fastify: () => require('../fastify'),
'find-my-way': () => require('../find-my-way'),
'fs': () => require('../fs'),
fs: () => require('../fs'),
'generic-pool': () => require('../generic-pool'),
'graphql': () => require('../graphql'),
'grpc': () => require('../grpc'),
'hapi': () => require('../hapi'),
'http': () => require('../http'),
'http2': () => require('../http2'),
'https': () => require('../http'),
'ioredis': () => require('../ioredis'),
graphql: () => require('../graphql'),
grpc: () => require('../grpc'),
hapi: () => require('../hapi'),
http: () => require('../http'),
http2: () => require('../http2'),
https: () => require('../http'),
ioredis: () => require('../ioredis'),
'jest-circus': () => require('../jest'),

@@ -61,24 +63,26 @@ 'jest-config': () => require('../jest'),

'jest-environment-jsdom': () => require('../jest'),
'jest-jasmine2': () => require('../jest'),
'jest-runtime': () => require('../jest'),
'jest-worker': () => require('../jest'),
'knex': () => require('../knex'),
'koa': () => require('../koa'),
knex: () => require('../knex'),
koa: () => require('../koa'),
'koa-router': () => require('../koa'),
'kafkajs': () => require('../kafkajs'),
'ldapjs': () => require('../ldapjs'),
kafkajs: () => require('../kafkajs'),
ldapjs: () => require('../ldapjs'),
'limitd-client': () => require('../limitd-client'),
'mariadb': () => require('../mariadb'),
'memcached': () => require('../memcached'),
lodash: () => require('../lodash'),
mariadb: () => require('../mariadb'),
memcached: () => require('../memcached'),
'microgateway-core': () => require('../microgateway-core'),
'mocha': () => require('../mocha'),
mocha: () => require('../mocha'),
'mocha-each': () => require('../mocha'),
'moleculer': () => require('../moleculer'),
'mongodb': () => require('../mongodb'),
moleculer: () => require('../moleculer'),
mongodb: () => require('../mongodb'),
'mongodb-core': () => require('../mongodb-core'),
'mongoose': () => require('../mongoose'),
'mysql': () => require('../mysql'),
'mysql2': () => require('../mysql2'),
'net': () => require('../net'),
'next': () => require('../next'),
'node:child_process': () => require('../child-process'),
mongoose: () => require('../mongoose'),
mquery: () => require('../mquery'),
mysql: () => require('../mysql'),
mysql2: () => require('../mysql2'),
net: () => require('../net'),
next: () => require('../next'),
'node:child_process': () => require('../child_process'),
'node:crypto': () => require('../crypto'),

@@ -90,24 +94,29 @@ 'node:dns': () => require('../dns'),

'node:net': () => require('../net'),
'oracledb': () => require('../oracledb'),
'openai': () => require('../openai'),
'paperplane': () => require('../paperplane'),
nyc: () => require('../nyc'),
oracledb: () => require('../oracledb'),
openai: () => require('../openai'),
paperplane: () => require('../paperplane'),
'passport-http': () => require('../passport-http'),
'passport-local': () => require('../passport-local'),
'pg': () => require('../pg'),
'pino': () => require('../pino'),
pg: () => require('../pg'),
pino: () => require('../pino'),
'pino-pretty': () => require('../pino'),
'playwright': () => require('../playwright'),
playwright: () => require('../playwright'),
'promise-js': () => require('../promise-js'),
'promise': () => require('../promise'),
'q': () => require('../q'),
'qs': () => require('../qs'),
'redis': () => require('../redis'),
'restify': () => require('../restify'),
'rhea': () => require('../rhea'),
'router': () => require('../router'),
'sharedb': () => require('../sharedb'),
'sequelize': () => require('../sequelize'),
'tedious': () => require('../tedious'),
'when': () => require('../when'),
'winston': () => require('../winston')
promise: () => require('../promise'),
q: () => require('../q'),
qs: () => require('../qs'),
redis: () => require('../redis'),
restify: () => require('../restify'),
rhea: () => require('../rhea'),
router: () => require('../router'),
'selenium-webdriver': () => require('../selenium'),
sequelize: () => require('../sequelize'),
sharedb: () => require('../sharedb'),
tedious: () => require('../tedious'),
undici: () => require('../undici'),
vitest: { esmFirst: true, fn: () => require('../vitest') },
when: () => require('../when'),
winston: () => require('../winston'),
workerpool: () => require('../mocha')
}

@@ -20,6 +20,7 @@ 'use strict'

* @param {string[]} args.versions array of semver range strings
* @param {string} args.file path to file within package to instrument?
* @param {string} args.file path to file within package to instrument
* @param {string} args.filePattern pattern to match files within package to instrument
* @param Function hook
*/
exports.addHook = function addHook ({ name, versions, file }, hook) {
exports.addHook = function addHook ({ name, versions, file, filePattern }, hook) {
if (typeof name === 'string') {

@@ -33,3 +34,3 @@ name = [name]

}
instrumentations[val].push({ name: val, versions, file, hook })
instrumentations[val].push({ name: val, versions, file, filePattern, hook })
}

@@ -61,3 +62,3 @@ }

Object.defineProperties(bound, {
'length': {
length: {
configurable: true,

@@ -68,3 +69,3 @@ enumerable: false,

},
'asyncResource': {
asyncResource: {
configurable: true,

@@ -71,0 +72,0 @@ enumerable: true,

@@ -9,4 +9,9 @@ 'use strict'

const log = require('../../../dd-trace/src/log')
const checkRequireCache = require('../check_require_cache')
const telemetry = require('../../../dd-trace/src/telemetry/init-telemetry')
const { DD_TRACE_DISABLED_INSTRUMENTATIONS = '' } = process.env
const {
DD_TRACE_DISABLED_INSTRUMENTATIONS = '',
DD_TRACE_DEBUG = ''
} = process.env

@@ -28,13 +33,33 @@ const hooks = require('./hooks')

if (!disabledInstrumentations.has('process')) {
require('../process')
}
const HOOK_SYMBOL = Symbol('hookExportsMap')
if (DD_TRACE_DEBUG && DD_TRACE_DEBUG.toLowerCase() !== 'false') {
checkRequireCache.checkForRequiredModules()
setImmediate(checkRequireCache.checkForPotentialConflicts)
}
const seenCombo = new Set()
// TODO: make this more efficient
for (const packageName of names) {
if (disabledInstrumentations.has(packageName)) continue
Hook([packageName], (moduleExports, moduleName, moduleBaseDir, moduleVersion) => {
const hookOptions = {}
let hook = hooks[packageName]
if (typeof hook === 'object') {
hookOptions.internals = hook.esmFirst
hook = hook.fn
}
Hook([packageName], hookOptions, (moduleExports, moduleName, moduleBaseDir, moduleVersion) => {
moduleName = moduleName.replace(pathSepExpr, '/')
// This executes the integration file thus adding its entries to `instrumentations`
hooks[packageName]()
hook()

@@ -45,4 +70,9 @@ if (!instrumentations[packageName]) {

for (const { name, file, versions, hook } of instrumentations[packageName]) {
const namesAndSuccesses = {}
for (const { name, file, versions, hook, filePattern } of instrumentations[packageName]) {
let fullFilePattern = filePattern
const fullFilename = filename(name, file)
if (fullFilePattern) {
fullFilePattern = filename(name, fullFilePattern)
}

@@ -55,9 +85,29 @@ // Create a WeakMap associated with the hook function so that patches on the same moduleExport only happens once

}
let matchesFile = false
if (moduleName === fullFilename) {
const version = moduleVersion || getVersion(moduleBaseDir)
matchesFile = moduleName === fullFilename
if (fullFilePattern) {
// Some libraries include a hash in their filenames when installed,
// so our instrumentation has to include a '.*' to match them for more than a single version.
matchesFile = matchesFile || new RegExp(fullFilePattern).test(moduleName)
}
if (matchesFile) {
let version = moduleVersion
try {
version = version || getVersion(moduleBaseDir)
} catch (e) {
log.error(`Error getting version for "${name}": ${e.message}`)
log.error(e)
continue
}
if (typeof namesAndSuccesses[`${name}@${version}`] === 'undefined') {
namesAndSuccesses[`${name}@${version}`] = false
}
if (matchVersion(version, versions)) {
// Check if the hook already has a set moduleExport
if (hook[HOOK_SYMBOL].has(moduleExports)) {
namesAndSuccesses[`${name}@${version}`] = true
return moduleExports

@@ -74,7 +124,26 @@ }

} catch (e) {
log.error(e)
log.info('Error during ddtrace instrumentation of application, aborting.')
log.info(e)
telemetry('error', [
`error_type:${e.constructor.name}`,
`integration:${name}`,
`integration_version:${version}`
])
}
namesAndSuccesses[`${name}@${version}`] = true
}
}
}
for (const nameVersion of Object.keys(namesAndSuccesses)) {
const [name, version] = nameVersion.split('@')
const success = namesAndSuccesses[nameVersion]
if (!success && !seenCombo.has(nameVersion)) {
telemetry('abort.integration', [
`integration:${name}`,
`integration_version:${version}`
])
log.info(`Found incompatible integration version: ${nameVersion}`)
seenCombo.add(nameVersion)
}
}

@@ -81,0 +150,0 @@ return moduleExports

@@ -46,4 +46,6 @@ 'use strict'

const ctx = { args, http }
const abortController = new AbortController()
const ctx = { args, http, abortController }
return startChannel.runStores(ctx, () => {

@@ -111,2 +113,6 @@ let finished = false

if (abortController.signal.aborted) {
req.destroy(abortController.signal.reason || new Error('Aborted'))
}
return req

@@ -137,3 +143,3 @@ } catch (e) {

function combineOptions (inputURL, inputOptions) {
if (typeof inputOptions === 'object') {
if (inputOptions !== null && typeof inputOptions === 'object') {
return Object.assign(inputURL || {}, inputOptions)

@@ -161,2 +167,3 @@ } else {

} catch (e) {
// eslint-disable-next-line n/no-deprecated-api
return url.parse(inputURL)

@@ -163,0 +170,0 @@ }

'use strict'
const { AbortController } = require('node-abort-controller') // AbortController is not available in node <15
const {

@@ -14,3 +13,5 @@ channel,

const finishServerCh = channel('apm:http:server:request:finish')
const startWriteHeadCh = channel('apm:http:server:response:writeHead:start')
const finishSetHeaderCh = channel('datadog:http:server:response:set-header:finish')
const startSetHeaderCh = channel('datadog:http:server:response:set-header:start')

@@ -25,2 +26,11 @@ const requestFinishedSet = new WeakSet()

shimmer.wrap(http.Server.prototype, 'emit', wrapEmit)
shimmer.wrap(http.ServerResponse.prototype, 'writeHead', wrapWriteHead)
shimmer.wrap(http.ServerResponse.prototype, 'write', wrapWrite)
shimmer.wrap(http.ServerResponse.prototype, 'end', wrapEnd)
shimmer.wrap(http.ServerResponse.prototype, 'setHeader', wrapSetHeader)
shimmer.wrap(http.ServerResponse.prototype, 'removeHeader', wrapAppendOrRemoveHeader)
// Added in node v16.17.0
if (http.ServerResponse.prototype.appendHeader) {
shimmer.wrap(http.ServerResponse.prototype, 'appendHeader', wrapAppendOrRemoveHeader)
}
return http

@@ -67,5 +77,3 @@ })

}
if (finishSetHeaderCh.hasSubscribers) {
wrapSetHeader(res)
}
return emit.apply(this, arguments)

@@ -84,10 +92,136 @@ } catch (err) {

function wrapSetHeader (res) {
shimmer.wrap(res, 'setHeader', setHeader => {
return function (name, value) {
const setHeaderResult = setHeader.apply(this, arguments)
finishSetHeaderCh.publish({ name, value, res })
return setHeaderResult
function wrapWriteHead (writeHead) {
return function wrappedWriteHead (statusCode, reason, obj) {
if (!startWriteHeadCh.hasSubscribers) {
return writeHead.apply(this, arguments)
}
})
const abortController = new AbortController()
if (typeof reason !== 'string') {
obj ??= reason
}
// support writeHead(200, ['key1', 'val1', 'key2', 'val2'])
if (Array.isArray(obj)) {
const headers = {}
for (let i = 0; i < obj.length; i += 2) {
headers[obj[i]] = obj[i + 1]
}
obj = headers
}
// this doesn't support explicit duplicate headers, but it's an edge case
const responseHeaders = Object.assign(this.getHeaders(), obj)
startWriteHeadCh.publish({
req: this.req,
res: this,
abortController,
statusCode,
responseHeaders
})
if (abortController.signal.aborted) {
return this
}
return writeHead.apply(this, arguments)
}
}
function wrapWrite (write) {
return function wrappedWrite () {
if (!startWriteHeadCh.hasSubscribers) {
return write.apply(this, arguments)
}
const abortController = new AbortController()
const responseHeaders = this.getHeaders()
startWriteHeadCh.publish({
req: this.req,
res: this,
abortController,
statusCode: this.statusCode,
responseHeaders
})
if (abortController.signal.aborted) {
return true
}
return write.apply(this, arguments)
}
}
function wrapSetHeader (setHeader) {
return function wrappedSetHeader (name, value) {
if (!startSetHeaderCh.hasSubscribers && !finishSetHeaderCh.hasSubscribers) {
return setHeader.apply(this, arguments)
}
if (startSetHeaderCh.hasSubscribers) {
const abortController = new AbortController()
startSetHeaderCh.publish({ res: this, abortController })
if (abortController.signal.aborted) {
return
}
}
const setHeaderResult = setHeader.apply(this, arguments)
if (finishSetHeaderCh.hasSubscribers) {
finishSetHeaderCh.publish({ name, value, res: this })
}
return setHeaderResult
}
}
function wrapAppendOrRemoveHeader (originalMethod) {
return function wrappedAppendOrRemoveHeader () {
if (!startSetHeaderCh.hasSubscribers) {
return originalMethod.apply(this, arguments)
}
const abortController = new AbortController()
startSetHeaderCh.publish({ res: this, abortController })
if (abortController.signal.aborted) {
return this
}
return originalMethod.apply(this, arguments)
}
}
function wrapEnd (end) {
return function wrappedEnd () {
if (!startWriteHeadCh.hasSubscribers) {
return end.apply(this, arguments)
}
const abortController = new AbortController()
const responseHeaders = this.getHeaders()
startWriteHeadCh.publish({
req: this.req,
res: this,
abortController,
statusCode: this.statusCode,
responseHeaders
})
if (abortController.signal.aborted) {
return this
}
return end.apply(this, arguments)
}
}

@@ -12,3 +12,7 @@ 'use strict'

getTestSuitePath,
getTestParametersString
getTestParametersString,
addEfdStringToTestName,
removeEfdStringFromTestName,
getIsFaultyEarlyFlakeDetection,
NUM_FAILED_TEST_RETRIES
} = require('../../dd-trace/src/plugins/util/test')

@@ -20,3 +24,2 @@ const {

} = require('../../datadog-plugin-jest/src/util')
const { DD_MAJOR } = require('../../../version')

@@ -42,7 +45,18 @@ const testSessionStartCh = channel('ci:jest:session:start')

const skippableSuitesCh = channel('ci:jest:test-suite:skippable')
const jestItrConfigurationCh = channel('ci:jest:itr-configuration')
const libraryConfigurationCh = channel('ci:jest:library-configuration')
const knownTestsCh = channel('ci:jest:known-tests')
const itrSkippedSuitesCh = channel('ci:jest:itr:skipped-suites')
// Message sent by jest's main process to workers to run a test suite (=test file)
// https://github.com/jestjs/jest/blob/1d682f21c7a35da4d3ab3a1436a357b980ebd0fa/packages/jest-worker/src/types.ts#L37
const CHILD_MESSAGE_CALL = 1
// Maximum time we'll wait for the tracer to flush
const FLUSH_TIMEOUT = 10000
// eslint-disable-next-line
// https://github.com/jestjs/jest/blob/41f842a46bb2691f828c3a5f27fc1d6290495b82/packages/jest-circus/src/types.ts#L9C8-L9C54
const RETRY_TIMES = Symbol.for('RETRY_TIMES')
let skippableSuites = []
let knownTests = {}
let isCodeCoverageEnabled = false

@@ -55,15 +69,14 @@ let isSuitesSkippingEnabled = false

let hasForcedToRunSuites = false
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let earlyFlakeDetectionFaultyThreshold = 30
let isEarlyFlakeDetectionFaulty = false
let hasFilteredSkippableSuites = false
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
const specStatusToTestStatus = {
'pending': 'skip',
'disabled': 'skip',
'todo': 'skip',
'passed': 'pass',
'failed': 'fail'
}
const asyncResources = new WeakMap()
const originalTestFns = new WeakMap()
const retriedTestsToNumAttempts = new Map()
const newTestsTestStatuses = new Map()

@@ -97,2 +110,9 @@ // based on https://github.com/facebook/jest/blob/main/packages/jest-circus/src/formatNodeAssertErrors.ts#L41

function getEfdStats (testStatuses) {
return testStatuses.reduce((acc, testStatus) => {
acc[testStatus]++
return acc
}, { pass: 0, fail: 0 })
}
function getWrappedEnvironment (BaseEnvironment, jestVersion) {

@@ -107,6 +127,75 @@ return class DatadogEnvironment extends BaseEnvironment {

this.global._ddtrace = global._ddtrace
this.hasSnapshotTests = undefined
this.displayName = config.projectConfig?.displayName?.name
this.testEnvironmentOptions = getTestEnvironmentOptions(config)
const repositoryRoot = this.testEnvironmentOptions._ddRepositoryRoot
if (repositoryRoot) {
this.testSourceFile = getTestSuitePath(context.testPath, repositoryRoot)
}
this.isEarlyFlakeDetectionEnabled = this.testEnvironmentOptions._ddIsEarlyFlakeDetectionEnabled
this.isFlakyTestRetriesEnabled = this.testEnvironmentOptions._ddIsFlakyTestRetriesEnabled
if (this.isEarlyFlakeDetectionEnabled) {
const hasKnownTests = !!knownTests.jest
earlyFlakeDetectionNumRetries = this.testEnvironmentOptions._ddEarlyFlakeDetectionNumRetries
try {
this.knownTestsForThisSuite = hasKnownTests
? (knownTests.jest[this.testSuite] || [])
: this.getKnownTestsForSuite(this.testEnvironmentOptions._ddKnownTests)
} catch (e) {
// If there has been an error parsing the tests, we'll disable Early Flake Deteciton
this.isEarlyFlakeDetectionEnabled = false
}
}
if (this.isFlakyTestRetriesEnabled) {
const currentNumRetries = this.global[RETRY_TIMES]
if (!currentNumRetries) {
this.global[RETRY_TIMES] = NUM_FAILED_TEST_RETRIES
}
}
}
getHasSnapshotTests () {
if (this.hasSnapshotTests !== undefined) {
return this.hasSnapshotTests
}
let hasSnapshotTests = true
try {
const { _snapshotData } = this.getVmContext().expect.getState().snapshotState
hasSnapshotTests = Object.keys(_snapshotData).length > 0
} catch (e) {
// if we can't be sure, we'll err on the side of caution and assume it has snapshots
}
this.hasSnapshotTests = hasSnapshotTests
return hasSnapshotTests
}
// Function that receives a list of known tests for a test service and
// returns the ones that belong to the current suite
getKnownTestsForSuite (knownTests) {
if (this.knownTestsForThisSuite) {
return this.knownTestsForThisSuite
}
let knownTestsForSuite = knownTests
// If jest is using workers, known tests are serialized to json.
// If jest runs in band, they are not.
if (typeof knownTestsForSuite === 'string') {
knownTestsForSuite = JSON.parse(knownTestsForSuite)
}
return knownTestsForSuite
}
// Add the `add_test` event we don't have the test object yet, so
// we use its describe block to get the full name
getTestNameFromAddTestEvent (event, state) {
const describeSuffix = getJestTestName(state.currentDescribeBlock)
const fullTestName = describeSuffix ? `${describeSuffix} ${event.testName}` : event.testName
return removeEfdStringFromTestName(fullTestName)
}
async handleTestEvent (event, state) {

@@ -133,2 +222,4 @@ if (super.handleTestEvent) {

if (event.name === 'test_start') {
let isNewTest = false
let numEfdRetry = null
const testParameters = getTestParametersString(this.nameToParams, event.test.name)

@@ -139,9 +230,25 @@ // Async resource for this test is created here

asyncResources.set(event.test, asyncResource)
const testName = getJestTestName(event.test)
if (this.isEarlyFlakeDetectionEnabled) {
const originalTestName = removeEfdStringFromTestName(testName)
isNewTest = retriedTestsToNumAttempts.has(originalTestName)
if (isNewTest) {
numEfdRetry = retriedTestsToNumAttempts.get(originalTestName)
retriedTestsToNumAttempts.set(originalTestName, numEfdRetry + 1)
}
}
const isJestRetry = event.test?.invocations > 1
asyncResource.runInAsyncScope(() => {
testStartCh.publish({
name: getJestTestName(event.test),
name: removeEfdStringFromTestName(testName),
suite: this.testSuite,
testSourceFile: this.testSourceFile,
runner: 'jest-circus',
displayName: this.displayName,
testParameters,
frameworkVersion: jestVersion
frameworkVersion: jestVersion,
isNew: isNewTest,
isEfdRetry: numEfdRetry > 0,
isJestRetry
})

@@ -152,2 +259,26 @@ originalTestFns.set(event.test, event.test.fn)

}
if (event.name === 'add_test') {
if (this.isEarlyFlakeDetectionEnabled) {
const testName = this.getTestNameFromAddTestEvent(event, state)
const isNew = !this.knownTestsForThisSuite?.includes(testName)
const isSkipped = event.mode === 'todo' || event.mode === 'skip'
if (isNew && !isSkipped && !retriedTestsToNumAttempts.has(testName)) {
retriedTestsToNumAttempts.set(testName, 0)
// Retrying snapshots has proven to be problematic, so we'll skip them for now
// We'll still detect new tests, but we won't retry them.
// TODO: do not bail out of EFD with the whole test suite
if (this.getHasSnapshotTests()) {
log.warn('Early flake detection is disabled for suites with snapshots')
return
}
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
if (this.global.test) {
this.global.test(addEfdStringToTestName(event.testName, retryIndex), event.fn, event.timeout)
} else {
log.error('Early flake detection could not retry test because global.test is undefined')
}
}
}
}
}
if (event.name === 'test_done') {

@@ -168,2 +299,15 @@ const asyncResource = asyncResources.get(event.test)

event.test.fn = originalTestFns.get(event.test)
// We'll store the test statuses of the retries
if (this.isEarlyFlakeDetectionEnabled) {
const testName = getJestTestName(event.test)
const originalTestName = removeEfdStringFromTestName(testName)
const isNewTest = retriedTestsToNumAttempts.has(originalTestName)
if (isNewTest) {
if (newTestsTestStatuses.has(originalTestName)) {
newTestsTestStatuses.get(originalTestName).push(status)
} else {
newTestsTestStatuses.set(originalTestName, [status])
}
}
}
})

@@ -177,3 +321,5 @@ }

suite: this.testSuite,
testSourceFile: this.testSourceFile,
runner: 'jest-circus',
displayName: this.displayName,
frameworkVersion: jestVersion,

@@ -198,2 +344,19 @@ testStartLine: getTestLineStart(event.test.asyncError, this.testSuite)

function applySuiteSkipping (originalTests, rootDir, frameworkVersion) {
const jestSuitesToRun = getJestSuitesToRun(skippableSuites, originalTests, rootDir || process.cwd())
hasFilteredSkippableSuites = true
log.debug(
() => `${jestSuitesToRun.suitesToRun.length} out of ${originalTests.length} suites are going to run.`
)
hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== originalTests.length
numSkippedSuites = jestSuitesToRun.skippedSuites.length
itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
return jestSuitesToRun.suitesToRun
}
addHook({

@@ -209,3 +372,48 @@ name: 'jest-environment-node',

function getWrappedScheduleTests (scheduleTests, frameworkVersion) {
return async function (tests) {
if (!isSuitesSkippingEnabled || hasFilteredSkippableSuites) {
return scheduleTests.apply(this, arguments)
}
const [test] = tests
const rootDir = test?.context?.config?.rootDir
arguments[0] = applySuiteSkipping(tests, rootDir, frameworkVersion)
return scheduleTests.apply(this, arguments)
}
}
addHook({
name: '@jest/core',
file: 'build/TestScheduler.js',
versions: ['>=27.0.0']
}, (testSchedulerPackage, frameworkVersion) => {
const oldCreateTestScheduler = testSchedulerPackage.createTestScheduler
const newCreateTestScheduler = async function () {
if (!isSuitesSkippingEnabled || hasFilteredSkippableSuites) {
return oldCreateTestScheduler.apply(this, arguments)
}
// If suite skipping is enabled and has not filtered skippable suites yet, we'll attempt to do it
const scheduler = await oldCreateTestScheduler.apply(this, arguments)
shimmer.wrap(scheduler, 'scheduleTests', scheduleTests => getWrappedScheduleTests(scheduleTests, frameworkVersion))
return scheduler
}
testSchedulerPackage.createTestScheduler = newCreateTestScheduler
return testSchedulerPackage
})
addHook({
name: '@jest/core',
file: 'build/TestScheduler.js',
versions: ['>=24.8.0 <27.0.0']
}, (testSchedulerPackage, frameworkVersion) => {
shimmer.wrap(
testSchedulerPackage.default.prototype,
'scheduleTests', scheduleTests => getWrappedScheduleTests(scheduleTests, frameworkVersion)
)
return testSchedulerPackage
})
addHook({
name: '@jest/test-sequencer',

@@ -217,25 +425,9 @@ versions: ['>=24.8.0']

if (!shardedTests.length) {
if (!shardedTests.length || !isSuitesSkippingEnabled || !skippableSuites.length) {
return shardedTests
}
// TODO: could we get the rootDir from each test?
const [test] = shardedTests
const rootDir = test && test.context && test.context.config && test.context.config.rootDir
const rootDir = test?.context?.config?.rootDir
const jestSuitesToRun = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd())
log.debug(
() => `${jestSuitesToRun.suitesToRun.length} out of ${shardedTests.length} suites are going to run.`
)
hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== shardedTests.length
numSkippedSuites = jestSuitesToRun.skippedSuites.length
itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
skippableSuites = []
return jestSuitesToRun.suitesToRun
return applySuiteSkipping(shardedTests, rootDir, frameworkVersion)
})

@@ -251,3 +443,3 @@ return sequencerPackage

})
if (!jestItrConfigurationCh.hasSubscribers) {
if (!libraryConfigurationCh.hasSubscribers) {
return runCLI.apply(this, arguments)

@@ -257,10 +449,13 @@ }

sessionAsyncResource.runInAsyncScope(() => {
jestItrConfigurationCh.publish({ onDone })
libraryConfigurationCh.publish({ onDone })
})
try {
const { err, itrConfig } = await configurationPromise
const { err, libraryConfig } = await configurationPromise
if (!err) {
isCodeCoverageEnabled = itrConfig.isCodeCoverageEnabled
isSuitesSkippingEnabled = itrConfig.isSuitesSkippingEnabled
isCodeCoverageEnabled = libraryConfig.isCodeCoverageEnabled
isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled
isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
earlyFlakeDetectionFaultyThreshold = libraryConfig.earlyFlakeDetectionFaultyThreshold
}

@@ -271,2 +466,24 @@ } catch (err) {

if (isEarlyFlakeDetectionEnabled) {
const knownTestsPromise = new Promise((resolve) => {
onDone = resolve
})
sessionAsyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
try {
const { err, knownTests: receivedKnownTests } = await knownTestsPromise
if (!err) {
knownTests = receivedKnownTests
} else {
// We disable EFD if there has been an error in the known tests request
isEarlyFlakeDetectionEnabled = false
}
} catch (err) {
log.error(err)
}
}
if (isSuitesSkippingEnabled) {

@@ -331,3 +548,18 @@ const skippableSuitesPromise = new Promise((resolve) => {

}
let timeoutId
// Pass the resolve callback to defer it to DC listener
const flushPromise = new Promise((resolve) => {
onDone = () => {
clearTimeout(timeoutId)
resolve()
}
})
const timeoutPromise = new Promise((resolve) => {
timeoutId = setTimeout(() => {
resolve('timeout')
}, FLUSH_TIMEOUT).unref()
})
sessionAsyncResource.runInAsyncScope(() => {

@@ -343,8 +575,38 @@ testSessionFinishCh.publish({

hasForcedToRunSuites,
error
error,
isEarlyFlakeDetectionEnabled,
isEarlyFlakeDetectionFaulty,
onDone
})
})
const waitingResult = await Promise.race([flushPromise, timeoutPromise])
if (waitingResult === 'timeout') {
log.error('Timeout waiting for the tracer to flush')
}
numSkippedSuites = 0
/**
* If Early Flake Detection (EFD) is enabled the logic is as follows:
* - If all attempts for a test are failing, the test has failed and we will let the test process fail.
* - If just a single attempt passes, we will prevent the test process from failing.
* The rationale behind is the following: you may still be able to block your CI pipeline by gating
* on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too.
*/
if (isEarlyFlakeDetectionEnabled) {
let numFailedTestsToIgnore = 0
for (const testStatuses of newTestsTestStatuses.values()) {
const { pass, fail } = getEfdStats(testStatuses)
if (pass > 0) { // as long as one passes, we'll consider the test passed
numFailedTestsToIgnore += fail
}
}
// If every test that failed was an EFD retry, we'll consider the suite passed
if (numFailedTestsToIgnore !== 0 && result.results.numFailedTests === numFailedTestsToIgnore) {
result.results.success = true
}
}
return result

@@ -363,6 +625,9 @@ })

* If ITR is active, we're running fewer tests, so of course the total code coverage is reduced.
* This calculation adds no value, so we'll skip it.
* This calculation adds no value, so we'll skip it, as long as the user has not manually opted in to code coverage,
* in which case we'll leave it.
*/
shimmer.wrap(CoverageReporter.prototype, '_addUntestedFiles', addUntestedFiles => async function () {
if (isSuitesSkippingEnabled) {
// If the user has added coverage manually, they're willing to pay the price of this execution, so
// we will not skip it.
if (isSuitesSkippingEnabled && !isUserCodeCoverageEnabled) {
return Promise.resolve()

@@ -406,2 +671,3 @@ }

testEnvironmentOptions: environment.testEnvironmentOptions,
displayName: environment.displayName,
frameworkVersion: jestVersion

@@ -466,2 +732,6 @@ })

if (readConfigsResult.globalConfig.forceExit) {
log.warn("Jest's '--forceExit' flag has been passed. This may cause loss of data.")
}
if (isCodeCoverageEnabled) {

@@ -523,2 +793,7 @@ const globalConfig = {

_ddItrCorrelationId,
_ddKnownTests,
_ddIsEarlyFlakeDetectionEnabled,
_ddEarlyFlakeDetectionNumRetries,
_ddRepositoryRoot,
_ddIsFlakyTestRetriesEnabled,
...restOfTestEnvironmentOptions

@@ -548,9 +823,22 @@ } = testEnvironmentOptions

shimmer.wrap(SearchSource.prototype, 'getTestPaths', getTestPaths => async function () {
if (!skippableSuites.length) {
return getTestPaths.apply(this, arguments)
const testPaths = await getTestPaths.apply(this, arguments)
const [{ rootDir, shard }] = arguments
if (isEarlyFlakeDetectionEnabled) {
const projectSuites = testPaths.tests.map(test => getTestSuitePath(test.path, test.context.config.rootDir))
const isFaulty =
getIsFaultyEarlyFlakeDetection(projectSuites, knownTests.jest || {}, earlyFlakeDetectionFaultyThreshold)
if (isFaulty) {
log.error('Early flake detection is disabled because the number of new suites is too high.')
isEarlyFlakeDetectionEnabled = false
const testEnvironmentOptions = testPaths.tests[0]?.context?.config?.testEnvironmentOptions
// Project config is shared among all tests, so we can modify it here
if (testEnvironmentOptions) {
testEnvironmentOptions._ddIsEarlyFlakeDetectionEnabled = false
}
isEarlyFlakeDetectionFaulty = true
}
}
const [{ rootDir, shard }] = arguments
if (shard && shard.shardIndex) {
if (shard?.shardCount > 1 || !isSuitesSkippingEnabled || !skippableSuites.length) {
// If the user is using jest sharding, we want to apply the filtering of tests in the shard process.

@@ -563,23 +851,8 @@ // The reason for this is the following:

// causing the shards to potentially run the same suite.
return getTestPaths.apply(this, arguments)
return testPaths
}
const testPaths = await getTestPaths.apply(this, arguments)
const { tests } = testPaths
const jestSuitesToRun = getJestSuitesToRun(skippableSuites, tests, rootDir)
log.debug(() => `${jestSuitesToRun.suitesToRun.length} out of ${tests.length} suites are going to run.`)
hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== tests.length
numSkippedSuites = jestSuitesToRun.skippedSuites.length
itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
skippableSuites = []
return { ...testPaths, tests: jestSuitesToRun.suitesToRun }
const suitesToRun = applySuiteSkipping(tests, rootDir, frameworkVersion)
return { ...testPaths, tests: suitesToRun }
})

@@ -601,42 +874,31 @@

function jasmineAsyncInstallWraper (jasmineAsyncInstallExport, jestVersion) {
log.warn('jest-jasmine2 support is removed from dd-trace@v4. Consider changing to jest-circus as `testRunner`.')
return function (globalConfig, globalInput) {
globalInput._ddtrace = global._ddtrace
shimmer.wrap(globalInput.jasmine.Spec.prototype, 'execute', execute => function (onComplete) {
const asyncResource = new AsyncResource('bound-anonymous-fn')
asyncResource.runInAsyncScope(() => {
const testSuite = getTestSuitePath(this.result.testPath, globalConfig.rootDir)
testStartCh.publish({
name: this.getFullName(),
suite: testSuite,
runner: 'jest-jasmine2',
frameworkVersion: jestVersion
})
const spec = this
const callback = asyncResource.bind(function () {
if (spec.result.failedExpectations && spec.result.failedExpectations.length) {
const formattedError = formatJestError(spec.result.failedExpectations[0].error)
testErrCh.publish(formattedError)
}
testRunFinishCh.publish({ status: specStatusToTestStatus[spec.result.status] })
onComplete.apply(this, arguments)
})
arguments[0] = callback
execute.apply(this, arguments)
})
})
return jasmineAsyncInstallExport.default(globalConfig, globalInput)
}
}
const LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE = [
'selenium-webdriver'
]
if (DD_MAJOR < 4) {
addHook({
name: 'jest-jasmine2',
versions: ['>=24.8.0'],
file: 'build/jasmineAsyncInstall.js'
}, jasmineAsyncInstallWraper)
function shouldBypassJestRequireEngine (moduleName) {
return (
LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE.some(library => moduleName.includes(library))
)
}
addHook({
name: 'jest-runtime',
versions: ['>=24.8.0']
}, (runtimePackage) => {
const Runtime = runtimePackage.default ? runtimePackage.default : runtimePackage
shimmer.wrap(Runtime.prototype, 'requireModuleOrMock', requireModuleOrMock => function (from, moduleName) {
// TODO: do this for every library that we instrument
if (shouldBypassJestRequireEngine(moduleName)) {
// To bypass jest's own require engine
return this._requireCoreModule(moduleName)
}
return requireModuleOrMock.apply(this, arguments)
})
return runtimePackage
})
addHook({
name: 'jest-worker',

@@ -647,2 +909,34 @@ versions: ['>=24.9.0'],

const ChildProcessWorker = childProcessWorker.default
shimmer.wrap(ChildProcessWorker.prototype, 'send', send => function (request) {
if (!isEarlyFlakeDetectionEnabled) {
return send.apply(this, arguments)
}
const [type] = request
// eslint-disable-next-line
// https://github.com/jestjs/jest/blob/1d682f21c7a35da4d3ab3a1436a357b980ebd0fa/packages/jest-worker/src/workers/ChildProcessWorker.ts#L424
if (type === CHILD_MESSAGE_CALL) {
// This is the message that the main process sends to the worker to run a test suite (=test file).
// In here we modify the config.testEnvironmentOptions to include the known tests for the suite.
// This way the suite only knows about the tests that are part of it.
const args = request[request.length - 1]
if (args.length > 1) {
return send.apply(this, arguments)
}
if (!args[0]?.config) {
return send.apply(this, arguments)
}
const [{ globalConfig, config, path: testSuiteAbsolutePath }] = args
const testSuite = getTestSuitePath(testSuiteAbsolutePath, globalConfig.rootDir || process.cwd())
const suiteKnownTests = knownTests.jest?.[testSuite] || []
args[0].config = {
...config,
testEnvironmentOptions: {
...config.testEnvironmentOptions,
_ddKnownTests: suiteKnownTests
}
}
}
return send.apply(this, arguments)
})
shimmer.wrap(ChildProcessWorker.prototype, '_onMessage', _onMessage => function () {

@@ -649,0 +943,0 @@ const [code, data] = arguments[0]

@@ -41,3 +41,4 @@ 'use strict'

this._brokers = (options.brokers && typeof options.brokers !== 'function')
? options.brokers.join(',') : undefined
? options.brokers.join(',')
: undefined
}

@@ -62,3 +63,3 @@ }

for (const message of messages) {
if (typeof message === 'object') {
if (message !== null && typeof message === 'object') {
message.headers = message.headers || {}

@@ -72,3 +73,6 @@ }

result.then(
innerAsyncResource.bind(() => producerFinishCh.publish(undefined)),
innerAsyncResource.bind(res => {
producerFinishCh.publish(undefined)
producerCommitCh.publish(res)
}),
innerAsyncResource.bind(err => {

@@ -82,8 +86,2 @@ if (err) {

result.then(res => {
if (producerCommitCh.hasSubscribers) {
producerCommitCh.publish(res)
}
})
return result

@@ -90,0 +88,0 @@ } catch (e) {

@@ -64,3 +64,3 @@ 'use strict'

filter = options
} else if (typeof options === 'object' && options.filter) {
} else if (options !== null && typeof options === 'object' && options.filter) {
if (isString(options.filter)) {

@@ -80,4 +80,5 @@ filter = options.filter

const callback = arguments[callbackIndex]
// eslint-disable-next-line n/handle-callback-err
arguments[callbackIndex] = shimmer.wrap(callback, function (err, corkedEmitter) {
if (typeof corkedEmitter === 'object' && typeof corkedEmitter['on'] === 'function') {
if (corkedEmitter !== null && typeof corkedEmitter === 'object' && typeof corkedEmitter.on === 'function') {
wrapEmitter(corkedEmitter)

@@ -84,0 +85,0 @@ }

@@ -1,545 +0,5 @@

const { createCoverageMap } = require('istanbul-lib-coverage')
const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util')
const { addHook, channel, AsyncResource } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const log = require('../../dd-trace/src/log')
const {
getCoveredFilenamesFromCoverage,
resetCoverage,
mergeCoverage,
getTestSuitePath,
fromCoverageMapToCoverage,
getCallSites
} = require('../../dd-trace/src/plugins/util/test')
const testStartCh = channel('ci:mocha:test:start')
const errorCh = channel('ci:mocha:test:error')
const skipCh = channel('ci:mocha:test:skip')
const testFinishCh = channel('ci:mocha:test:finish')
const parameterizedTestCh = channel('ci:mocha:test:parameterize')
const itrConfigurationCh = channel('ci:mocha:itr-configuration')
const skippableSuitesCh = channel('ci:mocha:test-suite:skippable')
const testSessionStartCh = channel('ci:mocha:session:start')
const testSessionFinishCh = channel('ci:mocha:session:finish')
const testSuiteStartCh = channel('ci:mocha:test-suite:start')
const testSuiteFinishCh = channel('ci:mocha:test-suite:finish')
const testSuiteErrorCh = channel('ci:mocha:test-suite:error')
const testSuiteCodeCoverageCh = channel('ci:mocha:test-suite:code-coverage')
const itrSkippedSuitesCh = channel('ci:mocha:itr:skipped-suites')
// TODO: remove when root hooks and fixtures are implemented
const patched = new WeakSet()
const testToAr = new WeakMap()
const originalFns = new WeakMap()
const testFileToSuiteAr = new Map()
const testToStartLine = new WeakMap()
// `isWorker` is true if it's a Mocha worker
let isWorker = false
// We'll preserve the original coverage here
const originalCoverageMap = createCoverageMap()
let suitesToSkip = []
let frameworkVersion
let isSuitesSkipped = false
let skippedSuites = []
const unskippableSuites = []
let isForcedToRun = false
let itrCorrelationId = ''
function getSuitesByTestFile (root) {
const suitesByTestFile = {}
function getSuites (suite) {
if (suite.file) {
if (suitesByTestFile[suite.file]) {
suitesByTestFile[suite.file].push(suite)
} else {
suitesByTestFile[suite.file] = [suite]
}
}
suite.suites.forEach(suite => {
getSuites(suite)
})
}
getSuites(root)
const numSuitesByTestFile = Object.keys(suitesByTestFile).reduce((acc, testFile) => {
acc[testFile] = suitesByTestFile[testFile].length
return acc
}, {})
return { suitesByTestFile, numSuitesByTestFile }
if (process.env.MOCHA_WORKER_ID) {
require('./mocha/worker')
} else {
require('./mocha/main')
}
function getTestStatus (test) {
if (test.isPending()) {
return 'skip'
}
if (test.isFailed() || test.timedOut) {
return 'fail'
}
return 'pass'
}
function isRetry (test) {
return test._currentRetry !== undefined && test._currentRetry !== 0
}
function getTestAsyncResource (test) {
if (!test.fn) {
return testToAr.get(test)
}
if (!test.fn.asyncResource) {
return testToAr.get(test.fn)
}
const originalFn = originalFns.get(test.fn)
return testToAr.get(originalFn)
}
function getFilteredSuites (originalSuites) {
return originalSuites.reduce((acc, suite) => {
const testPath = getTestSuitePath(suite.file, process.cwd())
const shouldSkip = suitesToSkip.includes(testPath)
const isUnskippable = unskippableSuites.includes(suite.file)
if (shouldSkip && !isUnskippable) {
acc.skippedSuites.add(testPath)
} else {
acc.suitesToRun.push(suite)
}
return acc
}, { suitesToRun: [], skippedSuites: new Set() })
}
function mochaHook (Runner) {
if (patched.has(Runner)) return Runner
patched.add(Runner)
shimmer.wrap(Runner.prototype, 'run', run => function () {
if (!testStartCh.hasSubscribers || isWorker) {
return run.apply(this, arguments)
}
const { suitesByTestFile, numSuitesByTestFile } = getSuitesByTestFile(this.suite)
const testRunAsyncResource = new AsyncResource('bound-anonymous-fn')
this.once('end', testRunAsyncResource.bind(function () {
let status = 'pass'
let error
if (this.stats) {
status = this.stats.failures === 0 ? 'pass' : 'fail'
if (this.stats.tests === 0) {
status = 'skip'
}
} else if (this.failures !== 0) {
status = 'fail'
}
if (status === 'fail') {
error = new Error(`Failed tests: ${this.failures}.`)
}
testFileToSuiteAr.clear()
let testCodeCoverageLinesTotal
if (global.__coverage__) {
try {
testCodeCoverageLinesTotal = originalCoverageMap.getCoverageSummary().lines.pct
} catch (e) {
// ignore errors
}
// restore the original coverage
global.__coverage__ = fromCoverageMapToCoverage(originalCoverageMap)
}
testSessionFinishCh.publish({
status,
isSuitesSkipped,
testCodeCoverageLinesTotal,
numSkippedSuites: skippedSuites.length,
hasForcedToRunSuites: isForcedToRun,
hasUnskippableSuites: !!unskippableSuites.length,
error
})
}))
this.once('start', testRunAsyncResource.bind(function () {
const processArgv = process.argv.slice(2).join(' ')
const command = `mocha ${processArgv}`
testSessionStartCh.publish({ command, frameworkVersion })
if (skippedSuites.length) {
itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })
}
}))
this.on('suite', function (suite) {
if (suite.root || !suite.tests.length) {
return
}
let asyncResource = testFileToSuiteAr.get(suite.file)
if (!asyncResource) {
asyncResource = new AsyncResource('bound-anonymous-fn')
testFileToSuiteAr.set(suite.file, asyncResource)
const isUnskippable = unskippableSuites.includes(suite.file)
isForcedToRun = isUnskippable && suitesToSkip.includes(getTestSuitePath(suite.file, process.cwd()))
asyncResource.runInAsyncScope(() => {
testSuiteStartCh.publish({
testSuite: suite.file,
isUnskippable,
isForcedToRun,
itrCorrelationId
})
})
}
})
this.on('suite end', function (suite) {
if (suite.root) {
return
}
const suitesInTestFile = suitesByTestFile[suite.file]
const isLastSuite = --numSuitesByTestFile[suite.file] === 0
if (!isLastSuite) {
return
}
let status = 'pass'
if (suitesInTestFile.every(suite => suite.pending)) {
status = 'skip'
} else {
// has to check every test in the test file
suitesInTestFile.forEach(suite => {
suite.eachTest(test => {
if (test.state === 'failed' || test.timedOut) {
status = 'fail'
}
})
})
}
if (global.__coverage__) {
const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__)
testSuiteCodeCoverageCh.publish({
coverageFiles,
suiteFile: suite.file
})
// We need to reset coverage to get a code coverage per suite
// Before that, we preserve the original coverage
mergeCoverage(global.__coverage__, originalCoverageMap)
resetCoverage(global.__coverage__)
}
const asyncResource = testFileToSuiteAr.get(suite.file)
asyncResource.runInAsyncScope(() => {
testSuiteFinishCh.publish(status)
})
})
this.on('test', (test) => {
if (isRetry(test)) {
return
}
const testStartLine = testToStartLine.get(test)
const asyncResource = new AsyncResource('bound-anonymous-fn')
testToAr.set(test.fn, asyncResource)
asyncResource.runInAsyncScope(() => {
testStartCh.publish({ test, testStartLine })
})
})
this.on('test end', (test) => {
const asyncResource = getTestAsyncResource(test)
const status = getTestStatus(test)
// if there are afterEach to be run, we don't finish the test yet
if (asyncResource && !test.parent._afterEach.length) {
asyncResource.runInAsyncScope(() => {
testFinishCh.publish(status)
})
}
})
// If the hook passes, 'hook end' will be emitted. Otherwise, 'fail' will be emitted
this.on('hook end', (hook) => {
const test = hook.ctx.currentTest
if (test && hook.parent._afterEach.includes(hook)) { // only if it's an afterEach
const isLastAfterEach = hook.parent._afterEach.indexOf(hook) === hook.parent._afterEach.length - 1
if (isLastAfterEach) {
const status = getTestStatus(test)
const asyncResource = getTestAsyncResource(test)
asyncResource.runInAsyncScope(() => {
testFinishCh.publish(status)
})
}
}
})
this.on('fail', (testOrHook, err) => {
const testFile = testOrHook.file
let test = testOrHook
const isHook = testOrHook.type === 'hook'
if (isHook && testOrHook.ctx) {
test = testOrHook.ctx.currentTest
}
let testAsyncResource
if (test) {
testAsyncResource = getTestAsyncResource(test)
}
if (testAsyncResource) {
testAsyncResource.runInAsyncScope(() => {
if (isHook) {
err.message = `${testOrHook.fullTitle()}: ${err.message}`
errorCh.publish(err)
// if it's a hook and it has failed, 'test end' will not be called
testFinishCh.publish('fail')
} else {
errorCh.publish(err)
}
})
}
const testSuiteAsyncResource = testFileToSuiteAr.get(testFile)
if (testSuiteAsyncResource) {
// we propagate the error to the suite
const testSuiteError = new Error(
`"${testOrHook.parent.fullTitle()}" failed with message "${err.message}"`
)
testSuiteError.stack = err.stack
testSuiteAsyncResource.runInAsyncScope(() => {
testSuiteErrorCh.publish(testSuiteError)
})
}
})
this.on('pending', (test) => {
const asyncResource = getTestAsyncResource(test)
if (asyncResource) {
asyncResource.runInAsyncScope(() => {
skipCh.publish(test)
})
} else {
// if there is no async resource, the test has been skipped through `test.skip`
// or the parent suite is skipped
const skippedTestAsyncResource = new AsyncResource('bound-anonymous-fn')
if (test.fn) {
testToAr.set(test.fn, skippedTestAsyncResource)
} else {
testToAr.set(test, skippedTestAsyncResource)
}
skippedTestAsyncResource.runInAsyncScope(() => {
skipCh.publish(test)
})
}
})
return run.apply(this, arguments)
})
return Runner
}
function mochaEachHook (mochaEach) {
if (patched.has(mochaEach)) return mochaEach
patched.add(mochaEach)
return shimmer.wrap(mochaEach, function () {
const [params] = arguments
const { it, ...rest } = mochaEach.apply(this, arguments)
return {
it: function (name) {
parameterizedTestCh.publish({ name, params })
it.apply(this, arguments)
},
...rest
}
})
}
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/mocha.js'
}, (Mocha, mochaVersion) => {
frameworkVersion = mochaVersion
const mochaRunAsyncResource = new AsyncResource('bound-anonymous-fn')
/**
* Get ITR configuration and skippable suites
* If ITR is disabled, `onDone` is called immediately on the subscriber
*/
shimmer.wrap(Mocha.prototype, 'run', run => function () {
if (this.options.parallel) {
log.warn(`Unable to initialize CI Visibility because Mocha is running in parallel mode.`)
return run.apply(this, arguments)
}
if (!itrConfigurationCh.hasSubscribers || this.isWorker) {
if (this.isWorker) {
isWorker = true
}
return run.apply(this, arguments)
}
this.options.delay = true
const runner = run.apply(this, arguments)
this.files.forEach(path => {
const isUnskippable = isMarkedAsUnskippable({ path })
if (isUnskippable) {
unskippableSuites.push(path)
}
})
const onReceivedSkippableSuites = ({ err, skippableSuites, itrCorrelationId: responseItrCorrelationId }) => {
if (err) {
suitesToSkip = []
} else {
suitesToSkip = skippableSuites
itrCorrelationId = responseItrCorrelationId
}
// We remove the suites that we skip through ITR
const filteredSuites = getFilteredSuites(runner.suite.suites)
const { suitesToRun } = filteredSuites
isSuitesSkipped = suitesToRun.length !== runner.suite.suites.length
log.debug(
() => `${suitesToRun.length} out of ${runner.suite.suites.length} suites are going to run.`
)
runner.suite.suites = suitesToRun
skippedSuites = Array.from(filteredSuites.skippedSuites)
global.run()
}
const onReceivedConfiguration = ({ err }) => {
if (err) {
return global.run()
}
if (!skippableSuitesCh.hasSubscribers) {
return global.run()
}
skippableSuitesCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
})
}
mochaRunAsyncResource.runInAsyncScope(() => {
itrConfigurationCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedConfiguration)
})
})
return runner
})
return Mocha
})
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/suite.js'
}, (Suite) => {
shimmer.wrap(Suite.prototype, 'addTest', addTest => function (test) {
const callSites = getCallSites()
let startLine
const testCallSite = callSites.find(site => site.getFileName() === test.file)
if (testCallSite) {
startLine = testCallSite.getLineNumber()
testToStartLine.set(test, startLine)
}
return addTest.apply(this, arguments)
})
return Suite
})
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/runner.js'
}, mochaHook)
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/cli/run-helpers.js'
}, (run) => {
shimmer.wrap(run, 'runMocha', runMocha => async function () {
if (!testStartCh.hasSubscribers) {
return runMocha.apply(this, arguments)
}
const mocha = arguments[0]
/**
* This attaches `run` to the global context, which we'll call after
* our configuration and skippable suites requests
*/
if (!mocha.options.parallel) {
mocha.options.delay = true
}
return runMocha.apply(this, arguments)
})
return run
})
addHook({
name: 'mocha',
versions: ['>=5.2.0'],
file: 'lib/runnable.js'
}, (Runnable) => {
shimmer.wrap(Runnable.prototype, 'run', run => function () {
if (!testStartCh.hasSubscribers) {
return run.apply(this, arguments)
}
const isBeforeEach = this.parent._beforeEach.includes(this)
const isAfterEach = this.parent._afterEach.includes(this)
const isTestHook = isBeforeEach || isAfterEach
// we restore the original user defined function
if (this.fn.asyncResource) {
const originalFn = originalFns.get(this.fn)
this.fn = originalFn
}
if (isTestHook || this.type === 'test') {
const test = isTestHook ? this.ctx.currentTest : this
const asyncResource = getTestAsyncResource(test)
if (asyncResource) {
// we bind the test fn to the correct async resource
const newFn = asyncResource.bind(this.fn)
// we store the original function, not to lose it
originalFns.set(newFn, this.fn)
this.fn = newFn
// Temporarily keep functionality when .asyncResource is removed from node
// in https://github.com/nodejs/node/pull/46432
if (!this.fn.asyncResource) {
this.fn.asyncResource = asyncResource
}
}
}
return run.apply(this, arguments)
})
return Runnable
})
addHook({
name: 'mocha-each',
versions: ['>=2.0.1']
}, mochaEachHook)

@@ -10,5 +10,5 @@ 'use strict'

const startCh = channel(`apm:mongodb:query:start`)
const finishCh = channel(`apm:mongodb:query:finish`)
const errorCh = channel(`apm:mongodb:query:error`)
const startCh = channel('apm:mongodb:query:start')
const finishCh = channel('apm:mongodb:query:finish')
const errorCh = channel('apm:mongodb:query:error')

@@ -36,3 +36,3 @@ addHook({ name: 'mongodb-core', versions: ['2 - 3.1.9'] }, Server => {

addHook({ name: 'mongodb', versions: ['>=4.6.0'], file: 'lib/cmap/connection.js' }, Connection => {
addHook({ name: 'mongodb', versions: ['>=4.6.0 <6.4.0'], file: 'lib/cmap/connection.js' }, Connection => {
const proto = Connection.Connection.prototype

@@ -43,2 +43,8 @@ shimmer.wrap(proto, 'command', command => wrapConnectionCommand(command, 'command'))

addHook({ name: 'mongodb', versions: ['>=6.4.0'], file: 'lib/cmap/connection.js' }, Connection => {
const proto = Connection.Connection.prototype
shimmer.wrap(proto, 'command', command => wrapConnectionCommand(command, 'command', undefined, instrumentPromise))
return Connection
})
addHook({ name: 'mongodb', versions: ['>=3.3 <4'], file: 'lib/core/wireprotocol/index.js' }, wp => wrapWp(wp))

@@ -95,3 +101,3 @@

function wrapConnectionCommand (command, operation, name) {
function wrapConnectionCommand (command, operation, name, instrumentFn = instrument) {
const wrapped = function (ns, ops) {

@@ -108,3 +114,3 @@ if (!startCh.hasSubscribers) {

ns = `${ns.db}.${ns.collection}`
return instrument(operation, command, this, arguments, topology, ns, ops, { name })
return instrumentFn(operation, command, this, arguments, topology, ns, ops, { name })
}

@@ -187,1 +193,24 @@ return shimmer.wrap(command, wrapped)

}
function instrumentPromise (operation, command, ctx, args, server, ns, ops, options = {}) {
const name = options.name || (ops && Object.keys(ops)[0])
const serverInfo = server && server.s && server.s.options
const asyncResource = new AsyncResource('bound-anonymous-fn')
return asyncResource.runInAsyncScope(() => {
startCh.publish({ ns, ops, options: serverInfo, name })
const promise = command.apply(ctx, args)
return promise.then(function (res) {
finishCh.publish()
return res
}, function (err) {
errorCh.publish(err)
finishCh.publish()
return Promise.reject(err)
})
})
}

@@ -24,3 +24,4 @@ 'use strict'

}, mongoose => {
if (mongoose.Promise !== global.Promise) {
// As of Mongoose 7, custom promise libraries are no longer supported and mongoose.Promise may be undefined
if (mongoose.Promise && mongoose.Promise !== global.Promise) {
shimmer.wrap(mongoose.Promise.prototype, 'then', wrapThen)

@@ -83,17 +84,22 @@ }

let callbackWrapped = false
const lastArgumentIndex = arguments.length - 1
if (typeof arguments[lastArgumentIndex] === 'function') {
// is a callback, wrap it to execute finish()
shimmer.wrap(arguments, lastArgumentIndex, originalCb => {
return function () {
finish()
const wrapCallbackIfExist = (args) => {
const lastArgumentIndex = args.length - 1
return originalCb.apply(this, arguments)
}
})
if (typeof args[lastArgumentIndex] === 'function') {
// is a callback, wrap it to execute finish()
shimmer.wrap(args, lastArgumentIndex, originalCb => {
return function () {
finish()
callbackWrapped = true
return originalCb.apply(this, arguments)
}
})
callbackWrapped = true
}
}
wrapCallbackIfExist(arguments)
return asyncResource.runInAsyncScope(() => {

@@ -111,4 +117,12 @@ startCh.publish({

return function wrappedExec () {
if (!callbackWrapped) {
wrapCallbackIfExist(arguments)
}
const execResult = originalExec.apply(this, arguments)
if (callbackWrapped || typeof execResult?.then !== 'function') {
return execResult
}
// wrap them method, wrap resolve and reject methods

@@ -115,0 +129,0 @@ shimmer.wrap(execResult, 'then', originalThen => {

@@ -18,3 +18,3 @@ 'use strict'

const connectionCh = channel(`apm:net:tcp:connection`)
const connectionCh = channel('apm:net:tcp:connection')

@@ -21,0 +21,0 @@ const names = ['net', 'node:net']

@@ -49,3 +49,3 @@ 'use strict'

return instrument(req, res, () => {
const page = (typeof match === 'object' && typeof match.definition === 'object')
const page = (match !== null && typeof match === 'object' && typeof match.definition === 'object')
? match.definition.pathname

@@ -294,5 +294,5 @@ : undefined

const body = await originalMethod.apply(this, arguments)
bodyParsedChannel.publish({
body
})
bodyParsedChannel.publish({ body })
return body

@@ -302,3 +302,17 @@ }

shimmer.wrap(request.NextRequest.prototype, 'formData', function (originalFormData) {
return async function wrappedFormData () {
const body = await originalFormData.apply(this, arguments)
let normalizedBody = body
if (typeof body.entries === 'function') {
normalizedBody = Object.fromEntries(body.entries())
}
bodyParsedChannel.publish({ body: normalizedBody })
return body
}
})
return request
})
'use strict'
const {
channel,
addHook
} = require('./helpers/instrument')
const { addHook } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const startCh = channel('apm:openai:request:start')
const finishCh = channel('apm:openai:request:finish')
const errorCh = channel('apm:openai:request:error')
const tracingChannel = require('dc-polyfill').tracingChannel
const ch = tracingChannel('apm:openai:request')
const V4_PACKAGE_SHIMS = [
{
file: 'resources/chat/completions.js',
targetClass: 'Completions',
baseResource: 'chat.completions',
methods: ['create'],
streamedResponse: true
},
{
file: 'resources/completions.js',
targetClass: 'Completions',
baseResource: 'completions',
methods: ['create'],
streamedResponse: true
},
{
file: 'resources/embeddings.js',
targetClass: 'Embeddings',
baseResource: 'embeddings',
methods: ['create']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['create', 'del', 'list', 'retrieve']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['retrieveContent'],
versions: ['>=4.0.0 <4.17.1']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['content'], // replaced `retrieveContent` in v4.17.1
versions: ['>=4.17.1']
},
{
file: 'resources/images.js',
targetClass: 'Images',
baseResource: 'images',
methods: ['createVariation', 'edit', 'generate']
},
{
file: 'resources/fine-tuning/jobs/jobs.js',
targetClass: 'Jobs',
baseResource: 'fine_tuning.jobs',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.34.0'] // file location changed in 4.34.0
},
{
file: 'resources/fine-tuning/jobs.js',
targetClass: 'Jobs',
baseResource: 'fine_tuning.jobs',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.1.0 <4.34.0']
},
{
file: 'resources/fine-tunes.js', // deprecated after 4.1.0
targetClass: 'FineTunes',
baseResource: 'fine-tune',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.0.0 <4.1.0']
},
{
file: 'resources/models.js',
targetClass: 'Models',
baseResource: 'models',
methods: ['del', 'list', 'retrieve']
},
{
file: 'resources/moderations.js',
targetClass: 'Moderations',
baseResource: 'moderations',
methods: ['create']
},
{
file: 'resources/audio/transcriptions.js',
targetClass: 'Transcriptions',
baseResource: 'audio.transcriptions',
methods: ['create']
},
{
file: 'resources/audio/translations.js',
targetClass: 'Translations',
baseResource: 'audio.translations',
methods: ['create']
}
]
addHook({ name: 'openai', file: 'dist/api.js', versions: ['>=3.0.0 <4'] }, exports => {

@@ -19,7 +109,7 @@ const methodNames = Object.getOwnPropertyNames(exports.OpenAIApi.prototype)

shimmer.wrap(exports.OpenAIApi.prototype, methodName, fn => function () {
if (!startCh.hasSubscribers) {
if (!ch.start.hasSubscribers) {
return fn.apply(this, arguments)
}
startCh.publish({
const ctx = {
methodName,

@@ -29,24 +119,243 @@ args: arguments,

apiKey: this.configuration.apiKey
}
return ch.tracePromise(fn, ctx, this, ...arguments)
})
}
return exports
})
function addStreamedChunk (content, chunk) {
content.usage = chunk.usage // add usage if it was specified to be returned
for (const choice of chunk.choices) {
const choiceIdx = choice.index
const oldChoice = content.choices.find(choice => choice?.index === choiceIdx)
if (!oldChoice) {
// we don't know which choices arrive in which order
content.choices[choiceIdx] = choice
} else {
if (!oldChoice.finish_reason) {
oldChoice.finish_reason = choice.finish_reason
}
// delta exists on chat completions
const delta = choice.delta
if (delta) {
const content = delta.content
if (content) {
if (oldChoice.delta.content) { // we don't want to append to undefined
oldChoice.delta.content += content
} else {
oldChoice.delta.content = content
}
}
} else {
const text = choice.text
if (text) {
if (oldChoice.text) {
oldChoice.text += text
} else {
oldChoice.text = text
}
}
}
// tools only exist on chat completions
const tools = delta && choice.delta.tool_calls
if (tools) {
oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => {
const oldTool = oldChoice.delta.tool_calls[toolIdx]
if (oldTool) {
oldTool.function.arguments += newTool.function.arguments
}
return oldTool
})
}
}
}
}
function convertBufferstoObjects (chunks = []) {
return Buffer
.concat(chunks) // combine the buffers
.toString() // stringify
.split(/(?=data:)/) // split on "data:"
.map(chunk => chunk.split('\n').join('')) // remove newlines
.map(chunk => chunk.substring(6)) // remove 'data: ' from the front
.slice(0, -1) // remove the last [DONE] message
.map(JSON.parse) // parse all of the returned objects
}
/**
* For streamed responses, we need to accumulate all of the content in
* the chunks, and let the combined content be the final response.
* This way, spans look the same as when not streamed.
*/
function wrapStreamIterator (response, options, n, ctx) {
let processChunksAsBuffers = false
let chunks = []
return function (itr) {
return function () {
const iterator = itr.apply(this, arguments)
shimmer.wrap(iterator, 'next', next => function () {
return next.apply(this, arguments)
.then(res => {
const { done, value: chunk } = res
if (chunk) {
chunks.push(chunk)
if (chunk instanceof Buffer) {
// this operation should be safe
// if one chunk is a buffer (versus a plain object), the rest should be as well
processChunksAsBuffers = true
}
}
if (done) {
let body = {}
chunks = chunks.filter(chunk => chunk != null) // filter null or undefined values
if (chunks) {
if (processChunksAsBuffers) {
chunks = convertBufferstoObjects(chunks)
}
if (chunks.length) {
// define the initial body having all the content outside of choices from the first chunk
// this will include import data like created, id, model, etc.
body = { ...chunks[0], choices: Array.from({ length: n }) }
// start from the first chunk, and add its choices into the body
for (let i = 0; i < chunks.length; i++) {
addStreamedChunk(body, chunks[i])
}
}
}
finish(ctx, {
headers: response.headers,
data: body,
request: {
path: response.url,
method: options.method
}
})
}
return res
})
.catch(err => {
finish(undefined, err)
throw err
})
})
return iterator
}
}
}
return fn.apply(this, arguments)
.then((response) => {
finishCh.publish({
headers: response.headers,
body: response.data,
path: response.request.path,
method: response.request.method
for (const shim of V4_PACKAGE_SHIMS) {
const { file, targetClass, baseResource, methods, versions, streamedResponse } = shim
addHook({ name: 'openai', file, versions: versions || ['>=4'] }, exports => {
const targetPrototype = exports[targetClass].prototype
for (const methodName of methods) {
shimmer.wrap(targetPrototype, methodName, methodFn => function () {
if (!ch.start.hasSubscribers) {
return methodFn.apply(this, arguments)
}
// The OpenAI library lets you set `stream: true` on the options arg to any method
// However, we only want to handle streamed responses in specific cases
// chat.completions and completions
const stream = streamedResponse && getOption(arguments, 'stream', false)
// we need to compute how many prompts we are sending in streamed cases for completions
// not applicable for chat completiond
let n
if (stream) {
n = getOption(arguments, 'n', 1)
const prompt = getOption(arguments, 'prompt')
if (Array.isArray(prompt) && typeof prompt[0] !== 'number') {
n *= prompt.length
}
}
const client = this._client || this.client
const ctx = {
methodName: `${baseResource}.${methodName}`,
args: arguments,
basePath: client.baseURL,
apiKey: client.apiKey
}
return ch.start.runStores(ctx, () => {
const apiProm = methodFn.apply(this, arguments)
// wrapping `parse` avoids problematic wrapping of `then` when trying to call
// `withResponse` in userland code after. This way, we can return the whole `APIPromise`
shimmer.wrap(apiProm, 'parse', origApiPromParse => function () {
return origApiPromParse.apply(this, arguments)
// the original response is wrapped in a promise, so we need to unwrap it
.then(body => Promise.all([this.responsePromise, body]))
.then(([{ response, options }, body]) => {
if (stream) {
if (body.iterator) {
shimmer.wrap(body, 'iterator', wrapStreamIterator(response, options, n, ctx))
} else {
shimmer.wrap(
body.response.body, Symbol.asyncIterator, wrapStreamIterator(response, options, n, ctx)
)
}
} else {
finish(ctx, {
headers: response.headers,
data: body,
request: {
path: response.url,
method: options.method
}
})
}
return body
})
.catch(error => {
finish(ctx, undefined, error)
throw error
})
.finally(() => {
// maybe we don't want to unwrap here in case the promise is re-used?
// other hand: we want to avoid resource leakage
shimmer.unwrap(apiProm, 'parse')
})
})
return response
return apiProm
})
.catch((err) => {
errorCh.publish({ err })
})
}
return exports
})
}
throw err
})
})
function finish (ctx, response, error) {
if (error) {
ctx.error = error
ch.error.publish(ctx)
}
return exports
})
ctx.result = response
ch.asyncEnd.publish(ctx)
}
function getOption (args, option, defaultValue) {
return args[args.length - 1]?.[option] || defaultValue
}

@@ -24,3 +24,3 @@ 'use strict'

addHook({ name: 'oracledb', versions: ['5'] }, oracledb => {
addHook({ name: 'oracledb', versions: ['>=5'] }, oracledb => {
shimmer.wrap(oracledb.Connection.prototype, 'execute', execute => {

@@ -27,0 +27,0 @@ return function wrappedExecute (dbQuery, ...args) {

@@ -7,3 +7,8 @@ 'use strict'

if (process.env.DD_TRACE_OTEL_ENABLED) {
const otelSdkEnabled = process.env.DD_TRACE_OTEL_ENABLED ||
process.env.OTEL_SDK_DISABLED
? !process.env.OTEL_SDK_DISABLED
: undefined
if (otelSdkEnabled) {
addHook({

@@ -10,0 +15,0 @@ name: '@opentelemetry/sdk-trace-node',

@@ -13,2 +13,3 @@ 'use strict'

// eslint-disable-next-line n/handle-callback-err
return shimmer.wrap(verified, function (err, user, info) {

@@ -15,0 +16,0 @@ const credentials = { type, username }

@@ -38,3 +38,3 @@ 'use strict'

const pgQuery = arguments[0] && typeof arguments[0] === 'object'
const pgQuery = arguments[0] !== null && typeof arguments[0] === 'object'
? arguments[0]

@@ -113,3 +113,3 @@ : { text: arguments[0] }

const pgQuery = arguments[0] && typeof arguments[0] === 'object' ? arguments[0] : { text: arguments[0] }
const pgQuery = arguments[0] !== null && typeof arguments[0] === 'object' ? arguments[0] : { text: arguments[0] }

@@ -116,0 +116,0 @@ return asyncResource.runInAsyncScope(() => {

@@ -0,4 +1,7 @@

const semver = require('semver')
const { addHook, channel, AsyncResource } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const { parseAnnotations } = require('../../dd-trace/src/plugins/util/test')
const { parseAnnotations, getTestSuitePath, NUM_FAILED_TEST_RETRIES } = require('../../dd-trace/src/plugins/util/test')
const log = require('../../dd-trace/src/log')

@@ -11,2 +14,5 @@ const testStartCh = channel('ci:playwright:test:start')

const libraryConfigurationCh = channel('ci:playwright:library-configuration')
const knownTestsCh = channel('ci:playwright:known-tests')
const testSuiteStartCh = channel('ci:playwright:test-suite:start')

@@ -18,3 +24,7 @@ const testSuiteFinishCh = channel('ci:playwright:test-suite:finish')

const testSuiteToTestStatuses = new Map()
const testSuiteToErrors = new Map()
const testSessionAsyncResource = new AsyncResource('bound-anonymous-fn')
let applyRepeatEachIndex = null
let startedSuites = []

@@ -30,3 +40,42 @@

let remainingTestsByFile = {}
let isEarlyFlakeDetectionEnabled = false
let isFlakyTestRetriesEnabled = false
let earlyFlakeDetectionNumRetries = 0
let knownTests = {}
let rootDir = ''
const MINIMUM_SUPPORTED_VERSION_EFD = '1.38.0'
function isNewTest (test) {
const testSuite = getTestSuitePath(test._requireFile, rootDir)
const testsForSuite = knownTests?.playwright?.[testSuite] || []
return !testsForSuite.includes(test.title)
}
function getSuiteType (test, type) {
let suite = test.parent
while (suite && suite._type !== type) {
suite = suite.parent
}
return suite
}
// Copy of Suite#_deepClone but with a function to filter tests
function deepCloneSuite (suite, filterTest) {
const copy = suite._clone()
for (const entry of suite._entries) {
if (entry.constructor.name === 'Suite') {
copy._addSuite(deepCloneSuite(entry, filterTest))
} else {
if (filterTest(entry)) {
const copiedTest = entry._clone()
copiedTest._ddIsNew = true
copiedTest._ddIsEfdRetry = true
copy._addTest(copiedTest)
}
}
}
return copy
}
function getTestsBySuiteFromTestGroups (testGroups) {

@@ -79,4 +128,4 @@ return testGroups.reduce((acc, { requireFile, tests }) => {

}
if (playwrightRunner._config && playwrightRunner._config.config) {
return playwrightRunner._config.config.rootDir
if (playwrightRunner._config) {
return playwrightRunner._config.config?.rootDir || process.cwd()
}

@@ -86,5 +135,107 @@ return process.cwd()

function testBeginHandler (test) {
const { _requireFile: testSuiteAbsolutePath, title: testName, _type, location: { line: testSourceLine } } = test
function getProjectsFromRunner (runner) {
const config = getPlaywrightConfig(runner)
return config.projects?.map((project) => {
if (project.project) {
return project.project
}
return project
})
}
function getProjectsFromDispatcher (dispatcher) {
const newConfig = dispatcher._config?.config?.projects
if (newConfig) {
return newConfig
}
// old
return dispatcher._loader?.fullConfig()?.projects
}
function getBrowserNameFromProjects (projects, test) {
if (!projects || !test) {
return null
}
const { _projectIndex, _projectId: testProjectId } = test
if (_projectIndex !== undefined) {
return projects[_projectIndex]?.name
}
return projects.find(({ __projectId, _id, name }) => {
if (__projectId !== undefined) {
return __projectId === testProjectId
}
if (_id !== undefined) {
return _id === testProjectId
}
return name === testProjectId
})?.name
}
function formatTestHookError (error, hookType, isTimeout) {
let hookError = error
if (error) {
hookError.message = `Error in ${hookType} hook: ${error.message}`
}
if (!hookError && isTimeout) {
hookError = new Error(`${hookType} hook timed out`)
}
return hookError
}
function addErrorToTestSuite (testSuiteAbsolutePath, error) {
if (testSuiteToErrors.has(testSuiteAbsolutePath)) {
testSuiteToErrors.get(testSuiteAbsolutePath).push(error)
} else {
testSuiteToErrors.set(testSuiteAbsolutePath, [error])
}
}
function getTestSuiteError (testSuiteAbsolutePath) {
const errors = testSuiteToErrors.get(testSuiteAbsolutePath)
if (!errors) {
return null
}
if (errors.length === 1) {
return errors[0]
}
return new Error(`${errors.length} errors in this test suite:\n${errors.map(e => e.message).join('\n------\n')}`)
}
function getTestByTestId (dispatcher, testId) {
if (dispatcher._testById) {
return dispatcher._testById.get(testId)?.test
}
const allTests = dispatcher._allTests || dispatcher._ddAllTests
if (allTests) {
return allTests.find(({ id }) => id === testId)
}
}
function getChannelPromise (channelToPublishTo) {
return new Promise(resolve => {
testSessionAsyncResource.runInAsyncScope(() => {
channelToPublishTo.publish({ onDone: resolve })
})
})
}
// eslint-disable-next-line
// Inspired by https://github.com/microsoft/playwright/blob/2b77ed4d7aafa85a600caa0b0d101b72c8437eeb/packages/playwright/src/reporters/base.ts#L293
// We can't use test.outcome() directly because it's set on follow up handlers:
// our `testEndHandler` is called before the outcome is set.
function testWillRetry (test, testStatus) {
return testStatus === 'fail' && test.results.length <= test.retries
}
function testBeginHandler (test, browserName) {
const {
_requireFile: testSuiteAbsolutePath,
title: testName,
_type,
location: {
line: testSourceLine
}
} = test
if (_type === 'beforeAll' || _type === 'afterAll') {

@@ -108,7 +259,7 @@ return

testAsyncResource.runInAsyncScope(() => {
testStartCh.publish({ testName, testSuiteAbsolutePath, testSourceLine })
testStartCh.publish({ testName, testSuiteAbsolutePath, testSourceLine, browserName })
})
}
function testEndHandler (test, annotations, testStatus, error) {
function testEndHandler (test, annotations, testStatus, error, isTimeout) {
let annotationTags

@@ -121,2 +272,7 @@ if (annotations.length) {

if (_type === 'beforeAll' || _type === 'afterAll') {
const hookError = formatTestHookError(error, _type, isTimeout)
if (hookError) {
addErrorToTestSuite(testSuiteAbsolutePath, hookError)
}
return

@@ -128,14 +284,29 @@ }

testAsyncResource.runInAsyncScope(() => {
testFinishCh.publish({ testStatus, steps: testResult.steps, error, extraTags: annotationTags })
testFinishCh.publish({
testStatus,
steps: testResult?.steps || [],
isRetry: testResult?.retry > 0,
error,
extraTags: annotationTags,
isNew: test._ddIsNew,
isEfdRetry: test._ddIsEfdRetry
})
})
if (!testSuiteToTestStatuses.has(testSuiteAbsolutePath)) {
if (testSuiteToTestStatuses.has(testSuiteAbsolutePath)) {
testSuiteToTestStatuses.get(testSuiteAbsolutePath).push(testStatus)
} else {
testSuiteToTestStatuses.set(testSuiteAbsolutePath, [testStatus])
} else {
testSuiteToTestStatuses.get(testSuiteAbsolutePath).push(testStatus)
}
remainingTestsByFile[testSuiteAbsolutePath] = remainingTestsByFile[testSuiteAbsolutePath]
.filter(currentTest => currentTest !== test)
if (error) {
addErrorToTestSuite(testSuiteAbsolutePath, error)
}
if (!testWillRetry(test, testStatus)) {
remainingTestsByFile[testSuiteAbsolutePath] = remainingTestsByFile[testSuiteAbsolutePath]
.filter(currentTest => currentTest !== test)
}
// Last test, we finish the suite
if (!remainingTestsByFile[testSuiteAbsolutePath].length) {

@@ -151,5 +322,6 @@ const testStatuses = testSuiteToTestStatuses.get(testSuiteAbsolutePath)

const suiteError = getTestSuiteError(testSuiteAbsolutePath)
const testSuiteAsyncResource = testSuiteToAr.get(testSuiteAbsolutePath)
testSuiteAsyncResource.runInAsyncScope(() => {
testSuiteFinishCh.publish(testSuiteStatus)
testSuiteFinishCh.publish({ status: testSuiteStatus, error: suiteError })
})

@@ -167,3 +339,8 @@ }

function dispatcherRunWrapperNew (run) {
return function () {
return function (testGroups) {
if (!this._allTests) {
// Removed in https://github.com/microsoft/playwright/commit/1e52c37b254a441cccf332520f60225a5acc14c7
// Not available from >=1.44.0
this._ddAllTests = testGroups.map(g => g.tests).flat()
}
remainingTestsByFile = getTestsBySuiteFromTestGroups(arguments[0])

@@ -179,7 +356,8 @@ return run.apply(this, arguments)

const worker = createWorker.apply(this, arguments)
worker.process.on('message', ({ method, params }) => {
if (method === 'testBegin') {
const { test } = dispatcher._testById.get(params.testId)
testBeginHandler(test)
const projects = getProjectsFromDispatcher(dispatcher)
const browser = getBrowserNameFromProjects(projects, test)
testBeginHandler(test, browser)
} else if (method === 'testEnd') {

@@ -191,3 +369,4 @@ const { test } = dispatcher._testById.get(params.testId)

testEndHandler(test, params.annotations, STATUS_TO_TEST_STATUS[testResult.status], testResult.error)
const isTimeout = testResult.status === 'timedOut'
testEndHandler(test, params.annotations, STATUS_TO_TEST_STATUS[testResult.status], testResult.error, isTimeout)
}

@@ -201,11 +380,2 @@ })

function getTestByTestId (dispatcher, testId) {
if (dispatcher._testById) {
return dispatcher._testById.get(testId)?.test
}
if (dispatcher._allTests) {
return dispatcher._allTests.find(({ id }) => id === testId)
}
}
function dispatcherHookNew (dispatcherExport, runWrapper) {

@@ -219,3 +389,5 @@ shimmer.wrap(dispatcherExport.Dispatcher.prototype, 'run', runWrapper)

const test = getTestByTestId(dispatcher, testId)
testBeginHandler(test)
const projects = getProjectsFromDispatcher(dispatcher)
const browser = getBrowserNameFromProjects(projects, test)
testBeginHandler(test, browser)
})

@@ -225,3 +397,4 @@ worker.on('testEnd', ({ testId, status, errors, annotations }) => {

testEndHandler(test, annotations, STATUS_TO_TEST_STATUS[status], errors && errors[0])
const isTimeout = status === 'timedOut'
testEndHandler(test, annotations, STATUS_TO_TEST_STATUS[status], errors && errors[0], isTimeout)
})

@@ -236,5 +409,6 @@

shimmer.wrap(runnerExport.Runner.prototype, 'runAllTests', runAllTests => async function () {
const testSessionAsyncResource = new AsyncResource('bound-anonymous-fn')
const rootDir = getRootDir(this)
let onDone
rootDir = getRootDir(this)
const processArgv = process.argv.slice(2).join(' ')

@@ -246,2 +420,38 @@ const command = `playwright ${processArgv}`

try {
const { err, libraryConfig } = await getChannelPromise(libraryConfigurationCh)
if (!err) {
isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
isFlakyTestRetriesEnabled = libraryConfig.isFlakyTestRetriesEnabled
}
} catch (e) {
isEarlyFlakeDetectionEnabled = false
log.error(e)
}
if (isEarlyFlakeDetectionEnabled && semver.gte(playwrightVersion, MINIMUM_SUPPORTED_VERSION_EFD)) {
try {
const { err, knownTests: receivedKnownTests } = await getChannelPromise(knownTestsCh)
if (!err) {
knownTests = receivedKnownTests
} else {
isEarlyFlakeDetectionEnabled = false
}
} catch (err) {
isEarlyFlakeDetectionEnabled = false
log.error(err)
}
}
const projects = getProjectsFromRunner(this)
if (isFlakyTestRetriesEnabled) {
projects.forEach(project => {
if (project.retries === 0) { // Only if it hasn't been set by the user
project.retries = NUM_FAILED_TEST_RETRIES
}
})
}
const runAllTestsReturn = await runAllTests.apply(this, arguments)

@@ -254,3 +464,4 @@

tests.forEach(test => {
testBeginHandler(test)
const browser = getBrowserNameFromProjects(projects, test)
testBeginHandler(test, browser)
testEndHandler(test, [], 'skip')

@@ -262,3 +473,2 @@ })

let onDone
const flushWait = new Promise(resolve => {

@@ -268,3 +478,7 @@ onDone = resolve

testSessionAsyncResource.runInAsyncScope(() => {
testSessionFinishCh.publish({ status: STATUS_TO_TEST_STATUS[sessionStatus], onDone })
testSessionFinishCh.publish({
status: STATUS_TO_TEST_STATUS[sessionStatus],
isEarlyFlakeDetectionEnabled,
onDone
})
})

@@ -318,2 +532,3 @@ await flushWait

}, runnerHook)
addHook({

@@ -324,1 +539,51 @@ name: 'playwright',

}, (dispatcher) => dispatcherHookNew(dispatcher, dispatcherRunWrapperNew))
// Hook used for early flake detection. EFD only works from >=1.38.0
addHook({
name: 'playwright',
file: 'lib/common/suiteUtils.js',
versions: [`>=${MINIMUM_SUPPORTED_VERSION_EFD}`]
}, suiteUtilsPackage => {
// We grab `applyRepeatEachIndex` to use it later
// `applyRepeatEachIndex` needs to be applied to a cloned suite
applyRepeatEachIndex = suiteUtilsPackage.applyRepeatEachIndex
return suiteUtilsPackage
})
// Hook used for early flake detection. EFD only works from >=1.38.0
addHook({
name: 'playwright',
file: 'lib/runner/loadUtils.js',
versions: [`>=${MINIMUM_SUPPORTED_VERSION_EFD}`]
}, (loadUtilsPackage) => {
const oldCreateRootSuite = loadUtilsPackage.createRootSuite
async function newCreateRootSuite () {
const rootSuite = await oldCreateRootSuite.apply(this, arguments)
if (!isEarlyFlakeDetectionEnabled) {
return rootSuite
}
const newTests = rootSuite
.allTests()
.filter(isNewTest)
newTests.forEach(newTest => {
newTest._ddIsNew = true
if (newTest.expectedStatus !== 'skipped') {
const fileSuite = getSuiteType(newTest, 'file')
const projectSuite = getSuiteType(newTest, 'project')
for (let repeatEachIndex = 0; repeatEachIndex < earlyFlakeDetectionNumRetries; repeatEachIndex++) {
const copyFileSuite = deepCloneSuite(fileSuite, isNewTest)
applyRepeatEachIndex(projectSuite._fullProject, copyFileSuite, repeatEachIndex + 1)
projectSuite._addSuite(copyFileSuite)
}
}
})
return rootSuite
}
loadUtilsPackage.createRootSuite = newCreateRootSuite
return loadUtilsPackage
})

@@ -54,3 +54,3 @@ 'use strict'

const result = fn.apply(this, arguments)
if (result && typeof result === 'object' && typeof result.then === 'function') {
if (result !== null && typeof result === 'object' && typeof result.then === 'function') {
return result.then(function () {

@@ -57,0 +57,0 @@ nextChannel.publish({ req })

@@ -48,3 +48,5 @@ 'use strict'

const targetAddress = this.options && this.options.target &&
this.options.target.address ? this.options.target.address : undefined
this.options.target.address
? this.options.target.address
: undefined

@@ -191,3 +193,4 @@ const asyncResource = new AsyncResource('bound-anonymous-fn')

const state = remoteState && remoteState.constructor
? entry.remote_state.constructor.composite_type : undefined
? entry.remote_state.constructor.composite_type
: undefined
asyncResource.runInAsyncScope(() => {

@@ -194,0 +197,0 @@ exports.beforeFinish(entry, state)

'use strict'
const METHODS = require('methods').concat('all')
const METHODS = require('http').METHODS.map(v => v.toLowerCase()).concat('all')
const pathToRegExp = require('path-to-regexp')

@@ -5,0 +5,0 @@ const shimmer = require('../../datadog-shimmer')

@@ -10,3 +10,3 @@ 'use strict'

addHook({ name: 'tedious', versions: [ '>=1.0.0' ] }, tedious => {
addHook({ name: 'tedious', versions: ['>=1.0.0'] }, tedious => {
const startCh = channel('apm:tedious:request:start')

@@ -13,0 +13,0 @@ const finishCh = channel('apm:tedious:request:finish')

@@ -5,2 +5,4 @@ 'use strict'

const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getResourceName } = require('./util')

@@ -17,3 +19,3 @@

this.startSpan({
const span = this.startSpan({
childOf,

@@ -31,2 +33,13 @@ resource: getResourceName(method, fields),

})
if (
this.config.dsmEnabled && message?.properties?.headers &&
DsmPathwayCodec.contextExists(message.properties.headers)
) {
const payloadSize = getAmqpMessageSize({ headers: message.properties.headers, content: message.content })
const queue = fields.queue ? fields.queue : fields.routingKey
this.tracer.decodeDataStreamsContext(message.properties.headers)
this.tracer
.setCheckpoint(['direction:in', `topic:${queue}`, 'type:rabbitmq'], span, payloadSize)
}
}

@@ -33,0 +46,0 @@ }

@@ -6,2 +6,4 @@ 'use strict'

const ProducerPlugin = require('../../dd-trace/src/plugins/producer')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { getResourceName } = require('./util')

@@ -13,3 +15,3 @@

start ({ channel = {}, method, fields }) {
start ({ channel = {}, method, fields, message }) {
if (method !== 'basic.publish') return

@@ -35,2 +37,12 @@

this.tracer.inject(span, TEXT_MAP, fields.headers)
if (this.config.dsmEnabled) {
const hasRoutingKey = fields.routingKey != null
const payloadSize = getAmqpMessageSize({ content: message, headers: fields.headers })
const dataStreamsContext = this.tracer
.setCheckpoint(
['direction:out', `exchange:${fields.exchange}`, `has_routing_key:${hasRoutingKey}`, 'type:rabbitmq']
, span, payloadSize)
DsmPathwayCodec.encode(dataStreamsContext, fields.headers)
}
}

@@ -37,0 +49,0 @@ }

@@ -7,2 +7,3 @@ 'use strict'

const { isTrue } = require('../../dd-trace/src/util')
const coalesce = require('koalas')

@@ -41,6 +42,6 @@ class BaseAwsSdkPlugin extends ClientPlugin {

'aws.region': awsRegion,
'region': awsRegion,
'aws_service': awsService,
region: awsRegion,
aws_service: awsService,
'aws.service': awsService,
'component': 'aws-sdk'
component: 'aws-sdk'
}

@@ -69,3 +70,3 @@ if (this.requestTags) this.requestTags.set(request, tags)

this.addSub(`apm:aws:request:complete:${this.serviceIdentifier}`, ({ response }) => {
this.addSub(`apm:aws:request:complete:${this.serviceIdentifier}`, ({ response, cbExists = false }) => {
const store = storage.getStore()

@@ -75,2 +76,8 @@ if (!store) return

if (!span) return
// try to extract DSM context from response if no callback exists as extraction normally happens in CB
if (!cbExists && this.serviceIdentifier === 'sqs') {
const params = response.request.params
const operation = response.request.operation
this.responseExtractDSMContext(operation, params, response.data ?? response, span)
}
this.addResponseTags(span, response)

@@ -133,4 +140,5 @@ this.finish(span, response, response.error)

if (err.requestId) {
span.addTags({ 'aws.response.request_id': err.requestId })
const requestId = err.RequestId || err.requestId
if (requestId) {
span.addTags({ 'aws.response.request_id': requestId })
}

@@ -164,4 +172,18 @@ }

// check if AWS batch propagation or AWS_[SERVICE] batch propagation is enabled via env variable
const serviceId = serviceIdentifier.toUpperCase()
const batchPropagationEnabled = isTrue(
coalesce(
specificConfig.batchPropagationEnabled,
process.env[`DD_TRACE_AWS_SDK_${serviceId}_BATCH_PROPAGATION_ENABLED`],
config.batchPropagationEnabled,
process.env.DD_TRACE_AWS_SDK_BATCH_PROPAGATION_ENABLED,
false
)
)
// Merge the specific config back into the main config
return Object.assign({}, config, specificConfig, {
splitByAwsService: config.splitByAwsService !== false,
batchPropagationEnabled,
hooks

@@ -168,0 +190,0 @@ })

@@ -16,3 +16,3 @@ 'use strict'

'aws.cloudwatch.logs.log_group_name': params.logGroupName,
'loggroupname': params.logGroupName
loggroupname: params.logGroupName
})

@@ -19,0 +19,0 @@ }

@@ -17,3 +17,3 @@ 'use strict'

'aws.dynamodb.table_name': params.TableName,
'tablename': params.TableName
tablename: params.TableName
})

@@ -25,3 +25,3 @@ }

// dynamoDB batch TableName
if (params.RequestItems) {
if (params.RequestItems !== null) {
if (typeof params.RequestItems === 'object') {

@@ -35,3 +35,3 @@ if (Object.keys(params.RequestItems).length === 1) {

'aws.dynamodb.table_name': tableName,
'tablename': tableName
tablename: tableName
})

@@ -38,0 +38,0 @@ }

@@ -14,3 +14,3 @@ 'use strict'

'aws.eventbridge.source': `${params.source}`,
'rulename': `${rulename}`
rulename: `${rulename}`
}

@@ -17,0 +17,0 @@ }

@@ -10,4 +10,7 @@ 'use strict'

exports.s3 = require('./s3')
exports.sfn = require('./sfn')
exports.sns = require('./sns')
exports.sqs = require('./sqs')
exports.states = require('./states')
exports.stepfunctions = require('./stepfunctions')
exports.default = require('./default')
'use strict'
const {
getSizeOrZero
} = require('../../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../../dd-trace/src/datastreams/pathway')
const log = require('../../../dd-trace/src/log')
const BaseAwsSdkPlugin = require('../base')
const { storage } = require('../../../datadog-core')
class Kinesis extends BaseAwsSdkPlugin {

@@ -8,2 +14,54 @@ static get id () { return 'kinesis' }

constructor (...args) {
super(...args)
// TODO(bengl) Find a way to create the response span tags without this WeakMap being populated
// in the base class
this.requestTags = new WeakMap()
this.addSub('apm:aws:response:start:kinesis', obj => {
const { request, response } = obj
const store = storage.getStore()
const plugin = this
// if we have either of these operations, we want to store the streamName param
// since it is not typically available during get/put records requests
if (request.operation === 'getShardIterator' || request.operation === 'listShards') {
this.storeStreamName(request.params, request.operation, store)
return
}
if (request.operation === 'getRecords') {
let span
const responseExtraction = this.responseExtract(request.params, request.operation, response)
if (responseExtraction && responseExtraction.maybeChildOf) {
obj.needsFinish = true
const options = {
childOf: responseExtraction.maybeChildOf,
tags: Object.assign(
{},
this.requestTags.get(request) || {},
{ 'span.kind': 'server' }
)
}
span = plugin.tracer.startSpan('aws.response', options)
this.enter(span, store)
}
// get the stream name that should have been stored previously
const { streamName } = storage.getStore()
// extract DSM context after as we might not have a parent-child but may have a DSM context
this.responseExtractDSMContext(
request.operation, request.params, response, span || null, { streamName }
)
}
})
this.addSub('apm:aws:response:finish:kinesis', err => {
const { span } = storage.getStore()
this.finish(span, null, err)
})
}
generateTags (params, operation, response) {

@@ -15,6 +73,56 @@ if (!params || !params.StreamName) return {}

'aws.kinesis.stream_name': params.StreamName,
'streamname': params.StreamName
streamname: params.StreamName
}
}
storeStreamName (params, operation, store) {
if (!operation || (operation !== 'getShardIterator' && operation !== 'listShards')) return
if (!params || !params.StreamName) return
const streamName = params.StreamName
storage.enterWith({ ...store, streamName })
}
responseExtract (params, operation, response) {
if (operation !== 'getRecords') return
if (params.Limit && params.Limit !== 1) return
if (!response || !response.Records || !response.Records[0]) return
const record = response.Records[0]
try {
const decodedData = JSON.parse(Buffer.from(record.Data).toString())
return {
maybeChildOf: this.tracer.extract('text_map', decodedData._datadog),
parsedAttributes: decodedData._datadog
}
} catch (e) {
log.error(e)
}
}
responseExtractDSMContext (operation, params, response, span, kwargs = {}) {
const { streamName } = kwargs
if (!this.config.dsmEnabled) return
if (operation !== 'getRecords') return
if (!response || !response.Records || !response.Records[0]) return
// we only want to set the payloadSize on the span if we have one message, not repeatedly
span = response.Records.length > 1 ? null : span
response.Records.forEach(record => {
const parsedAttributes = JSON.parse(Buffer.from(record.Data).toString())
if (
parsedAttributes?._datadog && streamName && DsmPathwayCodec.contextExists(parsedAttributes._datadog)
) {
const payloadSize = getSizeOrZero(record.Data)
this.tracer.decodeDataStreamsContext(parsedAttributes._datadog)
this.tracer
.setCheckpoint(['direction:in', `topic:${streamName}`, 'type:kinesis'], span, payloadSize)
}
})
}
// AWS-SDK will b64 kinesis payloads

@@ -37,38 +145,72 @@ // or will accept an already b64 encoded payload

requestInject (span, request) {
const operation = request.operation
if (operation === 'putRecord' || operation === 'putRecords') {
if (!request.params) {
const { operation, params } = request
if (!params) return
let stream
switch (operation) {
case 'putRecord':
stream = params.StreamArn ? params.StreamArn : (params.StreamName ? params.StreamName : '')
this.injectToMessage(span, params, stream, true)
break
case 'putRecords':
stream = params.StreamArn ? params.StreamArn : (params.StreamName ? params.StreamName : '')
for (let i = 0; i < params.Records.length; i++) {
this.injectToMessage(
span,
params.Records[i],
stream,
i === 0 || (this.config.batchPropagationEnabled)
)
}
}
}
injectToMessage (span, params, stream, injectTraceContext) {
if (!params) {
return
}
let parsedData
if (injectTraceContext || this.config.dsmEnabled) {
parsedData = this._tryParse(params.Data)
if (!parsedData) {
log.error('Unable to parse payload, unable to pass trace context or set DSM checkpoint (if enabled)')
return
}
}
const traceData = {}
this.tracer.inject(span, 'text_map', traceData)
let injectPath
if (request.params.Records && request.params.Records.length > 0) {
injectPath = request.params.Records[0]
} else if (request.params.Data) {
injectPath = request.params
} else {
log.error('No valid payload passed, unable to pass trace context')
const ddInfo = {}
// for now, we only want to inject to the first message, this may change for batches in the future
if (injectTraceContext) { this.tracer.inject(span, 'text_map', ddInfo) }
// set DSM hash if enabled
if (this.config.dsmEnabled) {
parsedData._datadog = ddInfo
const dataStreamsContext = this.setDSMCheckpoint(span, parsedData, stream)
DsmPathwayCodec.encode(dataStreamsContext, ddInfo)
}
if (Object.keys(ddInfo).length !== 0) {
parsedData._datadog = ddInfo
const finalData = Buffer.from(JSON.stringify(parsedData))
const byteSize = finalData.length
// Kinesis max payload size is 1MB
// So we must ensure adding DD context won't go over that (512b is an estimate)
if (byteSize >= 1048576) {
log.info('Payload size too large to pass context')
return
}
const parsedData = this._tryParse(injectPath.Data)
if (parsedData) {
parsedData._datadog = traceData
const finalData = Buffer.from(JSON.stringify(parsedData))
const byteSize = finalData.length
// Kinesis max payload size is 1MB
// So we must ensure adding DD context won't go over that (512b is an estimate)
if (byteSize >= 1048576) {
log.info('Payload size too large to pass context')
return
}
injectPath.Data = finalData
} else {
log.error('Unable to parse payload, unable to pass trace context')
}
params.Data = finalData
}
}
setDSMCheckpoint (span, parsedData, stream) {
// get payload size of request data
const payloadSize = Buffer.from(JSON.stringify(parsedData)).byteLength
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${stream}`, 'type:kinesis'], span, payloadSize)
return dataStreamsContext
}
}
module.exports = Kinesis

@@ -16,3 +16,3 @@ 'use strict'

'resource.name': `${operation} ${params.FunctionName}`,
'functionname': params.FunctionName,
functionname: params.FunctionName,
'aws.lambda': params.FunctionName

@@ -19,0 +19,0 @@ })

@@ -16,3 +16,3 @@ 'use strict'

'aws.redshift.cluster_identifier': params.ClusterIdentifier,
'clusteridentifier': params.ClusterIdentifier
clusteridentifier: params.ClusterIdentifier
})

@@ -19,0 +19,0 @@ }

@@ -17,3 +17,3 @@ 'use strict'

'aws.s3.bucket_name': params.Bucket,
'bucketname': params.Bucket
bucketname: params.Bucket
})

@@ -20,0 +20,0 @@ }

'use strict'
const { getHeadersSize } = require('../../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../../dd-trace/src/datastreams/pathway')
const log = require('../../../dd-trace/src/log')

@@ -14,2 +16,3 @@ const BaseAwsSdkPlugin = require('../base')

const TopicArn = params.TopicArn || response.data.TopicArn
// Split the ARN into its parts

@@ -24,3 +27,3 @@ // ex.'arn:aws:sns:us-east-1:123456789012:my-topic'

'aws.sns.topic_arn': TopicArn,
'topicname': topicName
topicname: topicName
}

@@ -57,7 +60,12 @@

case 'publish':
this._injectMessageAttributes(span, params)
this.injectToMessage(span, params, params.TopicArn, true)
break
case 'publishBatch':
if (params.PublishBatchRequestEntries && params.PublishBatchRequestEntries.length > 0) {
this._injectMessageAttributes(span, params.PublishBatchRequestEntries[0])
for (let i = 0; i < params.PublishBatchRequestEntries.length; i++) {
this.injectToMessage(
span,
params.PublishBatchRequestEntries[i],
params.TopicArn,
i === 0 || (this.config.batchPropagationEnabled)
)
}

@@ -68,3 +76,3 @@ break

_injectMessageAttributes (span, params) {
injectToMessage (span, params, topicArn, injectTraceContext) {
if (!params.MessageAttributes) {

@@ -77,11 +85,46 @@ params.MessageAttributes = {}

}
const ddInfo = {}
this.tracer.inject(span, 'text_map', ddInfo)
params.MessageAttributes._datadog = {
DataType: 'Binary',
BinaryValue: Buffer.from(JSON.stringify(ddInfo)) // BINARY types are automatically base64 encoded
// for now, we only want to inject to the first message, this may change for batches in the future
if (injectTraceContext) {
this.tracer.inject(span, 'text_map', ddInfo)
// add ddInfo before checking DSM so we can include DD attributes in payload size
params.MessageAttributes._datadog = {
DataType: 'Binary',
BinaryValue: ddInfo
}
}
if (this.config.dsmEnabled) {
if (!params.MessageAttributes._datadog) {
params.MessageAttributes._datadog = {
DataType: 'Binary',
BinaryValue: ddInfo
}
}
const dataStreamsContext = this.setDSMCheckpoint(span, params, topicArn)
DsmPathwayCodec.encode(dataStreamsContext, ddInfo)
}
if (Object.keys(ddInfo).length !== 0) {
// BINARY types are automatically base64 encoded
params.MessageAttributes._datadog.BinaryValue = Buffer.from(JSON.stringify(ddInfo))
} else if (params.MessageAttributes._datadog) {
// let's avoid adding any additional information to payload if we failed to inject
delete params.MessageAttributes._datadog
}
}
setDSMCheckpoint (span, params, topicArn) {
// only set a checkpoint if publishing to a topic
if (topicArn) {
const payloadSize = getHeadersSize(params)
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${topicArn}`, 'type:sns'], span, payloadSize)
return dataStreamsContext
}
}
}
module.exports = Sns

@@ -6,2 +6,4 @@ 'use strict'

const { storage } = require('../../../datadog-core')
const { getHeadersSize } = require('../../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../../dd-trace/src/datastreams/pathway')

@@ -23,7 +25,9 @@ class Sqs extends BaseAwsSdkPlugin {

const plugin = this
const maybeChildOf = this.responseExtract(request.params, request.operation, response)
if (maybeChildOf) {
const contextExtraction = this.responseExtract(request.params, request.operation, response)
let span
let parsedMessageAttributes = null
if (contextExtraction && contextExtraction.datadogContext) {
obj.needsFinish = true
const options = {
childOf: maybeChildOf,
childOf: contextExtraction.datadogContext,
tags: Object.assign(

@@ -35,5 +39,11 @@ {},

}
const span = plugin.tracer.startSpan('aws.response', options)
parsedMessageAttributes = contextExtraction.parsedAttributes
span = plugin.tracer.startSpan('aws.response', options)
this.enter(span, store)
}
// extract DSM context after as we might not have a parent-child but may have a DSM context
this.responseExtractDSMContext(
request.operation, request.params, response, span || null, { parsedMessageAttributes }
)
})

@@ -98,3 +108,3 @@

'aws.sqs.queue_name': params.QueueName || params.QueueUrl,
'queuename': queueName
queuename: queueName
})

@@ -140,9 +150,19 @@

const parsedAttributes = this.parseDatadogAttributes(datadogAttribute)
if (parsedAttributes) {
return {
datadogContext: this.tracer.extract('text_map', parsedAttributes),
parsedAttributes
}
}
}
parseDatadogAttributes (attributes) {
try {
if (datadogAttribute.StringValue) {
const textMap = datadogAttribute.StringValue
return this.tracer.extract('text_map', JSON.parse(textMap))
} else if (datadogAttribute.Type === 'Binary') {
const buffer = Buffer.from(datadogAttribute.Value, 'base64')
return this.tracer.extract('text_map', JSON.parse(buffer))
if (attributes.StringValue) {
const textMap = attributes.StringValue
return JSON.parse(textMap)
} else if (attributes.Type === 'Binary' || attributes.DataType === 'Binary') {
const buffer = Buffer.from(attributes.Value ?? attributes.BinaryValue, 'base64')
return JSON.parse(buffer)
}

@@ -154,17 +174,91 @@ } catch (e) {

requestInject (span, request) {
const operation = request.operation
if (operation === 'sendMessage') {
if (!request.params) {
request.params = {}
responseExtractDSMContext (operation, params, response, span, kwargs = {}) {
let { parsedAttributes } = kwargs
if (!this.config.dsmEnabled) return
if (operation !== 'receiveMessage') return
if (!response || !response.Messages || !response.Messages[0]) return
// we only want to set the payloadSize on the span if we have one message
span = response.Messages.length > 1 ? null : span
response.Messages.forEach(message => {
// we may have already parsed the message attributes when extracting trace context
if (!parsedAttributes) {
if (message.Body) {
try {
const body = JSON.parse(message.Body)
// SNS to SQS
if (body.Type === 'Notification') {
message = body
}
} catch (e) {
// SQS to SQS
}
}
if (!parsedAttributes && message.MessageAttributes && message.MessageAttributes._datadog) {
parsedAttributes = this.parseDatadogAttributes(message.MessageAttributes._datadog)
}
}
if (!request.params.MessageAttributes) {
request.params.MessageAttributes = {}
} else if (Object.keys(request.params.MessageAttributes).length >= 10) { // SQS quota
// TODO: add test when the test suite is fixed
return
if (parsedAttributes && DsmPathwayCodec.contextExists(parsedAttributes)) {
const payloadSize = getHeadersSize({
Body: message.Body,
MessageAttributes: message.MessageAttributes
})
const queue = params.QueueUrl.split('/').pop()
this.tracer.decodeDataStreamsContext(parsedAttributes)
this.tracer
.setCheckpoint(['direction:in', `topic:${queue}`, 'type:sqs'], span, payloadSize)
}
const ddInfo = {}
})
}
requestInject (span, request) {
const { operation, params } = request
if (!params) return
switch (operation) {
case 'sendMessage':
this.injectToMessage(span, params, params.QueueUrl, true)
break
case 'sendMessageBatch':
for (let i = 0; i < params.Entries.length; i++) {
this.injectToMessage(
span,
params.Entries[i],
params.QueueUrl,
i === 0 || (this.config.batchPropagationEnabled)
)
}
break
case 'receiveMessage':
if (!params.MessageAttributeNames) {
params.MessageAttributeNames = ['_datadog']
} else if (
!params.MessageAttributeNames.includes('_datadog') &&
!params.MessageAttributeNames.includes('.*') &&
!params.MessageAttributeNames.includes('All')
) {
params.MessageAttributeNames.push('_datadog')
}
break
}
}
injectToMessage (span, params, queueUrl, injectTraceContext) {
if (!params) {
params = {}
}
if (!params.MessageAttributes) {
params.MessageAttributes = {}
} else if (Object.keys(params.MessageAttributes).length >= 10) { // SQS quota
// TODO: add test when the test suite is fixed
return
}
const ddInfo = {}
// for now, we only want to inject to the first message, this may change for batches in the future
if (injectTraceContext) {
this.tracer.inject(span, 'text_map', ddInfo)
request.params.MessageAttributes._datadog = {
params.MessageAttributes._datadog = {
DataType: 'String',

@@ -174,5 +268,36 @@ StringValue: JSON.stringify(ddInfo)

}
if (this.config.dsmEnabled) {
if (!params.MessageAttributes._datadog) {
params.MessageAttributes._datadog = {
DataType: 'String',
StringValue: JSON.stringify(ddInfo)
}
}
const dataStreamsContext = this.setDSMCheckpoint(span, params, queueUrl)
if (dataStreamsContext) {
DsmPathwayCodec.encode(dataStreamsContext, ddInfo)
params.MessageAttributes._datadog.StringValue = JSON.stringify(ddInfo)
}
}
if (params.MessageAttributes._datadog && Object.keys(ddInfo).length === 0) {
// let's avoid adding any additional information to payload if we failed to inject
delete params.MessageAttributes._datadog
}
}
setDSMCheckpoint (span, params, queueUrl) {
const payloadSize = getHeadersSize({
Body: params.MessageBody,
MessageAttributes: params.MessageAttributes
})
const queue = queueUrl.split('/').pop()
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${queue}`, 'type:sqs'], span, payloadSize)
return dataStreamsContext
}
}
module.exports = Sqs

@@ -19,3 +19,3 @@ 'use strict'

'db.type': 'couchbase',
'component': 'couchbase',
component: 'couchbase',
'resource.name': `couchbase.${operation}`,

@@ -65,2 +65,3 @@ 'span.kind': this.constructor.kind,

}
_addCommandSubs (name) {

@@ -67,0 +68,0 @@ this.addSubs(name, ({ bucket, collection, seedNodes }) => {

@@ -17,3 +17,14 @@ 'use strict'

TEST_CODE_OWNERS,
ITR_CORRELATION_ID
ITR_CORRELATION_ID,
TEST_SOURCE_FILE,
TEST_EARLY_FLAKE_ENABLED,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_SUITE_ID,
TEST_SESSION_ID,
TEST_COMMAND,
TEST_MODULE,
TEST_MODULE_ID,
TEST_SUITE,
CUCUMBER_IS_PARALLEL
} = require('../../dd-trace/src/plugins/util/test')

@@ -30,5 +41,24 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES
TELEMETRY_CODE_COVERAGE_NUM_FILES,
TEST_IS_RUM_ACTIVE,
TEST_BROWSER_DRIVER,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')
const id = require('../../dd-trace/src/id')
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID
function getTestSuiteTags (testSuiteSpan) {
const suiteTags = {
[TEST_SUITE_ID]: testSuiteSpan.context().toSpanId(),
[TEST_SESSION_ID]: testSuiteSpan.context().toTraceId(),
[TEST_COMMAND]: testSuiteSpan.context()._tags[TEST_COMMAND],
[TEST_MODULE]: 'cucumber'
}
if (testSuiteSpan.context()._parentId) {
suiteTags[TEST_MODULE_ID] = testSuiteSpan.context()._parentId.toString(10)
}
return suiteTags
}
class CucumberPlugin extends CiPlugin {

@@ -44,2 +74,4 @@ static get id () {

this.testSuiteSpanByPath = {}
this.addSub('ci:cucumber:session:finish', ({

@@ -51,5 +83,7 @@ status,

hasUnskippableSuites,
hasForcedToRunSuites
hasForcedToRunSuites,
isEarlyFlakeDetectionEnabled,
isParallel
}) => {
const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {}
const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {}
addIntelligentTestRunnerSpanTags(

@@ -69,2 +103,8 @@ this.testSessionSpan,

)
if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true')
}
if (isParallel) {
this.testSessionSpan.setTag(CUCUMBER_IS_PARALLEL, 'true')
}

@@ -78,4 +118,5 @@ this.testSessionSpan.setTag(TEST_STATUS, status)

finishAllTraceSpans(this.testSessionSpan)
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
this.itrConfig = null
this.libraryConfig = null
this.tracer._exporter.flush()

@@ -102,3 +143,3 @@ })

}
this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
const testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
childOf: this.testModuleSpan,

@@ -111,4 +152,6 @@ tags: {

})
this.testSuiteSpanByPath[testSuitePath] = testSuiteSpan
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
if (this.itrConfig?.isCodeCoverageEnabled) {
if (this.libraryConfig?.isCodeCoverageEnabled) {
this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' })

@@ -118,10 +161,11 @@ }

this.addSub('ci:cucumber:test-suite:finish', status => {
this.testSuiteSpan.setTag(TEST_STATUS, status)
this.testSuiteSpan.finish()
this.addSub('ci:cucumber:test-suite:finish', ({ status, testSuitePath }) => {
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]
testSuiteSpan.setTag(TEST_STATUS, status)
testSuiteSpan.finish()
this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')
})
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => {
if (!this.itrConfig?.isCodeCoverageEnabled) {
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile, testSuitePath }) => {
if (!this.libraryConfig?.isCodeCoverageEnabled) {
return

@@ -132,5 +176,6 @@ }

}
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]
const relativeCoverageFiles = [...coverageFiles, suiteFile]
.map(filename => getTestSuitePath(filename, this.sourceRoot))
.map(filename => getTestSuitePath(filename, this.repositoryRoot))

@@ -140,4 +185,4 @@ this.telemetry.distribution(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length)

const formattedCoverage = {
sessionId: this.testSuiteSpan.context()._traceId,
suiteId: this.testSuiteSpan.context()._spanId,
sessionId: testSuiteSpan.context()._traceId,
suiteId: testSuiteSpan.context()._spanId,
files: relativeCoverageFiles

@@ -150,10 +195,31 @@ }

this.addSub('ci:cucumber:test:start', ({ testName, fullTestSuite, testSourceLine }) => {
this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine, isParallel }) => {
const store = storage.getStore()
const testSuite = getTestSuitePath(fullTestSuite, this.sourceRoot)
const testSpan = this.startTestSpan(testName, testSuite, testSourceLine)
const testSuite = getTestSuitePath(testFileAbsolutePath, this.sourceRoot)
const testSourceFile = getTestSuitePath(testFileAbsolutePath, this.repositoryRoot)
const extraTags = {
[TEST_SOURCE_START]: testSourceLine,
[TEST_SOURCE_FILE]: testSourceFile
}
if (isParallel) {
extraTags[CUCUMBER_IS_PARALLEL] = 'true'
}
const testSpan = this.startTestSpan(testName, testSuite, extraTags)
this.enter(testSpan, store)
})
this.addSub('ci:cucumber:test:retry', (isFlakyRetry) => {
const store = storage.getStore()
const span = store.span
if (isFlakyRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}
span.setTag(TEST_STATUS, 'fail')
span.finish()
finishAllTraceSpans(span)
})
this.addSub('ci:cucumber:test-step:start', ({ resource }) => {

@@ -173,3 +239,41 @@ const store = storage.getStore()

this.addSub('ci:cucumber:test:finish', ({ isStep, status, skipReason, errorMessage }) => {
this.addSub('ci:cucumber:worker-report:trace', (traces) => {
const formattedTraces = JSON.parse(traces).map(trace =>
trace.map(span => ({
...span,
span_id: id(span.span_id),
trace_id: id(span.trace_id),
parent_id: id(span.parent_id)
}))
)
// We have to update the test session, test module and test suite ids
// before we export them in the main process
formattedTraces.forEach(trace => {
trace.forEach(span => {
if (span.name === 'cucumber.test') {
const testSuite = span.meta[TEST_SUITE]
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
const testSuiteTags = getTestSuiteTags(testSuiteSpan)
span.meta = {
...span.meta,
...testSuiteTags
}
}
})
this.tracer._exporter.export(trace)
})
})
this.addSub('ci:cucumber:test:finish', ({
isStep,
status,
skipReason,
errorMessage,
isNew,
isEfdRetry,
isFlakyRetry
}) => {
const span = storage.getStore().span

@@ -180,2 +284,9 @@ const statusTag = isStep ? 'step.status' : TEST_STATUS

if (isNew) {
span.setTag(TEST_IS_NEW, 'true')
if (isEfdRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}
}
if (skipReason) {

@@ -189,10 +300,24 @@ span.setTag(TEST_SKIP_REASON, skipReason)

if (isFlakyRetry > 0) {
span.setTag(TEST_IS_RETRY, 'true')
}
span.finish()
if (!isStep) {
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew,
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
finishAllTraceSpans(span)
// If it's a worker, flushing is cheap, as it's just sending data to the main process
if (isCucumberWorker) {
this.tracer._exporter.flush()
}
}

@@ -209,8 +334,9 @@ })

startTestSpan (testName, testSuite, testSourceLine) {
startTestSpan (testName, testSuite, extraTags) {
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
return super.startTestSpan(
testName,
testSuite,
this.testSuiteSpan,
{ [TEST_SOURCE_START]: testSourceLine }
testSuiteSpan,
extraTags
)

@@ -217,0 +343,0 @@ }

@@ -1,152 +0,4 @@

const {
TEST_STATUS,
TEST_IS_RUM_ACTIVE,
TEST_CODE_OWNERS,
getTestEnvironmentMetadata,
CI_APP_ORIGIN,
getTestParentSpan,
getCodeOwnersFileEntries,
getCodeOwnersForFilename,
getTestCommonTags,
getTestSessionCommonTags,
getTestModuleCommonTags,
getTestSuiteCommonTags,
TEST_SUITE_ID,
TEST_MODULE_ID,
TEST_SESSION_ID,
TEST_COMMAND,
TEST_MODULE,
TEST_SOURCE_START,
finishAllTraceSpans,
getCoveredFilenamesFromCoverage,
getTestSuitePath,
addIntelligentTestRunnerSpanTags,
TEST_SKIPPED_BY_ITR,
TEST_ITR_UNSKIPPABLE,
TEST_ITR_FORCED_RUN,
ITR_CORRELATION_ID
} = require('../../dd-trace/src/plugins/util/test')
const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants')
const log = require('../../dd-trace/src/log')
const NoopTracer = require('../../dd-trace/src/noop/tracer')
const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util')
const {
TELEMETRY_EVENT_CREATED,
TELEMETRY_EVENT_FINISHED,
TELEMETRY_ITR_FORCED_TO_RUN,
TELEMETRY_CODE_COVERAGE_EMPTY,
TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES,
incrementCountMetric,
distributionMetric
} = require('../../dd-trace/src/ci-visibility/telemetry')
const { appClosing: appClosingTelemetry } = require('../../dd-trace/src/telemetry')
const {
GIT_REPOSITORY_URL,
GIT_COMMIT_SHA,
GIT_BRANCH,
CI_PROVIDER_NAME
} = require('../../dd-trace/src/plugins/util/tags')
const {
OS_VERSION,
OS_PLATFORM,
OS_ARCHITECTURE,
RUNTIME_NAME,
RUNTIME_VERSION
} = require('../../dd-trace/src/plugins/util/env')
const cypressPlugin = require('./cypress-plugin')
const TEST_FRAMEWORK_NAME = 'cypress'
const CYPRESS_STATUS_TO_TEST_STATUS = {
passed: 'pass',
failed: 'fail',
pending: 'skip',
skipped: 'skip'
}
function getTestSpanMetadata (tracer, testName, testSuite, cypressConfig) {
const childOf = getTestParentSpan(tracer)
const commonTags = getTestCommonTags(testName, testSuite, cypressConfig.version, TEST_FRAMEWORK_NAME)
return {
childOf,
...commonTags
}
}
function getCypressVersion (details) {
if (details && details.cypressVersion) {
return details.cypressVersion
}
if (details && details.config && details.config.version) {
return details.config.version
}
return ''
}
function getRootDir (details) {
if (details && details.config) {
return details.config.projectRoot || details.config.repoRoot || process.cwd()
}
return process.cwd()
}
function getCypressCommand (details) {
if (!details) {
return TEST_FRAMEWORK_NAME
}
return `${TEST_FRAMEWORK_NAME} ${details.specPattern || ''}`
}
function getSessionStatus (summary) {
if (summary.totalFailed !== undefined && summary.totalFailed > 0) {
return 'fail'
}
if (summary.totalSkipped !== undefined && summary.totalSkipped === summary.totalTests) {
return 'skip'
}
return 'pass'
}
function getSuiteStatus (suiteStats) {
if (suiteStats.failures !== undefined && suiteStats.failures > 0) {
return 'fail'
}
if (suiteStats.tests !== undefined && suiteStats.tests === suiteStats.pending) {
return 'skip'
}
return 'pass'
}
function getItrConfig (tracer, testConfiguration) {
return new Promise(resolve => {
if (!tracer._tracer._exporter || !tracer._tracer._exporter.getItrConfiguration) {
return resolve({ err: new Error('CI Visibility was not initialized correctly') })
}
tracer._tracer._exporter.getItrConfiguration(testConfiguration, (err, itrConfig) => {
resolve({ err, itrConfig })
})
})
}
function getSkippableTests (isSuitesSkippingEnabled, tracer, testConfiguration) {
if (!isSuitesSkippingEnabled) {
return Promise.resolve({ skippableTests: [] })
}
return new Promise(resolve => {
if (!tracer._tracer._exporter || !tracer._tracer._exporter.getItrConfiguration) {
return resolve({ err: new Error('CI Visibility was not initialized correctly') })
}
tracer._tracer._exporter.getSkippableSuites(testConfiguration, (err, skippableTests, correlationId) => {
resolve({
err,
skippableTests,
correlationId
})
})
})
}
const noopTask = {

@@ -168,4 +20,2 @@ 'dd:testSuiteStart': () => {

module.exports = (on, config) => {
let isTestsSkipped = false
const skippedTests = []
const tracer = require('../../dd-trace')

@@ -176,375 +26,12 @@

// We still need to register these tasks or the support file will fail
return on('task', noopTask)
on('task', noopTask)
return config
}
const testEnvironmentMetadata = getTestEnvironmentMetadata(TEST_FRAMEWORK_NAME)
on('before:run', cypressPlugin.beforeRun.bind(cypressPlugin))
on('after:spec', cypressPlugin.afterSpec.bind(cypressPlugin))
on('after:run', cypressPlugin.afterRun.bind(cypressPlugin))
on('task', cypressPlugin.getTasks())
const {
[GIT_REPOSITORY_URL]: repositoryUrl,
[GIT_COMMIT_SHA]: sha,
[OS_VERSION]: osVersion,
[OS_PLATFORM]: osPlatform,
[OS_ARCHITECTURE]: osArchitecture,
[RUNTIME_NAME]: runtimeName,
[RUNTIME_VERSION]: runtimeVersion,
[GIT_BRANCH]: branch,
[CI_PROVIDER_NAME]: ciProviderName
} = testEnvironmentMetadata
const isUnsupportedCIProvider = !ciProviderName
const finishedTestsByFile = {}
const testConfiguration = {
repositoryUrl,
sha,
osVersion,
osPlatform,
osArchitecture,
runtimeName,
runtimeVersion,
branch,
testLevel: 'test'
}
const codeOwnersEntries = getCodeOwnersFileEntries()
let activeSpan = null
let testSessionSpan = null
let testModuleSpan = null
let testSuiteSpan = null
let command = null
let frameworkVersion
let rootDir
let isSuitesSkippingEnabled = false
let isCodeCoverageEnabled = false
let testsToSkip = []
let itrCorrelationId = ''
const unskippableSuites = []
let hasForcedToRunSuites = false
let hasUnskippableSuites = false
function ciVisEvent (name, testLevel, tags = {}) {
incrementCountMetric(name, {
testLevel,
testFramework: 'cypress',
isUnsupportedCIProvider,
...tags
})
}
function getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) {
const testSuiteTags = {
[TEST_COMMAND]: command,
[TEST_COMMAND]: command,
[TEST_MODULE]: TEST_FRAMEWORK_NAME
}
if (testSuiteSpan) {
testSuiteTags[TEST_SUITE_ID] = testSuiteSpan.context().toSpanId()
}
if (testSessionSpan && testModuleSpan) {
testSuiteTags[TEST_SESSION_ID] = testSessionSpan.context().toTraceId()
testSuiteTags[TEST_MODULE_ID] = testModuleSpan.context().toSpanId()
}
const {
childOf,
resource,
...testSpanMetadata
} = getTestSpanMetadata(tracer, testName, testSuite, config)
const codeOwners = getCodeOwnersForFilename(testSuite, codeOwnersEntries)
if (codeOwners) {
testSpanMetadata[TEST_CODE_OWNERS] = codeOwners
}
if (isUnskippable) {
hasUnskippableSuites = true
incrementCountMetric(TELEMETRY_ITR_UNSKIPPABLE, { testLevel: 'suite' })
testSpanMetadata[TEST_ITR_UNSKIPPABLE] = 'true'
}
if (isForcedToRun) {
hasForcedToRunSuites = true
incrementCountMetric(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' })
testSpanMetadata[TEST_ITR_FORCED_RUN] = 'true'
}
ciVisEvent(TELEMETRY_EVENT_CREATED, 'test', { hasCodeOwners: !!codeOwners })
return tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test`, {
childOf,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
[ORIGIN_KEY]: CI_APP_ORIGIN,
...testSpanMetadata,
...testEnvironmentMetadata,
...testSuiteTags
}
})
}
on('before:run', (details) => {
return getItrConfig(tracer, testConfiguration).then(({ err, itrConfig }) => {
if (err) {
log.error(err)
} else {
isSuitesSkippingEnabled = itrConfig.isSuitesSkippingEnabled
isCodeCoverageEnabled = itrConfig.isCodeCoverageEnabled
}
return getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration)
.then(({ err, skippableTests, correlationId }) => {
if (err) {
log.error(err)
} else {
testsToSkip = skippableTests || []
itrCorrelationId = correlationId
}
// `details.specs` are test files
details.specs.forEach(({ absolute, relative }) => {
const isUnskippableSuite = isMarkedAsUnskippable({ path: absolute })
if (isUnskippableSuite) {
unskippableSuites.push(relative)
}
})
const childOf = getTestParentSpan(tracer)
rootDir = getRootDir(details)
command = getCypressCommand(details)
frameworkVersion = getCypressVersion(details)
const testSessionSpanMetadata = getTestSessionCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME)
const testModuleSpanMetadata = getTestModuleCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME)
testSessionSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_session`, {
childOf,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
...testEnvironmentMetadata,
...testSessionSpanMetadata
}
})
ciVisEvent(TELEMETRY_EVENT_CREATED, 'session')
testModuleSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_module`, {
childOf: testSessionSpan,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
...testEnvironmentMetadata,
...testModuleSpanMetadata
}
})
ciVisEvent(TELEMETRY_EVENT_CREATED, 'module')
return details
})
})
})
on('after:spec', (spec, { tests, stats }) => {
const cypressTests = tests || []
const finishedTests = finishedTestsByFile[spec.relative] || []
// Get tests that didn't go through `dd:afterEach`
// and create a skipped test span for each of them
cypressTests.filter(({ title }) => {
const cypressTestName = title.join(' ')
const isTestFinished = finishedTests.find(({ testName }) => cypressTestName === testName)
return !isTestFinished
}).forEach(({ title }) => {
const cypressTestName = title.join(' ')
const isSkippedByItr = testsToSkip.find(test =>
cypressTestName === test.name && spec.relative === test.suite
)
const skippedTestSpan = getTestSpan(cypressTestName, spec.relative)
skippedTestSpan.setTag(TEST_STATUS, 'skip')
if (isSkippedByItr) {
skippedTestSpan.setTag(TEST_SKIPPED_BY_ITR, 'true')
}
if (itrCorrelationId) {
skippedTestSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId)
}
skippedTestSpan.finish()
})
// Make sure that reported test statuses are the same as Cypress reports.
// This is not always the case, such as when an `after` hook fails:
// Cypress will report the last run test as failed, but we don't know that yet at `dd:afterEach`
let latestError
finishedTests.forEach((finishedTest) => {
const cypressTest = cypressTests.find(test => test.title.join(' ') === finishedTest.testName)
if (!cypressTest) {
return
}
if (cypressTest.displayError) {
latestError = new Error(cypressTest.displayError)
}
const cypressTestStatus = CYPRESS_STATUS_TO_TEST_STATUS[cypressTest.state]
// update test status
if (cypressTestStatus !== finishedTest.testStatus) {
finishedTest.testSpan.setTag(TEST_STATUS, cypressTestStatus)
finishedTest.testSpan.setTag('error', latestError)
}
if (itrCorrelationId) {
finishedTest.testSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId)
}
finishedTest.testSpan.finish(finishedTest.finishTime)
})
if (testSuiteSpan) {
const status = getSuiteStatus(stats)
testSuiteSpan.setTag(TEST_STATUS, status)
if (latestError) {
testSuiteSpan.setTag('error', latestError)
}
testSuiteSpan.finish()
testSuiteSpan = null
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')
}
})
on('after:run', (suiteStats) => {
if (testSessionSpan && testModuleSpan) {
const testStatus = getSessionStatus(suiteStats)
testModuleSpan.setTag(TEST_STATUS, testStatus)
testSessionSpan.setTag(TEST_STATUS, testStatus)
addIntelligentTestRunnerSpanTags(
testSessionSpan,
testModuleSpan,
{
isSuitesSkipped: isTestsSkipped,
isSuitesSkippingEnabled,
isCodeCoverageEnabled,
skippingType: 'test',
skippingCount: skippedTests.length,
hasForcedToRunSuites,
hasUnskippableSuites
}
)
testModuleSpan.finish()
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')
testSessionSpan.finish()
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session')
finishAllTraceSpans(testSessionSpan)
}
return new Promise(resolve => {
const exporter = tracer._tracer._exporter
if (!exporter) {
return resolve(null)
}
if (exporter.flush) {
exporter.flush(() => {
appClosingTelemetry()
resolve(null)
})
} else if (exporter._writer) {
exporter._writer.flush(() => {
appClosingTelemetry()
resolve(null)
})
}
})
})
on('task', {
'dd:testSuiteStart': (suite) => {
if (testSuiteSpan) {
return null
}
const testSuiteSpanMetadata = getTestSuiteCommonTags(command, frameworkVersion, suite, TEST_FRAMEWORK_NAME)
testSuiteSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_suite`, {
childOf: testModuleSpan,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
...testEnvironmentMetadata,
...testSuiteSpanMetadata
}
})
ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
return null
},
'dd:beforeEach': (test) => {
const { testName, testSuite } = test
const shouldSkip = !!testsToSkip.find(test => {
return testName === test.name && testSuite === test.suite
})
const isUnskippable = unskippableSuites.includes(testSuite)
const isForcedToRun = shouldSkip && isUnskippable
// skip test
if (shouldSkip && !isUnskippable) {
skippedTests.push(test)
isTestsSkipped = true
return { shouldSkip: true }
}
if (!activeSpan) {
activeSpan = getTestSpan(testName, testSuite, isUnskippable, isForcedToRun)
}
return activeSpan ? { traceId: activeSpan.context().toTraceId() } : {}
},
'dd:afterEach': ({ test, coverage }) => {
const { state, error, isRUMActive, testSourceLine, testSuite, testName } = test
if (activeSpan) {
if (coverage && isCodeCoverageEnabled && tracer._tracer._exporter && tracer._tracer._exporter.exportCoverage) {
const coverageFiles = getCoveredFilenamesFromCoverage(coverage)
const relativeCoverageFiles = coverageFiles.map(file => getTestSuitePath(file, rootDir))
if (!relativeCoverageFiles.length) {
incrementCountMetric(TELEMETRY_CODE_COVERAGE_EMPTY)
}
distributionMetric(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length)
const { _traceId, _spanId } = testSuiteSpan.context()
const formattedCoverage = {
sessionId: _traceId,
suiteId: _spanId,
testId: activeSpan.context()._spanId,
files: relativeCoverageFiles
}
tracer._tracer._exporter.exportCoverage(formattedCoverage)
}
const testStatus = CYPRESS_STATUS_TO_TEST_STATUS[state]
activeSpan.setTag(TEST_STATUS, testStatus)
if (error) {
activeSpan.setTag('error', error)
}
if (isRUMActive) {
activeSpan.setTag(TEST_IS_RUM_ACTIVE, 'true')
}
if (testSourceLine) {
activeSpan.setTag(TEST_SOURCE_START, testSourceLine)
}
const finishedTest = {
testName,
testStatus,
finishTime: activeSpan._getTime(), // we store the finish time here
testSpan: activeSpan
}
if (finishedTestsByFile[testSuite]) {
finishedTestsByFile[testSuite].push(finishedTest)
} else {
finishedTestsByFile[testSuite] = [finishedTest]
}
// test spans are finished at after:spec
}
activeSpan = null
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test')
return null
},
'dd:addTags': (tags) => {
if (activeSpan) {
activeSpan.addTags(tags)
}
return null
}
})
return cypressPlugin.init(tracer, config)
}
/* eslint-disable */
let isEarlyFlakeDetectionEnabled = false
let knownTestsForSuite = []
let suiteTests = []
let earlyFlakeDetectionNumRetries = 0
// If the test is using multi domain with cy.origin, trying to access
// window properties will result in a cross origin error.
function safeGetRum (window) {
try {
return window.DD_RUM
} catch (e) {
return null
}
}
function isNewTest (test) {
return !knownTestsForSuite.includes(test.fullTitle())
}
function retryTest (test, suiteTests) {
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
const clonedTest = test.clone()
// TODO: signal in framework logs that this is a retry.
// TODO: Change it so these tests are allowed to fail.
// TODO: figure out if reported duration is skewed.
suiteTests.unshift(clonedTest)
clonedTest._ddIsNew = true
clonedTest._ddIsEfdRetry = true
}
}
const oldRunTests = Cypress.mocha.getRunner().runTests
Cypress.mocha.getRunner().runTests = function (suite, fn) {
if (!isEarlyFlakeDetectionEnabled) {
return oldRunTests.apply(this, arguments)
}
// We copy the new tests at the beginning of the suite run (runTests), so that they're run
// multiple times.
suite.tests.forEach(test => {
if (!test._ddIsNew && !test.isPending() && isNewTest(test)) {
test._ddIsNew = true
retryTest(test, suite.tests)
}
})
return oldRunTests.apply(this, [suite, fn])
}
beforeEach(function () {

@@ -14,4 +63,10 @@ cy.task('dd:beforeEach', {

before(() => {
cy.task('dd:testSuiteStart', Cypress.mocha.getRootSuite().file)
before(function () {
cy.task('dd:testSuiteStart', Cypress.mocha.getRootSuite().file).then((suiteConfig) => {
if (suiteConfig) {
isEarlyFlakeDetectionEnabled = suiteConfig.isEarlyFlakeDetectionEnabled
knownTestsForSuite = suiteConfig.knownTestsForSuite
earlyFlakeDetectionNumRetries = suiteConfig.earlyFlakeDetectionNumRetries
}
})
})

@@ -21,3 +76,3 @@

cy.window().then(win => {
if (win.DD_RUM) {
if (safeGetRum(win)) {
win.dispatchEvent(new Event('beforeunload'))

@@ -29,3 +84,3 @@ }

afterEach(() => {
afterEach(function () {
cy.window().then(win => {

@@ -38,2 +93,4 @@ const currentTest = Cypress.mocha.getRunner().suite.ctx.currentTest

error: currentTest.err,
isNew: currentTest._ddIsNew,
isEfdRetry: currentTest._ddIsEfdRetry
}

@@ -44,7 +101,13 @@ try {

if (win.DD_RUM) {
if (safeGetRum(win)) {
testInfo.isRUMActive = true
}
cy.task('dd:afterEach', { test: testInfo, coverage: win.__coverage__ })
let coverage
try {
coverage = win.__coverage__
} catch (e) {
// ignore error and continue
}
cy.task('dd:afterEach', { test: testInfo, coverage })
})
})

@@ -7,10 +7,6 @@ 'use strict'

static get id () { return 'fetch' }
static get prefix () { return `apm:fetch:request` }
static get prefix () { return 'tracing:apm:fetch:request' }
addTraceSub (eventName, handler) {
this.addSub(`apm:${this.constructor.id}:${this.operation}:${eventName}`, handler)
}
bindStart (message) {
const req = message.req
bindStart (ctx) {
const req = ctx.req
const options = new URL(req.url)

@@ -21,13 +17,26 @@ const headers = options.headers = Object.fromEntries(req.headers.entries())

message.args = { options }
ctx.args = { options }
const store = super.bindStart(message)
const store = super.bindStart(ctx)
message.headers = headers
message.req = new globalThis.Request(req, { headers })
for (const name in headers) {
if (!req.headers.has(name)) {
req.headers.set(name, headers[name])
}
}
return store
}
error (ctx) {
if (ctx.error.name === 'AbortError') return
return super.error(ctx)
}
asyncEnd (ctx) {
ctx.res = ctx.result
return this.finish(ctx)
}
}
module.exports = FetchPlugin

@@ -32,3 +32,3 @@ 'use strict'

meta: {
'file.descriptor': (typeof fd === 'object' || typeof fd === 'number') ? fd.toString() : '',
'file.descriptor': ((fd !== null && typeof fd === 'object') || typeof fd === 'number') ? fd.toString() : '',
'file.dest': params.dest || params.newPath || (params.target && params.path),

@@ -35,0 +35,0 @@ 'file.flag': String(flag || defaultFlag || ''),

'use strict'
const pick = require('../../datadog-core/src/utils/src/pick')
const CompositePlugin = require('../../dd-trace/src/plugins/composite')

@@ -66,8 +67,2 @@ const log = require('../../dd-trace/src/log')

// non-lodash pick
function pick (obj, selectors) {
return Object.fromEntries(Object.entries(obj).filter(([key]) => selectors.includes(key)))
}
module.exports = GraphQLPlugin

@@ -83,2 +83,6 @@ 'use strict'

}
finish (finishTime) {
this.activeSpan.finish(finishTime)
}
}

@@ -126,11 +130,13 @@

const directives = info.fieldNodes[0].directives
for (const directive of directives) {
const argList = {}
for (const argument of directive['arguments']) {
argList[argument.name.value] = argument.value.value
}
const directives = info.fieldNodes?.[0]?.directives
if (Array.isArray(directives)) {
for (const directive of directives) {
const argList = {}
for (const argument of directive.arguments) {
argList[argument.name.value] = argument.value.value
}
if (Object.keys(argList).length) {
resolverVars[directive.name.value] = argList
if (Object.keys(argList).length) {
resolverVars[directive.name.value] = argList
}
}

@@ -137,0 +143,0 @@ }

@@ -11,3 +11,3 @@ 'use strict'

static get operation () { return 'client:request' }
static get prefix () { return `apm:grpc:client:request` }
static get prefix () { return 'apm:grpc:client:request' }
static get peerServicePrecursors () { return ['rpc.service'] }

@@ -34,3 +34,3 @@

meta: {
'component': 'grpc',
component: 'grpc',
'grpc.method.kind': method.kind,

@@ -46,3 +46,2 @@ 'grpc.method.path': method.path,

}, false)
// needed as precursor for peer.service

@@ -74,3 +73,3 @@ if (method.service && method.package) {

finish ({ span, result }) {
finish ({ span, result, peer }) {
if (!span) return

@@ -87,2 +86,17 @@

if (peer) {
// The only scheme we want to support here is ipv[46]:port, although
// more are supported by the library
// https://github.com/grpc/grpc/blob/v1.60.0/doc/naming.md
const parts = peer.split(':')
if (parts[parts.length - 1].match(/^\d+/)) {
const port = parts[parts.length - 1]
const ip = parts.slice(0, -1).join(':')
span.setTag('network.destination.ip', ip)
span.setTag('network.destination.port', port)
} else {
span.setTag('network.destination.ip', peer)
}
}
this.tagPeerService(span)

@@ -89,0 +103,0 @@ span.finish()

@@ -11,3 +11,3 @@ 'use strict'

static get operation () { return 'server:request' }
static get prefix () { return `apm:grpc:server:request` }
static get prefix () { return 'apm:grpc:server:request' }

@@ -43,3 +43,3 @@ constructor (...args) {

meta: {
'component': 'grpc',
component: 'grpc',
'grpc.method.kind': method.kind,

@@ -46,0 +46,0 @@ 'grpc.method.path': method.path,

'use strict'
const pick = require('lodash.pick')
const pick = require('../../datadog-core/src/utils/src/pick')
const log = require('../../dd-trace/src/log')

@@ -5,0 +5,0 @@

@@ -20,3 +20,3 @@ 'use strict'

static get id () { return 'http' }
static get prefix () { return `apm:http:client:request` }
static get prefix () { return 'apm:http:client:request' }

@@ -126,3 +126,3 @@ bindStart (message) {

// 2. no invocation of `req.setTimeout`
if (!args.options.agent?.options.timeout && !customRequestTimeout) return
if (!args.options.agent?.options?.timeout && !customRequestTimeout) return

@@ -232,3 +232,3 @@ span.setTag('error', 1)

if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) {
if ([].concat(headers.authorization).some(startsWith('AWS4-HMAC-SHA256'))) {
return true

@@ -235,0 +235,0 @@ }

@@ -28,3 +28,3 @@ 'use strict'

static get id () { return 'http2' }
static get prefix () { return `apm:http2:client:request` }
static get prefix () { return 'apm:http2:client:request' }

@@ -126,3 +126,4 @@ bindStart (message) {

let port = '' + (authority.port !== ''
? authority.port : (authority.protocol === 'http:' ? 80 : 443))
? authority.port
: (authority.protocol === 'http:' ? 80 : 443))
let host = authority.hostname || authority.host || 'localhost'

@@ -149,3 +150,3 @@

if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) {
if ([].concat(headers.authorization).some(startsWith('AWS4-HMAC-SHA256'))) {
return true

@@ -152,0 +153,0 @@ }

@@ -17,3 +17,11 @@ const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')

TEST_CODE_OWNERS,
ITR_CORRELATION_ID
ITR_CORRELATION_ID,
TEST_SOURCE_FILE,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED,
TEST_EARLY_FLAKE_ABORT_REASON,
JEST_DISPLAY_NAME,
TEST_IS_RUM_ACTIVE,
TEST_BROWSER_DRIVER
} = require('../../dd-trace/src/plugins/util/test')

@@ -30,3 +38,4 @@ const { COMPONENT } = require('../../dd-trace/src/constants')

TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES
TELEMETRY_CODE_COVERAGE_NUM_FILES,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')

@@ -51,2 +60,3 @@

}
getForcedToRunSuites (forcedToRunSuitesList) {

@@ -87,3 +97,6 @@ if (!this.forcedToRunSuites) {

hasForcedToRunSuites,
error
error,
isEarlyFlakeDetectionEnabled,
isEarlyFlakeDetectionFaulty,
onDone
}) => {

@@ -113,2 +126,9 @@ this.testSessionSpan.setTag(TEST_STATUS, status)

if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true')
}
if (isEarlyFlakeDetectionFaulty) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ABORT_REASON, 'faulty')
}
this.testModuleSpan.finish()

@@ -119,3 +139,10 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')

finishAllTraceSpans(this.testSessionSpan)
this.tracer._exporter.flush()
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
this.tracer._exporter.flush(() => {
if (onDone) {
onDone()
}
})
})

@@ -125,3 +152,3 @@

// This subscriber changes the configuration objects from jest to inject the trace id
// of the test session to the processes that run the test suites.
// of the test session to the processes that run the test suites, and other data.
this.addSub('ci:jest:session:configuration', configs => {

@@ -133,6 +160,10 @@ configs.forEach(config => {

config._ddItrCorrelationId = this.itrCorrelationId
config._ddIsEarlyFlakeDetectionEnabled = !!this.libraryConfig?.isEarlyFlakeDetectionEnabled
config._ddEarlyFlakeDetectionNumRetries = this.libraryConfig?.earlyFlakeDetectionNumRetries ?? 0
config._ddRepositoryRoot = this.repositoryRoot
config._ddIsFlakyTestRetriesEnabled = this.libraryConfig?.isFlakyTestRetriesEnabled ?? false
})
})
this.addSub('ci:jest:test-suite:start', ({ testSuite, testEnvironmentOptions, frameworkVersion }) => {
this.addSub('ci:jest:test-suite:start', ({ testSuite, testEnvironmentOptions, frameworkVersion, displayName }) => {
const {

@@ -172,2 +203,5 @@ _ddTestSessionId: testSessionId,

}
if (displayName) {
testSuiteMetadata[JEST_DISPLAY_NAME] = displayName
}

@@ -235,3 +269,3 @@ this.testSuiteSpan = this.tracer.startSpan('jest.test_suite', {

/**
* This can't use `this.itrConfig` like `ci:mocha:test-suite:code-coverage`
* This can't use `this.libraryConfig` like `ci:mocha:test-suite:code-coverage`
* because this subscription happens in a different process from the one

@@ -271,8 +305,16 @@ * fetching the ITR config.

}
span.finish()
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew: spanTags[TEST_IS_NEW] === 'true',
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
span.finish()
finishAllTraceSpans(span)

@@ -300,3 +342,15 @@ })

startTestSpan (test) {
const { suite, name, runner, testParameters, frameworkVersion, testStartLine } = test
const {
suite,
name,
runner,
displayName,
testParameters,
frameworkVersion,
testStartLine,
testSourceFile,
isNew,
isEfdRetry,
isJestRetry
} = test

@@ -311,3 +365,20 @@ const extraTags = {

}
// If for whatever we don't have the source file, we'll fall back to the suite name
extraTags[TEST_SOURCE_FILE] = testSourceFile || suite
if (displayName) {
extraTags[JEST_DISPLAY_NAME] = displayName
}
if (isNew) {
extraTags[TEST_IS_NEW] = 'true'
if (isEfdRetry) {
extraTags[TEST_IS_RETRY] = 'true'
}
}
if (isJestRetry) {
extraTags[TEST_IS_RETRY] = 'true'
}
return super.startTestSpan(name, suite, this.testSuiteSpan, extraTags)

@@ -314,0 +385,0 @@ }

'use strict'
const { getMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const dc = require('dc-polyfill')
const { getMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer')
const afterStartCh = dc.channel('dd-trace:kafkajs:consumer:afterStart')
const beforeFinishCh = dc.channel('dd-trace:kafkajs:consumer:beforeFinish')
class KafkajsConsumerPlugin extends ConsumerPlugin {

@@ -68,3 +73,3 @@ static get id () { return 'kafkajs' }

meta: {
'component': 'kafkajs',
component: 'kafkajs',
'kafka.topic': topic,

@@ -77,9 +82,21 @@ 'kafka.message.offset': message.offset

})
if (this.config.dsmEnabled) {
if (this.config.dsmEnabled && message?.headers && DsmPathwayCodec.contextExists(message.headers)) {
const payloadSize = getMessageSize(message)
this.tracer.decodeDataStreamsContext(message.headers[CONTEXT_PROPAGATION_KEY])
this.tracer.decodeDataStreamsContext(message.headers)
this.tracer
.setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'], span, payloadSize)
}
if (afterStartCh.hasSubscribers) {
afterStartCh.publish({ topic, partition, message, groupId })
}
}
finish () {
if (beforeFinishCh.hasSubscribers) {
beforeFinishCh.publish()
}
super.finish()
}
}

@@ -86,0 +103,0 @@

'use strict'
const ProducerPlugin = require('../../dd-trace/src/plugins/producer')
const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway')
const { getMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getMessageSize } = require('../../dd-trace/src/datastreams/processor')

@@ -70,7 +70,6 @@ const BOOTSTRAP_SERVERS_KEY = 'messaging.kafka.bootstrap.servers'

start ({ topic, messages, bootstrapServers }) {
let pathwayCtx
const span = this.startSpan({
resource: topic,
meta: {
'component': 'kafkajs',
component: 'kafkajs',
'kafka.topic': topic

@@ -86,3 +85,3 @@ },

for (const message of messages) {
if (typeof message === 'object') {
if (message !== null && typeof message === 'object') {
this.tracer.inject(span, 'text_map', message.headers)

@@ -93,4 +92,3 @@ if (this.config.dsmEnabled) {

.setCheckpoint(['direction:out', `topic:${topic}`, 'type:kafka'], span, payloadSize)
pathwayCtx = encodePathwayContext(dataStreamsContext)
message.headers[CONTEXT_PROPAGATION_KEY] = pathwayCtx
DsmPathwayCodec.encode(dataStreamsContext, message.headers)
}

@@ -97,0 +95,0 @@ }

@@ -18,3 +18,17 @@ 'use strict'

TEST_CODE_OWNERS,
ITR_CORRELATION_ID
ITR_CORRELATION_ID,
TEST_SOURCE_FILE,
removeEfdStringFromTestName,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED,
TEST_SESSION_ID,
TEST_MODULE_ID,
TEST_MODULE,
TEST_SUITE_ID,
TEST_COMMAND,
TEST_SUITE,
MOCHA_IS_PARALLEL,
TEST_IS_RUM_ACTIVE,
TEST_BROWSER_DRIVER
} = require('../../dd-trace/src/plugins/util/test')

@@ -30,5 +44,22 @@ const { COMPONENT } = require('../../dd-trace/src/constants')

TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES
TELEMETRY_CODE_COVERAGE_NUM_FILES,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/ci-visibility/telemetry')
const id = require('../../dd-trace/src/id')
const log = require('../../dd-trace/src/log')
function getTestSuiteLevelVisibilityTags (testSuiteSpan) {
const testSuiteSpanContext = testSuiteSpan.context()
const suiteTags = {
[TEST_SUITE_ID]: testSuiteSpanContext.toSpanId(),
[TEST_SESSION_ID]: testSuiteSpanContext.toTraceId(),
[TEST_COMMAND]: testSuiteSpanContext._tags[TEST_COMMAND],
[TEST_MODULE]: 'mocha'
}
if (testSuiteSpanContext._parentId) {
suiteTags[TEST_MODULE_ID] = testSuiteSpanContext._parentId.toString(10)
}
return suiteTags
}
class MochaPlugin extends CiPlugin {

@@ -43,10 +74,11 @@ static get id () {

this._testSuites = new Map()
this._testNameToParams = {}
this._testTitleToParams = {}
this.sourceRoot = process.cwd()
this.addSub('ci:mocha:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => {
if (!this.itrConfig || !this.itrConfig.isCodeCoverageEnabled) {
if (!this.libraryConfig?.isCodeCoverageEnabled) {
return
}
const testSuiteSpan = this._testSuites.get(suiteFile)
const testSuite = getTestSuitePath(suiteFile, this.sourceRoot)
const testSuiteSpan = this._testSuites.get(testSuite)

@@ -74,3 +106,3 @@ if (!coverageFiles.length) {

this.addSub('ci:mocha:test-suite:start', ({
testSuite,
testSuiteAbsolutePath,
isUnskippable,

@@ -80,7 +112,11 @@ isForcedToRun,

}) => {
const store = storage.getStore()
// If the test module span is undefined, the plugin has not been initialized correctly and we bail out
if (!this.testModuleSpan) {
return
}
const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.sourceRoot)
const testSuiteMetadata = getTestSuiteCommonTags(
this.command,
this.frameworkVersion,
getTestSuitePath(testSuite, this.sourceRoot),
testSuite,
'mocha'

@@ -106,3 +142,3 @@ )

this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
if (this.itrConfig?.isCodeCoverageEnabled) {
if (this.libraryConfig?.isCodeCoverageEnabled) {
this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' })

@@ -113,2 +149,3 @@ }

}
const store = storage.getStore()
this.enter(testSuiteSpan, store)

@@ -140,5 +177,5 @@ this._testSuites.set(testSuite, testSuiteSpan)

this.addSub('ci:mocha:test:start', ({ test, testStartLine }) => {
this.addSub('ci:mocha:test:start', (testInfo) => {
const store = storage.getStore()
const span = this.startTestSpan(test, testStartLine)
const span = this.startTestSpan(testInfo)

@@ -148,16 +185,29 @@ this.enter(span, store)

this.addSub('ci:mocha:test:finish', (status) => {
this.addSub('ci:mocha:worker:finish', () => {
this.tracer._exporter.flush()
})
this.addSub('ci:mocha:test:finish', ({ status, hasBeenRetried }) => {
const store = storage.getStore()
const span = store?.span
if (store && store.span) {
const span = store.span
if (span) {
span.setTag(TEST_STATUS, status)
if (hasBeenRetried) {
span.setTag(TEST_IS_RETRY, 'true')
}
span.finish()
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew: spanTags[TEST_IS_NEW] === 'true',
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
span.finish()
finishAllTraceSpans(span)

@@ -167,3 +217,3 @@ }

this.addSub('ci:mocha:test:skip', (test) => {
this.addSub('ci:mocha:test:skip', (testInfo) => {
const store = storage.getStore()

@@ -173,3 +223,3 @@ // skipped through it.skip, so the span is not created yet

if (!store) {
const testSpan = this.startTestSpan(test)
const testSpan = this.startTestSpan(testInfo)
this.enter(testSpan, store)

@@ -181,4 +231,4 @@ }

const store = storage.getStore()
if (err && store && store.span) {
const span = store.span
const span = store?.span
if (err && span) {
if (err.constructor.name === 'Pending' && !this.forbidPending) {

@@ -193,6 +243,32 @@ span.setTag(TEST_STATUS, 'skip')

this.addSub('ci:mocha:test:parameterize', ({ name, params }) => {
this._testNameToParams[name] = params
this.addSub('ci:mocha:test:retry', (isFirstAttempt) => {
const store = storage.getStore()
const span = store?.span
if (span) {
span.setTag(TEST_STATUS, 'fail')
if (!isFirstAttempt) {
span.setTag(TEST_IS_RETRY, 'true')
}
const spanTags = span.context()._tags
this.telemetry.ciVisEvent(
TELEMETRY_EVENT_FINISHED,
'test',
{
hasCodeOwners: !!spanTags[TEST_CODE_OWNERS],
isNew: spanTags[TEST_IS_NEW] === 'true',
isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true',
browserDriver: spanTags[TEST_BROWSER_DRIVER]
}
)
span.finish()
finishAllTraceSpans(span)
}
})
this.addSub('ci:mocha:test:parameterize', ({ title, params }) => {
this._testTitleToParams[title] = params
})
this.addSub('ci:mocha:session:finish', ({

@@ -205,6 +281,8 @@ status,

hasUnskippableSuites,
error
error,
isEarlyFlakeDetectionEnabled,
isParallel
}) => {
if (this.testSessionSpan) {
const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {}
const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {}
this.testSessionSpan.setTag(TEST_STATUS, status)

@@ -218,2 +296,6 @@ this.testModuleSpan.setTag(TEST_STATUS, status)

if (isParallel) {
this.testSessionSpan.setTag(MOCHA_IS_PARALLEL, 'true')
}
addIntelligentTestRunnerSpanTags(

@@ -234,2 +316,6 @@ this.testSessionSpan,

if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true')
}
this.testModuleSpan.finish()

@@ -240,14 +326,54 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')

finishAllTraceSpans(this.testSessionSpan)
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
}
this.itrConfig = null
this.libraryConfig = null
this.tracer._exporter.flush()
})
this.addSub('ci:mocha:worker-report:trace', (traces) => {
const formattedTraces = JSON.parse(traces).map(trace =>
trace.map(span => {
const formattedSpan = {
...span,
span_id: id(span.span_id),
trace_id: id(span.trace_id),
parent_id: id(span.parent_id)
}
if (formattedSpan.name === 'mocha.test') {
const testSuite = span.meta[TEST_SUITE]
const testSuiteSpan = this._testSuites.get(testSuite)
if (!testSuiteSpan) {
log.warn(`Test suite span not found for test span with test suite ${testSuite}`)
return formattedSpan
}
const suiteTags = getTestSuiteLevelVisibilityTags(testSuiteSpan)
formattedSpan.meta = {
...formattedSpan.meta,
...suiteTags
}
}
return formattedSpan
})
)
formattedTraces.forEach(trace => {
this.tracer._exporter.export(trace)
})
})
}
startTestSpan (test, testStartLine) {
const testName = test.fullTitle()
const { file: testSuiteAbsolutePath, title } = test
startTestSpan (testInfo) {
const {
testSuiteAbsolutePath,
title,
isNew,
isEfdRetry,
testStartLine,
isParallel
} = testInfo
const testName = removeEfdStringFromTestName(testInfo.testName)
const extraTags = {}
const testParametersString = getTestParametersString(this._testNameToParams, title)
const testParametersString = getTestParametersString(this._testTitleToParams, title)
if (testParametersString) {

@@ -261,5 +387,22 @@ extraTags[TEST_PARAMETERS] = testParametersString

if (isParallel) {
extraTags[MOCHA_IS_PARALLEL] = 'true'
}
const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.sourceRoot)
const testSuiteSpan = this._testSuites.get(testSuiteAbsolutePath)
const testSuiteSpan = this._testSuites.get(testSuite)
if (this.repositoryRoot !== this.sourceRoot && !!this.repositoryRoot) {
extraTags[TEST_SOURCE_FILE] = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot)
} else {
extraTags[TEST_SOURCE_FILE] = testSuite
}
if (isNew) {
extraTags[TEST_IS_NEW] = 'true'
if (isEfdRetry) {
extraTags[TEST_IS_RETRY] = 'true'
}
}
return super.startTestSpan(testName, testSuite, testSuiteSpan, extraTags)

@@ -266,0 +409,0 @@ }

@@ -118,3 +118,3 @@ 'use strict'

function isObject (val) {
return typeof val === 'object' && val !== null && !(val instanceof Array)
return val !== null && typeof val === 'object' && !Array.isArray(val)
}

@@ -121,0 +121,0 @@

@@ -9,3 +9,3 @@ 'use strict'

const errorPages = ['/404', '/500', '/_error', '/_not-found']
const errorPages = ['/404', '/500', '/_error', '/_not-found', '/_not-found/page']

@@ -62,3 +62,3 @@ class NextPlugin extends ServerPlugin {

const span = store.span
const error = span.context()._tags['error']
const error = span.context()._tags.error
const requestError = req.error || nextRequest.error

@@ -125,3 +125,2 @@

})
web.setRoute(req, page)

@@ -128,0 +127,0 @@ }

@@ -10,2 +10,3 @@ 'use strict'

const { MEASURED } = require('../../../ext/tags')
const { estimateTokens } = require('./token-estimator')

@@ -19,2 +20,13 @@ // String#replaceAll unavailable on Node.js@v14 (dd-trace@<=v3)

function safeRequire (path) {
try {
// eslint-disable-next-line import/no-extraneous-dependencies
return require(path)
} catch {
return null
}
}
const encodingForModel = safeRequire('tiktoken')?.encoding_for_model
class OpenApiPlugin extends TracingPlugin {

@@ -24,2 +36,5 @@ static get id () { return 'openai' }

static get system () { return 'openai' }
static get prefix () {
return 'tracing:apm:openai:request'
}

@@ -49,4 +64,6 @@ constructor (...args) {

start ({ methodName, args, basePath, apiKey }) {
bindStart (ctx) {
const { methodName, args, basePath, apiKey } = ctx
const payload = normalizeRequestPayload(methodName, args)
const store = storage.getStore() || {}

@@ -82,7 +99,5 @@ const span = this.startSpan('openai.request', {

}
})
}, false)
const fullStore = storage.getStore() || {} // certain request body fields are later used for logs
const store = Object.create(null)
fullStore.openai = store // namespacing these fields
const openaiStore = Object.create(null)

@@ -92,8 +107,8 @@ const tags = {} // The remaining tags are added one at a time

// createChatCompletion, createCompletion, createImage, createImageEdit, createTranscription, createTranslation
if ('prompt' in payload) {
if (payload.prompt) {
const prompt = payload.prompt
store.prompt = prompt
openaiStore.prompt = prompt
if (typeof prompt === 'string' || (Array.isArray(prompt) && typeof prompt[0] === 'number')) {
// This is a single prompt, either String or [Number]
tags[`openai.request.prompt`] = normalizeStringOrTokenArray(prompt, true)
tags['openai.request.prompt'] = normalizeStringOrTokenArray(prompt, true)
} else if (Array.isArray(prompt)) {

@@ -108,10 +123,10 @@ // This is multiple prompts, either [String] or [[Number]]

// createEdit, createEmbedding, createModeration
if ('input' in payload) {
if (payload.input) {
const normalized = normalizeStringOrTokenArray(payload.input, false)
tags[`openai.request.input`] = truncateText(normalized)
store.input = normalized
tags['openai.request.input'] = truncateText(normalized)
openaiStore.input = normalized
}
// createChatCompletion, createCompletion
if (typeof payload.logit_bias === 'object' && payload.logit_bias) {
if (payload.logit_bias !== null && typeof payload.logit_bias === 'object') {
for (const [tokenId, bias] of Object.entries(payload.logit_bias)) {

@@ -122,4 +137,10 @@ tags[`openai.request.logit_bias.${tokenId}`] = bias

if (payload.stream) {
tags['openai.request.stream'] = payload.stream
}
switch (methodName) {
case 'createFineTune':
case 'fine_tuning.jobs.create':
case 'fine-tune.create':
createFineTuneRequestExtraction(tags, payload)

@@ -129,13 +150,19 @@ break

case 'createImage':
case 'images.generate':
case 'createImageEdit':
case 'images.edit':
case 'createImageVariation':
commonCreateImageRequestExtraction(tags, payload, store)
case 'images.createVariation':
commonCreateImageRequestExtraction(tags, payload, openaiStore)
break
case 'createChatCompletion':
createChatCompletionRequestExtraction(tags, payload, store)
case 'chat.completions.create':
createChatCompletionRequestExtraction(tags, payload, openaiStore)
break
case 'createFile':
case 'files.create':
case 'retrieveFile':
case 'files.retrieve':
commonFileRequestExtraction(tags, payload)

@@ -145,7 +172,10 @@ break

case 'createTranscription':
case 'audio.transcriptions.create':
case 'createTranslation':
commonCreateAudioRequestExtraction(tags, payload, store)
case 'audio.translations.create':
commonCreateAudioRequestExtraction(tags, payload, openaiStore)
break
case 'retrieveModel':
case 'models.retrieve':
retrieveModelRequestExtraction(tags, payload)

@@ -155,5 +185,12 @@ break

case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'deleteModel':
case 'models.del':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
commonLookupFineTuneRequestExtraction(tags, payload)

@@ -163,3 +200,4 @@ break

case 'createEdit':
createEditRequestExtraction(tags, payload, store)
case 'edits.create':
createEditRequestExtraction(tags, payload, openaiStore)
break

@@ -169,6 +207,28 @@ }

span.addTags(tags)
ctx.currentStore = { ...store, span, openai: openaiStore }
return ctx.currentStore
}
finish ({ headers, body, method, path }) {
const span = this.activeSpan
asyncEnd (ctx) {
const { result } = ctx
const store = ctx.currentStore
const span = store?.span
if (!span) return
const error = !!span.context()._tags.error
let headers, body, method, path
if (!error) {
headers = result.headers
body = result.data
method = result.request.method
path = result.request.path
}
if (!error && headers?.constructor.name === 'Headers') {
headers = Object.fromEntries(headers)
}
const methodName = span._spanContext._tags['resource.name']

@@ -178,103 +238,195 @@

const fullStore = storage.getStore()
const store = fullStore.openai
const openaiStore = store.openai
if (!error && (path?.startsWith('https://') || path?.startsWith('http://'))) {
// basic checking for if the path was set as a full URL
// not using a full regex as it will likely be "https://api.openai.com/..."
path = new URL(path).pathname
}
const endpoint = lookupOperationEndpoint(methodName, path)
const tags = {
'openai.request.endpoint': endpoint,
'openai.request.method': method,
const tags = error
? {}
: {
'openai.request.endpoint': endpoint,
'openai.request.method': method.toUpperCase(),
'openai.organization.id': body.organization_id, // only available in fine-tunes endpoints
'openai.organization.name': headers['openai-organization'],
'openai.organization.id': body.organization_id, // only available in fine-tunes endpoints
'openai.organization.name': headers['openai-organization'],
'openai.response.model': headers['openai-model'] || body.model, // specific model, often undefined
'openai.response.id': body.id, // common creation value, numeric epoch
'openai.response.deleted': body.deleted, // common boolean field in delete responses
'openai.response.model': headers['openai-model'] || body.model, // specific model, often undefined
'openai.response.id': body.id, // common creation value, numeric epoch
'openai.response.deleted': body.deleted, // common boolean field in delete responses
// The OpenAI API appears to use both created and created_at in different places
// Here we're conciously choosing to surface this inconsistency instead of normalizing
'openai.response.created': body.created,
'openai.response.created_at': body.created_at
}
// The OpenAI API appears to use both created and created_at in different places
// Here we're conciously choosing to surface this inconsistency instead of normalizing
'openai.response.created': body.created,
'openai.response.created_at': body.created_at
}
responseDataExtractionByMethod(methodName, tags, body, store)
responseDataExtractionByMethod(methodName, tags, body, openaiStore)
span.addTags(tags)
super.finish()
this.sendLog(methodName, span, tags, store, false)
this.sendMetrics(headers, body, endpoint, span._duration)
span.finish()
this.sendLog(methodName, span, tags, openaiStore, error)
this.sendMetrics(headers, body, endpoint, span._duration, error, tags)
}
error (...args) {
super.error(...args)
sendMetrics (headers, body, endpoint, duration, error, spanTags) {
const tags = [`error:${Number(!!error)}`]
if (error) {
this.metrics.increment('openai.request.error', 1, tags)
} else {
tags.push(`org:${headers['openai-organization']}`)
tags.push(`endpoint:${endpoint}`) // just "/v1/models", no method
tags.push(`model:${headers['openai-model'] || body.model}`)
}
const span = this.activeSpan
const methodName = span._spanContext._tags['resource.name']
this.metrics.distribution('openai.request.duration', duration * 1000, tags)
const fullStore = storage.getStore()
const store = fullStore.openai
const promptTokens = spanTags['openai.response.usage.prompt_tokens']
const promptTokensEstimated = spanTags['openai.response.usage.prompt_tokens_estimated']
// We don't know most information about the request when it fails
const completionTokens = spanTags['openai.response.usage.completion_tokens']
const completionTokensEstimated = spanTags['openai.response.usage.completion_tokens_estimated']
const tags = [`error:1`]
this.metrics.distribution('openai.request.duration', span._duration * 1000, tags)
this.metrics.increment('openai.request.error', 1, tags)
if (!error) {
if (promptTokensEstimated) {
this.metrics.distribution(
'openai.tokens.prompt', promptTokens, [...tags, 'openai.estimated:true'])
} else {
this.metrics.distribution('openai.tokens.prompt', promptTokens, tags)
}
if (completionTokensEstimated) {
this.metrics.distribution(
'openai.tokens.completion', completionTokens, [...tags, 'openai.estimated:true'])
} else {
this.metrics.distribution('openai.tokens.completion', completionTokens, tags)
}
this.sendLog(methodName, span, {}, store, true)
}
if (promptTokensEstimated || completionTokensEstimated) {
this.metrics.distribution(
'openai.tokens.total', promptTokens + completionTokens, [...tags, 'openai.estimated:true'])
} else {
this.metrics.distribution('openai.tokens.total', promptTokens + completionTokens, tags)
}
}
sendMetrics (headers, body, endpoint, duration) {
const tags = [
`org:${headers['openai-organization']}`,
`endpoint:${endpoint}`, // just "/v1/models", no method
`model:${headers['openai-model']}`,
`error:0`
]
if (headers) {
if (headers['x-ratelimit-limit-requests']) {
this.metrics.gauge('openai.ratelimit.requests', Number(headers['x-ratelimit-limit-requests']), tags)
}
this.metrics.distribution('openai.request.duration', duration * 1000, tags)
if (headers['x-ratelimit-remaining-requests']) {
this.metrics.gauge(
'openai.ratelimit.remaining.requests', Number(headers['x-ratelimit-remaining-requests']), tags
)
}
if (body && ('usage' in body)) {
const promptTokens = body.usage.prompt_tokens
const completionTokens = body.usage.completion_tokens
this.metrics.distribution('openai.tokens.prompt', promptTokens, tags)
this.metrics.distribution('openai.tokens.completion', completionTokens, tags)
this.metrics.distribution('openai.tokens.total', promptTokens + completionTokens, tags)
if (headers['x-ratelimit-limit-tokens']) {
this.metrics.gauge('openai.ratelimit.tokens', Number(headers['x-ratelimit-limit-tokens']), tags)
}
if (headers['x-ratelimit-remaining-tokens']) {
this.metrics.gauge('openai.ratelimit.remaining.tokens', Number(headers['x-ratelimit-remaining-tokens']), tags)
}
}
}
if ('x-ratelimit-limit-requests' in headers) {
this.metrics.gauge('openai.ratelimit.requests', Number(headers['x-ratelimit-limit-requests']), tags)
sendLog (methodName, span, tags, openaiStore, error) {
if (!openaiStore) return
if (!Object.keys(openaiStore).length) return
if (!this.sampler.isSampled()) return
const log = {
status: error ? 'error' : 'info',
message: `sampled ${methodName}`,
...openaiStore
}
if ('x-ratelimit-remaining-requests' in headers) {
this.metrics.gauge('openai.ratelimit.remaining.requests', Number(headers['x-ratelimit-remaining-requests']), tags)
this.logger.log(log, span, tags)
}
}
function countPromptTokens (methodName, payload, model) {
let promptTokens = 0
let promptEstimated = false
if (methodName === 'chat.completions.create') {
const messages = payload.messages
for (const message of messages) {
const content = message.content
if (typeof content === 'string') {
const { tokens, estimated } = countTokens(content, model)
promptTokens += tokens
promptEstimated = estimated
} else if (Array.isArray(content)) {
for (const c of content) {
if (c.type === 'text') {
const { tokens, estimated } = countTokens(c.text, model)
promptTokens += tokens
promptEstimated = estimated
}
// unsupported token computation for image_url
// as even though URL is a string, its true token count
// is based on the image itself, something onerous to do client-side
}
}
}
} else if (methodName === 'completions.create') {
let prompt = payload.prompt
if (!Array.isArray(prompt)) prompt = [prompt]
if ('x-ratelimit-limit-tokens' in headers) {
this.metrics.gauge('openai.ratelimit.tokens', Number(headers['x-ratelimit-limit-tokens']), tags)
for (const p of prompt) {
const { tokens, estimated } = countTokens(p, model)
promptTokens += tokens
promptEstimated = estimated
}
}
if ('x-ratelimit-remaining-tokens' in headers) {
this.metrics.gauge('openai.ratelimit.remaining.tokens', Number(headers['x-ratelimit-remaining-tokens']), tags)
return { promptTokens, promptEstimated }
}
function countCompletionTokens (body, model) {
let completionTokens = 0
let completionEstimated = false
if (body?.choices) {
for (const choice of body.choices) {
const message = choice.message || choice.delta // delta for streamed responses
const text = choice.text
const content = text || message?.content
const { tokens, estimated } = countTokens(content, model)
completionTokens += tokens
completionEstimated = estimated
}
}
sendLog (methodName, span, tags, store, error) {
if (!Object.keys(store).length) return
if (!this.sampler.isSampled()) return
return { completionTokens, completionEstimated }
}
const log = {
status: error ? 'error' : 'info',
message: `sampled ${methodName}`,
...store
function countTokens (content, model) {
if (encodingForModel) {
try {
// try using tiktoken if it was available
const encoder = encodingForModel(model)
const tokens = encoder.encode(content).length
encoder.free()
return { tokens, estimated: false }
} catch {
// possible errors from tiktoken:
// * model not available for token counts
// * issue encoding content
}
}
this.logger.log(log, span, tags)
return {
tokens: estimateTokens(content),
estimated: true
}
}
function createEditRequestExtraction (tags, payload, store) {
function createEditRequestExtraction (tags, payload, openaiStore) {
const instruction = payload.instruction
tags['openai.request.instruction'] = instruction
store.instruction = instruction
openaiStore.instruction = instruction
}

@@ -286,28 +438,30 @@

function createChatCompletionRequestExtraction (tags, payload, store) {
if (!defensiveArrayLength(payload.messages)) return
function createChatCompletionRequestExtraction (tags, payload, openaiStore) {
const messages = payload.messages
if (!defensiveArrayLength(messages)) return
store.messages = payload.messages
openaiStore.messages = payload.messages
for (let i = 0; i < payload.messages.length; i++) {
const message = payload.messages[i]
tags[`openai.request.${i}.content`] = truncateText(message.content)
tags[`openai.request.${i}.role`] = message.role
tags[`openai.request.${i}.name`] = message.name
tags[`openai.request.${i}.finish_reason`] = message.finish_reason
tagChatCompletionRequestContent(message.content, i, tags)
tags[`openai.request.messages.${i}.role`] = message.role
tags[`openai.request.messages.${i}.name`] = message.name
tags[`openai.request.messages.${i}.finish_reason`] = message.finish_reason
}
}
function commonCreateImageRequestExtraction (tags, payload, store) {
function commonCreateImageRequestExtraction (tags, payload, openaiStore) {
// createImageEdit, createImageVariation
if (payload.file && typeof payload.file === 'object' && payload.file.path) {
const file = path.basename(payload.file.path)
const img = payload.file || payload.image
if (img !== null && typeof img === 'object' && img.path) {
const file = path.basename(img.path)
tags['openai.request.image'] = file
store.file = file
openaiStore.file = file
}
// createImageEdit
if (payload.mask && typeof payload.mask === 'object' && payload.mask.path) {
if (payload.mask !== null && typeof payload.mask === 'object' && payload.mask.path) {
const mask = path.basename(payload.mask.path)
tags['openai.request.mask'] = mask
store.mask = mask
openaiStore.mask = mask
}

@@ -320,5 +474,6 @@

function responseDataExtractionByMethod (methodName, tags, body, store) {
function responseDataExtractionByMethod (methodName, tags, body, openaiStore) {
switch (methodName) {
case 'createModeration':
case 'moderations.create':
createModerationResponseExtraction(tags, body)

@@ -328,10 +483,18 @@ break

case 'createCompletion':
case 'completions.create':
case 'createChatCompletion':
case 'chat.completions.create':
case 'createEdit':
commonCreateResponseExtraction(tags, body, store)
case 'edits.create':
commonCreateResponseExtraction(tags, body, openaiStore, methodName)
break
case 'listFiles':
case 'files.list':
case 'listFineTunes':
case 'fine_tuning.jobs.list':
case 'fine-tune.list':
case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
commonListCountResponseExtraction(tags, body)

@@ -341,7 +504,10 @@ break

case 'createEmbedding':
createEmbeddingResponseExtraction(tags, body)
case 'embeddings.create':
createEmbeddingResponseExtraction(tags, body, openaiStore)
break
case 'createFile':
case 'files.create':
case 'retrieveFile':
case 'files.retrieve':
createRetrieveFileResponseExtraction(tags, body)

@@ -351,2 +517,3 @@ break

case 'deleteFile':
case 'files.del':
deleteFileResponseExtraction(tags, body)

@@ -356,2 +523,4 @@ break

case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
downloadFileResponseExtraction(tags, body)

@@ -361,4 +530,10 @@ break

case 'createFineTune':
case 'fine_tuning.jobs.create':
case 'fine-tune.create':
case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
commonFineTuneResponseExtraction(tags, body)

@@ -368,3 +543,5 @@ break

case 'createTranscription':
case 'audio.transcriptions.create':
case 'createTranslation':
case 'audio.translations.create':
createAudioResponseExtraction(tags, body)

@@ -374,4 +551,7 @@ break

case 'createImage':
case 'images.generate':
case 'createImageEdit':
case 'images.edit':
case 'createImageVariation':
case 'images.createVariation':
commonImageResponseExtraction(tags, body)

@@ -381,2 +561,3 @@ break

case 'listModels':
case 'models.list':
listModelsResponseExtraction(tags, body)

@@ -386,2 +567,3 @@ break

case 'retrieveModel':
case 'models.retrieve':
retrieveModelResponseExtraction(tags, body)

@@ -459,11 +641,15 @@ break

tags['openai.response.fine_tuned_model'] = body.fine_tuned_model
if (body.hyperparams) {
tags['openai.response.hyperparams.n_epochs'] = body.hyperparams.n_epochs
tags['openai.response.hyperparams.batch_size'] = body.hyperparams.batch_size
tags['openai.response.hyperparams.prompt_loss_weight'] = body.hyperparams.prompt_loss_weight
tags['openai.response.hyperparams.learning_rate_multiplier'] = body.hyperparams.learning_rate_multiplier
const hyperparams = body.hyperparams || body.hyperparameters
const hyperparamsKey = body.hyperparams ? 'hyperparams' : 'hyperparameters'
if (hyperparams) {
tags[`openai.response.${hyperparamsKey}.n_epochs`] = hyperparams.n_epochs
tags[`openai.response.${hyperparamsKey}.batch_size`] = hyperparams.batch_size
tags[`openai.response.${hyperparamsKey}.prompt_loss_weight`] = hyperparams.prompt_loss_weight
tags[`openai.response.${hyperparamsKey}.learning_rate_multiplier`] = hyperparams.learning_rate_multiplier
}
tags['openai.response.training_files_count'] = defensiveArrayLength(body.training_files)
tags['openai.response.training_files_count'] = defensiveArrayLength(body.training_files || body.training_file)
tags['openai.response.result_files_count'] = defensiveArrayLength(body.result_files)
tags['openai.response.validation_files_count'] = defensiveArrayLength(body.validation_files)
tags['openai.response.validation_files_count'] = defensiveArrayLength(body.validation_files || body.validation_file)
tags['openai.response.updated_at'] = body.updated_at

@@ -483,10 +669,10 @@ tags['openai.response.status'] = body.status

function commonCreateAudioRequestExtraction (tags, body, store) {
function commonCreateAudioRequestExtraction (tags, body, openaiStore) {
tags['openai.request.response_format'] = body.response_format
tags['openai.request.language'] = body.language
if (body.file && typeof body.file === 'object' && body.file.path) {
if (body.file !== null && typeof body.file === 'object' && body.file.path) {
const filename = path.basename(body.file.path)
tags['openai.request.filename'] = filename
store.file = filename
openaiStore.file = filename
}

@@ -501,3 +687,3 @@ }

// This is a best effort attempt to extract the filename during the request
if (body.file && typeof body.file === 'object' && body.file.path) {
if (body.file !== null && typeof body.file === 'object' && body.file.path) {
tags['openai.request.filename'] = path.basename(body.file.path)

@@ -515,4 +701,4 @@ }

function createEmbeddingResponseExtraction (tags, body) {
usageExtraction(tags, body)
function createEmbeddingResponseExtraction (tags, body, openaiStore) {
usageExtraction(tags, body, openaiStore)

@@ -551,4 +737,4 @@ if (!body.data) return

// createCompletion, createChatCompletion, createEdit
function commonCreateResponseExtraction (tags, body, store) {
usageExtraction(tags, body)
function commonCreateResponseExtraction (tags, body, openaiStore, methodName) {
usageExtraction(tags, body, methodName, openaiStore)

@@ -559,16 +745,31 @@ if (!body.choices) return

store.choices = body.choices
openaiStore.choices = body.choices
for (let i = 0; i < body.choices.length; i++) {
const choice = body.choices[i]
tags[`openai.response.choices.${i}.finish_reason`] = choice.finish_reason
tags[`openai.response.choices.${i}.logprobs`] = ('logprobs' in choice) ? 'returned' : undefined
tags[`openai.response.choices.${i}.text`] = truncateText(choice.text)
for (let choiceIdx = 0; choiceIdx < body.choices.length; choiceIdx++) {
const choice = body.choices[choiceIdx]
// logprobs can be null and we still want to tag it as 'returned' even when set to 'null'
const specifiesLogProb = Object.keys(choice).indexOf('logprobs') !== -1
tags[`openai.response.choices.${choiceIdx}.finish_reason`] = choice.finish_reason
tags[`openai.response.choices.${choiceIdx}.logprobs`] = specifiesLogProb ? 'returned' : undefined
tags[`openai.response.choices.${choiceIdx}.text`] = truncateText(choice.text)
// createChatCompletion only
if ('message' in choice) {
const message = choice.message
tags[`openai.response.choices.${i}.message.role`] = message.role
tags[`openai.response.choices.${i}.message.content`] = truncateText(message.content)
tags[`openai.response.choices.${i}.message.name`] = truncateText(message.name)
const message = choice.message || choice.delta // delta for streamed responses
if (message) {
tags[`openai.response.choices.${choiceIdx}.message.role`] = message.role
tags[`openai.response.choices.${choiceIdx}.message.content`] = truncateText(message.content)
tags[`openai.response.choices.${choiceIdx}.message.name`] = truncateText(message.name)
if (message.tool_calls) {
const toolCalls = message.tool_calls
for (let toolIdx = 0; toolIdx < toolCalls.length; toolIdx++) {
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.function.name`] =
toolCalls[toolIdx].function.name
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.function.arguments`] =
toolCalls[toolIdx].function.arguments
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.id`] =
toolCalls[toolIdx].id
}
}
}

@@ -579,7 +780,36 @@ }

// createCompletion, createChatCompletion, createEdit, createEmbedding
function usageExtraction (tags, body) {
if (typeof body.usage !== 'object' || !body.usage) return
tags['openai.response.usage.prompt_tokens'] = body.usage.prompt_tokens
tags['openai.response.usage.completion_tokens'] = body.usage.completion_tokens
tags['openai.response.usage.total_tokens'] = body.usage.total_tokens
function usageExtraction (tags, body, methodName, openaiStore) {
let promptTokens = 0
let completionTokens = 0
let totalTokens = 0
if (body && body.usage) {
promptTokens = body.usage.prompt_tokens
completionTokens = body.usage.completion_tokens
totalTokens = body.usage.total_tokens
} else if (body.model && ['chat.completions.create', 'completions.create'].includes(methodName)) {
// estimate tokens based on method name for completions and chat completions
const { model } = body
let promptEstimated = false
let completionEstimated = false
// prompt tokens
const payload = openaiStore
const promptTokensCount = countPromptTokens(methodName, payload, model)
promptTokens = promptTokensCount.promptTokens
promptEstimated = promptTokensCount.promptEstimated
// completion tokens
const completionTokensCount = countCompletionTokens(body, model)
completionTokens = completionTokensCount.completionTokens
completionEstimated = completionTokensCount.completionEstimated
// total tokens
totalTokens = promptTokens + completionTokens
if (promptEstimated) tags['openai.response.usage.prompt_tokens_estimated'] = true
if (completionEstimated) tags['openai.response.usage.completion_tokens_estimated'] = true
}
if (promptTokens) tags['openai.response.usage.prompt_tokens'] = promptTokens
if (completionTokens) tags['openai.response.usage.completion_tokens'] = completionTokens
if (totalTokens) tags['openai.response.usage.total_tokens'] = totalTokens
}

@@ -596,2 +826,3 @@

if (!text) return
if (typeof text !== 'string' || !text || (typeof text === 'string' && text.length === 0)) return

@@ -609,2 +840,24 @@ text = text

function tagChatCompletionRequestContent (contents, messageIdx, tags) {
if (typeof contents === 'string') {
tags[`openai.request.messages.${messageIdx}.content`] = contents
} else if (Array.isArray(contents)) {
// content can also be an array of objects
// which represent text input or image url
for (const contentIdx in contents) {
const content = contents[contentIdx]
const type = content.type
tags[`openai.request.messages.${messageIdx}.content.${contentIdx}.type`] = content.type
if (type === 'text') {
tags[`openai.request.messages.${messageIdx}.content.${contentIdx}.text`] = truncateText(content.text)
} else if (type === 'image_url') {
tags[`openai.request.messages.${messageIdx}.content.${contentIdx}.image_url.url`] =
truncateText(content.image_url.url)
}
// unsupported type otherwise, won't be tagged
}
}
// unsupported type otherwise, won't be tagged
}
// The server almost always responds with JSON

@@ -614,6 +867,19 @@ function coerceResponseBody (body, methodName) {

case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return { file: body }
}
return typeof body === 'object' ? body : {}
const type = typeof body
if (type === 'string') {
try {
return JSON.parse(body)
} catch {
return body
}
} else if (type === 'object') {
return body
} else {
return {}
}
}

@@ -625,20 +891,35 @@

case 'deleteModel':
case 'models.del':
case 'retrieveModel':
case 'models.retrieve':
return '/v1/models/*'
case 'deleteFile':
case 'files.del':
case 'retrieveFile':
case 'files.retrieve':
return '/v1/files/*'
case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return '/v1/files/*/content'
case 'retrieveFineTune':
case 'fine-tune.retrieve':
return '/v1/fine-tunes/*'
case 'fine_tuning.jobs.retrieve':
return '/v1/fine_tuning/jobs/*'
case 'listFineTuneEvents':
case 'fine-tune.listEvents':
return '/v1/fine-tunes/*/events'
case 'fine_tuning.jobs.listEvents':
return '/v1/fine_tuning/jobs/*/events'
case 'cancelFineTune':
case 'fine-tune.cancel':
return '/v1/fine-tunes/*/cancel'
case 'fine_tuning.jobs.cancel':
return '/v1/fine_tuning/jobs/*/cancel'
}

@@ -657,4 +938,8 @@

case 'listModels':
case 'models.list':
case 'listFiles':
case 'files.list':
case 'listFineTunes':
case 'fine_tuning.jobs.list':
case 'fine-tune.list':
// no argument

@@ -664,2 +949,3 @@ return {}

case 'retrieveModel':
case 'models.retrieve':
return { id: args[0] }

@@ -674,7 +960,13 @@

case 'deleteFile':
case 'files.del':
case 'retrieveFile':
case 'files.retrieve':
case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return { file_id: args[0] }
case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
return {

@@ -686,4 +978,9 @@ fine_tune_id: args[0],

case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'deleteModel':
case 'models.del':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
return { fine_tune_id: args[0] }

@@ -745,5 +1042,14 @@

function defensiveArrayLength (maybeArray) {
return Array.isArray(maybeArray) ? maybeArray.length : undefined
if (maybeArray) {
if (Array.isArray(maybeArray)) {
return maybeArray.length
} else {
// case of a singular item (ie body.training_file vs body.training_files)
return 1
}
}
return undefined
}
module.exports = OpenApiPlugin
'use strict'
const { DogStatsDClient, NoopDogStatsDClient } = require('../../dd-trace/src/dogstatsd')
const { DogStatsDClient } = require('../../dd-trace/src/dogstatsd')
const NoopDogStatsDClient = require('../../dd-trace/src/noop/dogstatsd')
const { ExternalLogger, NoopExternalLogger } = require('../../dd-trace/src/external-logger/src')

@@ -5,0 +6,0 @@

@@ -12,3 +12,9 @@ 'use strict'

TEST_SOURCE_START,
TEST_CODE_OWNERS
TEST_CODE_OWNERS,
TEST_SOURCE_FILE,
TEST_CONFIGURATION_BROWSER_NAME,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED,
TELEMETRY_TEST_SESSION
} = require('../../dd-trace/src/plugins/util/test')

@@ -32,7 +38,23 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

this._testSuites = new Map()
this.numFailedTests = 0
this.numFailedSuites = 0
this.addSub('ci:playwright:session:finish', ({ status, onDone }) => {
this.addSub('ci:playwright:session:finish', ({ status, isEarlyFlakeDetectionEnabled, onDone }) => {
this.testModuleSpan.setTag(TEST_STATUS, status)
this.testSessionSpan.setTag(TEST_STATUS, status)
if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_ENABLED, 'true')
}
if (this.numFailedSuites > 0) {
let errorMessage = `Test suites failed: ${this.numFailedSuites}.`
if (this.numFailedTests > 0) {
errorMessage += ` Tests failed: ${this.numFailedTests}`
}
const error = new Error(errorMessage)
this.testModuleSpan.setTag('error', error)
this.testSessionSpan.setTag('error', error)
}
this.testModuleSpan.finish()

@@ -43,4 +65,6 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')

finishAllTraceSpans(this.testSessionSpan)
this.telemetry.count(TELEMETRY_TEST_SESSION, { provider: this.ciProviderName })
appClosingTelemetry()
this.tracer._exporter.flush(onDone)
this.numFailedTests = 0
})

@@ -73,7 +97,17 @@

this.addSub('ci:playwright:test-suite:finish', (status) => {
this.addSub('ci:playwright:test-suite:finish', ({ status, error }) => {
const store = storage.getStore()
const span = store && store.span
if (!span) return
span.setTag(TEST_STATUS, status)
if (error) {
span.setTag('error', error)
span.setTag(TEST_STATUS, 'fail')
} else {
span.setTag(TEST_STATUS, status)
}
if (status === 'fail' || error) {
this.numFailedSuites++
}
span.finish()

@@ -83,10 +117,11 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')

this.addSub('ci:playwright:test:start', ({ testName, testSuiteAbsolutePath, testSourceLine }) => {
this.addSub('ci:playwright:test:start', ({ testName, testSuiteAbsolutePath, testSourceLine, browserName }) => {
const store = storage.getStore()
const testSuite = getTestSuitePath(testSuiteAbsolutePath, this.rootDir)
const span = this.startTestSpan(testName, testSuite, testSourceLine)
const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot)
const span = this.startTestSpan(testName, testSuite, testSourceFile, testSourceLine, browserName)
this.enter(span, store)
})
this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags }) => {
this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags, isNew, isEfdRetry, isRetry }) => {
const store = storage.getStore()

@@ -104,2 +139,11 @@ const span = store && store.span

}
if (isNew) {
span.setTag(TEST_IS_NEW, 'true')
if (isEfdRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}
}
if (isRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}

@@ -120,6 +164,12 @@ steps.forEach(step => {

}
stepSpan.finish(stepStartTime + step.duration)
let stepDuration = step.duration
if (stepDuration <= 0 || isNaN(stepDuration)) {
stepDuration = 0
}
stepSpan.finish(stepStartTime + stepDuration)
})
span.finish()
if (testStatus === 'fail') {
this.numFailedTests++
}

@@ -129,4 +179,9 @@ this.telemetry.ciVisEvent(

'test',
{ hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] }
{
hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS],
isNew,
browserDriver: 'playwright'
}
)
span.finish()

@@ -137,5 +192,16 @@ finishAllTraceSpans(span)

startTestSpan (testName, testSuite, testSourceLine) {
startTestSpan (testName, testSuite, testSourceFile, testSourceLine, browserName) {
const testSuiteSpan = this._testSuites.get(testSuite)
return super.startTestSpan(testName, testSuite, testSuiteSpan, { [TEST_SOURCE_START]: testSourceLine })
const extraTags = {
[TEST_SOURCE_START]: testSourceLine
}
if (testSourceFile) {
extraTags[TEST_SOURCE_FILE] = testSourceFile || testSuite
}
if (browserName) {
extraTags[TEST_CONFIGURATION_BROWSER_NAME] = browserName
}
return super.startTestSpan(testName, testSuite, testSuiteSpan, extraTags)
}

@@ -142,0 +208,0 @@ }

@@ -5,2 +5,4 @@ 'use strict'

const { storage } = require('../../datadog-core')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')

@@ -23,3 +25,3 @@ class RheaConsumerPlugin extends ConsumerPlugin {

this.startSpan({
const span = this.startSpan({
childOf,

@@ -29,3 +31,3 @@ resource: name,

meta: {
'component': 'rhea',
component: 'rhea',
'amqp.link.source.address': name,

@@ -35,2 +37,15 @@ 'amqp.link.role': 'receiver'

})
if (
this.config.dsmEnabled &&
msgObj?.message?.delivery_annotations &&
DsmPathwayCodec.contextExists(msgObj.message.delivery_annotations)
) {
const payloadSize = getAmqpMessageSize(
{ headers: msgObj.message.delivery_annotations, content: msgObj.message.body }
)
this.tracer.decodeDataStreamsContext(msgObj.message.delivery_annotations)
this.tracer
.setCheckpoint(['direction:in', `topic:${name}`, 'type:rabbitmq'], span, payloadSize)
}
}

@@ -37,0 +52,0 @@ }

@@ -5,2 +5,4 @@ 'use strict'

const ProducerPlugin = require('../../dd-trace/src/plugins/producer')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')

@@ -21,3 +23,3 @@ class RheaProducerPlugin extends ProducerPlugin {

meta: {
'component': 'rhea',
component: 'rhea',
'amqp.link.target.address': name,

@@ -41,2 +43,10 @@ 'amqp.link.role': 'sender',

tracer.inject(span, 'text_map', msg.delivery_annotations)
if (tracer._config.dsmEnabled) {
const targetName = span.context()._tags['amqp.link.target.address']
const payloadSize = getAmqpMessageSize({ content: msg.body, headers: msg.delivery_annotations })
const dataStreamsContext = tracer
.setCheckpoint(['direction:out', `exchange:${targetName}`, 'type:rabbitmq'], span, payloadSize)
DsmPathwayCodec.encode(dataStreamsContext, msg.delivery_annotations)
}
}

@@ -43,0 +53,0 @@ }

@@ -74,3 +74,3 @@ 'use strict'

this.addSub(`apm:http:server:request:finish`, ({ req }) => {
this.addSub('apm:http:server:request:finish', ({ req }) => {
const context = this._contexts.get(req)

@@ -77,0 +77,0 @@

@@ -57,5 +57,5 @@ 'use strict'

function isObject (val) {
return typeof val === 'object' && val !== null && !(val instanceof Array)
return val !== null && typeof val === 'object' && !Array.isArray(val)
}
module.exports = SharedbPlugin

@@ -19,3 +19,3 @@ 'use strict'

'db.type': 'mssql',
'component': 'tedious',
component: 'tedious',
'out.host': connectionConfig.server,

@@ -22,0 +22,0 @@ [CLIENT_PORT_KEY]: connectionConfig.options.port,

@@ -7,3 +7,3 @@ 'use strict'

sample (span, measured, measuredByDefault) {
if (typeof measured === 'object') {
if (measured !== null && typeof measured === 'object') {
this.sample(span, measured[span.context()._name], measuredByDefault)

@@ -10,0 +10,0 @@ } else if (measured !== undefined) {

@@ -18,6 +18,10 @@ 'use strict'

HTTP_INCOMING_RESPONSE_BODY: 'server.response.body',
HTTP_CLIENT_IP: 'http.client_ip',
USER_ID: 'usr.id',
WAF_CONTEXT_PROCESSOR: 'waf.context.processor'
WAF_CONTEXT_PROCESSOR: 'waf.context.processor',
HTTP_OUTGOING_URL: 'server.io.net.url'
}

@@ -8,2 +8,4 @@ 'use strict'

const sampledRequests = new WeakSet()
function configure ({ apiSecurity }) {

@@ -36,3 +38,3 @@ enabled = apiSecurity.enabled

function sampleRequest () {
function sampleRequest (req) {
if (!enabled || !requestSampling) {

@@ -42,5 +44,15 @@ return false

return Math.random() <= requestSampling
const shouldSample = Math.random() <= requestSampling
if (shouldSample) {
sampledRequests.add(req)
}
return shouldSample
}
function isSampled (req) {
return sampledRequests.has(req)
}
module.exports = {

@@ -50,3 +62,4 @@ configure,

setRequestSampling,
sampleRequest
sampleRequest,
isSampled
}

@@ -11,4 +11,5 @@ 'use strict'

let templateGraphqlJson = blockedTemplates.graphqlJson
let blockingConfiguration
const responseBlockedSet = new WeakSet()
const specificBlockingTypes = {

@@ -26,4 +27,4 @@ GRAPHQL: 'graphql'

function getBlockWithRedirectData (rootSpan) {
let statusCode = blockingConfiguration.parameters.status_code
function getBlockWithRedirectData (rootSpan, actionParameters) {
let statusCode = actionParameters.status_code
if (!statusCode || statusCode < 300 || statusCode >= 400) {

@@ -33,3 +34,3 @@ statusCode = 303

const headers = {
'Location': blockingConfiguration.parameters.location
Location: actionParameters.location
}

@@ -54,6 +55,5 @@

function getBlockWithContentData (req, specificType, rootSpan) {
function getBlockWithContentData (req, specificType, rootSpan, actionParameters) {
let type
let body
let statusCode

@@ -71,3 +71,3 @@ const specificBlockingType = specificType || detectedSpecificEndpoints[getSpecificKey(req.method, req.url)]

if (!blockingConfiguration || blockingConfiguration.parameters.type === 'auto') {
if (!actionParameters || actionParameters.type === 'auto') {
if (accept?.includes('text/html') && !accept.includes('application/json')) {

@@ -81,3 +81,3 @@ type = 'text/html; charset=utf-8'

} else {
if (blockingConfiguration.parameters.type === 'html') {
if (actionParameters.type === 'html') {
type = 'text/html; charset=utf-8'

@@ -92,7 +92,3 @@ body = templateHtml

if (blockingConfiguration?.type === 'block_request' && blockingConfiguration.parameters.status_code) {
statusCode = blockingConfiguration.parameters.status_code
} else {
statusCode = 403
}
const statusCode = actionParameters?.status_code || 403

@@ -111,11 +107,11 @@ const headers = {

function getBlockingData (req, specificType, rootSpan) {
if (blockingConfiguration?.type === 'redirect_request' && blockingConfiguration.parameters.location) {
return getBlockWithRedirectData(rootSpan)
function getBlockingData (req, specificType, rootSpan, actionParameters) {
if (actionParameters?.location) {
return getBlockWithRedirectData(rootSpan, actionParameters)
} else {
return getBlockWithContentData(req, specificType, rootSpan)
return getBlockWithContentData(req, specificType, rootSpan, actionParameters)
}
}
function block (req, res, rootSpan, abortController, type) {
function block (req, res, rootSpan, abortController, actionParameters) {
if (res.headersSent) {

@@ -126,9 +122,19 @@ log.warn('Cannot send blocking response when headers have already been sent')

const { body, headers, statusCode } = getBlockingData(req, type, rootSpan)
const { body, headers, statusCode } = getBlockingData(req, null, rootSpan, actionParameters)
for (const headerName of res.getHeaderNames()) {
res.removeHeader(headerName)
}
res.writeHead(statusCode, headers).end(body)
responseBlockedSet.add(res)
abortController?.abort()
}
function getBlockingAction (actions) {
return actions?.block_request || actions?.redirect_request
}
function setTemplates (config) {

@@ -154,4 +160,4 @@ if (config.appsec.blockedTemplateHtml) {

function updateBlockingConfiguration (newBlockingConfiguration) {
blockingConfiguration = newBlockingConfiguration
function isBlocked (res) {
return responseBlockedSet.has(res)
}

@@ -164,4 +170,5 @@

getBlockingData,
getBlockingAction,
setTemplates,
updateBlockingConfiguration
isBlocked
}

@@ -19,3 +19,9 @@ 'use strict'

nextBodyParsed: dc.channel('apm:next:body-parsed'),
nextQueryParsed: dc.channel('apm:next:query-parsed')
nextQueryParsed: dc.channel('apm:next:query-parsed'),
responseBody: dc.channel('datadog:express:response:json:start'),
responseWriteHead: dc.channel('apm:http:server:response:writeHead:start'),
httpClientRequestStart: dc.channel('apm:http:client:request:start'),
responseSetHeader: dc.channel('datadog:http:server:response:set-header:start'),
setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start')
}
'use strict'
const { storage } = require('../../../datadog-core')
const { addSpecificEndpoint, specificBlockingTypes, getBlockingData } = require('./blocking')
const {
addSpecificEndpoint,
specificBlockingTypes,
getBlockingData,
getBlockingAction
} = require('./blocking')
const waf = require('./waf')

@@ -35,6 +40,8 @@ const addresses = require('./addresses')

const actions = waf.run({ ephemeral: { [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo } }, req)
if (actions?.includes('block')) {
const blockingAction = getBlockingAction(actions)
if (blockingAction) {
const requestData = graphqlRequestData.get(req)
if (requestData?.isInGraphqlRequest) {
requestData.blocked = true
requestData.wafAction = blockingAction
context?.abortController?.abort()

@@ -91,3 +98,3 @@ }

const blockingData = getBlockingData(req, specificBlockingTypes.GRAPHQL, rootSpan)
const blockingData = getBlockingData(req, specificBlockingTypes.GRAPHQL, rootSpan, requestData.wafAction)
abortData.statusCode = blockingData.statusCode

@@ -94,0 +101,0 @@ abortData.headers = blockingData.headers

'use strict'
module.exports = {
'COMMAND_INJECTION_ANALYZER': require('./command-injection-analyzer'),
'HARCODED_SECRET_ANALYZER': require('./hardcoded-secret-analyzer'),
'HEADER_INJECTION_ANALYZER': require('./header-injection-analyzer'),
'HSTS_HEADER_MISSING_ANALYZER': require('./hsts-header-missing-analyzer'),
'INSECURE_COOKIE_ANALYZER': require('./insecure-cookie-analyzer'),
'LDAP_ANALYZER': require('./ldap-injection-analyzer'),
'NO_HTTPONLY_COOKIE_ANALYZER': require('./no-httponly-cookie-analyzer'),
'NO_SAMESITE_COOKIE_ANALYZER': require('./no-samesite-cookie-analyzer'),
'NOSQL_MONGODB_INJECTION': require('./nosql-injection-mongodb-analyzer'),
'PATH_TRAVERSAL_ANALYZER': require('./path-traversal-analyzer'),
'SQL_INJECTION_ANALYZER': require('./sql-injection-analyzer'),
'SSRF': require('./ssrf-analyzer'),
'UNVALIDATED_REDIRECT_ANALYZER': require('./unvalidated-redirect-analyzer'),
'WEAK_CIPHER_ANALYZER': require('./weak-cipher-analyzer'),
'WEAK_HASH_ANALYZER': require('./weak-hash-analyzer'),
'WEAK_RANDOMNESS_ANALYZER': require('./weak-randomness-analyzer'),
'XCONTENTTYPE_HEADER_MISSING_ANALYZER': require('./xcontenttype-header-missing-analyzer')
CODE_INJECTION_ANALYZER: require('./code-injection-analyzer'),
COMMAND_INJECTION_ANALYZER: require('./command-injection-analyzer'),
HARCODED_PASSWORD_ANALYZER: require('./hardcoded-password-analyzer'),
HARCODED_SECRET_ANALYZER: require('./hardcoded-secret-analyzer'),
HEADER_INJECTION_ANALYZER: require('./header-injection-analyzer'),
HSTS_HEADER_MISSING_ANALYZER: require('./hsts-header-missing-analyzer'),
INSECURE_COOKIE_ANALYZER: require('./insecure-cookie-analyzer'),
LDAP_ANALYZER: require('./ldap-injection-analyzer'),
NO_HTTPONLY_COOKIE_ANALYZER: require('./no-httponly-cookie-analyzer'),
NO_SAMESITE_COOKIE_ANALYZER: require('./no-samesite-cookie-analyzer'),
NOSQL_MONGODB_INJECTION: require('./nosql-injection-mongodb-analyzer'),
PATH_TRAVERSAL_ANALYZER: require('./path-traversal-analyzer'),
SQL_INJECTION_ANALYZER: require('./sql-injection-analyzer'),
SSRF: require('./ssrf-analyzer'),
UNVALIDATED_REDIRECT_ANALYZER: require('./unvalidated-redirect-analyzer'),
WEAK_CIPHER_ANALYZER: require('./weak-cipher-analyzer'),
WEAK_HASH_ANALYZER: require('./weak-hash-analyzer'),
WEAK_RANDOMNESS_ANALYZER: require('./weak-randomness-analyzer'),
XCONTENTTYPE_HEADER_MISSING_ANALYZER: require('./xcontenttype-header-missing-analyzer')
}

@@ -11,3 +11,3 @@ 'use strict'

onConfigure () {
this.addSub('datadog:child_process:execution:start', ({ command }) => this.analyze(command))
this.addSub('tracing:datadog:child_process:execution:start', ({ command }) => this.analyze(command))
}

@@ -14,0 +14,0 @@ }

@@ -37,2 +37,3 @@ 'use strict'

}
_checkOCE (context, value) {

@@ -39,0 +40,0 @@ if (value && value.location) {

'use strict'
const Analyzer = require('./vulnerability-analyzer')
const { HARDCODED_SECRET } = require('../vulnerabilities')
const { getRelativePath } = require('../path-line')
const HardcodedBaseAnalyzer = require('./hardcoded-base-analyzer')
const { ValueOnly } = require('./hardcoded-rule-type')
const secretRules = require('./hardcoded-secrets-rules')
const allRules = require('./hardcoded-secret-rules')
class HardcodedSecretAnalyzer extends Analyzer {
class HardcodedSecretAnalyzer extends HardcodedBaseAnalyzer {
constructor () {
super(HARDCODED_SECRET)
super(HARDCODED_SECRET, allRules, allRules.filter(rule => rule.type === ValueOnly))
}
onConfigure () {
this.addSub('datadog:secrets:result', (secrets) => { this.analyze(secrets) })
}
analyze (secrets) {
if (!secrets?.file || !secrets.literals) return
const matches = secrets.literals
.filter(literal => literal.value && literal.locations?.length)
.map(literal => {
const match = secretRules.find(rule => literal.value.match(rule.regex))
return match ? { locations: literal.locations, ruleId: match.id } : undefined
})
.filter(match => !!match)
if (matches.length) {
const file = getRelativePath(secrets.file)
matches.forEach(match => {
match.locations
.filter(location => location.line)
.forEach(location => this._report({
file,
line: location.line,
column: location.column,
data: match.ruleId
}))
})
}
}
_getEvidence (value) {
return { value: `${value.data}` }
}
_getLocation (value) {
return {
path: value.file,
line: value.line,
column: value.column,
isInternal: false
}
}
}
module.exports = new HardcodedSecretAnalyzer()
/* eslint-disable max-len */
'use strict'
const { ValueOnly, NameAndValue } = require('./hardcoded-rule-type')
module.exports = [
{
'id': 'adobe-client-secret',
'regex': /\b((p8e-)[a-z0-9]{32})(?:['"\s\x60;]|$)/i
id: 'adafruit-api-key',
regex: /(?:adafruit)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9_-]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'age-secret-key',
'regex': /AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}/
id: 'adobe-client-id',
regex: /(?:adobe)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'alibaba-access-key-id',
'regex': /\b((LTAI)[a-z0-9]{20})(?:['"\s\x60;]|$)/i
id: 'adobe-client-secret',
regex: /\b((p8e-)[a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'authress-service-client-access-key',
'regex': /\b((?:sc|ext|scauth|authress)_[a-z0-9]{5,30}\.[a-z0-9]{4,6}\.acc[_-][a-z0-9-]{10,32}\.[a-z0-9+/_=-]{30,120})(?:['"\s\x60;]|$)/i
id: 'age-secret-key',
regex: /AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}/,
type: ValueOnly
},
{
'id': 'aws-access-token',
'regex': /\b((A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})(?:['"\s\x60;]|$)/
id: 'airtable-api-key',
regex: /(?:airtable)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{17})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'clojars-api-token',
'regex': /(CLOJARS_)[a-z0-9]{60}/i
id: 'algolia-api-key',
regex: /(?:algolia)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'databricks-api-token',
'regex': /\b(dapi[a-h0-9]{32})(?:['"\s\x60;]|$)/i
id: 'alibaba-access-key-id',
regex: /\b((LTAI)[a-z0-9]{20})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'digitalocean-access-token',
'regex': /\b(doo_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
id: 'asana-client-id',
regex: /(?:asana)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9]{16})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'digitalocean-pat',
'regex': /\b(dop_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
id: 'asana-client-secret',
regex: /(?:asana)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'digitalocean-refresh-token',
'regex': /\b(dor_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
id: 'atlassian-api-token',
regex: /(?:atlassian|confluence|jira)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{24})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'doppler-api-token',
'regex': /(dp\.pt\.)[a-z0-9]{43}/i
id: 'authress-service-client-access-key',
regex: /\b((?:sc|ext|scauth|authress)_[a-z0-9]{5,30}\.[a-z0-9]{4,6}\.acc[_-][a-z0-9-]{10,32}\.[a-z0-9+/_=-]{30,120})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'duffel-api-token',
'regex': /duffel_(test|live)_[a-z0-9_\-=]{43}/i
id: 'aws-access-token',
regex: /\b((A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})(?:['"\s\x60;]|$)/,
type: ValueOnly
},
{
'id': 'dynatrace-api-token',
'regex': /dt0c01\.[a-z0-9]{24}\.[a-z0-9]{64}/i
id: 'beamer-api-token',
regex: /(?:beamer)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(b_[a-z0-9=_-]{44})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'easypost-api-token',
'regex': /\bEZAK[a-z0-9]{54}/i
id: 'bitbucket-client-id',
regex: /(?:bitbucket)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'flutterwave-public-key',
'regex': /FLWPUBK_TEST-[a-h0-9]{32}-X/i
id: 'bitbucket-client-secret',
regex: /(?:bitbucket)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'frameio-api-token',
'regex': /fio-u-[a-z0-9\-_=]{64}/i
id: 'bittrex-access-key',
regex: /(?:bittrex)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'gcp-api-key',
'regex': /\b(AIza[0-9a-z\-_]{35})(?:['"\s\x60;]|$)/i
id: 'clojars-api-token',
regex: /(CLOJARS_)[a-z0-9]{60}/i,
type: ValueOnly
},
{
'id': 'github-app-token',
'regex': /(ghu|ghs)_[0-9a-zA-Z]{36}/
id: 'codecov-access-token',
regex: /(?:codecov)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'github-fine-grained-pat',
'regex': /github_pat_[0-9a-zA-Z_]{82}/
id: 'coinbase-access-token',
regex: /(?:coinbase)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9_-]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'github-oauth',
'regex': /gho_[0-9a-zA-Z]{36}/
id: 'confluent-access-token',
regex: /(?:confluent)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{16})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'github-pat',
'regex': /ghp_[0-9a-zA-Z]{36}/
id: 'confluent-secret-key',
regex: /(?:confluent)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'gitlab-pat',
'regex': /glpat-[0-9a-zA-Z\-_]{20}/
id: 'contentful-delivery-api-token',
regex: /(?:contentful)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{43})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'gitlab-ptt',
'regex': /glptt-[0-9a-f]{40}/
id: 'databricks-api-token',
regex: /\b(dapi[a-h0-9]{32})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'gitlab-rrt',
'regex': /GR1348941[0-9a-zA-Z\-_]{20}/
id: 'datadog-access-token',
regex: /(?:datadog)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'grafana-api-key',
'regex': /\b(eyJrIjoi[a-z0-9]{70,400}={0,2})(?:['"\s\x60;]|$)/i
id: 'defined-networking-api-token',
regex: /(?:dnkey)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(dnkey-[a-z0-9=_-]{26}-[a-z0-9=_-]{52})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'grafana-cloud-api-token',
'regex': /\b(glc_[a-z0-9+/]{32,400}={0,2})(?:['"\s\x60;]|$)/i
id: 'digitalocean-access-token',
regex: /\b(doo_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'grafana-service-account-token',
'regex': /\b(glsa_[a-z0-9]{32}_[a-f0-9]{8})(?:['"\s\x60;]|$)/i
id: 'digitalocean-pat',
regex: /\b(dop_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'hashicorp-tf-api-token',
'regex': /[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}/i
id: 'digitalocean-refresh-token',
regex: /\b(dor_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'jwt',
'regex': /\b(ey[a-zA-Z0-9]{17,}\.ey[a-zA-Z0-9/_-]{17,}\.(?:[a-zA-Z0-9/_-]{10,}={0,2})?)(?:['"\s\x60;]|$)/
id: 'discord-api-token',
regex: /(?:discord)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'linear-api-key',
'regex': /lin_api_[a-z0-9]{40}/i
id: 'discord-client-id',
regex: /(?:discord)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9]{18})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'npm-access-token',
'regex': /\b(npm_[a-z0-9]{36})(?:['"\s\x60;]|$)/i
id: 'discord-client-secret',
regex: /(?:discord)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'openai-api-key',
'regex': /\b(sk-[a-z0-9]{20}T3BlbkFJ[a-z0-9]{20})(?:['"\s\x60;]|$)/i
id: 'doppler-api-token',
regex: /(dp\.pt\.)[a-z0-9]{43}/i,
type: ValueOnly
},
{
'id': 'planetscale-api-token',
'regex': /\b(pscale_tkn_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
id: 'droneci-access-token',
regex: /(?:droneci)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'planetscale-oauth-token',
'regex': /\b(pscale_oauth_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
id: 'dropbox-api-token',
regex: /(?:dropbox)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{15})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'planetscale-password',
'regex': /\b(pscale_pw_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
id: 'dropbox-long-lived-api-token',
regex: /(?:dropbox)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{11}(AAAAAAAAAA)[a-z0-9\-_=]{43})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'postman-api-token',
'regex': /\b(PMAK-[a-f0-9]{24}-[a-f0-9]{34})(?:['"\s\x60;]|$)/i
id: 'dropbox-short-lived-api-token',
regex: /(?:dropbox)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(sl\.[a-z0-9\-=_]{135})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'prefect-api-token',
'regex': /\b(pnu_[a-z0-9]{36})(?:['"\s\x60;]|$)/i
id: 'duffel-api-token',
regex: /duffel_(test|live)_[a-z0-9_\-=]{43}/i,
type: ValueOnly
},
{
'id': 'private-key',
'regex': /-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY( BLOCK)?-----[\s\S]*KEY( BLOCK)?----/i
id: 'dynatrace-api-token',
regex: /dt0c01\.[a-z0-9]{24}\.[a-z0-9]{64}/i,
type: ValueOnly
},
{
'id': 'pulumi-api-token',
'regex': /\b(pul-[a-f0-9]{40})(?:['"\s\x60;]|$)/i
id: 'easypost-api-token',
regex: /\bEZAK[a-z0-9]{54}/i,
type: ValueOnly
},
{
'id': 'pypi-upload-token',
'regex': /pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}/
id: 'etsy-access-token',
regex: /(?:etsy)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{24})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'readme-api-token',
'regex': /\b(rdme_[a-z0-9]{70})(?:['"\s\x60;]|$)/i
id: 'facebook',
regex: /(?:facebook)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'rubygems-api-token',
'regex': /\b(rubygems_[a-f0-9]{48})(?:['"\s\x60;]|$)/i
id: 'fastly-api-token',
regex: /(?:fastly)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'scalingo-api-token',
'regex': /tk-us-[a-zA-Z0-9-_]{48}/
id: 'finicity-api-token',
regex: /(?:finicity)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'sendgrid-api-token',
'regex': /\b(SG\.[a-z0-9=_\-.]{66})(?:['"\s\x60;]|$)/i
id: 'finicity-client-secret',
regex: /(?:finicity)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{20})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'sendinblue-api-token',
'regex': /\b(xkeysib-[a-f0-9]{64}-[a-z0-9]{16})(?:['"\s\x60;]|$)/i
id: 'finnhub-access-token',
regex: /(?:finnhub)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{20})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'shippo-api-token',
'regex': /\b(shippo_(live|test)_[a-f0-9]{40})(?:['"\s\x60;]|$)/i
id: 'flickr-access-token',
regex: /(?:flickr)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'shopify-access-token',
'regex': /shpat_[a-fA-F0-9]{32}/
id: 'flutterwave-public-key',
regex: /FLWPUBK_TEST-[a-h0-9]{32}-X/i,
type: ValueOnly
},
{
'id': 'shopify-custom-access-token',
'regex': /shpca_[a-fA-F0-9]{32}/
id: 'frameio-api-token',
regex: /fio-u-[a-z0-9\-_=]{64}/i,
type: ValueOnly
},
{
'id': 'shopify-private-app-access-token',
'regex': /shppa_[a-fA-F0-9]{32}/
id: 'freshbooks-access-token',
regex: /(?:freshbooks)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'shopify-shared-secret',
'regex': /shpss_[a-fA-F0-9]{32}/
id: 'gcp-api-key',
regex: /\b(AIza[0-9a-z\-_]{35})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'slack-app-token',
'regex': /(xapp-\d-[A-Z0-9]+-\d+-[a-z0-9]+)/i
id: 'github-app-token',
regex: /(ghu|ghs)_[0-9a-zA-Z]{36}/,
type: ValueOnly
},
{
'id': 'slack-bot-token',
'regex': /(xoxb-[0-9]{10,13}-[0-9]{10,13}[a-zA-Z0-9-]*)/
id: 'github-fine-grained-pat',
regex: /github_pat_[0-9a-zA-Z_]{82}/,
type: ValueOnly
},
{
'id': 'slack-config-access-token',
'regex': /(xoxe.xox[bp]-\d-[A-Z0-9]{163,166})/i
id: 'github-oauth',
regex: /gho_[0-9a-zA-Z]{36}/,
type: ValueOnly
},
{
'id': 'slack-config-refresh-token',
'regex': /(xoxe-\d-[A-Z0-9]{146})/i
id: 'github-pat',
regex: /ghp_[0-9a-zA-Z]{36}/,
type: ValueOnly
},
{
'id': 'slack-legacy-bot-token',
'regex': /(xoxb-[0-9]{8,14}-[a-zA-Z0-9]{18,26})/
id: 'gitlab-pat',
regex: /glpat-[0-9a-zA-Z\-_]{20}/,
type: ValueOnly
},
{
'id': 'slack-legacy-token',
'regex': /(xox[os]-\d+-\d+-\d+-[a-fA-F\d]+)/
id: 'gitlab-ptt',
regex: /glptt-[0-9a-f]{40}/,
type: ValueOnly
},
{
'id': 'slack-legacy-workspace-token',
'regex': /(xox[ar]-(?:\d-)?[0-9a-zA-Z]{8,48})/
id: 'gitlab-rrt',
regex: /GR1348941[0-9a-zA-Z\-_]{20}/,
type: ValueOnly
},
{
'id': 'slack-user-token',
'regex': /(xox[pe](?:-[0-9]{10,13}){3}-[a-zA-Z0-9-]{28,34})/
id: 'gitter-access-token',
regex: /(?:gitter)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9_-]{40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'slack-webhook-url',
'regex': /(https?:\/\/)?hooks.slack.com\/(services|workflows)\/[A-Za-z0-9+/]{43,46}/
id: 'gocardless-api-token',
regex: /(?:gocardless)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(live_[a-z0-9\-_=]{40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'square-access-token',
'regex': /\b(sq0atp-[0-9a-z\-_]{22})(?:['"\s\x60;]|$)/i
id: 'grafana-api-key',
regex: /\b(eyJrIjoi[a-z0-9]{70,400}={0,2})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'square-secret',
'regex': /\b(sq0csp-[0-9a-z\-_]{43})(?:['"\s\x60;]|$)/i
id: 'grafana-cloud-api-token',
regex: /\b(glc_[a-z0-9+/]{32,400}={0,2})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'stripe-access-token',
'regex': /(sk|pk)_(test|live)_[0-9a-z]{10,32}/i
id: 'grafana-service-account-token',
regex: /\b(glsa_[a-z0-9]{32}_[a-f0-9]{8})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
'id': 'telegram-bot-api-token',
'regex': /(?:^|[^0-9])([0-9]{5,16}:A[a-z0-9_-]{34})(?:$|[^a-z0-9_-])/i
id: 'hashicorp-tf-api-token',
regex: /[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}/i,
type: ValueOnly
},
{
'id': 'twilio-api-key',
'regex': /SK[0-9a-fA-F]{32}/
id: 'heroku-api-key',
regex: /(?:heroku)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'vault-batch-token',
'regex': /\b(hvb\.[a-z0-9_-]{138,212})(?:['"\s\x60;]|$)/i
id: 'hubspot-api-key',
regex: /(?:hubspot)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
'id': 'vault-service-token',
'regex': /\b(hvs\.[a-z0-9_-]{90,100})(?:['"\s\x60;]|$)/i
id: 'intercom-api-key',
regex: /(?:intercom)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{60})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'jfrog-api-key',
regex: /(?:jfrog|artifactory|bintray|xray)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{73})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'jwt',
regex: /\b(ey[a-zA-Z0-9]{17,}\.ey[a-zA-Z0-9/_-]{17,}\.(?:[a-zA-Z0-9/_-]{10,}={0,2})?)(?:['"\s\x60;]|$)/,
type: ValueOnly
},
{
id: 'kraken-access-token',
regex: /(?:kraken)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9/=_+-]{80,90})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'kucoin-access-token',
regex: /(?:kucoin)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{24})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'launchdarkly-access-token',
regex: /(?:launchdarkly)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'linear-api-key',
regex: /lin_api_[a-z0-9]{40}/i,
type: ValueOnly
},
{
id: 'linkedin-client-secret',
regex: /(?:linkedin|linked-in)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{16})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'lob-pub-api-key',
regex: /(?:lob)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}((test|live)_pub_[a-f0-9]{31})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'mailchimp-api-key',
regex: /(?:mailchimp)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{32}-us20)(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'mailgun-private-api-token',
regex: /(?:mailgun)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(key-[a-f0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'mailgun-pub-key',
regex: /(?:mailgun)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(pubkey-[a-f0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'mailgun-signing-key',
regex: /(?:mailgun)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-h0-9]{32}-[a-h0-9]{8}-[a-h0-9]{8})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'mapbox-api-token',
regex: /(?:mapbox)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(pk\.[a-z0-9]{60}\.[a-z0-9]{22})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'mattermost-access-token',
regex: /(?:mattermost)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{26})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'messagebird-api-token',
regex: /(?:messagebird|message-bird|message_bird)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{25})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'netlify-access-token',
regex: /(?:netlify)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{40,46})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'new-relic-browser-api-token',
regex: /(?:new-relic|newrelic|new_relic)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(NRJS-[a-f0-9]{19})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'new-relic-user-api-id',
regex: /(?:new-relic|newrelic|new_relic)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'new-relic-user-api-key',
regex: /(?:new-relic|newrelic|new_relic)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(NRAK-[a-z0-9]{27})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'npm-access-token',
regex: /\b(npm_[a-z0-9]{36})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'nytimes-access-token',
regex: /(?:nytimes|new-york-times,|newyorktimes)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'okta-access-token',
regex: /(?:okta)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9=_-]{42})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'openai-api-key',
regex: /\b(sk-[a-z0-9]{20}T3BlbkFJ[a-z0-9]{20})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'plaid-api-token',
regex: /(?:plaid)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(access-(?:sandbox|development|production)-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'plaid-client-id',
regex: /(?:plaid)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{24})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'plaid-secret-key',
regex: /(?:plaid)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{30})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'planetscale-api-token',
regex: /\b(pscale_tkn_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'planetscale-oauth-token',
regex: /\b(pscale_oauth_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'planetscale-password',
regex: /\b(pscale_pw_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'postman-api-token',
regex: /\b(PMAK-[a-f0-9]{24}-[a-f0-9]{34})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'prefect-api-token',
regex: /\b(pnu_[a-z0-9]{36})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'private-key',
regex: /-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY( BLOCK)?-----[\s\S]*KEY( BLOCK)?----/i,
type: ValueOnly
},
{
id: 'pulumi-api-token',
regex: /\b(pul-[a-f0-9]{40})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'pypi-upload-token',
regex: /pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}/,
type: ValueOnly
},
{
id: 'rapidapi-access-token',
regex: /(?:rapidapi)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9_-]{50})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'readme-api-token',
regex: /\b(rdme_[a-z0-9]{70})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'rubygems-api-token',
regex: /\b(rubygems_[a-f0-9]{48})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'scalingo-api-token',
regex: /tk-us-[a-zA-Z0-9-_]{48}/,
type: ValueOnly
},
{
id: 'sendbird-access-id',
regex: /(?:sendbird)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'sendbird-access-token',
regex: /(?:sendbird)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'sendgrid-api-token',
regex: /\b(SG\.[a-z0-9=_\-.]{66})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'sendinblue-api-token',
regex: /\b(xkeysib-[a-f0-9]{64}-[a-z0-9]{16})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'sentry-access-token',
regex: /(?:sentry)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'shippo-api-token',
regex: /\b(shippo_(live|test)_[a-f0-9]{40})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'shopify-access-token',
regex: /shpat_[a-fA-F0-9]{32}/,
type: ValueOnly
},
{
id: 'shopify-custom-access-token',
regex: /shpca_[a-fA-F0-9]{32}/,
type: ValueOnly
},
{
id: 'shopify-private-app-access-token',
regex: /shppa_[a-fA-F0-9]{32}/,
type: ValueOnly
},
{
id: 'shopify-shared-secret',
regex: /shpss_[a-fA-F0-9]{32}/,
type: ValueOnly
},
{
id: 'sidekiq-secret',
regex: /(?:BUNDLE_ENTERPRISE__CONTRIBSYS__COM|BUNDLE_GEMS__CONTRIBSYS__COM)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-f0-9]{8}:[a-f0-9]{8})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'slack-app-token',
regex: /(xapp-\d-[A-Z0-9]+-\d+-[a-z0-9]+)/i,
type: ValueOnly
},
{
id: 'slack-bot-token',
regex: /(xoxb-[0-9]{10,13}-[0-9]{10,13}[a-zA-Z0-9-]*)/,
type: ValueOnly
},
{
id: 'slack-config-access-token',
regex: /(xoxe.xox[bp]-\d-[A-Z0-9]{163,166})/i,
type: ValueOnly
},
{
id: 'slack-config-refresh-token',
regex: /(xoxe-\d-[A-Z0-9]{146})/i,
type: ValueOnly
},
{
id: 'slack-legacy-bot-token',
regex: /(xoxb-[0-9]{8,14}-[a-zA-Z0-9]{18,26})/,
type: ValueOnly
},
{
id: 'slack-legacy-token',
regex: /(xox[os]-\d+-\d+-\d+-[a-fA-F\d]+)/,
type: ValueOnly
},
{
id: 'slack-legacy-workspace-token',
regex: /(xox[ar]-(?:\d-)?[0-9a-zA-Z]{8,48})/,
type: ValueOnly
},
{
id: 'slack-user-token',
regex: /(xox[pe](?:-[0-9]{10,13}){3}-[a-zA-Z0-9-]{28,34})/,
type: ValueOnly
},
{
id: 'slack-webhook-url',
regex: /(https?:\/\/)?hooks.slack.com\/(services|workflows)\/[A-Za-z0-9+/]{43,46}/,
type: ValueOnly
},
{
id: 'snyk-api-token',
regex: /(?:snyk)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'square-access-token',
regex: /\b(sq0atp-[0-9a-z\-_]{22})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'square-secret',
regex: /\b(sq0csp-[0-9a-z\-_]{43})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'squarespace-access-token',
regex: /(?:squarespace)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'stripe-access-token',
regex: /(sk|pk)_(test|live)_[0-9a-z]{10,32}/i,
type: ValueOnly
},
{
id: 'sumologic-access-token',
regex: /(?:sumo)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{64})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'telegram-bot-api-token',
regex: /(?:^|[^0-9])([0-9]{5,16}:A[a-z0-9_-]{34})(?:$|[^a-z0-9_-])/i,
type: ValueOnly
},
{
id: 'travisci-access-token',
regex: /(?:travis)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{22})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'trello-access-token',
regex: /(?:trello)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z-0-9]{32})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'twilio-api-key',
regex: /SK[0-9a-fA-F]{32}/,
type: ValueOnly
},
{
id: 'twitch-api-token',
regex: /(?:twitch)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{30})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'twitter-access-secret',
regex: /(?:twitter)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{45})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'twitter-access-token',
regex: /(?:twitter)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([0-9]{15,25}-[a-z0-9]{20,40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'twitter-api-key',
regex: /(?:twitter)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{25})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'twitter-api-secret',
regex: /(?:twitter)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{50})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'twitter-bearer-token',
regex: /(?:twitter)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(A{22}[a-z0-9%]{80,100})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'typeform-api-token',
regex: /(?:typeform)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(tfp_[a-z0-9\-_.=]{59})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'vault-batch-token',
regex: /\b(hvb\.[a-z0-9_-]{138,212})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'vault-service-token',
regex: /\b(hvs\.[a-z0-9_-]{90,100})(?:['"\s\x60;]|$)/i,
type: ValueOnly
},
{
id: 'yandex-access-token',
regex: /(?:yandex)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(t1\.[A-Z0-9a-z_-]+[=]{0,2}\.[A-Z0-9a-z_-]{86}[=]{0,2})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'yandex-api-key',
regex: /(?:yandex)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(AQVN[a-z0-9_-]{35,38})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'yandex-aws-access-token',
regex: /(?:yandex)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}(YC[a-z0-9_-]{38})(?:['"\s\x60;]|$)/i,
type: NameAndValue
},
{
id: 'zendesk-secret-key',
regex: /(?:zendesk)(?:[0-9a-z\-_\t.]{0,20})(?:[\s|']|[\s|""]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|""|\s|=|\x60){0,5}([a-z0-9]{40})(?:['"\s\x60;]|$)/i,
type: NameAndValue
}
]

@@ -12,2 +12,3 @@ 'use strict'

}
_isVulnerableFromRequestAndResponse (req, res) {

@@ -14,0 +15,0 @@ const headerValues = this._getHeaderValues(res, HSTS_HEADER_NAME)

@@ -12,7 +12,7 @@ 'use strict'

const EXCLUDED_PATHS_FROM_STACK = getNodeModulesPaths('mongodb', 'mongoose')
const EXCLUDED_PATHS_FROM_STACK = getNodeModulesPaths('mongodb', 'mongoose', 'mquery')
const MONGODB_NOSQL_SECURE_MARK = getNextSecureMark()
function iterateObjectStrings (target, fn, levelKeys = [], depth = 50, visited = new Set()) {
if (target && typeof target === 'object') {
function iterateObjectStrings (target, fn, levelKeys = [], depth = 20, visited = new Set()) {
if (target !== null && typeof target === 'object') {
Object.keys(target).forEach((key) => {

@@ -41,3 +41,3 @@ const nextLevelKeys = [...levelKeys, key]

this.addSub('datadog:mongodb:collection:filter:start', ({ filters }) => {
const onStart = ({ filters }) => {
const store = storage.getStore()

@@ -49,23 +49,28 @@ if (store && !store.nosqlAnalyzed && filters?.length) {

}
})
this.addSub('datadog:mongoose:model:filter:start', ({ filters }) => {
const store = storage.getStore()
if (!store) return
return store
}
if (filters?.length) {
filters.forEach(filter => {
this.analyze({ filter }, store)
})
const onStartAndEnterWithStore = (message) => {
const store = onStart(message || {})
if (store) {
storage.enterWith({ ...store, nosqlAnalyzed: true, nosqlParentStore: store })
}
}
storage.enterWith({ ...store, nosqlAnalyzed: true, mongooseParentStore: store })
})
this.addSub('datadog:mongoose:model:filter:finish', () => {
const onFinish = () => {
const store = storage.getStore()
if (store?.mongooseParentStore) {
storage.enterWith(store.mongooseParentStore)
if (store?.nosqlParentStore) {
storage.enterWith(store.nosqlParentStore)
}
})
}
this.addSub('datadog:mongodb:collection:filter:start', onStart)
this.addSub('datadog:mongoose:model:filter:start', onStartAndEnterWithStore)
this.addSub('datadog:mongoose:model:filter:finish', onFinish)
this.addSub('datadog:mquery:filter:prepare', onStart)
this.addSub('tracing:datadog:mquery:filter:start', onStartAndEnterWithStore)
this.addSub('tracing:datadog:mquery:filter:asyncEnd', onFinish)
}

@@ -72,0 +77,0 @@

@@ -7,4 +7,2 @@ 'use strict'

const { storage } = require('../../../../../datadog-core')
const { getIastContext } = require('../iast-context')
const { addVulnerability } = require('../vulnerability-reporter')
const { getNodeModulesPaths } = require('../path-line')

@@ -20,5 +18,5 @@

onConfigure () {
this.addSub('apm:mysql:query:start', ({ sql }) => this.analyze(sql, 'MYSQL'))
this.addSub('apm:mysql2:query:start', ({ sql }) => this.analyze(sql, 'MYSQL'))
this.addSub('apm:pg:query:start', ({ query }) => this.analyze(query.text, 'POSTGRES'))
this.addSub('apm:mysql:query:start', ({ sql }) => this.analyze(sql, undefined, 'MYSQL'))
this.addSub('apm:mysql2:query:start', ({ sql }) => this.analyze(sql, undefined, 'MYSQL'))
this.addSub('apm:pg:query:start', ({ query }) => this.analyze(query.text, undefined, 'POSTGRES'))

@@ -47,3 +45,3 @@ this.addSub(

if (parentStore) {
this.analyze(query, dialect, parentStore)
this.analyze(query, parentStore, dialect)

@@ -66,28 +64,9 @@ storage.enterWith({ ...parentStore, sqlAnalyzed: true, sqlParentStore: parentStore })

analyze (value, dialect, store = storage.getStore()) {
analyze (value, store, dialect) {
store = store || storage.getStore()
if (!(store && store.sqlAnalyzed)) {
const iastContext = getIastContext(store)
if (this._isInvalidContext(store, iastContext)) return
this._reportIfVulnerable(value, iastContext, dialect)
super.analyze(value, store, dialect)
}
}
_reportIfVulnerable (value, context, dialect) {
if (this._isVulnerable(value, context) && this._checkOCE(context)) {
this._report(value, context, dialect)
return true
}
return false
}
_report (value, context, dialect) {
const evidence = this._getEvidence(value, context, dialect)
const location = this._getLocation()
if (!this._isExcluded(location)) {
const spanId = context && context.rootSpan && context.rootSpan.context().toSpanId()
const vulnerability = this._createVulnerability(this._type, evidence, spanId, location)
addVulnerability(context, vulnerability)
}
}
_getExcludedPaths () {

@@ -94,0 +73,0 @@ return EXCLUDED_PATHS

@@ -25,4 +25,8 @@ 'use strict'

_report (value, context) {
const evidence = this._getEvidence(value, context)
_report (value, context, meta) {
const evidence = this._getEvidence(value, context, meta)
this._reportEvidence(value, context, evidence)
}
_reportEvidence (value, context, evidence) {
const location = this._getLocation(value)

@@ -37,5 +41,5 @@ if (!this._isExcluded(location)) {

_reportIfVulnerable (value, context) {
_reportIfVulnerable (value, context, meta) {
if (this._isVulnerable(value, context) && this._checkOCE(context, value)) {
this._report(value, context)
this._report(value, context, meta)
return true

@@ -76,7 +80,7 @@ }

analyze (value, store = storage.getStore()) {
analyze (value, store = storage.getStore(), meta) {
const iastContext = getIastContext(store)
if (this._isInvalidContext(store, iastContext)) return
this._reportIfVulnerable(value, iastContext)
this._reportIfVulnerable(value, iastContext, meta)
}

@@ -83,0 +87,0 @@

@@ -23,7 +23,11 @@ 'use strict'

'sqreen/lib/package-reader/index.js',
'ws/lib/websocket-server.js'
'ws/lib/websocket-server.js',
'google-gax/build/src/grpc.js',
'cookie-signature/index.js'
)
const EXCLUDED_PATHS_FROM_STACK = [
path.join('node_modules', 'object-hash', path.sep)
path.join('node_modules', 'object-hash', path.sep),
path.join('node_modules', 'aws-sdk', 'lib', 'util.js'),
path.join('node_modules', 'keygrip', path.sep)
]

@@ -47,2 +51,4 @@ class WeakHashAnalyzer extends Analyzer {

_isExcluded (location) {
if (!location) return false
return EXCLUDED_LOCATIONS.some(excludedLocation => {

@@ -49,0 +55,0 @@ return location.path.includes(excludedLocation)

@@ -5,31 +5,5 @@ 'use strict'

const log = require('../../log')
const { calculateDDBasePath } = require('../../util')
const telemetryLog = dc.channel('datadog:telemetry:log')
const ddBasePath = calculateDDBasePath(__dirname)
const EOL = '\n'
const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm
function sanitize (logEntry, stack) {
if (!stack) return logEntry
let stackLines = stack.split(EOL)
const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX))
const isDDCode = firstIndex > -1 && stackLines[firstIndex].includes(ddBasePath)
stackLines = stackLines
.filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath))
.map(line => line.replace(ddBasePath, ''))
logEntry.stack_trace = stackLines.join(EOL)
if (!isDDCode) {
logEntry.message = 'omitted'
}
return logEntry
}
function getTelemetryLog (data, level) {

@@ -46,14 +20,9 @@ try {

let logEntry = {
const logEntry = {
message,
level
}
if (data.stack) {
logEntry = sanitize(logEntry, data.stack)
if (logEntry.stack_trace === '') {
return
}
logEntry.stack_trace = data.stack
}
return logEntry

@@ -114,3 +83,4 @@ } catch (e) {

this.error(data)
return this.publish(data, 'ERROR')
// publish is done automatically by log.error()
return this
}

@@ -117,0 +87,0 @@ }

@@ -8,3 +8,4 @@ 'use strict'

const iastTelemetry = require('./telemetry')
const { getInstrumentedMetric, getExecutedMetric, TagKey, EXECUTED_SOURCE } = require('./telemetry/iast-metric')
const { getInstrumentedMetric, getExecutedMetric, TagKey, EXECUTED_SOURCE, formatTags } =
require('./telemetry/iast-metric')
const { storage } = require('../../../../datadog-core')

@@ -24,9 +25,12 @@ const { getIastContext } = require('./iast-context')

class IastPluginSubscription {
constructor (moduleName, channelName, tag, tagKey = TagKey.VULNERABILITY_TYPE) {
constructor (moduleName, channelName, tagValues, tagKey = TagKey.VULNERABILITY_TYPE) {
this.moduleName = moduleName
this.channelName = channelName
this.tag = tag
this.tagKey = tagKey
this.executedMetric = getExecutedMetric(this.tagKey)
this.instrumentedMetric = getInstrumentedMetric(this.tagKey)
tagValues = Array.isArray(tagValues) ? tagValues : [tagValues]
this.tags = formatTags(tagValues, tagKey)
this.executedMetric = getExecutedMetric(tagKey)
this.instrumentedMetric = getInstrumentedMetric(tagKey)
this.moduleInstrumented = false

@@ -36,10 +40,11 @@ }

increaseInstrumented () {
if (this.moduleInstrumented) return
if (!this.moduleInstrumented) {
this.moduleInstrumented = true
this.moduleInstrumented = true
this.instrumentedMetric.inc(this.tag)
this.tags.forEach(tag => this.instrumentedMetric.inc(undefined, tag))
}
}
increaseExecuted (iastContext) {
this.executedMetric.inc(this.tag, iastContext)
this.tags.forEach(tag => this.executedMetric.inc(iastContext, tag))
}

@@ -82,6 +87,12 @@

_execHandlerAndIncMetric ({ handler, metric, tag, iastContext = getIastContext(storage.getStore()) }) {
_execHandlerAndIncMetric ({ handler, metric, tags, iastContext = getIastContext(storage.getStore()) }) {
try {
const result = handler()
iastTelemetry.isEnabled() && metric.inc(tag, iastContext)
if (iastTelemetry.isEnabled()) {
if (Array.isArray(tags)) {
tags.forEach(tag => metric.inc(iastContext, tag))
} else {
metric.inc(iastContext, tags)
}
}
return result

@@ -108,2 +119,10 @@ } catch (e) {

enable () {
this.configure(true)
}
disable () {
this.configure(false)
}
onConfigure () {}

@@ -135,6 +154,9 @@

if (!moduleName) {
const firstSep = channelName.indexOf(':')
let firstSep = channelName.indexOf(':')
if (firstSep === -1) {
moduleName = channelName
} else {
if (channelName.startsWith('tracing:')) {
firstSep = channelName.indexOf(':', 'tracing:'.length + 1)
}
const lastSep = channelName.indexOf(':', firstSep + 1)

@@ -141,0 +163,0 @@ moduleName = channelName.substring(firstSep + 1, lastSep !== -1 ? lastSep : channelName.length)

@@ -24,4 +24,8 @@ const vulnerabilityReporter = require('./vulnerability-reporter')

let isEnabled = false
function enable (config, _tracer) {
iastTelemetry.configure(config, config.iast && config.iast.telemetryVerbosity)
if (isEnabled) return
iastTelemetry.configure(config, config.iast?.telemetryVerbosity)
enableAllAnalyzers(config)

@@ -34,5 +38,11 @@ enableTaintTracking(config.iast, iastTelemetry.verbosity)

vulnerabilityReporter.start(config, _tracer)
isEnabled = true
}
function disable () {
if (!isEnabled) return
isEnabled = false
iastTelemetry.stop()

@@ -48,3 +58,3 @@ disableAllAnalyzers()

function onIncomingHttpRequestStart (data) {
if (data && data.req) {
if (data?.req) {
const store = storage.getStore()

@@ -74,7 +84,7 @@ if (store) {

function onIncomingHttpRequestEnd (data) {
if (data && data.req) {
if (data?.req) {
const store = storage.getStore()
const topContext = web.getContext(data.req)
const iastContext = iastContextFunctions.getIastContext(store, topContext)
if (iastContext && iastContext.rootSpan) {
if (iastContext?.rootSpan) {
iastResponseEnd.publish(data)

@@ -81,0 +91,0 @@

@@ -55,5 +55,6 @@ 'use strict'

function acquireRequest (rootSpan) {
if (availableRequest > 0) {
if (availableRequest > 0 && rootSpan) {
const sampling = config && typeof config.requestSampling === 'number'
? config.requestSampling : 30
? config.requestSampling
: 30
if (rootSpan.context().toSpanId().slice(-2) <= sampling) {

@@ -60,0 +61,0 @@ availableRequest--

@@ -6,2 +6,3 @@ 'use strict'

const { calculateDDBasePath } = require('../../util')
const { getCallSiteList } = require('../stack_trace')
const pathLine = {

@@ -28,17 +29,2 @@ getFirstNonDDPathAndLine,

function getCallSiteInfo () {
const previousPrepareStackTrace = Error.prepareStackTrace
const previousStackTraceLimit = Error.stackTraceLimit
let callsiteList
Error.stackTraceLimit = 100
Error.prepareStackTrace = function (_, callsites) {
callsiteList = callsites
}
const e = new Error()
e.stack
Error.prepareStackTrace = previousPrepareStackTrace
Error.stackTraceLimit = previousStackTraceLimit
return callsiteList
}
function getFirstNonDDPathAndLineFromCallsites (callsites, externallyExcludedPaths) {

@@ -93,3 +79,3 @@ if (callsites) {

function getFirstNonDDPathAndLine (externallyExcludedPaths) {
return getFirstNonDDPathAndLineFromCallsites(getCallSiteInfo(), externallyExcludedPaths)
return getFirstNonDDPathAndLineFromCallsites(getCallSiteList(), externallyExcludedPaths)
}

@@ -96,0 +82,0 @@

@@ -5,2 +5,4 @@ 'use strict'

{ src: 'concat' },
{ src: 'join' },
{ src: 'parse' },
{ src: 'plusOperator', operator: true },

@@ -12,5 +14,8 @@ { src: 'random' },

{ src: 'substring' },
{ src: 'toLowerCase', dst: 'stringCase' },
{ src: 'toUpperCase', dst: 'stringCase' },
{ src: 'trim' },
{ src: 'trimEnd' },
{ src: 'trimStart', dst: 'trim' }
{ src: 'trimStart', dst: 'trim' },
{ src: 'eval', allowedWithoutCallee: true }
]

@@ -17,0 +22,0 @@

@@ -13,3 +13,6 @@ 'use strict'

const taintTrackingPlugin = require('./plugin')
const kafkaConsumerPlugin = require('./plugins/kafka')
const kafkaContextPlugin = require('../context/kafka-ctx-plugin')
module.exports = {

@@ -20,2 +23,6 @@ enableTaintTracking (config, telemetryVerbosity) {

taintTrackingPlugin.enable()
kafkaContextPlugin.enable()
kafkaConsumerPlugin.enable()
setMaxTransactions(config.maxConcurrentRequests)

@@ -27,7 +34,10 @@ },

taintTrackingPlugin.disable()
kafkaContextPlugin.disable()
kafkaConsumerPlugin.disable()
},
setMaxTransactions: setMaxTransactions,
createTransaction: createTransaction,
removeTransaction: removeTransaction,
setMaxTransactions,
createTransaction,
removeTransaction,
taintTrackingPlugin
}
'use strict'
const dc = require('dc-polyfill')
const TaintedUtils = require('@datadog/native-iast-taint-tracking')
const { IAST_TRANSACTION_ID } = require('../iast-context')
const iastLog = require('../iast-log')
const iastTelemetry = require('../telemetry')
const { REQUEST_TAINTED } = require('../telemetry/iast-metric')
const { isInfoAllowed } = require('../telemetry/verbosity')
const { getTaintTrackingImpl, getTaintTrackingNoop } = require('./taint-tracking-impl')
const {
getTaintTrackingImpl,
getTaintTrackingNoop,
lodashTaintTrackingHandler
} = require('./taint-tracking-impl')
const { taintObject } = require('./operations-taint-object')
const lodashOperationCh = dc.channel('datadog:lodash:operation')
function createTransaction (id, iastContext) {

@@ -22,3 +29,3 @@ if (id && iastContext) {

if (metrics?.requestCount) {
REQUEST_TAINTED.add(metrics.requestCount, null, iastContext)
REQUEST_TAINTED.inc(iastContext, metrics.requestCount)
}

@@ -38,3 +45,3 @@ }

function newTaintedString (iastContext, string, name, type) {
let result = string
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]

@@ -49,46 +56,9 @@ if (transactionId) {

function taintObject (iastContext, object, type, keyTainting, keyType) {
let result = object
function newTaintedObject (iastContext, obj, name, type) {
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]
if (transactionId) {
const queue = [{ parent: null, property: null, value: object }]
const visited = new WeakSet()
while (queue.length > 0) {
const { parent, property, value, key } = queue.pop()
if (value === null) {
continue
}
try {
if (typeof value === 'string') {
const tainted = TaintedUtils.newTaintedString(transactionId, value, property, type)
if (!parent) {
result = tainted
} else {
if (keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = tainted
} else {
parent[key] = tainted
}
}
} else if (typeof value === 'object' && !visited.has(value)) {
visited.add(value)
const keys = Object.keys(value)
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
queue.push({ parent: value, property: property ? `${property}.${key}` : key, value: value[key], key })
}
if (parent && keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = value
}
}
} catch (e) {
iastLog.error(`Error visiting property : ${property}`).errorAndPublish(e)
}
}
result = TaintedUtils.newTaintedObject(transactionId, obj, name, type)
} else {
result = obj
}

@@ -99,3 +69,3 @@ return result

function isTainted (iastContext, string) {
let result = false
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]

@@ -111,3 +81,3 @@ if (transactionId) {

function getRanges (iastContext, string) {
let result = []
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]

@@ -137,2 +107,3 @@ if (transactionId) {

global._ddiast = getTaintTrackingImpl(telemetryVerbosity)
lodashOperationCh.subscribe(lodashTaintTrackingHandler)
}

@@ -142,2 +113,3 @@

global._ddiast = getTaintTrackingNoop()
lodashOperationCh.unsubscribe(lodashTaintTrackingHandler)
}

@@ -158,2 +130,3 @@

newTaintedString,
newTaintedObject,
taintObject,

@@ -160,0 +133,0 @@ isTainted,

@@ -6,3 +6,3 @@ 'use strict'

const { storage } = require('../../../../../datadog-core')
const { taintObject, newTaintedString } = require('./operations')
const { taintObject, newTaintedString, getRanges } = require('./operations')
const {

@@ -18,3 +18,7 @@ HTTP_REQUEST_BODY,

} = require('./source-types')
const { EXECUTED_SOURCE } = require('../telemetry/iast-metric')
const REQ_HEADER_TAGS = EXECUTED_SOURCE.formatTags(HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME)
const REQ_URI_TAGS = EXECUTED_SOURCE.formatTags(HTTP_REQUEST_URI)
class TaintTrackingPlugin extends SourceIastPlugin {

@@ -31,5 +35,5 @@ constructor () {

const iastContext = getIastContext(storage.getStore())
if (iastContext && iastContext['body'] !== req.body) {
if (iastContext && iastContext.body !== req.body) {
this._taintTrackingHandler(HTTP_REQUEST_BODY, req, 'body', iastContext)
iastContext['body'] = req.body
iastContext.body = req.body
}

@@ -47,7 +51,7 @@ }

({ req }) => {
if (req && req.body && typeof req.body === 'object') {
if (req && req.body !== null && typeof req.body === 'object') {
const iastContext = getIastContext(storage.getStore())
if (iastContext && iastContext['body'] !== req.body) {
if (iastContext && iastContext.body !== req.body) {
this._taintTrackingHandler(HTTP_REQUEST_BODY, req, 'body', iastContext)
iastContext['body'] = req.body
iastContext.body = req.body
}

@@ -66,3 +70,3 @@ }

({ req }) => {
if (req && req.params && typeof req.params === 'object') {
if (req && req.params !== null && typeof req.params === 'object') {
this._taintTrackingHandler(HTTP_REQUEST_PATH_PARAM, req, 'params')

@@ -73,2 +77,14 @@ }

this.addSub(
{ channelName: 'apm:graphql:resolve:start', tag: HTTP_REQUEST_BODY },
(data) => {
const iastContext = getIastContext(storage.getStore())
const source = data.context?.source
const ranges = source && getRanges(iastContext, source)
if (ranges?.length) {
this._taintTrackingHandler(ranges[0].iinfo.type, data.args, null, iastContext)
}
}
)
// this is a special case to increment INSTRUMENTED_SOURCE metric for header

@@ -88,9 +104,11 @@ this.addInstrumentedSource('http', [HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME])

const iastContext = getIastContext(storage.getStore())
taintObject(iastContext, target, HTTP_REQUEST_COOKIE_VALUE, true, HTTP_REQUEST_COOKIE_NAME)
// Prevent tainting cookie names since it leads to taint literal string with same value.
taintObject(iastContext, target, HTTP_REQUEST_COOKIE_VALUE)
}
taintHeaders (headers, iastContext) {
// Prevent tainting header names since it leads to taint literal string with same value.
this.execSource({
handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE, true, HTTP_REQUEST_HEADER_NAME),
tag: [HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME],
handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE),
tags: REQ_HEADER_TAGS,
iastContext

@@ -105,3 +123,3 @@ })

},
tag: [HTTP_REQUEST_URI],
tags: REQ_URI_TAGS,
iastContext

@@ -115,12 +133,4 @@ })

}
enable () {
this.configure(true)
}
disable () {
this.configure(false)
}
}
module.exports = new TaintTrackingPlugin()

@@ -17,3 +17,3 @@ 'use strict'

if (metrics && metrics.instrumentedPropagation) {
INSTRUMENTED_PROPAGATION.add(metrics.instrumentedPropagation)
INSTRUMENTED_PROPAGATION.inc(undefined, metrics.instrumentedPropagation)
}

@@ -20,0 +20,0 @@

@@ -15,2 +15,3 @@ 'use strict'

let getPrepareStackTrace
let kSymbolPrepareStackTrace

@@ -48,2 +49,3 @@ let getRewriterOriginalPathAndLineFromSourceMap = function (path, line, column) {

getPrepareStackTrace = iastRewriter.getPrepareStackTrace
kSymbolPrepareStackTrace = iastRewriter.kSymbolPrepareStackTrace

@@ -70,4 +72,5 @@ const chainSourceMap = isFlagPresent('--enable-source-maps')

let originalPrepareStackTrace = Error.prepareStackTrace
let originalPrepareStackTrace
function getPrepareStackTraceAccessor () {
originalPrepareStackTrace = Error.prepareStackTrace
let actual = getPrepareStackTrace(originalPrepareStackTrace)

@@ -127,3 +130,12 @@ return {

shimmer.unwrap(Module.prototype, '_compile')
Error.prepareStackTrace = originalPrepareStackTrace
if (!Error.prepareStackTrace?.[kSymbolPrepareStackTrace]) return
try {
delete Error.prepareStackTrace
Error.prepareStackTrace = originalPrepareStackTrace
} catch (e) {
iastLog.warn(e)
}
}

@@ -130,0 +142,0 @@

@@ -12,3 +12,5 @@ 'use strict'

HTTP_REQUEST_PATH_PARAM: 'http.request.path.parameter',
HTTP_REQUEST_URI: 'http.request.uri'
HTTP_REQUEST_URI: 'http.request.uri',
KAFKA_MESSAGE_KEY: 'kafka.message.key',
KAFKA_MESSAGE_VALUE: 'kafka.message.value'
}

@@ -10,5 +10,9 @@ 'use strict'

const { isDebugAllowed } = require('../telemetry/verbosity')
const { taintObject } = require('./operations-taint-object')
const mathRandomCallCh = dc.channel('datadog:random:call')
const evalCallCh = dc.channel('datadog:eval:call')
const JSON_VALUE = 'json.value'
function noop (res) { return res }

@@ -18,4 +22,7 @@ // NOTE: methods of this object must be synchronized with csi-methods.js file definitions!

const TaintTrackingNoop = {
concat: noop,
eval: noop,
join: noop,
parse: noop,
plusOperator: noop,
concat: noop,
random: noop,

@@ -26,2 +33,3 @@ replace: noop,

substring: noop,
stringCase: noop,
trim: noop,

@@ -32,3 +40,3 @@ trimEnd: noop

function getTransactionId (iastContext) {
return iastContext && iastContext[iastContextFunctions.IAST_TRANSACTION_ID]
return iastContext?.[iastContextFunctions.IAST_TRANSACTION_ID]
}

@@ -43,3 +51,3 @@

const iastContext = getContextDefault()
EXECUTED_PROPAGATION.inc(null, iastContext)
EXECUTED_PROPAGATION.inc(iastContext)
return iastContext

@@ -111,3 +119,3 @@ }

} catch (e) {
iastLog.error(`Error invoking CSI plusOperator`)
iastLog.error('Error invoking CSI plusOperator')
.errorAndPublish(e)

@@ -118,2 +126,9 @@ }

stringCase: getCsiFn(
(transactionId, res, target) => TaintedUtils.stringCase(transactionId, res, target),
getContext,
String.prototype.toLowerCase,
String.prototype.toUpperCase
),
trim: getCsiFn(

@@ -131,2 +146,50 @@ (transactionId, res, target) => TaintedUtils.trim(transactionId, res, target),

return res
},
eval: function (res, fn, target, script) {
// eslint-disable-next-line no-eval
if (evalCallCh.hasSubscribers && fn === globalThis.eval) {
evalCallCh.publish({ script })
}
return res
},
parse: function (res, fn, target, json) {
if (fn === JSON.parse) {
try {
const iastContext = getContext()
const transactionId = getTransactionId(iastContext)
if (transactionId) {
const ranges = TaintedUtils.getRanges(transactionId, json)
// TODO: first version.
// here we are losing the original source because taintObject always creates a new tainted
if (ranges?.length > 0) {
const range = ranges.find(range => range.iinfo?.type)
res = taintObject(iastContext, res, range?.iinfo.type || JSON_VALUE)
}
}
} catch (e) {
iastLog.error(e)
}
}
return res
},
join: function (res, fn, target, separator) {
if (fn === Array.prototype.join) {
try {
const iastContext = getContext()
const transactionId = getTransactionId(iastContext)
if (transactionId) {
res = TaintedUtils.arrayJoin(transactionId, res, target, separator)
}
} catch (e) {
iastLog.error(e)
}
}
return res
}

@@ -160,5 +223,34 @@ }

const lodashFns = {
join: TaintedUtils.arrayJoin,
toLower: TaintedUtils.stringCase,
toUpper: TaintedUtils.stringCase,
trim: TaintedUtils.trim,
trimEnd: TaintedUtils.trimEnd,
trimStart: TaintedUtils.trim
}
function getLodashTaintedUtilFn (lodashFn) {
return lodashFns[lodashFn] || ((transactionId, result) => result)
}
function lodashTaintTrackingHandler (message) {
try {
if (!message.result) return
const context = getContextDefault()
const transactionId = getTransactionId(context)
if (transactionId) {
message.result = getLodashTaintedUtilFn(message.operation)(transactionId, message.result, ...message.arguments)
}
} catch (e) {
iastLog.error(`Error invoking CSI lodash ${message.operation}`)
.errorAndPublish(e)
}
}
module.exports = {
getTaintTrackingImpl,
getTaintTrackingNoop
getTaintTrackingNoop,
lodashTaintTrackingHandler
}

@@ -22,2 +22,15 @@ 'use strict'

function formatTags (tags, tagKey) {
return tags.map(tagValue => tagValue ? [`${tagKey}:${tagValue.toLowerCase()}`] : undefined)
}
function getNamespace (scope, context) {
let namespace = globalNamespace
if (scope === Scope.REQUEST) {
namespace = getNamespaceFromContext(context) || globalNamespace
}
return namespace
}
class IastMetric {

@@ -30,26 +43,22 @@ constructor (name, scope, tagKey) {

getNamespace (context) {
return getNamespaceFromContext(context) || globalNamespace
formatTags (...tags) {
return formatTags(tags, this.tagKey)
}
getTag (tagValue) {
return tagValue ? { [this.tagKey]: tagValue } : undefined
inc (context, tags, value = 1) {
const namespace = getNamespace(this.scope, context)
namespace.count(this.name, tags).inc(value)
}
}
addValue (value, tagValue, context) {
this.getNamespace(context)
.count(this.name, this.getTag(tagValue))
.inc(value)
}
class NoTaggedIastMetric extends IastMetric {
constructor (name, scope) {
super(name, scope)
add (value, tagValue, context) {
if (Array.isArray(tagValue)) {
tagValue.forEach(tag => this.addValue(value, tag, context))
} else {
this.addValue(value, tagValue, context)
}
this.tags = []
}
inc (tagValue, context) {
this.add(1, tagValue, context)
inc (context, value = 1) {
const namespace = getNamespace(this.scope, context)
namespace.count(this.name, this.tags).inc(value)
}

@@ -66,3 +75,3 @@ }

const INSTRUMENTED_PROPAGATION = new IastMetric('instrumented.propagation', Scope.GLOBAL)
const INSTRUMENTED_PROPAGATION = new NoTaggedIastMetric('instrumented.propagation', Scope.GLOBAL)
const INSTRUMENTED_SOURCE = new IastMetric('instrumented.source', Scope.GLOBAL, TagKey.SOURCE_TYPE)

@@ -74,11 +83,8 @@ const INSTRUMENTED_SINK = new IastMetric('instrumented.sink', Scope.GLOBAL, TagKey.VULNERABILITY_TYPE)

const REQUEST_TAINTED = new IastMetric('request.tainted', Scope.REQUEST)
const REQUEST_TAINTED = new NoTaggedIastMetric('request.tainted', Scope.REQUEST)
// DEBUG using metrics
const EXECUTED_PROPAGATION = new IastMetric('executed.propagation', Scope.REQUEST)
const EXECUTED_TAINTED = new IastMetric('executed.tainted', Scope.REQUEST)
const EXECUTED_PROPAGATION = new NoTaggedIastMetric('executed.propagation', Scope.REQUEST)
const EXECUTED_TAINTED = new NoTaggedIastMetric('executed.tainted', Scope.REQUEST)
// DEBUG using distribution endpoint
const INSTRUMENTATION_TIME = new IastMetric('instrumentation.time', Scope.GLOBAL)
module.exports = {

@@ -96,4 +102,2 @@ INSTRUMENTED_PROPAGATION,

INSTRUMENTATION_TIME,
PropagationType,

@@ -103,5 +107,8 @@ TagKey,

IastMetric,
NoTaggedIastMetric,
getExecutedMetric,
getInstrumentedMetric
getInstrumentedMetric,
formatTags
}

@@ -5,4 +5,5 @@ 'use strict'

const { Namespace } = require('../../../telemetry/metrics')
const { addMetricsToSpan, filterTags } = require('./span-tags')
const { addMetricsToSpan } = require('./span-tags')
const { IAST_TRACE_METRIC_PREFIX } = require('../tags')
const iastLog = require('../iast-log')

@@ -14,3 +15,3 @@ const DD_IAST_METRICS_NAMESPACE = Symbol('_dd.iast.request.metrics.namespace')

const namespace = new Namespace('iast')
const namespace = new IastNamespace()
context[DD_IAST_METRICS_NAMESPACE] = namespace

@@ -21,3 +22,3 @@ return namespace

function getNamespaceFromContext (context) {
return context && context[DD_IAST_METRICS_NAMESPACE]
return context?.[DD_IAST_METRICS_NAMESPACE]
}

@@ -30,8 +31,7 @@

const metrics = [...namespace.metrics.values()]
namespace.metrics.clear()
addMetricsToSpan(rootSpan, [...namespace.metrics.values()], IAST_TRACE_METRIC_PREFIX)
addMetricsToSpan(rootSpan, metrics, IAST_TRACE_METRIC_PREFIX)
merge(namespace)
merge(metrics)
namespace.clear()
} catch (e) {

@@ -46,13 +46,12 @@ log.error(e)

function merge (metrics) {
metrics.forEach(metric => metric.points.forEach(point => {
globalNamespace
.count(metric.metric, getTagsObject(metric.tags))
.inc(point[1])
}))
}
function merge (namespace) {
for (const [metricName, metricsByTagMap] of namespace.iastMetrics) {
for (const [tags, metric] of metricsByTagMap) {
const { type, points } = metric
function getTagsObject (tags) {
if (tags && tags.length > 0) {
return filterTags(tags)
if (points?.length && type === 'count') {
const gMetric = globalNamespace.getMetric(metricName, tags)
points.forEach(point => gMetric.inc(point[1]))
}
}
}

@@ -62,9 +61,45 @@ }

class IastNamespace extends Namespace {
constructor () {
constructor (maxMetricTagsSize = 100) {
super('iast')
this.maxMetricTagsSize = maxMetricTagsSize
this.iastMetrics = new Map()
}
reset () {
getIastMetrics (name) {
let metrics = this.iastMetrics.get(name)
if (!metrics) {
metrics = new Map()
this.iastMetrics.set(name, metrics)
}
return metrics
}
getMetric (name, tags, type = 'count') {
const metrics = this.getIastMetrics(name)
let metric = metrics.get(tags)
if (!metric) {
metric = super[type](name, Array.isArray(tags) ? [...tags] : tags)
if (metrics.size === this.maxMetricTagsSize) {
metrics.clear()
iastLog.warnAndPublish(`Tags cache max size reached for metric ${name}`)
}
metrics.set(tags, metric)
}
return metric
}
count (name, tags) {
return this.getMetric(name, tags, 'count')
}
clear () {
this.iastMetrics.clear()
this.distributions.clear()
this.metrics.clear()
this.distributions.clear()
}

@@ -81,3 +116,5 @@ }

DD_IAST_METRICS_NAMESPACE
DD_IAST_METRICS_NAMESPACE,
IastNamespace
}
'use strict'
function addMetricsToSpan (rootSpan, metrics, tagPrefix) {
if (!rootSpan || !rootSpan.addTags || !metrics) return
if (!rootSpan?.addTags || !metrics) return
const flattenMap = new Map()
metrics
.filter(data => data && data.metric)
.filter(data => data?.metric)
.forEach(data => {

@@ -30,3 +30,4 @@ const name = taggedMetricName(data)

function flatten (metricData) {
return metricData.points && metricData.points.map(point => point[1]).reduce((total, value) => total + value, 0)
const { points } = metricData
return points ? points.map(point => point[1]).reduce((total, value) => total + value, 0) : 0
}

@@ -36,4 +37,4 @@

const metric = data.metric
const tags = data.tags && filterTags(data.tags)
return !tags || !tags.length
const tags = filterTags(data.tags)
return !tags?.length
? metric

@@ -44,3 +45,3 @@ : `${metric}.${processTagValue(tags)}`

function filterTags (tags) {
return tags.filter(tag => !tag.startsWith('lib_language') && !tag.startsWith('version'))
return tags?.filter(tag => !tag.startsWith('version'))
}

@@ -47,0 +48,0 @@

@@ -71,3 +71,3 @@ 'use strict'

if (!pattern) {
pattern = patterns['ANSI']
pattern = patterns.ANSI
}

@@ -74,0 +74,0 @@ pattern.lastIndex = 0

@@ -8,3 +8,5 @@ 'use strict'

const codeInjectionSensitiveAnalyzer = require('./sensitive-analyzers/code-injection-sensitive-analyzer')
const commandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer')
const hardcodedPasswordAnalyzer = require('./sensitive-analyzers/hardcoded-password-analyzer')
const headerSensitiveAnalyzer = require('./sensitive-analyzers/header-sensitive-analyzer')

@@ -26,2 +28,3 @@ const jsonSensitiveAnalyzer = require('./sensitive-analyzers/json-sensitive-analyzer')

this._sensitiveAnalyzers = new Map()
this._sensitiveAnalyzers.set(vulnerabilities.CODE_INJECTION, codeInjectionSensitiveAnalyzer)
this._sensitiveAnalyzers.set(vulnerabilities.COMMAND_INJECTION, commandSensitiveAnalyzer)

@@ -36,2 +39,5 @@ this._sensitiveAnalyzers.set(vulnerabilities.NOSQL_MONGODB_INJECTION, jsonSensitiveAnalyzer)

})
this._sensitiveAnalyzers.set(vulnerabilities.HARDCODED_PASSWORD, (evidence) => {
return hardcodedPasswordAnalyzer(evidence, this._valuePattern)
})
}

@@ -57,3 +63,5 @@

const sensitiveRanges = sensitiveAnalyzer(evidence)
return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources)
if (evidence.ranges || sensitiveRanges?.length) {
return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources)
}
}

@@ -74,3 +82,3 @@ return null

let nextTainted = ranges.shift()
let nextTainted = ranges?.shift()
let nextSensitive = sensitive.shift()

@@ -77,0 +85,0 @@

// eslint-disable-next-line max-len
const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)'
const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)'
// eslint-disable-next-line max-len
const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,}))'
const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,})|[\\w\\.-]+@[a-zA-Z\\d\\.-]+\\.[a-zA-Z]{2,})'

@@ -6,0 +6,0 @@ module.exports = {

@@ -46,2 +46,4 @@ 'use strict'

if (evidence.value == null) return { valueParts }
if (typeof evidence.value === 'object' && evidence.rangesToApply) {

@@ -53,2 +55,6 @@ const { value, ranges } = stringifyWithRanges(evidence.value, evidence.rangesToApply)

if (!evidence.ranges) {
return { value: evidence.value }
}
evidence.ranges.forEach((range, rangeIndex) => {

@@ -70,8 +76,4 @@ if (fromIndex < range.start) {

formatEvidence (type, evidence, sourcesIndexes, sources) {
if (!evidence.ranges && !evidence.rangesToApply) {
if (typeof evidence.value === 'undefined') {
return undefined
} else {
return { value: evidence.value }
}
if (evidence.value === undefined) {
return undefined
}

@@ -78,0 +80,0 @@

@@ -16,3 +16,3 @@ 'use strict'

function iterateObject (target, fn, levelKeys = [], depth = 50) {
function iterateObject (target, fn, levelKeys = [], depth = 10, visited = new Set()) {
Object.keys(target).forEach((key) => {

@@ -22,6 +22,9 @@ const nextLevelKeys = [...levelKeys, key]

fn(val, nextLevelKeys, target, key)
if (typeof val !== 'object' || !visited.has(val)) {
visited.add(val)
fn(val, nextLevelKeys, target, key)
if (val !== null && typeof val === 'object') {
iterateObject(val, fn, nextLevelKeys, depth - 1)
if (val !== null && typeof val === 'object' && depth > 0) {
iterateObject(val, fn, nextLevelKeys, depth - 1, visited)
}
}

@@ -28,0 +31,0 @@ })

module.exports = {
COMMAND_INJECTION: 'COMMAND_INJECTION',
CODE_INJECTION: 'CODE_INJECTION',
HARDCODED_PASSWORD: 'HARDCODED_PASSWORD',
HARDCODED_SECRET: 'HARDCODED_SECRET',

@@ -4,0 +6,0 @@ HEADER_INJECTION: 'HEADER_INJECTION',

@@ -7,2 +7,3 @@ 'use strict'

const { IAST_ENABLED_TAG_KEY, IAST_JSON_TAG_KEY } = require('./tags')
const standalone = require('../standalone')

@@ -61,2 +62,5 @@ const VULNERABILITIES_KEY = 'vulnerabilities'

span.addTags(tags)
standalone.sample(span)
if (!rootSpan) span.finish()

@@ -63,0 +67,0 @@ }

@@ -14,3 +14,6 @@ 'use strict'

nextBodyParsed,
nextQueryParsed
nextQueryParsed,
responseBody,
responseWriteHead,
responseSetHeader
} = require('./channels')

@@ -25,6 +28,7 @@ const waf = require('./waf')

const { HTTP_CLIENT_IP } = require('../../../../ext/tags')
const { block, setTemplates } = require('./blocking')
const { isBlocked, block, setTemplates, getBlockingAction } = require('./blocking')
const { passportTrackEvent } = require('./passport')
const { storage } = require('../../../datadog-core')
const graphql = require('./graphql')
const rasp = require('./rasp')

@@ -41,2 +45,6 @@ let isEnabled = false

if (_config.appsec.rasp.enabled) {
rasp.enable(_config)
}
setTemplates(_config)

@@ -59,2 +67,5 @@

cookieParser.subscribe(onRequestCookieParser)
responseBody.subscribe(onResponseBody)
responseWriteHead.subscribe(onResponseWriteHead)
responseSetHeader.subscribe(onResponseSetHeader)

@@ -100,3 +111,3 @@ if (_config.appsec.eventTracking.enabled) {

if (apiSecuritySampler.sampleRequest()) {
if (apiSecuritySampler.sampleRequest(req)) {
persistent[addresses.WAF_CONTEXT_PROCESSOR] = { 'extract-schema': true }

@@ -111,11 +122,4 @@ }

function incomingHttpEndTranslator ({ req, res }) {
// TODO: this doesn't support headers sent with res.writeHead()
const responseHeaders = Object.assign({}, res.getHeaders())
delete responseHeaders['set-cookie']
const persistent = {}
const persistent = {
[addresses.HTTP_INCOMING_RESPONSE_CODE]: '' + res.statusCode,
[addresses.HTTP_INCOMING_RESPONSE_HEADERS]: responseHeaders
}
// we need to keep this to support other body parsers

@@ -128,3 +132,3 @@ // TODO: no need to analyze it if it was already done by the body-parser hook

// TODO: temporary express instrumentation, will use express plugin later
if (req.params && typeof req.params === 'object') {
if (req.params !== null && typeof req.params === 'object') {
persistent[addresses.HTTP_INCOMING_PARAMS] = req.params

@@ -134,11 +138,13 @@ }

// we need to keep this to support other cookie parsers
if (req.cookies && typeof req.cookies === 'object') {
if (req.cookies !== null && typeof req.cookies === 'object') {
persistent[addresses.HTTP_INCOMING_COOKIES] = req.cookies
}
if (req.query && typeof req.query === 'object') {
if (req.query !== null && typeof req.query === 'object') {
persistent[addresses.HTTP_INCOMING_QUERY] = req.query
}
waf.run({ persistent }, req)
if (Object.keys(persistent).length) {
waf.run({ persistent }, req)
}

@@ -205,2 +211,14 @@ waf.disposeContext(req)

function onResponseBody ({ req, body }) {
if (!body || typeof body !== 'object') return
if (!apiSecuritySampler.isSampled(req)) return
// we don't support blocking at this point, so no results needed
waf.run({
persistent: {
[addresses.HTTP_INCOMING_RESPONSE_BODY]: body
}
}, req)
}
function onPassportVerify ({ credentials, user }) {

@@ -218,7 +236,46 @@ const store = storage.getStore()

const responseAnalyzedSet = new WeakSet()
function onResponseWriteHead ({ req, res, abortController, statusCode, responseHeaders }) {
// avoid "write after end" error
if (isBlocked(res)) {
abortController?.abort()
return
}
// avoid double waf call
if (responseAnalyzedSet.has(res)) {
return
}
const rootSpan = web.root(req)
if (!rootSpan) return
responseHeaders = Object.assign({}, responseHeaders)
delete responseHeaders['set-cookie']
const results = waf.run({
persistent: {
[addresses.HTTP_INCOMING_RESPONSE_CODE]: '' + statusCode,
[addresses.HTTP_INCOMING_RESPONSE_HEADERS]: responseHeaders
}
}, req)
responseAnalyzedSet.add(res)
handleResults(results, req, res, rootSpan, abortController)
}
function onResponseSetHeader ({ res, abortController }) {
if (isBlocked(res)) {
abortController?.abort()
}
}
function handleResults (actions, req, res, rootSpan, abortController) {
if (!actions || !req || !res || !rootSpan || !abortController) return
if (actions.includes('block')) {
block(req, res, rootSpan, abortController)
const blockingAction = getBlockingAction(actions)
if (blockingAction) {
block(req, res, rootSpan, abortController, blockingAction)
}

@@ -235,2 +292,3 @@ }

graphql.disable()
rasp.disable()

@@ -247,3 +305,6 @@ remoteConfig.disableWafUpdate()

if (cookieParser.hasSubscribers) cookieParser.unsubscribe(onRequestCookieParser)
if (responseBody.hasSubscribers) responseBody.unsubscribe(onResponseBody)
if (passportVerify.hasSubscribers) passportVerify.unsubscribe(onPassportVerify)
if (responseWriteHead.hasSubscribers) responseWriteHead.unsubscribe(onResponseWriteHead)
if (responseSetHeader.hasSubscribers) responseSetHeader.unsubscribe(onResponseSetHeader)
}

@@ -250,0 +311,0 @@

@@ -16,3 +16,3 @@ 'use strict'

if (tags && typeof tags === 'object') {
if (tags !== null && typeof tags === 'object') {
called = Object.entries(tags).some(([key, value]) => regexSdkEvent.test(key) && value === 'true')

@@ -19,0 +19,0 @@ }

@@ -9,2 +9,3 @@ 'use strict'

ASM_REQUEST_BLOCKING: 1n << 5n,
ASM_RESPONSE_BLOCKING: 1n << 6n,
ASM_USER_BLOCKING: 1n << 7n,

@@ -18,3 +19,5 @@ ASM_CUSTOM_RULES: 1n << 8n,

APM_TRACING_HTTP_HEADER_TAGS: 1n << 14n,
APM_TRACING_CUSTOM_TAGS: 1n << 15n
APM_TRACING_CUSTOM_TAGS: 1n << 15n,
APM_TRACING_ENABLED: 1n << 19n,
APM_TRACING_SAMPLE_RULES: 1n << 29n
}

@@ -11,3 +11,3 @@ 'use strict'

function enable (config) {
function enable (config, appsec) {
rc = new RemoteConfigManager(config)

@@ -18,2 +18,4 @@ rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_CUSTOM_TAGS, true)

rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_SAMPLE_RATE, true)
rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_ENABLED, true)
rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_SAMPLE_RULES, true)

@@ -35,3 +37,3 @@ const activation = Activation.fromConfig(config)

if (activation === Activation.ONECLICK) {
enableOrDisableAppsec(action, rcConfig, config)
enableOrDisableAppsec(action, rcConfig, config, appsec)
}

@@ -46,3 +48,3 @@

function enableOrDisableAppsec (action, rcConfig, config) {
function enableOrDisableAppsec (action, rcConfig, config, appsec) {
if (typeof rcConfig.asm?.enabled === 'boolean') {

@@ -58,5 +60,5 @@ let shouldEnable

if (shouldEnable) {
require('..').enable(config)
appsec.enable(config)
} else {
require('..').disable()
appsec.disable()
}

@@ -67,3 +69,3 @@ }

function enableWafUpdate (appsecConfig) {
if (rc && appsecConfig && !appsecConfig.customRulesProvided) {
if (rc && appsecConfig && !appsecConfig.rules) {
// dirty require to make startup faster for serverless

@@ -78,2 +80,3 @@ const RuleManager = require('../rule_manager')

rc.updateCapabilities(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, true)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true)

@@ -100,2 +103,3 @@ rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true)

rc.updateCapabilities(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_RESPONSE_BLOCKING, false)
rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_RULES, false)

@@ -102,0 +106,0 @@ rc.updateCapabilities(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false)

@@ -10,6 +10,10 @@ 'use strict'

updateWafRequestsMetricTags,
updateRaspRequestsMetricTags,
incrementWafUpdatesMetric,
incrementWafRequestsMetric
incrementWafRequestsMetric,
getRequestMetrics
} = require('./telemetry')
const zlib = require('zlib')
const { MANUAL_KEEP } = require('../../../../ext/tags')
const standalone = require('./standalone')

@@ -21,21 +25,37 @@ // default limiter, configurable with setRateLimit()

// following header lists are ordered in the same way the spec orders them, it doesn't matter but it's easier to compare
const contentHeaderList = [
'content-length',
'content-type',
'content-encoding',
'content-language',
'content-length',
'content-type'
'content-language'
]
const REQUEST_HEADERS_MAP = mapHeaderAndTags([
'accept',
'accept-encoding',
'accept-language',
'host',
const EVENT_HEADERS_MAP = mapHeaderAndTags([
...ipHeaderList,
'forwarded',
'user-agent',
'via',
...contentHeaderList,
'host',
'accept-encoding',
'accept-language'
], 'http.request.headers.')
const identificationHeaders = [
'x-amzn-trace-id',
'cloudfront-viewer-ja3-fingerprint',
'cf-ray',
'x-cloud-trace-context',
'x-appgw-trace-id',
'x-sigsci-requestid',
'x-sigsci-tags',
'akamai-user-risk'
]
...ipHeaderList,
...contentHeaderList
// these request headers are always collected - it breaks the expected spec orders
const REQUEST_HEADERS_MAP = mapHeaderAndTags([
'content-type',
'user-agent',
'accept',
...identificationHeaders
], 'http.request.headers.')

@@ -81,3 +101,3 @@

metricsQueue.set('manual.keep', 'true')
metricsQueue.set(MANUAL_KEEP, 'true')

@@ -87,4 +107,3 @@ incrementWafInitMetric(wafVersion, rulesVersion)

function reportMetrics (metrics) {
// TODO: metrics should be incremental, there already is an RFC to report metrics
function reportMetrics (metrics, raspRuleType) {
const store = storage.getStore()

@@ -94,15 +113,10 @@ const rootSpan = store?.req && web.root(store.req)

if (metrics.duration) {
rootSpan.setTag('_dd.appsec.waf.duration', metrics.duration)
}
if (metrics.durationExt) {
rootSpan.setTag('_dd.appsec.waf.duration_ext', metrics.durationExt)
}
if (metrics.rulesVersion) {
rootSpan.setTag('_dd.appsec.event_rules.version', metrics.rulesVersion)
}
updateWafRequestsMetricTags(metrics, store.req)
if (raspRuleType) {
updateRaspRequestsMetricTags(metrics, store.req, raspRuleType)
} else {
updateWafRequestsMetricTags(metrics, store.req)
}
}

@@ -118,8 +132,10 @@

const newTags = filterHeaders(req.headers, REQUEST_HEADERS_MAP)
const newTags = {
'appsec.event': 'true'
}
newTags['appsec.event'] = 'true'
if (limiter.isAllowed()) {
newTags[MANUAL_KEEP] = 'true'
if (limiter.isAllowed()) {
newTags['manual.keep'] = 'true' // TODO: figure out how to keep appsec traces with sampling revamp
standalone.sample(rootSpan)
}

@@ -141,7 +157,2 @@

const ua = newTags['http.request.headers.user-agent']
if (ua) {
newTags['http.useragent'] = ua
}
newTags['network.client.ip'] = req.socket.remoteAddress

@@ -176,12 +187,41 @@

standalone.sample(rootSpan)
metricsQueue.clear()
}
const metrics = getRequestMetrics(req)
if (metrics?.duration) {
rootSpan.setTag('_dd.appsec.waf.duration', metrics.duration)
}
if (metrics?.durationExt) {
rootSpan.setTag('_dd.appsec.waf.duration_ext', metrics.durationExt)
}
if (metrics?.raspDuration) {
rootSpan.setTag('_dd.appsec.rasp.duration', metrics.raspDuration)
}
if (metrics?.raspDurationExt) {
rootSpan.setTag('_dd.appsec.rasp.duration_ext', metrics.raspDurationExt)
}
if (metrics?.raspEvalCount) {
rootSpan.setTag('_dd.appsec.rasp.rule.eval', metrics.raspEvalCount)
}
incrementWafRequestsMetric(req)
if (!rootSpan.context()._tags['appsec.event']) return
// collect some headers even when no attack is detected
const mandatoryTags = filterHeaders(req.headers, REQUEST_HEADERS_MAP)
rootSpan.addTags(mandatoryTags)
const tags = rootSpan.context()._tags
if (!shouldCollectEventHeaders(tags)) return
const newTags = filterHeaders(res.getHeaders(), RESPONSE_HEADERS_MAP)
Object.assign(newTags, filterHeaders(req.headers, EVENT_HEADERS_MAP))
if (req.route && typeof req.route.path === 'string') {
if (tags['appsec.event'] === 'true' && typeof req.route?.path === 'string') {
newTags['http.endpoint'] = req.route.path

@@ -193,2 +233,16 @@ }

function shouldCollectEventHeaders (tags = {}) {
if (tags['appsec.event'] === 'true') {
return true
}
for (const tagName of Object.keys(tags)) {
if (tagName.startsWith('appsec.events.')) {
return true
}
}
return false
}
function setRateLimit (rateLimit) {

@@ -195,0 +249,0 @@ limiter = new Limiter(rateLimit)

@@ -6,3 +6,2 @@ 'use strict'

const { ACKNOWLEDGED, ERROR } = require('./remote_config/apply_states')
const blocking = require('./blocking')

@@ -24,6 +23,2 @@ let defaultRules

waf.init(defaultRules, config)
if (defaultRules.actions) {
blocking.updateBlockingConfiguration(defaultRules.actions.find(action => action.id === 'block'))
}
}

@@ -73,6 +68,6 @@

} else {
if (file && file.rules && file.rules.length) {
const { version, metadata, rules } = file
if (file?.rules?.length) {
const { version, metadata, rules, processors, scanners } = file
newRuleset = { version, metadata, rules }
newRuleset = { version, metadata, rules, processors, scanners }
newRulesetId = id

@@ -84,26 +79,19 @@ }

} else if (product === 'ASM') {
let batchConfiguration = false
if (file && file.rules_override && file.rules_override.length) {
batchConfiguration = true
if (file?.rules_override?.length) {
newRulesOverride.set(id, file.rules_override)
}
if (file && file.exclusions && file.exclusions.length) {
batchConfiguration = true
if (file?.exclusions?.length) {
newExclusions.set(id, file.exclusions)
}
if (file && file.custom_rules && file.custom_rules.length) {
batchConfiguration = true
if (file?.custom_rules?.length) {
newCustomRules.set(id, file.custom_rules)
}
if (file && file.actions && file.actions.length) {
if (file?.actions?.length) {
newActions.set(id, file.actions)
}
// "actions" data is managed by tracer and not by waf
if (batchConfiguration) {
batch.add(item)
}
batch.add(item)
}

@@ -119,3 +107,5 @@ }

newExclusions.modified ||
newCustomRules.modified) {
newCustomRules.modified ||
newActions.modified
) {
const payload = newRuleset || {}

@@ -135,2 +125,5 @@

}
if (newActions.modified) {
payload.actions = concatArrays(newActions)
}

@@ -155,2 +148,5 @@ try {

}
if (newActions.modified) {
appliedActions = newActions
}
} catch (err) {

@@ -166,7 +162,2 @@ newApplyState = ERROR

}
if (newActions.modified) {
blocking.updateBlockingConfiguration(concatArrays(newActions).find(action => action.id === 'block'))
appliedActions = newActions
}
}

@@ -253,3 +244,2 @@

waf.destroy()
blocking.updateBlockingConfiguration(undefined)

@@ -256,0 +246,0 @@ defaultRules = undefined

@@ -7,2 +7,3 @@ 'use strict'

const { setUserTags } = require('./set_user')
const standalone = require('../standalone')

@@ -77,2 +78,4 @@ function trackUserLoginSuccessEvent (tracer, user, metadata) {

rootSpan.addTags(tags)
standalone.sample(rootSpan)
}

@@ -79,0 +82,0 @@

@@ -6,3 +6,3 @@ 'use strict'

const { getRootSpan } = require('./utils')
const { block } = require('../blocking')
const { block, getBlockingAction } = require('../blocking')
const { storage } = require('../../../../datadog-core')

@@ -14,6 +14,3 @@ const { setUserTags } = require('./set_user')

const actions = waf.run({ persistent: { [USER_ID]: user.id } })
if (!actions) return false
return actions.includes('block')
return !!getBlockingAction(actions)
}

@@ -20,0 +17,0 @@

@@ -8,2 +8,3 @@ 'use strict'

const DD_TELEMETRY_WAF_RESULT_TAGS = Symbol('_dd.appsec.telemetry.waf.result.tags')
const DD_TELEMETRY_REQUEST_METRICS = Symbol('_dd.appsec.telemetry.request.metrics')

@@ -30,6 +31,18 @@ const tags = {

function newStore () {
return {
[DD_TELEMETRY_REQUEST_METRICS]: {
duration: 0,
durationExt: 0,
raspDuration: 0,
raspDurationExt: 0,
raspEvalCount: 0
}
}
}
function getStore (req) {
let store = metricsStoreMap.get(req)
if (!store) {
store = {}
store = newStore()
metricsStoreMap.set(req, store)

@@ -56,5 +69,3 @@ }

function getOrCreateMetricTags (req, versionsTags) {
const store = getStore(req)
function getOrCreateMetricTags (store, versionsTags) {
let metricTags = store[DD_TELEMETRY_WAF_RESULT_TAGS]

@@ -74,5 +85,34 @@ if (!metricTags) {

function updateRaspRequestsMetricTags (metrics, req, raspRuleType) {
if (!req) return
const store = getStore(req)
// it does not depend on whether telemetry is enabled or not
addRaspRequestMetrics(store, metrics)
if (!enabled) return
const tags = { rule_type: raspRuleType, waf_version: metrics.wafVersion }
appsecMetrics.count('appsec.rasp.rule.eval', tags).inc(1)
if (metrics.wafTimeout) {
appsecMetrics.count('appsec.rasp.timeout', tags).inc(1)
}
if (metrics.ruleTriggered) {
appsecMetrics.count('appsec.rasp.rule.match', tags).inc(1)
}
}
function updateWafRequestsMetricTags (metrics, req) {
if (!req || !enabled) return
if (!req) return
const store = getStore(req)
// it does not depend on whether telemetry is enabled or not
addRequestMetrics(store, metrics)
if (!enabled) return
const versionsTags = getVersionsTags(metrics.wafVersion, metrics.rulesVersion)

@@ -82,3 +122,3 @@

const metricTags = getOrCreateMetricTags(req, versionsTags)
const metricTags = getOrCreateMetricTags(store, versionsTags)

@@ -129,2 +169,20 @@ const { blockTriggered, ruleTriggered, wafTimeout } = metrics

function addRequestMetrics (store, { duration, durationExt }) {
store[DD_TELEMETRY_REQUEST_METRICS].duration += duration || 0
store[DD_TELEMETRY_REQUEST_METRICS].durationExt += durationExt || 0
}
function addRaspRequestMetrics (store, { duration, durationExt }) {
store[DD_TELEMETRY_REQUEST_METRICS].raspDuration += duration || 0
store[DD_TELEMETRY_REQUEST_METRICS].raspDurationExt += durationExt || 0
store[DD_TELEMETRY_REQUEST_METRICS].raspEvalCount++
}
function getRequestMetrics (req) {
if (req) {
const store = getStore(req)
return store?.[DD_TELEMETRY_REQUEST_METRICS]
}
}
module.exports = {

@@ -135,5 +193,8 @@ enable,

updateWafRequestsMetricTags,
updateRaspRequestsMetricTags,
incrementWafInitMetric,
incrementWafUpdatesMetric,
incrementWafRequestsMetric
incrementWafRequestsMetric,
getRequestMetrics
}

@@ -49,3 +49,3 @@ 'use strict'

function run (data, req) {
function run (data, req, raspRuleType) {
if (!req) {

@@ -63,3 +63,3 @@ const store = storage.getStore()

return wafContext.run(data)
return wafContext.run(data, raspRuleType)
}

@@ -66,0 +66,0 @@

@@ -6,2 +6,3 @@ 'use strict'

const addresses = require('../addresses')
const { getBlockingAction } = require('../blocking')

@@ -22,3 +23,3 @@ // TODO: remove once ephemeral addresses are implemented

run ({ persistent, ephemeral }) {
run ({ persistent, ephemeral }, raspRuleType) {
const payload = {}

@@ -29,3 +30,3 @@ let payloadHasData = false

if (persistent && typeof persistent === 'object') {
if (persistent !== null && typeof persistent === 'object') {
// TODO: possible optimization: only send params that haven't already been sent with same value to this wafContext

@@ -45,3 +46,3 @@ for (const key of Object.keys(persistent)) {

if (Object.keys(inputs).length) {
payload['persistent'] = inputs
payload.persistent = inputs
payloadHasData = true

@@ -51,3 +52,3 @@ }

if (ephemeral && Object.keys(ephemeral).length) {
payload['ephemeral'] = ephemeral
payload.ephemeral = ephemeral
payloadHasData = true

@@ -68,4 +69,5 @@ }

const ruleTriggered = !!result.events?.length
const blockTriggered = result.actions?.includes('block')
const blockTriggered = !!getBlockingAction(result.actions)
Reporter.reportMetrics({

@@ -79,3 +81,3 @@ duration: result.totalRuntime / 1e3,

wafTimeout: result.timeout
})
}, raspRuleType)

@@ -82,0 +84,0 @@ if (ruleTriggered) {

@@ -8,6 +8,19 @@ 'use strict'

const AGENT_EVP_PROXY_PATH = '/evp_proxy/v2'
const AGENT_EVP_PROXY_PATH_PREFIX = '/evp_proxy/v'
const AGENT_EVP_PROXY_PATH_REGEX = /\/evp_proxy\/v(\d+)\/?/
function getIsEvpCompatible (err, agentInfo) {
return !err && agentInfo.endpoints.some(url => url.includes(AGENT_EVP_PROXY_PATH))
function getLatestEvpProxyVersion (err, agentInfo) {
if (err) {
return 0
}
return agentInfo.endpoints.reduce((acc, endpoint) => {
if (endpoint.includes(AGENT_EVP_PROXY_PATH_PREFIX)) {
const version = Number(endpoint.replace(AGENT_EVP_PROXY_PATH_REGEX, '$1'))
if (isNaN(version)) {
return acc
}
return version > acc ? version : acc
}
return acc
}, 0)
}

@@ -29,13 +42,23 @@

this._isInitialized = true
const isEvpCompatible = getIsEvpCompatible(err, agentInfo)
let latestEvpProxyVersion = getLatestEvpProxyVersion(err, agentInfo)
const isEvpCompatible = latestEvpProxyVersion >= 2
const isGzipCompatible = latestEvpProxyVersion >= 4
// v3 does not work well citestcycle, so we downgrade to v2
if (latestEvpProxyVersion === 3) {
latestEvpProxyVersion = 2
}
const evpProxyPrefix = `${AGENT_EVP_PROXY_PATH_PREFIX}${latestEvpProxyVersion}`
if (isEvpCompatible) {
this._isUsingEvpProxy = true
this.evpProxyPrefix = evpProxyPrefix
this._writer = new AgentlessWriter({
url: this._url,
tags,
evpProxyPrefix: AGENT_EVP_PROXY_PATH
evpProxyPrefix
})
this._coverageWriter = new CoverageWriter({
url: this._url,
evpProxyPrefix: AGENT_EVP_PROXY_PATH
evpProxyPrefix
})

@@ -56,2 +79,3 @@ } else {

this.exportUncodedCoverages()
this._isGzipCompatible = isGzipCompatible
})

@@ -58,0 +82,0 @@ }

@@ -15,4 +15,3 @@ 'use strict'

TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED,
getErrorTypeFromStatusCode
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED
} = require('../../../ci-visibility/telemetry')

@@ -60,6 +59,5 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(
TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
{ endpoint: 'code_coverage', errorType }
{ endpoint: 'code_coverage', statusCode }
)

@@ -66,0 +64,0 @@ incrementCountMetric(

@@ -24,2 +24,4 @@ 'use strict'

this._apiUrl = url || new URL(`https://api.${site}`)
// Agentless is always gzip compatible
this._isGzipCompatible = true
}

@@ -26,0 +28,0 @@

@@ -15,4 +15,3 @@ 'use strict'

TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED,
getErrorTypeFromStatusCode
TELEMETRY_ENDPOINT_PAYLOAD_DROPPED
} = require('../../../ci-visibility/telemetry')

@@ -61,6 +60,5 @@

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(
TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS,
{ endpoint: 'test_cycle', errorType }
{ endpoint: 'test_cycle', statusCode }
)

@@ -67,0 +65,0 @@ incrementCountMetric(

@@ -6,4 +6,5 @@ 'use strict'

const { sendGitMetadata: sendGitMetadataRequest } = require('./git/git_metadata')
const { getItrConfiguration: getItrConfigurationRequest } = require('../intelligent-test-runner/get-itr-configuration')
const { getLibraryConfiguration: getLibraryConfigurationRequest } = require('../requests/get-library-configuration')
const { getSkippableSuites: getSkippableSuitesRequest } = require('../intelligent-test-runner/get-skippable-suites')
const { getKnownTests: getKnownTestsRequest } = require('../early-flake-detection/get-known-tests')
const log = require('../../log')

@@ -80,7 +81,14 @@ const AgentInfoExporter = require('../../exporters/common/agent-info-exporter')

this._canUseCiVisProtocol &&
this._itrConfig &&
this._itrConfig.isSuitesSkippingEnabled)
this._libraryConfig?.isSuitesSkippingEnabled)
}
shouldRequestItrConfiguration () {
shouldRequestKnownTests () {
return !!(
this._config.isEarlyFlakeDetectionEnabled &&
this._canUseCiVisProtocol &&
this._libraryConfig?.isEarlyFlakeDetectionEnabled
)
}
shouldRequestLibraryConfiguration () {
return this._config.isIntelligentTestRunnerEnabled

@@ -97,2 +105,15 @@ }

getRequestConfiguration (testConfiguration) {
return {
url: this._getApiUrl(),
env: this._config.env,
service: this._config.service,
isEvpProxy: !!this._isUsingEvpProxy,
isGzipCompatible: this._isGzipCompatible,
evpProxyPrefix: this.evpProxyPrefix,
custom: getTestConfigurationTags(this._config.tags),
...testConfiguration
}
}
// We can't call the skippable endpoint until git upload has finished,

@@ -108,23 +129,21 @@ // hence the this._gitUploadPromise.then

}
const configuration = {
url: this._getApiUrl(),
site: this._config.site,
env: this._config.env,
service: this._config.service,
isEvpProxy: !!this._isUsingEvpProxy,
custom: getTestConfigurationTags(this._config.tags),
...testConfiguration
}
getSkippableSuitesRequest(configuration, callback)
getSkippableSuitesRequest(this.getRequestConfiguration(testConfiguration), callback)
})
}
getKnownTests (testConfiguration, callback) {
if (!this.shouldRequestKnownTests()) {
return callback(null)
}
getKnownTestsRequest(this.getRequestConfiguration(testConfiguration), callback)
}
/**
* We can't request ITR configuration until we know whether we can use the
* We can't request library configuration until we know whether we can use the
* CI Visibility Protocol, hence the this._canUseCiVisProtocol promise.
*/
getItrConfiguration (testConfiguration, callback) {
getLibraryConfiguration (testConfiguration, callback) {
const { repositoryUrl } = testConfiguration
this.sendGitMetadata(repositoryUrl)
if (!this.shouldRequestItrConfiguration()) {
if (!this.shouldRequestLibraryConfiguration()) {
return callback(null, {})

@@ -136,20 +155,15 @@ }

}
const configuration = {
url: this._getApiUrl(),
env: this._config.env,
service: this._config.service,
isEvpProxy: !!this._isUsingEvpProxy,
custom: getTestConfigurationTags(this._config.tags),
...testConfiguration
}
getItrConfigurationRequest(configuration, (err, itrConfig) => {
const configuration = this.getRequestConfiguration(testConfiguration)
getLibraryConfigurationRequest(configuration, (err, libraryConfig) => {
/**
* **Important**: this._itrConfig remains empty in testing frameworks
* where the tests run in a subprocess, because `getItrConfiguration` is called only once.
* **Important**: this._libraryConfig remains empty in testing frameworks
* where the tests run in a subprocess, like Jest,
* because `getLibraryConfiguration` is called only once in the main process.
*/
this._itrConfig = itrConfig
this._libraryConfig = this.filterConfiguration(libraryConfig)
if (err) {
callback(err, {})
} else if (itrConfig?.requireGit) {
} else if (libraryConfig?.requireGit) {
// If the backend requires git, we'll wait for the upload to finish and request settings again

@@ -160,9 +174,9 @@ this._gitUploadPromise.then(gitUploadError => {

}
getItrConfigurationRequest(configuration, (err, finalItrConfig) => {
this._itrConfig = finalItrConfig
callback(err, finalItrConfig)
getLibraryConfigurationRequest(configuration, (err, finalLibraryConfig) => {
this._libraryConfig = this.filterConfiguration(finalLibraryConfig)
callback(err, this._libraryConfig)
})
})
} else {
callback(null, itrConfig)
callback(null, this._libraryConfig)
}

@@ -173,2 +187,29 @@ })

// Takes into account potential kill switches
filterConfiguration (remoteConfiguration) {
if (!remoteConfiguration) {
return {}
}
const {
isCodeCoverageEnabled,
isSuitesSkippingEnabled,
isItrEnabled,
requireGit,
isEarlyFlakeDetectionEnabled,
earlyFlakeDetectionNumRetries,
earlyFlakeDetectionFaultyThreshold,
isFlakyTestRetriesEnabled
} = remoteConfiguration
return {
isCodeCoverageEnabled,
isSuitesSkippingEnabled,
isItrEnabled,
requireGit,
isEarlyFlakeDetectionEnabled: isEarlyFlakeDetectionEnabled && this._config.isEarlyFlakeDetectionEnabled,
earlyFlakeDetectionNumRetries,
earlyFlakeDetectionFaultyThreshold,
isFlakyTestRetriesEnabled
}
}
sendGitMetadata (repositoryUrl) {

@@ -182,10 +223,15 @@ if (!this._config.isGitUploadEnabled) {

}
sendGitMetadataRequest(this._getApiUrl(), !!this._isUsingEvpProxy, repositoryUrl, (err) => {
if (err) {
log.error(`Error uploading git metadata: ${err.message}`)
} else {
log.debug('Successfully uploaded git metadata')
sendGitMetadataRequest(
this._getApiUrl(),
{ isEvpProxy: !!this._isUsingEvpProxy, evpProxyPrefix: this.evpProxyPrefix },
repositoryUrl,
(err) => {
if (err) {
log.error(`Error uploading git metadata: ${err.message}`)
} else {
log.debug('Successfully uploaded git metadata')
}
this._resolveGit(err)
}
this._resolveGit(err)
})
)
})

@@ -192,0 +238,0 @@ }

@@ -1,2 +0,1 @@

const fs = require('fs')

@@ -15,3 +14,4 @@ const path = require('path')

isShallowRepository,
unshallowRepository
unshallowRepository,
isGitAvailable
} = require('../../../plugins/util/git')

@@ -29,4 +29,3 @@

TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS,
TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES,
getErrorTypeFromStatusCode
TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES
} = require('../../../ci-visibility/telemetry')

@@ -65,3 +64,3 @@

*/
function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, callback) {
function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy, evpProxyPrefix }, callback) {
const commonOptions = getCommonRequestOptions(url)

@@ -79,3 +78,3 @@

if (isEvpProxy) {
options.path = '/evp_proxy/v2/api/v2/git/repository/search_commits'
options.path = `${evpProxyPrefix}/api/v2/git/repository/search_commits`
options.headers['X-Datadog-EVP-Subdomain'] = 'api'

@@ -100,4 +99,3 @@ delete options.headers['dd-api-key']

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, { statusCode })
const error = new Error(`Error fetching commits to exclude: ${err.message}`)

@@ -123,2 +121,6 @@ return callback(error)

if (commitsToUpload === null) {
return callback(new Error('git rev-list failed'))
}
callback(null, commitsToUpload)

@@ -131,3 +133,3 @@ })

*/
function uploadPackFile ({ url, isEvpProxy, packFileToUpload, repositoryUrl, headCommit }, callback) {
function uploadPackFile ({ url, isEvpProxy, evpProxyPrefix, packFileToUpload, repositoryUrl, headCommit }, callback) {
const form = new FormData()

@@ -172,3 +174,3 @@

if (isEvpProxy) {
options.path = '/evp_proxy/v2/api/v2/git/repository/packfile'
options.path = `${evpProxyPrefix}/api/v2/git/repository/packfile`
options.headers['X-Datadog-EVP-Subdomain'] = 'api'

@@ -186,4 +188,3 @@ delete options.headers['dd-api-key']

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, { statusCode })
const error = new Error(`Could not upload packfiles: status code ${statusCode}: ${err.message}`)

@@ -199,2 +200,3 @@ return callback(error, uploadSize)

isEvpProxy,
evpProxyPrefix,
commitsToUpload,

@@ -229,2 +231,3 @@ repositoryUrl,

isEvpProxy,
evpProxyPrefix,
repositoryUrl,

@@ -242,2 +245,3 @@ headCommit

isEvpProxy,
evpProxyPrefix,
repositoryUrl,

@@ -253,3 +257,6 @@ headCommit

*/
function sendGitMetadata (url, isEvpProxy, configRepositoryUrl, callback) {
function sendGitMetadata (url, { isEvpProxy, evpProxyPrefix }, configRepositoryUrl, callback) {
if (!isGitAvailable()) {
return callback(new Error('Git is not available'))
}
let repositoryUrl = configRepositoryUrl

@@ -266,5 +273,4 @@ if (!repositoryUrl) {

const latestCommits = getLatestCommits()
let latestCommits = getLatestCommits()
log.debug(`There were ${latestCommits.length} commits since last month.`)
const [headCommit] = latestCommits

@@ -283,3 +289,11 @@ const getOnFinishGetCommitsToUpload = (hasCheckedShallow) => (err, commitsToUpload) => {

if (hasCheckedShallow || !isShallowRepository()) {
return generateAndUploadPackFiles({ url, isEvpProxy, commitsToUpload, repositoryUrl, headCommit }, callback)
const [headCommit] = latestCommits
return generateAndUploadPackFiles({
url,
isEvpProxy,
evpProxyPrefix,
commitsToUpload,
repositoryUrl,
headCommit
}, callback)
}

@@ -289,6 +303,21 @@ // Otherwise we unshallow and get commits to upload again

unshallowRepository()
getCommitsToUpload({ url, repositoryUrl, latestCommits, isEvpProxy }, getOnFinishGetCommitsToUpload(true))
// The latest commits change after unshallowing
latestCommits = getLatestCommits()
getCommitsToUpload({
url,
repositoryUrl,
latestCommits,
isEvpProxy,
evpProxyPrefix
}, getOnFinishGetCommitsToUpload(true))
}
getCommitsToUpload({ url, repositoryUrl, latestCommits, isEvpProxy }, getOnFinishGetCommitsToUpload(false))
getCommitsToUpload({
url,
repositoryUrl,
latestCommits,
isEvpProxy,
evpProxyPrefix
}, getOnFinishGetCommitsToUpload(false))
}

@@ -295,0 +324,0 @@

@@ -11,4 +11,3 @@ const request = require('../../exporters/common/request')

TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS,
TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES,
getErrorTypeFromStatusCode
TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES
} = require('../../ci-visibility/telemetry')

@@ -19,2 +18,4 @@

isEvpProxy,
evpProxyPrefix,
isGzipCompatible,
env,

@@ -42,4 +43,8 @@ service,

if (isGzipCompatible) {
options.headers['accept-encoding'] = 'gzip'
}
if (isEvpProxy) {
options.path = '/evp_proxy/v2/api/v2/ci/tests/skippable'
options.path = `${evpProxyPrefix}/api/v2/ci/tests/skippable`
options.headers['X-Datadog-EVP-Subdomain'] = 'api'

@@ -83,4 +88,3 @@ } else {

if (err) {
const errorType = getErrorTypeFromStatusCode(statusCode)
incrementCountMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, { errorType })
incrementCountMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, { statusCode })
done(err)

@@ -103,3 +107,4 @@ } else {

testLevel === 'test'
? TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS : TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES,
? TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS
: TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES,
{},

@@ -106,0 +111,0 @@ skippableSuites.length

@@ -13,3 +13,6 @@ const telemetryMetrics = require('../telemetry/metrics')

hasCodeOwners: 'has_code_owners',
isUnsupportedCIProvider: 'is_unsupported_ci'
isUnsupportedCIProvider: 'is_unsupported_ci',
isNew: 'is_new',
isRum: 'is_rum',
browserDriver: 'browser_driver'
}

@@ -21,2 +24,10 @@

return Object.keys(tagsDictionary).reduce((acc, tagKey) => {
if (tagKey === 'statusCode') {
const statusCode = tagsDictionary[tagKey]
if (isStatusCode400(statusCode)) {
acc.push(`status_code:${statusCode}`)
}
acc.push(`error_type:${getErrorTypeFromStatusCode(statusCode)}`)
return acc
}
const formattedTagKey = formattedTags[tagKey] || tagKey

@@ -41,2 +52,3 @@ if (tagsDictionary[tagKey] === true) {

// CI Visibility telemetry events
const TELEMETRY_TEST_SESSION = 'test_session'
const TELEMETRY_EVENT_CREATED = 'event_created'

@@ -80,3 +92,13 @@ const TELEMETRY_EVENT_FINISHED = 'event_finished'

const TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES = 'itr_skippable_tests.response_bytes'
// early flake detection
const TELEMETRY_KNOWN_TESTS = 'early_flake_detection.request'
const TELEMETRY_KNOWN_TESTS_MS = 'early_flake_detection.request_ms'
const TELEMETRY_KNOWN_TESTS_ERRORS = 'early_flake_detection.request_errors'
const TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS = 'early_flake_detection.response_tests'
const TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES = 'early_flake_detection.response_bytes'
function isStatusCode400 (statusCode) {
return statusCode >= 400 && statusCode < 500
}
function getErrorTypeFromStatusCode (statusCode) {

@@ -95,2 +117,3 @@ if (statusCode >= 400 && statusCode < 500) {

distributionMetric,
TELEMETRY_TEST_SESSION,
TELEMETRY_EVENT_CREATED,

@@ -134,3 +157,7 @@ TELEMETRY_EVENT_FINISHED,

TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES,
getErrorTypeFromStatusCode
TELEMETRY_KNOWN_TESTS,
TELEMETRY_KNOWN_TESTS_MS,
TELEMETRY_KNOWN_TESTS_ERRORS,
TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS,
TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES
}

@@ -13,2 +13,3 @@ const CiPlugin = require('../../plugins/ci_plugin')

}
constructor (...args) {

@@ -15,0 +16,0 @@ super(...args)

@@ -5,3 +5,3 @@ 'use strict'

const os = require('os')
const uuid = require('crypto-randomuuid')
const uuid = require('crypto-randomuuid') // we need to keep the old uuid dep because of cypress
const URL = require('url').URL

@@ -12,2 +12,5 @@ const log = require('./log')

const tagger = require('./tagger')
const get = require('../../datadog-core/src/utils/src/get')
const has = require('../../datadog-core/src/utils/src/has')
const set = require('../../datadog-core/src/utils/src/set')
const { isTrue, isFalse } = require('./util')

@@ -17,4 +20,115 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags')

const { updateConfig } = require('./telemetry')
const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless')
const telemetryMetrics = require('./telemetry/metrics')
const { getIsGCPFunction, getIsAzureFunction } = require('./serverless')
const { ORIGIN_KEY } = require('./constants')
const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
const telemetryCounters = {
'otel.env.hiding': {},
'otel.env.invalid': {}
}
function getCounter (event, ddVar, otelVar) {
const counters = telemetryCounters[event]
const tags = []
const ddVarPrefix = 'config_datadog:'
const otelVarPrefix = 'config_opentelemetry:'
if (ddVar) {
ddVar = ddVarPrefix + ddVar.toLowerCase()
tags.push(ddVar)
}
if (otelVar) {
otelVar = otelVarPrefix + otelVar.toLowerCase()
tags.push(otelVar)
}
if (!(otelVar in counters)) counters[otelVar] = {}
const counter = tracerMetrics.count(event, tags)
counters[otelVar][ddVar] = counter
return counter
}
const otelDdEnvMapping = {
OTEL_LOG_LEVEL: 'DD_TRACE_LOG_LEVEL',
OTEL_PROPAGATORS: 'DD_TRACE_PROPAGATION_STYLE',
OTEL_SERVICE_NAME: 'DD_SERVICE',
OTEL_TRACES_SAMPLER: 'DD_TRACE_SAMPLE_RATE',
OTEL_TRACES_SAMPLER_ARG: 'DD_TRACE_SAMPLE_RATE',
OTEL_TRACES_EXPORTER: 'DD_TRACE_ENABLED',
OTEL_METRICS_EXPORTER: 'DD_RUNTIME_METRICS_ENABLED',
OTEL_RESOURCE_ATTRIBUTES: 'DD_TAGS',
OTEL_SDK_DISABLED: 'DD_TRACE_OTEL_ENABLED',
OTEL_LOGS_EXPORTER: undefined
}
const VALID_PROPAGATION_STYLES = new Set(['datadog', 'tracecontext', 'b3', 'b3 single header', 'none'])
const VALID_LOG_LEVELS = new Set(['debug', 'info', 'warn', 'error'])
function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) {
const OTEL_TRACES_SAMPLER_MAPPING = {
always_on: '1.0',
always_off: '0.0',
traceidratio: otelTracesSamplerArg,
parentbased_always_on: '1.0',
parentbased_always_off: '0.0',
parentbased_traceidratio: otelTracesSamplerArg
}
return OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler]
}
function validateOtelPropagators (propagators) {
if (!process.env.PROPAGATION_STYLE_EXTRACT &&
!process.env.PROPAGATION_STYLE_INJECT &&
!process.env.DD_TRACE_PROPAGATION_STYLE &&
process.env.OTEL_PROPAGATORS) {
for (const style in propagators) {
if (!VALID_PROPAGATION_STYLES.has(style)) {
log.warn('unexpected value for OTEL_PROPAGATORS environment variable')
getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc()
}
}
}
}
function validateEnvVarType (envVar) {
const value = process.env[envVar]
switch (envVar) {
case 'OTEL_LOG_LEVEL':
return VALID_LOG_LEVELS.has(value)
case 'OTEL_PROPAGATORS':
case 'OTEL_RESOURCE_ATTRIBUTES':
case 'OTEL_SERVICE_NAME':
return typeof value === 'string'
case 'OTEL_TRACES_SAMPLER':
return getFromOtelSamplerMap(value, process.env.OTEL_TRACES_SAMPLER_ARG) !== undefined
case 'OTEL_TRACES_SAMPLER_ARG':
return !isNaN(parseFloat(value))
case 'OTEL_SDK_DISABLED':
return value.toLowerCase() === 'true' || value.toLowerCase() === 'false'
case 'OTEL_TRACES_EXPORTER':
case 'OTEL_METRICS_EXPORTER':
case 'OTEL_LOGS_EXPORTER':
return value.toLowerCase() === 'none'
default:
return false
}
}
function checkIfBothOtelAndDdEnvVarSet () {
for (const [otelEnvVar, ddEnvVar] of Object.entries(otelDdEnvMapping)) {
if (ddEnvVar && process.env[ddEnvVar] && process.env[otelEnvVar]) {
log.warn(`both ${ddEnvVar} and ${otelEnvVar} environment variables are set`)
getCounter('otel.env.hiding', ddEnvVar, otelEnvVar).inc()
}
if (process.env[otelEnvVar] && !validateEnvVarType(otelEnvVar)) {
log.warn(`unexpected value for ${otelEnvVar} environment variable`)
getCounter('otel.env.invalid', ddEnvVar, otelEnvVar).inc()
}
}
}
const fromEntries = Object.fromEntries || (entries =>

@@ -25,2 +139,7 @@ entries.reduce((obj, [k, v]) => Object.assign(obj, { [k]: v }), {}))

const qsRegex = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:"|%22)(?:%2[^2]|%[^2]|[^"%])+(?:"|%22))|bearer(?:\\s|%20)+[a-z0-9\\._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+\\/=-]|%3D|%2F|%2B)+)?|[\\-]{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY[\\-]{5}[^\\-]+[\\-]{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY|ssh-rsa(?:\\s|%20)*(?:[a-z0-9\\/\\.+]|%2F|%5C|%2B){100,}'
// eslint-disable-next-line max-len
const defaultWafObfuscatorKeyRegex = '(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt'
// eslint-disable-next-line max-len
const defaultWafObfuscatorValueRegex = '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=[^;]|"\\s*:\\s*"[^"]+")|bearer\\s+[a-z0-9\\._\\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}'
const runtimeId = uuid()

@@ -71,5 +190,5 @@ function maybeFile (filepath) {

function propagationStyle (key, option, defaultValue) {
function propagationStyle (key, option) {
// Extract by key if in object-form value
if (typeof option === 'object' && !Array.isArray(option)) {
if (option !== null && typeof option === 'object' && !Array.isArray(option)) {
option = option[key]

@@ -82,3 +201,3 @@ }

// If it's not an array but not undefined there's something wrong with the input
if (typeof option !== 'undefined') {
if (option !== undefined) {
log.warn('Unexpected input for config.tracePropagationStyle')

@@ -89,4 +208,5 @@ }

const envKey = `DD_TRACE_PROPAGATION_STYLE_${key.toUpperCase()}`
const envVar = coalesce(process.env[envKey], process.env.DD_TRACE_PROPAGATION_STYLE)
if (typeof envVar !== 'undefined') {
const envVar = coalesce(process.env[envKey], process.env.DD_TRACE_PROPAGATION_STYLE, process.env.OTEL_PROPAGATORS)
if (envVar !== undefined) {
return envVar.split(',')

@@ -96,111 +216,33 @@ .filter(v => v !== '')

}
}
return defaultValue
function reformatSpanSamplingRules (rules) {
if (!rules) return rules
return rules.map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate',
max_per_second: 'maxPerSecond'
})
})
}
class Config {
constructor (options) {
options = options || {}
constructor (options = {}) {
options = {
...options,
appsec: options.appsec != null ? options.appsec : options.experimental?.appsec,
iast: options.iast != null ? options.iast : options.experimental?.iast
}
// Configure the logger first so it can be used to warn about other configs
this.debug = isTrue(coalesce(
process.env.DD_TRACE_DEBUG,
false
))
this.logger = options.logger
this.logLevel = coalesce(
options.logLevel,
process.env.DD_TRACE_LOG_LEVEL,
'debug'
)
const logConfig = log.getConfig()
this.debug = logConfig.enabled
this.logger = coalesce(options.logger, logConfig.logger)
this.logLevel = coalesce(options.logLevel, logConfig.logLevel)
log.use(this.logger)
log.toggle(this.debug, this.logLevel, this)
log.toggle(this.debug, this.logLevel)
const DD_TRACING_ENABLED = coalesce(
process.env.DD_TRACING_ENABLED,
true
)
const DD_PROFILING_ENABLED = coalesce(
options.profiling, // TODO: remove when enabled by default
process.env.DD_EXPERIMENTAL_PROFILING_ENABLED,
process.env.DD_PROFILING_ENABLED,
false
)
const DD_PROFILING_EXPORTERS = coalesce(
process.env.DD_PROFILING_EXPORTERS,
'agent'
)
const DD_PROFILING_SOURCE_MAP = process.env.DD_PROFILING_SOURCE_MAP
const DD_RUNTIME_METRICS_ENABLED = coalesce(
options.runtimeMetrics, // TODO: remove when enabled by default
process.env.DD_RUNTIME_METRICS_ENABLED,
false
)
const DD_DBM_PROPAGATION_MODE = coalesce(
options.dbmPropagationMode,
process.env.DD_DBM_PROPAGATION_MODE,
'disabled'
)
const DD_DATA_STREAMS_ENABLED = coalesce(
options.dsmEnabled,
process.env.DD_DATA_STREAMS_ENABLED,
false
)
const DD_AGENT_HOST = coalesce(
options.hostname,
process.env.DD_AGENT_HOST,
process.env.DD_TRACE_AGENT_HOSTNAME,
'127.0.0.1'
)
const DD_TRACE_AGENT_PORT = coalesce(
options.port,
process.env.DD_TRACE_AGENT_PORT,
'8126'
)
const DD_TRACE_AGENT_URL = coalesce(
options.url,
process.env.DD_TRACE_AGENT_URL,
process.env.DD_TRACE_URL,
null
)
const DD_IS_CIVISIBILITY = coalesce(
options.isCiVisibility,
false
)
const DD_CIVISIBILITY_AGENTLESS_URL = process.env.DD_CIVISIBILITY_AGENTLESS_URL
checkIfBothOtelAndDdEnvVarSet()
const DD_CIVISIBILITY_ITR_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_ITR_ENABLED,
true
)
const DD_CIVISIBILITY_MANUAL_API_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED,
false
)
const DD_TRACE_MEMCACHED_COMMAND_ENABLED = coalesce(
process.env.DD_TRACE_MEMCACHED_COMMAND_ENABLED,
false
)
const DD_SERVICE_MAPPING = coalesce(
options.serviceMapping,
process.env.DD_SERVICE_MAPPING ? fromEntries(
process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
) : {}
)
const DD_TRACE_STARTUP_LOGS = coalesce(
options.startupLogs,
process.env.DD_TRACE_STARTUP_LOGS,
false
)
const DD_OPENAI_LOGS_ENABLED = coalesce(
options.openAiLogsEnabled,
process.env.DD_OPENAI_LOGS_ENABLED,
false
)
const DD_API_KEY = coalesce(

@@ -211,63 +253,2 @@ process.env.DATADOG_API_KEY,

const inAWSLambda = process.env.AWS_LAMBDA_FUNCTION_NAME !== undefined
const isGCPFunction = getIsGCPFunction()
const isAzureFunctionConsumptionPlan = getIsAzureFunctionConsumptionPlan()
const inServerlessEnvironment = inAWSLambda || isGCPFunction || isAzureFunctionConsumptionPlan
const DD_INSTRUMENTATION_TELEMETRY_ENABLED = coalesce(
process.env.DD_TRACE_TELEMETRY_ENABLED, // for backward compatibility
process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED, // to comply with instrumentation telemetry specs
!inServerlessEnvironment
)
const DD_TELEMETRY_HEARTBEAT_INTERVAL = process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL
? Math.floor(parseFloat(process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL) * 1000)
: 60000
const DD_OPENAI_SPAN_CHAR_LIMIT = process.env.DD_OPENAI_SPAN_CHAR_LIMIT
? parseInt(process.env.DD_OPENAI_SPAN_CHAR_LIMIT)
: 128
const DD_TELEMETRY_DEBUG = coalesce(
process.env.DD_TELEMETRY_DEBUG,
false
)
const DD_TELEMETRY_METRICS_ENABLED = coalesce(
process.env.DD_TELEMETRY_METRICS_ENABLED,
true
)
const DD_TRACE_AGENT_PROTOCOL_VERSION = coalesce(
options.protocolVersion,
process.env.DD_TRACE_AGENT_PROTOCOL_VERSION,
'0.4'
)
const DD_TRACE_PARTIAL_FLUSH_MIN_SPANS = coalesce(
parseInt(options.flushMinSpans),
parseInt(process.env.DD_TRACE_PARTIAL_FLUSH_MIN_SPANS),
1000
)
const DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP = coalesce(
process.env.DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP,
qsRegex
)
const DD_TRACE_CLIENT_IP_ENABLED = coalesce(
options.clientIpEnabled,
process.env.DD_TRACE_CLIENT_IP_ENABLED && isTrue(process.env.DD_TRACE_CLIENT_IP_ENABLED),
false
)
const DD_TRACE_CLIENT_IP_HEADER = coalesce(
options.clientIpHeader,
process.env.DD_TRACE_CLIENT_IP_HEADER,
null
)
// TODO: Remove the experimental env vars as a major?
const DD_TRACE_B3_ENABLED = coalesce(
options.experimental && options.experimental.b3,
process.env.DD_TRACE_EXPERIMENTAL_B3_ENABLED,
false
)
const defaultPropagationStyle = ['datadog', 'tracecontext']
if (isTrue(DD_TRACE_B3_ENABLED)) {
defaultPropagationStyle.push('b3')
defaultPropagationStyle.push('b3 single header')
}
if (process.env.DD_TRACE_PROPAGATION_STYLE && (

@@ -283,253 +264,18 @@ process.env.DD_TRACE_PROPAGATION_STYLE_INJECT ||

}
const DD_TRACE_PROPAGATION_STYLE_INJECT = propagationStyle(
const PROPAGATION_STYLE_INJECT = propagationStyle(
'inject',
options.tracePropagationStyle,
defaultPropagationStyle
this._getDefaultPropagationStyle(options)
)
const DD_TRACE_PROPAGATION_STYLE_EXTRACT = propagationStyle(
'extract',
options.tracePropagationStyle,
defaultPropagationStyle
)
const DD_TRACE_PROPAGATION_EXTRACT_FIRST = coalesce(
process.env.DD_TRACE_PROPAGATION_EXTRACT_FIRST,
false
)
const DD_TRACE_RUNTIME_ID_ENABLED = coalesce(
options.experimental && options.experimental.runtimeId,
process.env.DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED,
false
)
const DD_TRACE_EXPORTER = coalesce(
options.experimental && options.experimental.exporter,
process.env.DD_TRACE_EXPERIMENTAL_EXPORTER
)
const DD_TRACE_GET_RUM_DATA_ENABLED = coalesce(
options.experimental && options.experimental.enableGetRumData,
process.env.DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED,
false
)
const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion(
coalesce(
options.spanAttributeSchema,
process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
)
)
const DD_TRACE_PEER_SERVICE_MAPPING = coalesce(
options.peerServiceMapping,
process.env.DD_TRACE_PEER_SERVICE_MAPPING ? fromEntries(
process.env.DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
) : {}
)
const peerServiceSet = (
options.hasOwnProperty('spanComputePeerService') ||
process.env.hasOwnProperty('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
)
const peerServiceValue = coalesce(
options.spanComputePeerService,
process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED
)
validateOtelPropagators(PROPAGATION_STYLE_INJECT)
const DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = (
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0'
// In v0, peer service is computed only if it is explicitly set to true
? peerServiceSet && isTrue(peerServiceValue)
// In >v0, peer service is false only if it is explicitly set to false
: (peerServiceSet ? !isFalse(peerServiceValue) : true)
)
const DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED = coalesce(
options.spanRemoveIntegrationFromService,
isTrue(process.env.DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED)
)
const DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH = coalesce(
process.env.DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH,
'512'
)
const DD_TRACE_STATS_COMPUTATION_ENABLED = coalesce(
options.stats,
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,
isGCPFunction || isAzureFunctionConsumptionPlan
)
// the tracer generates 128 bit IDs by default as of v5
const DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = coalesce(
options.traceId128BitGenerationEnabled,
process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED,
true
)
const DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = coalesce(
options.traceId128BitLoggingEnabled,
process.env.DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED,
false
)
let appsec = options.appsec != null ? options.appsec : options.experimental && options.experimental.appsec
if (typeof appsec === 'boolean') {
appsec = {
enabled: appsec
if (typeof options.appsec === 'boolean') {
options.appsec = {
enabled: options.appsec
}
} else if (appsec == null) {
appsec = {}
} else if (options.appsec == null) {
options.appsec = {}
}
const DD_APPSEC_ENABLED = coalesce(
appsec.enabled,
process.env.DD_APPSEC_ENABLED && isTrue(process.env.DD_APPSEC_ENABLED)
)
const DD_APPSEC_RULES = coalesce(
appsec.rules,
process.env.DD_APPSEC_RULES
)
const DD_APPSEC_TRACE_RATE_LIMIT = coalesce(
parseInt(appsec.rateLimit),
parseInt(process.env.DD_APPSEC_TRACE_RATE_LIMIT),
100
)
const DD_APPSEC_WAF_TIMEOUT = coalesce(
parseInt(appsec.wafTimeout),
parseInt(process.env.DD_APPSEC_WAF_TIMEOUT),
5e3 // µs
)
const DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP = coalesce(
appsec.obfuscatorKeyRegex,
process.env.DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
`(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?(?:id|key|se\
cret)|sign(?:ed|ature)|bearer|authorization`
)
const DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP = coalesce(
appsec.obfuscatorValueRegex,
process.env.DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
`(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|to\
ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:\\s*=[^;]|"\\s*:\\s*"[^"]+")|bearer\
\\s+[a-z0-9\\._\\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?\
|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}`
)
const DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML = coalesce(
maybeFile(appsec.blockedTemplateHtml),
maybeFile(process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML)
)
const DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = coalesce(
maybeFile(appsec.blockedTemplateJson),
maybeFile(process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON)
)
const DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = coalesce(
maybeFile(appsec.blockedTemplateGraphql),
maybeFile(process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON)
)
const DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = coalesce(
appsec.eventTracking && appsec.eventTracking.mode,
process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING,
'safe'
).toLowerCase()
const DD_EXPERIMENTAL_API_SECURITY_ENABLED = coalesce(
appsec?.apiSecurity?.enabled,
isTrue(process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED),
false
)
const DD_API_SECURITY_REQUEST_SAMPLE_RATE = coalesce(
appsec?.apiSecurity?.requestSampling,
parseFloat(process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE),
0.1
)
const remoteConfigOptions = options.remoteConfig || {}
const DD_REMOTE_CONFIGURATION_ENABLED = coalesce(
process.env.DD_REMOTE_CONFIGURATION_ENABLED && isTrue(process.env.DD_REMOTE_CONFIGURATION_ENABLED),
!inServerlessEnvironment
)
const DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = coalesce(
parseFloat(remoteConfigOptions.pollInterval),
parseFloat(process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS),
5 // seconds
)
const iastOptions = options?.experimental?.iast
const DD_IAST_ENABLED = coalesce(
iastOptions &&
(iastOptions === true || iastOptions.enabled === true),
process.env.DD_IAST_ENABLED,
false
)
const DD_TELEMETRY_LOG_COLLECTION_ENABLED = coalesce(
process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED,
DD_IAST_ENABLED
)
const DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED = coalesce(
process.env.DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED,
true
)
const defaultIastRequestSampling = 30
const iastRequestSampling = coalesce(
parseInt(iastOptions?.requestSampling),
parseInt(process.env.DD_IAST_REQUEST_SAMPLING),
defaultIastRequestSampling
)
const DD_IAST_REQUEST_SAMPLING = iastRequestSampling < 0 ||
iastRequestSampling > 100 ? defaultIastRequestSampling : iastRequestSampling
const DD_IAST_MAX_CONCURRENT_REQUESTS = coalesce(
parseInt(iastOptions?.maxConcurrentRequests),
parseInt(process.env.DD_IAST_MAX_CONCURRENT_REQUESTS),
2
)
const DD_IAST_MAX_CONTEXT_OPERATIONS = coalesce(
parseInt(iastOptions?.maxContextOperations),
parseInt(process.env.DD_IAST_MAX_CONTEXT_OPERATIONS),
2
)
const DD_IAST_DEDUPLICATION_ENABLED = coalesce(
iastOptions?.deduplicationEnabled,
process.env.DD_IAST_DEDUPLICATION_ENABLED && isTrue(process.env.DD_IAST_DEDUPLICATION_ENABLED),
true
)
const DD_IAST_REDACTION_ENABLED = coalesce(
iastOptions?.redactionEnabled,
!isFalse(process.env.DD_IAST_REDACTION_ENABLED),
true
)
const DD_IAST_REDACTION_NAME_PATTERN = coalesce(
iastOptions?.redactionNamePattern,
process.env.DD_IAST_REDACTION_NAME_PATTERN,
null
)
const DD_IAST_REDACTION_VALUE_PATTERN = coalesce(
iastOptions?.redactionValuePattern,
process.env.DD_IAST_REDACTION_VALUE_PATTERN,
null
)
const DD_IAST_TELEMETRY_VERBOSITY = coalesce(
iastOptions?.telemetryVerbosity,
process.env.DD_IAST_TELEMETRY_VERBOSITY,
'INFORMATION'
)
const DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED,
true
)
const DD_TRACE_GIT_METADATA_ENABLED = coalesce(
process.env.DD_TRACE_GIT_METADATA_ENABLED,
true
)
// 0: disabled, 1: logging, 2: garbage collection + logging
const DD_TRACE_SPAN_LEAK_DEBUG = coalesce(
process.env.DD_TRACE_SPAN_LEAK_DEBUG,
0
)
const DD_INSTRUMENTATION_INSTALL_ID = coalesce(

@@ -548,151 +294,6 @@ process.env.DD_INSTRUMENTATION_INSTALL_ID,

const ingestion = options.ingestion || {}
const dogstatsd = coalesce(options.dogstatsd, {})
const sampler = {
rateLimit: coalesce(options.rateLimit, process.env.DD_TRACE_RATE_LIMIT, ingestion.rateLimit),
rules: coalesce(
options.samplingRules,
safeJsonParse(process.env.DD_TRACE_SAMPLING_RULES),
[]
).map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate'
})
}),
spanSamplingRules: coalesce(
options.spanSamplingRules,
safeJsonParse(maybeFile(process.env.DD_SPAN_SAMPLING_RULES_FILE)),
safeJsonParse(process.env.DD_SPAN_SAMPLING_RULES),
[]
).map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate',
max_per_second: 'maxPerSecond'
})
})
}
const defaultFlushInterval = inAWSLambda ? 0 : 2000
this.tracing = !isFalse(DD_TRACING_ENABLED)
this.dbmPropagationMode = DD_DBM_PROPAGATION_MODE
this.dsmEnabled = isTrue(DD_DATA_STREAMS_ENABLED)
this.openAiLogsEnabled = DD_OPENAI_LOGS_ENABLED
// TODO: refactor
this.apiKey = DD_API_KEY
this.url = DD_CIVISIBILITY_AGENTLESS_URL ? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
: getAgentUrl(DD_TRACE_AGENT_URL, options)
this.site = coalesce(options.site, process.env.DD_SITE, 'datadoghq.com')
this.hostname = DD_AGENT_HOST || (this.url && this.url.hostname)
this.port = String(DD_TRACE_AGENT_PORT || (this.url && this.url.port))
this.flushInterval = coalesce(parseInt(options.flushInterval, 10), defaultFlushInterval)
this.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS
this.queryStringObfuscation = DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP
this.clientIpEnabled = DD_TRACE_CLIENT_IP_ENABLED
this.clientIpHeader = DD_TRACE_CLIENT_IP_HEADER
this.plugins = !!coalesce(options.plugins, true)
this.serviceMapping = DD_SERVICE_MAPPING
this.dogstatsd = {
hostname: coalesce(dogstatsd.hostname, process.env.DD_DOGSTATSD_HOSTNAME, this.hostname),
port: String(coalesce(dogstatsd.port, process.env.DD_DOGSTATSD_PORT, 8125))
}
this.runtimeMetrics = isTrue(DD_RUNTIME_METRICS_ENABLED)
this.tracePropagationStyle = {
inject: DD_TRACE_PROPAGATION_STYLE_INJECT,
extract: DD_TRACE_PROPAGATION_STYLE_EXTRACT
}
this.tracePropagationExtractFirst = isTrue(DD_TRACE_PROPAGATION_EXTRACT_FIRST)
this.experimental = {
runtimeId: isTrue(DD_TRACE_RUNTIME_ID_ENABLED),
exporter: DD_TRACE_EXPORTER,
enableGetRumData: isTrue(DD_TRACE_GET_RUM_DATA_ENABLED)
}
this.sampler = sampler
this.reportHostname = isTrue(coalesce(options.reportHostname, process.env.DD_TRACE_REPORT_HOSTNAME, false))
this.scope = process.env.DD_TRACE_SCOPE
this.profiling = {
enabled: isTrue(DD_PROFILING_ENABLED),
sourceMap: !isFalse(DD_PROFILING_SOURCE_MAP),
exporters: DD_PROFILING_EXPORTERS
}
this.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
this.spanComputePeerService = DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED
this.spanRemoveIntegrationFromService = DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED
this.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING
this.lookup = options.lookup
this.startupLogs = isTrue(DD_TRACE_STARTUP_LOGS)
// Disabled for CI Visibility's agentless
this.telemetry = {
enabled: isTrue(DD_INSTRUMENTATION_TELEMETRY_ENABLED),
heartbeatInterval: DD_TELEMETRY_HEARTBEAT_INTERVAL,
debug: isTrue(DD_TELEMETRY_DEBUG),
logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED),
metrics: isTrue(DD_TELEMETRY_METRICS_ENABLED),
dependencyCollection: DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED
}
this.protocolVersion = DD_TRACE_AGENT_PROTOCOL_VERSION
this.tagsHeaderMaxLength = parseInt(DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH)
this.appsec = {
enabled: DD_APPSEC_ENABLED,
rules: DD_APPSEC_RULES,
customRulesProvided: !!DD_APPSEC_RULES,
rateLimit: DD_APPSEC_TRACE_RATE_LIMIT,
wafTimeout: DD_APPSEC_WAF_TIMEOUT,
obfuscatorKeyRegex: DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
obfuscatorValueRegex: DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
blockedTemplateHtml: DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML,
blockedTemplateJson: DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON,
blockedTemplateGraphql: DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON,
eventTracking: {
enabled: ['extended', 'safe'].includes(DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING),
mode: DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING
},
apiSecurity: {
enabled: DD_EXPERIMENTAL_API_SECURITY_ENABLED,
// Coerce value between 0 and 1
requestSampling: Math.min(1, Math.max(0, DD_API_SECURITY_REQUEST_SAMPLE_RATE))
}
}
this.remoteConfig = {
enabled: DD_REMOTE_CONFIGURATION_ENABLED,
pollInterval: DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS
}
this.iast = {
enabled: isTrue(DD_IAST_ENABLED),
requestSampling: DD_IAST_REQUEST_SAMPLING,
maxConcurrentRequests: DD_IAST_MAX_CONCURRENT_REQUESTS,
maxContextOperations: DD_IAST_MAX_CONTEXT_OPERATIONS,
deduplicationEnabled: DD_IAST_DEDUPLICATION_ENABLED,
redactionEnabled: DD_IAST_REDACTION_ENABLED,
redactionNamePattern: DD_IAST_REDACTION_NAME_PATTERN,
redactionValuePattern: DD_IAST_REDACTION_VALUE_PATTERN,
telemetryVerbosity: DD_IAST_TELEMETRY_VERBOSITY
}
this.isCiVisibility = isTrue(DD_IS_CIVISIBILITY)
this.isIntelligentTestRunnerEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_ITR_ENABLED)
this.isGitUploadEnabled = this.isCiVisibility &&
(this.isIntelligentTestRunnerEnabled && !isFalse(DD_CIVISIBILITY_GIT_UPLOAD_ENABLED))
this.gitMetadataEnabled = isTrue(DD_TRACE_GIT_METADATA_ENABLED)
this.isManualApiEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_MANUAL_API_ENABLED)
this.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT
// Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
this.memcachedCommandEnabled = isTrue(DD_TRACE_MEMCACHED_COMMAND_ENABLED)
this.stats = {
enabled: isTrue(DD_TRACE_STATS_COMPUTATION_ENABLED)
}
this.traceId128BitGenerationEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
this.traceId128BitLoggingEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
this.isGCPFunction = isGCPFunction
this.isAzureFunctionConsumptionPlan = isAzureFunctionConsumptionPlan
this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG)
// sent in telemetry event app-started
this.installSignature = {

@@ -707,2 +308,3 @@ id: DD_INSTRUMENTATION_INSTALL_ID,

this._applyOptions(options)
this._applyCalculated()
this._applyRemote({})

@@ -715,5 +317,11 @@ this._merge()

version: this.version,
'runtime-id': uuid()
'runtime-id': runtimeId
})
if (this.isCiVisibility) {
tagger.add(this.tags, {
[ORIGIN_KEY]: 'ciapp-test'
})
}
if (this.gitMetadataEnabled) {

@@ -761,5 +369,30 @@ this.repositoryUrl = removeUserSensitiveInfo(

// TODO: test
this._applyCalculated()
this._merge()
}
_getDefaultPropagationStyle (options) {
// TODO: Remove the experimental env vars as a major?
const DD_TRACE_B3_ENABLED = coalesce(
options.experimental && options.experimental.b3,
process.env.DD_TRACE_EXPERIMENTAL_B3_ENABLED,
false
)
const defaultPropagationStyle = ['datadog', 'tracecontext']
if (isTrue(DD_TRACE_B3_ENABLED)) {
defaultPropagationStyle.push('b3')
defaultPropagationStyle.push('b3 single header')
}
return defaultPropagationStyle
}
_isInServerlessEnvironment () {
const inAWSLambda = process.env.AWS_LAMBDA_FUNCTION_NAME !== undefined
const isGCPFunction = getIsGCPFunction()
const isAzureFunction = getIsAzureFunction()
return inAWSLambda || isGCPFunction || isAzureFunction
}
// for _merge to work, every config value must have a default value
_applyDefaults () {

@@ -780,11 +413,105 @@ const {

const defaults = this._defaults = {}
const defaults = setHiddenProperty(this, '_defaults', {})
this._setValue(defaults, 'appsec.apiSecurity.enabled', true)
this._setValue(defaults, 'appsec.apiSecurity.requestSampling', 0.1)
this._setValue(defaults, 'appsec.blockedTemplateGraphql', undefined)
this._setValue(defaults, 'appsec.blockedTemplateHtml', undefined)
this._setValue(defaults, 'appsec.blockedTemplateJson', undefined)
this._setValue(defaults, 'appsec.enabled', undefined)
this._setValue(defaults, 'appsec.eventTracking.enabled', true)
this._setValue(defaults, 'appsec.eventTracking.mode', 'safe')
this._setValue(defaults, 'appsec.obfuscatorKeyRegex', defaultWafObfuscatorKeyRegex)
this._setValue(defaults, 'appsec.obfuscatorValueRegex', defaultWafObfuscatorValueRegex)
this._setValue(defaults, 'appsec.rasp.enabled', true)
this._setValue(defaults, 'appsec.rateLimit', 100)
this._setValue(defaults, 'appsec.rules', undefined)
this._setValue(defaults, 'appsec.sca.enabled', null)
this._setValue(defaults, 'appsec.standalone.enabled', undefined)
this._setValue(defaults, 'appsec.stackTrace.enabled', true)
this._setValue(defaults, 'appsec.stackTrace.maxDepth', 32)
this._setValue(defaults, 'appsec.stackTrace.maxStackTraces', 2)
this._setValue(defaults, 'appsec.wafTimeout', 5e3) // µs
this._setValue(defaults, 'clientIpEnabled', false)
this._setValue(defaults, 'clientIpHeader', null)
this._setValue(defaults, 'dbmPropagationMode', 'disabled')
this._setValue(defaults, 'dogstatsd.hostname', '127.0.0.1')
this._setValue(defaults, 'dogstatsd.port', '8125')
this._setValue(defaults, 'dsmEnabled', false)
this._setValue(defaults, 'env', undefined)
this._setValue(defaults, 'experimental.enableGetRumData', false)
this._setValue(defaults, 'experimental.exporter', undefined)
this._setValue(defaults, 'experimental.runtimeId', false)
this._setValue(defaults, 'flushInterval', 2000)
this._setValue(defaults, 'flushMinSpans', 1000)
this._setValue(defaults, 'gitMetadataEnabled', true)
this._setValue(defaults, 'headerTags', [])
this._setValue(defaults, 'hostname', '127.0.0.1')
this._setValue(defaults, 'iast.deduplicationEnabled', true)
this._setValue(defaults, 'iast.enabled', false)
this._setValue(defaults, 'iast.maxConcurrentRequests', 2)
this._setValue(defaults, 'iast.maxContextOperations', 2)
this._setValue(defaults, 'iast.redactionEnabled', true)
this._setValue(defaults, 'iast.redactionNamePattern', null)
this._setValue(defaults, 'iast.redactionValuePattern', null)
this._setValue(defaults, 'iast.requestSampling', 30)
this._setValue(defaults, 'iast.telemetryVerbosity', 'INFORMATION')
this._setValue(defaults, 'injectionEnabled', [])
this._setValue(defaults, 'isAzureFunction', false)
this._setValue(defaults, 'isCiVisibility', false)
this._setValue(defaults, 'isEarlyFlakeDetectionEnabled', false)
this._setValue(defaults, 'isGCPFunction', false)
this._setValue(defaults, 'isGitUploadEnabled', false)
this._setValue(defaults, 'isIntelligentTestRunnerEnabled', false)
this._setValue(defaults, 'isManualApiEnabled', false)
this._setValue(defaults, 'logInjection', false)
this._setValue(defaults, 'lookup', undefined)
this._setValue(defaults, 'memcachedCommandEnabled', false)
this._setValue(defaults, 'openAiLogsEnabled', false)
this._setValue(defaults, 'openaiSpanCharLimit', 128)
this._setValue(defaults, 'peerServiceMapping', {})
this._setValue(defaults, 'plugins', true)
this._setValue(defaults, 'port', '8126')
this._setValue(defaults, 'profiling.enabled', undefined)
this._setValue(defaults, 'profiling.exporters', 'agent')
this._setValue(defaults, 'profiling.sourceMap', true)
this._setValue(defaults, 'profiling.longLivedThreshold', undefined)
this._setValue(defaults, 'protocolVersion', '0.4')
this._setValue(defaults, 'queryStringObfuscation', qsRegex)
this._setValue(defaults, 'remoteConfig.enabled', true)
this._setValue(defaults, 'remoteConfig.pollInterval', 5) // seconds
this._setValue(defaults, 'reportHostname', false)
this._setValue(defaults, 'runtimeMetrics', false)
this._setValue(defaults, 'sampleRate', undefined)
this._setValue(defaults, 'sampler.rateLimit', undefined)
this._setValue(defaults, 'sampler.rules', [])
this._setValue(defaults, 'sampler.spanSamplingRules', [])
this._setValue(defaults, 'scope', undefined)
this._setValue(defaults, 'service', service)
this._setValue(defaults, 'env', undefined)
this._setValue(defaults, 'serviceMapping', {})
this._setValue(defaults, 'site', 'datadoghq.com')
this._setValue(defaults, 'spanAttributeSchema', 'v0')
this._setValue(defaults, 'spanComputePeerService', false)
this._setValue(defaults, 'spanLeakDebug', 0)
this._setValue(defaults, 'spanRemoveIntegrationFromService', false)
this._setValue(defaults, 'startupLogs', false)
this._setValue(defaults, 'stats.enabled', false)
this._setValue(defaults, 'tags', {})
this._setValue(defaults, 'tagsHeaderMaxLength', 512)
this._setValue(defaults, 'telemetry.debug', false)
this._setValue(defaults, 'telemetry.dependencyCollection', true)
this._setValue(defaults, 'telemetry.enabled', true)
this._setValue(defaults, 'telemetry.heartbeatInterval', 60000)
this._setValue(defaults, 'telemetry.logCollection', false)
this._setValue(defaults, 'telemetry.metrics', true)
this._setValue(defaults, 'traceId128BitGenerationEnabled', true)
this._setValue(defaults, 'traceId128BitLoggingEnabled', false)
this._setValue(defaults, 'tracePropagationExtractFirst', false)
this._setValue(defaults, 'tracePropagationStyle.inject', ['datadog', 'tracecontext'])
this._setValue(defaults, 'tracePropagationStyle.extract', ['datadog', 'tracecontext'])
this._setValue(defaults, 'tracePropagationStyle.otelPropagators', false)
this._setValue(defaults, 'tracing', true)
this._setValue(defaults, 'url', undefined)
this._setValue(defaults, 'version', pkg.version)
this._setUnit(defaults, 'sampleRate', undefined)
this._setBoolean(defaults, 'logInjection', false)
this._setArray(defaults, 'headerTags', [])
this._setValue(defaults, 'tags', {})
this._setValue(defaults, 'instrumentation_config_id', undefined)
}

@@ -794,17 +521,112 @@

const {
AWS_LAMBDA_FUNCTION_NAME,
DD_AGENT_HOST,
DD_API_SECURITY_ENABLED,
DD_API_SECURITY_REQUEST_SAMPLE_RATE,
DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING,
DD_APPSEC_ENABLED,
DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON,
DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML,
DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON,
DD_APPSEC_MAX_STACK_TRACES,
DD_APPSEC_MAX_STACK_TRACE_DEPTH,
DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
DD_APPSEC_RULES,
DD_APPSEC_SCA_ENABLED,
DD_APPSEC_STACK_TRACE_ENABLED,
DD_APPSEC_RASP_ENABLED,
DD_APPSEC_TRACE_RATE_LIMIT,
DD_APPSEC_WAF_TIMEOUT,
DD_DATA_STREAMS_ENABLED,
DD_DBM_PROPAGATION_MODE,
DD_DOGSTATSD_HOSTNAME,
DD_DOGSTATSD_PORT,
DD_ENV,
DD_EXPERIMENTAL_API_SECURITY_ENABLED,
DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED,
DD_EXPERIMENTAL_PROFILING_ENABLED,
JEST_WORKER_ID,
DD_IAST_DEDUPLICATION_ENABLED,
DD_IAST_ENABLED,
DD_IAST_MAX_CONCURRENT_REQUESTS,
DD_IAST_MAX_CONTEXT_OPERATIONS,
DD_IAST_REDACTION_ENABLED,
DD_IAST_REDACTION_NAME_PATTERN,
DD_IAST_REDACTION_VALUE_PATTERN,
DD_IAST_REQUEST_SAMPLING,
DD_IAST_TELEMETRY_VERBOSITY,
DD_INJECTION_ENABLED,
DD_INSTRUMENTATION_TELEMETRY_ENABLED,
DD_INSTRUMENTATION_CONFIG_ID,
DD_LOGS_INJECTION,
DD_OPENAI_LOGS_ENABLED,
DD_OPENAI_SPAN_CHAR_LIMIT,
DD_PROFILING_ENABLED,
DD_PROFILING_EXPORTERS,
DD_PROFILING_SOURCE_MAP,
DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD,
DD_REMOTE_CONFIGURATION_ENABLED,
DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS,
DD_RUNTIME_METRICS_ENABLED,
DD_SERVICE,
DD_SERVICE_MAPPING,
DD_SERVICE_NAME,
DD_SITE,
DD_SPAN_SAMPLING_RULES,
DD_SPAN_SAMPLING_RULES_FILE,
DD_TAGS,
DD_TELEMETRY_DEBUG,
DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED,
DD_TELEMETRY_HEARTBEAT_INTERVAL,
DD_TELEMETRY_LOG_COLLECTION_ENABLED,
DD_TELEMETRY_METRICS_ENABLED,
DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED,
DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED,
DD_TRACE_AGENT_HOSTNAME,
DD_TRACE_AGENT_PORT,
DD_TRACE_AGENT_PROTOCOL_VERSION,
DD_TRACE_CLIENT_IP_ENABLED,
DD_TRACE_CLIENT_IP_HEADER,
DD_TRACE_EXPERIMENTAL_EXPORTER,
DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED,
DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED,
DD_TRACE_GIT_METADATA_ENABLED,
DD_TRACE_GLOBAL_TAGS,
DD_TRACE_HEADER_TAGS,
DD_TRACE_MEMCACHED_COMMAND_ENABLED,
DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP,
DD_TRACE_PARTIAL_FLUSH_MIN_SPANS,
DD_TRACE_PEER_SERVICE_MAPPING,
DD_TRACE_PROPAGATION_EXTRACT_FIRST,
DD_TRACE_PROPAGATION_STYLE,
DD_TRACE_PROPAGATION_STYLE_INJECT,
DD_TRACE_PROPAGATION_STYLE_EXTRACT,
DD_TRACE_RATE_LIMIT,
DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED,
DD_TRACE_REPORT_HOSTNAME,
DD_TRACE_SAMPLE_RATE,
DD_TRACE_SAMPLING_RULES,
DD_TRACE_SCOPE,
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA,
DD_TRACE_SPAN_LEAK_DEBUG,
DD_TRACE_STARTUP_LOGS,
DD_TRACE_TAGS,
DD_VERSION
DD_TRACE_TELEMETRY_ENABLED,
DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH,
DD_TRACING_ENABLED,
DD_VERSION,
OTEL_METRICS_EXPORTER,
OTEL_PROPAGATORS,
OTEL_RESOURCE_ATTRIBUTES,
OTEL_SERVICE_NAME,
OTEL_TRACES_SAMPLER,
OTEL_TRACES_SAMPLER_ARG
} = process.env
const tags = {}
const env = this._env = {}
const env = setHiddenProperty(this, '_env', {})
setHiddenProperty(this, '_envUnprocessed', {})
tagger.add(tags, OTEL_RESOURCE_ATTRIBUTES, true)
tagger.add(tags, DD_TAGS)

@@ -814,30 +636,410 @@ tagger.add(tags, DD_TRACE_TAGS)

this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service)
this._setBoolean(env, 'appsec.apiSecurity.enabled', coalesce(
DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED),
DD_EXPERIMENTAL_API_SECURITY_ENABLED && isTrue(DD_EXPERIMENTAL_API_SECURITY_ENABLED)
))
this._setUnit(env, 'appsec.apiSecurity.requestSampling', DD_API_SECURITY_REQUEST_SAMPLE_RATE)
this._setValue(env, 'appsec.blockedTemplateGraphql', maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON))
this._setValue(env, 'appsec.blockedTemplateHtml', maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML))
this._envUnprocessed['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML
this._setValue(env, 'appsec.blockedTemplateJson', maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON))
this._envUnprocessed['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON
this._setBoolean(env, 'appsec.enabled', DD_APPSEC_ENABLED)
if (DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING) {
this._setValue(env, 'appsec.eventTracking.enabled',
['extended', 'safe'].includes(DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING.toLowerCase()))
this._setValue(env, 'appsec.eventTracking.mode', DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING.toLowerCase())
}
this._setString(env, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP)
this._setString(env, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP)
this._setBoolean(env, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED)
this._setValue(env, 'appsec.rateLimit', maybeInt(DD_APPSEC_TRACE_RATE_LIMIT))
this._envUnprocessed['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT
this._setString(env, 'appsec.rules', DD_APPSEC_RULES)
// DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend
this._setBoolean(env, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED)
this._setBoolean(env, 'appsec.standalone.enabled', DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED)
this._setBoolean(env, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED)
this._setValue(env, 'appsec.stackTrace.maxDepth', maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH))
this._envUnprocessed['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH
this._setValue(env, 'appsec.stackTrace.maxStackTraces', maybeInt(DD_APPSEC_MAX_STACK_TRACES))
this._envUnprocessed['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES
this._setValue(env, 'appsec.wafTimeout', maybeInt(DD_APPSEC_WAF_TIMEOUT))
this._envUnprocessed['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT
this._setBoolean(env, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED)
this._setString(env, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER)
this._setString(env, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE)
this._setString(env, 'dogstatsd.hostname', DD_DOGSTATSD_HOSTNAME)
this._setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT)
this._setBoolean(env, 'dsmEnabled', DD_DATA_STREAMS_ENABLED)
this._setString(env, 'env', DD_ENV || tags.env)
this._setString(env, 'version', DD_VERSION || tags.version)
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE)
this._setBoolean(env, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED)
this._setString(env, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER)
this._setBoolean(env, 'experimental.runtimeId', DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED)
if (AWS_LAMBDA_FUNCTION_NAME) this._setValue(env, 'flushInterval', 0)
this._setValue(env, 'flushMinSpans', maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS))
this._envUnprocessed.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS
this._setBoolean(env, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED)
this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS)
this._setString(env, 'hostname', coalesce(DD_AGENT_HOST, DD_TRACE_AGENT_HOSTNAME))
this._setBoolean(env, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED)
this._setBoolean(env, 'iast.enabled', DD_IAST_ENABLED)
this._setValue(env, 'iast.maxConcurrentRequests', maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS))
this._envUnprocessed['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS
this._setValue(env, 'iast.maxContextOperations', maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS))
this._envUnprocessed['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS
this._setBoolean(env, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED))
this._setString(env, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN)
this._setString(env, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN)
const iastRequestSampling = maybeInt(DD_IAST_REQUEST_SAMPLING)
if (iastRequestSampling > -1 && iastRequestSampling < 101) {
this._setValue(env, 'iast.requestSampling', iastRequestSampling)
}
this._envUnprocessed['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING
this._setString(env, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY)
this._setArray(env, 'injectionEnabled', DD_INJECTION_ENABLED)
this._setBoolean(env, 'isAzureFunction', getIsAzureFunction())
this._setBoolean(env, 'isGCPFunction', getIsGCPFunction())
this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION)
this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS)
// Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
this._setBoolean(env, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED)
this._setBoolean(env, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED)
this._setValue(env, 'openaiSpanCharLimit', maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT))
this._envUnprocessed.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT
if (DD_TRACE_PEER_SERVICE_MAPPING) {
this._setValue(env, 'peerServiceMapping', fromEntries(
DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
))
this._envUnprocessed.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING
}
this._setString(env, 'port', DD_TRACE_AGENT_PORT)
const profilingEnabledEnv = coalesce(DD_EXPERIMENTAL_PROFILING_ENABLED, DD_PROFILING_ENABLED)
const profilingEnabled = isTrue(profilingEnabledEnv)
? 'true'
: isFalse(profilingEnabledEnv)
? 'false'
: profilingEnabledEnv === 'auto' ? 'auto' : undefined
this._setString(env, 'profiling.enabled', profilingEnabled)
this._setString(env, 'profiling.exporters', DD_PROFILING_EXPORTERS)
this._setBoolean(env, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP))
if (DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) {
// This is only used in testing to not have to wait 30s
this._setValue(env, 'profiling.longLivedThreshold', Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD))
}
this._setString(env, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION)
this._setString(env, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP)
this._setBoolean(env, 'remoteConfig.enabled', coalesce(
DD_REMOTE_CONFIGURATION_ENABLED,
!this._isInServerlessEnvironment()
))
this._setValue(env, 'remoteConfig.pollInterval', maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS))
this._envUnprocessed['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS
this._setBoolean(env, 'reportHostname', DD_TRACE_REPORT_HOSTNAME)
// only used to explicitly set runtimeMetrics to false
const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none'
? false
: undefined
this._setBoolean(env, 'runtimeMetrics', DD_RUNTIME_METRICS_ENABLED ||
otelSetRuntimeMetrics)
this._setArray(env, 'sampler.spanSamplingRules', reformatSpanSamplingRules(coalesce(
safeJsonParse(maybeFile(DD_SPAN_SAMPLING_RULES_FILE)),
safeJsonParse(DD_SPAN_SAMPLING_RULES)
)))
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE ||
getFromOtelSamplerMap(OTEL_TRACES_SAMPLER, OTEL_TRACES_SAMPLER_ARG))
this._setValue(env, 'sampler.rateLimit', DD_TRACE_RATE_LIMIT)
this._setSamplingRule(env, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES))
this._envUnprocessed['sampler.rules'] = DD_TRACE_SAMPLING_RULES
this._setString(env, 'scope', DD_TRACE_SCOPE)
this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service || OTEL_SERVICE_NAME)
if (DD_SERVICE_MAPPING) {
this._setValue(env, 'serviceMapping', fromEntries(
process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
))
}
this._setString(env, 'site', DD_SITE)
if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) {
this._setString(env, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA))
this._envUnprocessed.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
}
// 0: disabled, 1: logging, 2: garbage collection + logging
this._setValue(env, 'spanLeakDebug', maybeInt(DD_TRACE_SPAN_LEAK_DEBUG))
this._setBoolean(env, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED)
this._setBoolean(env, 'startupLogs', DD_TRACE_STARTUP_LOGS)
this._setTags(env, 'tags', tags)
this._setValue(env, 'tagsHeaderMaxLength', DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH)
this._setBoolean(env, 'telemetry.enabled', coalesce(
DD_TRACE_TELEMETRY_ENABLED, // for backward compatibility
DD_INSTRUMENTATION_TELEMETRY_ENABLED, // to comply with instrumentation telemetry specs
!(this._isInServerlessEnvironment() || JEST_WORKER_ID)
))
this._setString(env, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID)
this._setBoolean(env, 'telemetry.debug', DD_TELEMETRY_DEBUG)
this._setBoolean(env, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED)
this._setValue(env, 'telemetry.heartbeatInterval', maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)))
this._envUnprocessed['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000
this._setBoolean(env, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED)
this._setBoolean(env, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED)
this._setBoolean(env, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
this._setBoolean(env, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
this._setBoolean(env, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST)
this._setBoolean(env, 'tracePropagationStyle.otelPropagators',
DD_TRACE_PROPAGATION_STYLE ||
DD_TRACE_PROPAGATION_STYLE_INJECT ||
DD_TRACE_PROPAGATION_STYLE_EXTRACT
? false
: !!OTEL_PROPAGATORS)
this._setBoolean(env, 'tracing', DD_TRACING_ENABLED)
this._setString(env, 'version', DD_VERSION || tags.version)
}
_applyOptions (options) {
const opts = this._options = this._options || {}
const opts = setHiddenProperty(this, '_options', this._options || {})
const tags = {}
setHiddenProperty(this, '_optsUnprocessed', {})
options = Object.assign({ ingestion: {} }, options, opts)
options = setHiddenProperty(this, '_optionsArg', Object.assign({ ingestion: {} }, options, opts))
tagger.add(tags, options.tags)
this._setString(opts, 'service', options.service || tags.service)
this._setBoolean(opts, 'appsec.apiSecurity.enabled', options.appsec.apiSecurity?.enabled)
this._setUnit(opts, 'appsec.apiSecurity.requestSampling', options.appsec.apiSecurity?.requestSampling)
this._setValue(opts, 'appsec.blockedTemplateGraphql', maybeFile(options.appsec.blockedTemplateGraphql))
this._setValue(opts, 'appsec.blockedTemplateHtml', maybeFile(options.appsec.blockedTemplateHtml))
this._optsUnprocessed['appsec.blockedTemplateHtml'] = options.appsec.blockedTemplateHtml
this._setValue(opts, 'appsec.blockedTemplateJson', maybeFile(options.appsec.blockedTemplateJson))
this._optsUnprocessed['appsec.blockedTemplateJson'] = options.appsec.blockedTemplateJson
this._setBoolean(opts, 'appsec.enabled', options.appsec.enabled)
let eventTracking = options.appsec.eventTracking?.mode
if (eventTracking) {
eventTracking = eventTracking.toLowerCase()
this._setValue(opts, 'appsec.eventTracking.enabled', ['extended', 'safe'].includes(eventTracking))
this._setValue(opts, 'appsec.eventTracking.mode', eventTracking)
}
this._setString(opts, 'appsec.obfuscatorKeyRegex', options.appsec.obfuscatorKeyRegex)
this._setString(opts, 'appsec.obfuscatorValueRegex', options.appsec.obfuscatorValueRegex)
this._setBoolean(opts, 'appsec.rasp.enabled', options.appsec.rasp?.enabled)
this._setValue(opts, 'appsec.rateLimit', maybeInt(options.appsec.rateLimit))
this._optsUnprocessed['appsec.rateLimit'] = options.appsec.rateLimit
this._setString(opts, 'appsec.rules', options.appsec.rules)
this._setBoolean(opts, 'appsec.standalone.enabled', options.experimental?.appsec?.standalone?.enabled)
this._setBoolean(opts, 'appsec.stackTrace.enabled', options.appsec.stackTrace?.enabled)
this._setValue(opts, 'appsec.stackTrace.maxDepth', maybeInt(options.appsec.stackTrace?.maxDepth))
this._optsUnprocessed['appsec.stackTrace.maxDepth'] = options.appsec.stackTrace?.maxDepth
this._setValue(opts, 'appsec.stackTrace.maxStackTraces', maybeInt(options.appsec.stackTrace?.maxStackTraces))
this._optsUnprocessed['appsec.stackTrace.maxStackTraces'] = options.appsec.stackTrace?.maxStackTraces
this._setValue(opts, 'appsec.wafTimeout', maybeInt(options.appsec.wafTimeout))
this._optsUnprocessed['appsec.wafTimeout'] = options.appsec.wafTimeout
this._setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled)
this._setString(opts, 'clientIpHeader', options.clientIpHeader)
this._setString(opts, 'dbmPropagationMode', options.dbmPropagationMode)
if (options.dogstatsd) {
this._setString(opts, 'dogstatsd.hostname', options.dogstatsd.hostname)
this._setString(opts, 'dogstatsd.port', options.dogstatsd.port)
}
this._setBoolean(opts, 'dsmEnabled', options.dsmEnabled)
this._setString(opts, 'env', options.env || tags.env)
this._setString(opts, 'version', options.version || tags.version)
this._setBoolean(opts, 'experimental.enableGetRumData',
options.experimental && options.experimental.enableGetRumData)
this._setString(opts, 'experimental.exporter', options.experimental && options.experimental.exporter)
this._setBoolean(opts, 'experimental.runtimeId', options.experimental && options.experimental.runtimeId)
this._setValue(opts, 'flushInterval', maybeInt(options.flushInterval))
this._optsUnprocessed.flushInterval = options.flushInterval
this._setValue(opts, 'flushMinSpans', maybeInt(options.flushMinSpans))
this._optsUnprocessed.flushMinSpans = options.flushMinSpans
this._setArray(opts, 'headerTags', options.headerTags)
this._setString(opts, 'hostname', options.hostname)
this._setBoolean(opts, 'iast.deduplicationEnabled', options.iast && options.iast.deduplicationEnabled)
this._setBoolean(opts, 'iast.enabled',
options.iast && (options.iast === true || options.iast.enabled === true))
this._setValue(opts, 'iast.maxConcurrentRequests',
maybeInt(options.iast?.maxConcurrentRequests))
this._optsUnprocessed['iast.maxConcurrentRequests'] = options.iast?.maxConcurrentRequests
this._setValue(opts, 'iast.maxContextOperations', maybeInt(options.iast?.maxContextOperations))
this._optsUnprocessed['iast.maxContextOperations'] = options.iast?.maxContextOperations
this._setBoolean(opts, 'iast.redactionEnabled', options.iast?.redactionEnabled)
this._setString(opts, 'iast.redactionNamePattern', options.iast?.redactionNamePattern)
this._setString(opts, 'iast.redactionValuePattern', options.iast?.redactionValuePattern)
const iastRequestSampling = maybeInt(options.iast?.requestSampling)
if (iastRequestSampling > -1 && iastRequestSampling < 101) {
this._setValue(opts, 'iast.requestSampling', iastRequestSampling)
this._optsUnprocessed['iast.requestSampling'] = options.iast?.requestSampling
}
this._setString(opts, 'iast.telemetryVerbosity', options.iast && options.iast.telemetryVerbosity)
this._setBoolean(opts, 'isCiVisibility', options.isCiVisibility)
this._setBoolean(opts, 'logInjection', options.logInjection)
this._setString(opts, 'lookup', options.lookup)
this._setBoolean(opts, 'openAiLogsEnabled', options.openAiLogsEnabled)
this._setValue(opts, 'peerServiceMapping', options.peerServiceMapping)
this._setBoolean(opts, 'plugins', options.plugins)
this._setString(opts, 'port', options.port)
const strProfiling = String(options.profiling)
if (['true', 'false', 'auto'].includes(strProfiling)) {
this._setString(opts, 'profiling.enabled', strProfiling)
}
this._setString(opts, 'protocolVersion', options.protocolVersion)
if (options.remoteConfig) {
this._setValue(opts, 'remoteConfig.pollInterval', maybeFloat(options.remoteConfig.pollInterval))
this._optsUnprocessed['remoteConfig.pollInterval'] = options.remoteConfig.pollInterval
}
this._setBoolean(opts, 'reportHostname', options.reportHostname)
this._setBoolean(opts, 'runtimeMetrics', options.runtimeMetrics)
this._setArray(opts, 'sampler.spanSamplingRules', reformatSpanSamplingRules(options.spanSamplingRules))
this._setUnit(opts, 'sampleRate', coalesce(options.sampleRate, options.ingestion.sampleRate))
this._setBoolean(opts, 'logInjection', options.logInjection)
this._setArray(opts, 'headerTags', options.headerTags)
const ingestion = options.ingestion || {}
this._setValue(opts, 'sampler.rateLimit', coalesce(options.rateLimit, ingestion.rateLimit))
this._setSamplingRule(opts, 'sampler.rules', options.samplingRules)
this._setString(opts, 'service', options.service || tags.service)
this._setValue(opts, 'serviceMapping', options.serviceMapping)
this._setString(opts, 'site', options.site)
if (options.spanAttributeSchema) {
this._setString(opts, 'spanAttributeSchema', validateNamingVersion(options.spanAttributeSchema))
this._optsUnprocessed.spanAttributeSchema = options.spanAttributeSchema
}
this._setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService)
this._setBoolean(opts, 'startupLogs', options.startupLogs)
this._setTags(opts, 'tags', tags)
this._setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled)
this._setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled)
this._setString(opts, 'version', options.version || tags.version)
}
_isCiVisibility () {
return coalesce(
this._optionsArg.isCiVisibility,
this._defaults.isCiVisibility
)
}
_isCiVisibilityItrEnabled () {
return coalesce(
process.env.DD_CIVISIBILITY_ITR_ENABLED,
true
)
}
_getHostname () {
const DD_CIVISIBILITY_AGENTLESS_URL = process.env.DD_CIVISIBILITY_AGENTLESS_URL
const url = DD_CIVISIBILITY_AGENTLESS_URL
? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
: getAgentUrl(this._getTraceAgentUrl(), this._optionsArg)
const DD_AGENT_HOST = coalesce(
this._optionsArg.hostname,
process.env.DD_AGENT_HOST,
process.env.DD_TRACE_AGENT_HOSTNAME,
'127.0.0.1'
)
return DD_AGENT_HOST || (url && url.hostname)
}
_getSpanComputePeerService () {
const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion(
coalesce(
this._optionsArg.spanAttributeSchema,
process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
)
)
const peerServiceSet = (
this._optionsArg.hasOwnProperty('spanComputePeerService') ||
process.env.hasOwnProperty('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
)
const peerServiceValue = coalesce(
this._optionsArg.spanComputePeerService,
process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED
)
const spanComputePeerService = (
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0'
// In v0, peer service is computed only if it is explicitly set to true
? peerServiceSet && isTrue(peerServiceValue)
// In >v0, peer service is false only if it is explicitly set to false
: (peerServiceSet ? !isFalse(peerServiceValue) : true)
)
return spanComputePeerService
}
_isCiVisibilityGitUploadEnabled () {
return coalesce(
process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED,
true
)
}
_isCiVisibilityManualApiEnabled () {
return isTrue(coalesce(
process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED,
false
))
}
_isTraceStatsComputationEnabled () {
return coalesce(
this._optionsArg.stats,
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,
getIsGCPFunction() || getIsAzureFunction()
)
}
_getTraceAgentUrl () {
return coalesce(
this._optionsArg.url,
process.env.DD_TRACE_AGENT_URL,
process.env.DD_TRACE_URL,
null
)
}
// handles values calculated from a mixture of options and env vars
_applyCalculated () {
const calc = setHiddenProperty(this, '_calculated', {})
const {
DD_CIVISIBILITY_AGENTLESS_URL,
DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED
} = process.env
if (DD_CIVISIBILITY_AGENTLESS_URL) {
this._setValue(calc, 'url', new URL(DD_CIVISIBILITY_AGENTLESS_URL))
} else {
this._setValue(calc, 'url', getAgentUrl(this._getTraceAgentUrl(), this._optionsArg))
}
if (this._isCiVisibility()) {
this._setBoolean(calc, 'isEarlyFlakeDetectionEnabled',
coalesce(DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, true))
this._setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(this._isCiVisibilityItrEnabled()))
this._setBoolean(calc, 'isManualApiEnabled', this._isCiVisibilityManualApiEnabled())
}
this._setString(calc, 'dogstatsd.hostname', this._getHostname())
this._setBoolean(calc, 'isGitUploadEnabled',
calc.isIntelligentTestRunnerEnabled && !isFalse(this._isCiVisibilityGitUploadEnabled()))
this._setBoolean(calc, 'spanComputePeerService', this._getSpanComputePeerService())
this._setBoolean(calc, 'stats.enabled', this._isTraceStatsComputationEnabled())
const defaultPropagationStyle = this._getDefaultPropagationStyle(this._optionsArg)
this._setValue(calc, 'tracePropagationStyle.inject', propagationStyle(
'inject',
this._optionsArg.tracePropagationStyle
))
this._setValue(calc, 'tracePropagationStyle.extract', propagationStyle(
'extract',
this._optionsArg.tracePropagationStyle
))
if (defaultPropagationStyle.length > 2) {
calc['tracePropagationStyle.inject'] = calc['tracePropagationStyle.inject'] || defaultPropagationStyle
calc['tracePropagationStyle.extract'] = calc['tracePropagationStyle.extract'] || defaultPropagationStyle
}
const iastEnabled = coalesce(this._options['iast.enabled'], this._env['iast.enabled'])
const profilingEnabled = coalesce(this._options['profiling.enabled'], this._env['profiling.enabled'])
const injectionIncludesProfiler = (this._env.injectionEnabled || []).includes('profiler')
if (iastEnabled || ['auto', 'true'].includes(profilingEnabled) || injectionIncludesProfiler) {
this._setBoolean(calc, 'telemetry.logCollection', true)
}
}
_applyRemote (options) {
const opts = this._remote = this._remote || {}
const opts = setHiddenProperty(this, '_remote', this._remote || {})
setHiddenProperty(this, '_remoteUnprocessed', {})
const tags = {}

@@ -851,2 +1053,3 @@ const headerTags = options.tracing_header_tags

tagger.add(tags, options.tracing_tags)
if (Object.keys(tags).length) tags['runtime-id'] = runtimeId

@@ -857,4 +1060,20 @@ this._setUnit(opts, 'sampleRate', options.tracing_sampling_rate)

this._setTags(opts, 'tags', tags)
this._setBoolean(opts, 'tracing', options.tracing_enabled)
this._remoteUnprocessed['sampler.rules'] = options.tracing_sampling_rules
this._setSamplingRule(opts, 'sampler.rules', this._reformatTags(options.tracing_sampling_rules))
}
_reformatTags (samplingRules) {
for (const rule of (samplingRules || [])) {
const reformattedTags = {}
if (rule.tags) {
for (const tag of (rule.tags || {})) {
reformattedTags[tag.key] = tag.value_glob
}
rule.tags = reformattedTags
}
}
return samplingRules
}
_setBoolean (obj, name, value) {

@@ -884,3 +1103,3 @@ if (value === undefined || value === null) {

_setArray (obj, name, value) {
if (value === null || value === undefined) {
if (value == null) {
return this._setValue(obj, name, null)

@@ -890,3 +1109,3 @@ }

if (typeof value === 'string') {
value = value && value.split(',')
value = value.split(',')
}

@@ -899,4 +1118,23 @@

_setSamplingRule (obj, name, value) {
if (value == null) {
return this._setValue(obj, name, null)
}
if (typeof value === 'string') {
value = value.split(',')
}
if (Array.isArray(value)) {
value = value.map(rule => {
return remapify(rule, {
sample_rate: 'sampleRate'
})
})
this._setValue(obj, name, value)
}
}
_setString (obj, name, value) {
obj[name] = value || undefined // unset for empty strings
obj[name] = value ? String(value) : undefined // unset for empty strings
}

@@ -919,5 +1157,9 @@

// TODO: Move change tracking to telemetry.
// for telemetry reporting, `name`s in `containers` need to be keys from:
// eslint-disable-next-line max-len
// https://github.com/DataDog/dd-go/blob/prod/trace/apps/tracer-telemetry-intake/telemetry-payload/static/config_norm_rules.json
_merge () {
const containers = [this._remote, this._options, this._env, this._defaults]
const origins = ['remote_config', 'code', 'env_var', 'default']
const containers = [this._remote, this._options, this._env, this._calculated, this._defaults]
const origins = ['remote_config', 'code', 'env_var', 'calculated', 'default']
const unprocessedValues = [this._remoteUnprocessed, this._optsUnprocessed, this._envUnprocessed, {}, {}]
const changes = []

@@ -928,10 +1170,14 @@

const container = containers[i]
const origin = origins[i]
const value = container[name]
if ((container[name] !== null && container[name] !== undefined) || container === this._defaults) {
if (this[name] === container[name] && this.hasOwnProperty(name)) break
if ((value !== null && value !== undefined) || container === this._defaults) {
if (get(this, name) === value && has(this, name)) break
const value = this[name] = container[name]
set(this, name, value)
changes.push({ name, value, origin })
changes.push({
name,
value: unprocessedValues[i][name] || value,
origin: origins[i]
})

@@ -944,3 +1190,2 @@ break

this.sampler.sampleRate = this.sampleRate
updateConfig(changes, this)

@@ -950,2 +1195,12 @@ }

function maybeInt (number) {
const parsed = parseInt(number)
return isNaN(parsed) ? undefined : parsed
}
function maybeFloat (number) {
const parsed = parseFloat(number)
return isNaN(parsed) ? undefined : parsed
}
function getAgentUrl (url, options) {

@@ -968,2 +1223,11 @@ if (url) return new URL(url)

function setHiddenProperty (obj, name, value) {
Object.defineProperty(obj, name, {
value,
enumerable: false,
writable: true
})
return obj[name]
}
module.exports = Config

@@ -18,2 +18,4 @@ 'use strict'

SAMPLING_MECHANISM_SPAN: 8,
SAMPLING_MECHANISM_REMOTE_USER: 11,
SAMPLING_MECHANISM_REMOTE_DYNAMIC: 12,
SPAN_SAMPLING_MECHANISM: '_dd.span_sampling.mechanism',

@@ -34,3 +36,5 @@ SPAN_SAMPLING_RULE_RATE: '_dd.span_sampling.rule_rate',

SCI_REPOSITORY_URL: '_dd.git.repository_url',
SCI_COMMIT_SHA: '_dd.git.commit.sha'
SCI_COMMIT_SHA: '_dd.git.commit.sha',
APM_TRACING_ENABLED_KEY: '_dd.apm.enabled',
APPSEC_PROPAGATION_KEY: '_dd.p.appsec'
}

@@ -9,3 +9,3 @@ const { storage } = require('../../datadog-core')

function setDataStreamsContext (dataStreamsContext) {
storage.enterWith({ ...(storage.getStore()), dataStreamsContext })
if (dataStreamsContext) storage.enterWith({ ...(storage.getStore()), dataStreamsContext })
}

@@ -12,0 +12,0 @@

@@ -11,2 +11,5 @@ // encoding used here is sha256

const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx'
const CONTEXT_PROPAGATION_KEY_BASE64 = 'dd-pathway-ctx-base64'
function shaHash (checkpointString) {

@@ -18,8 +21,10 @@ const hash = crypto.createHash('md5').update(checkpointString).digest('hex').slice(0, 16)

function computeHash (service, env, edgeTags, parentHash) {
const key = `${service}${env}` + edgeTags.join('') + parentHash.toString()
const hashableEdgeTags = edgeTags.filter(item => item !== 'manual_checkpoint:true')
const key = `${service}${env}` + hashableEdgeTags.join('') + parentHash.toString()
if (cache.get(key)) {
return cache.get(key)
}
const currentHash = shaHash(`${service}${env}` + edgeTags.join(''))
const buf = Buffer.concat([ currentHash, parentHash ], 16)
const currentHash = shaHash(`${service}${env}` + hashableEdgeTags.join(''))
const buf = Buffer.concat([currentHash, parentHash], 16)
const val = shaHash(buf.toString())

@@ -38,2 +43,7 @@ cache.set(key, val)

function encodePathwayContextBase64 (dataStreamsContext) {
const encodedPathway = encodePathwayContext(dataStreamsContext)
return encodedPathway.toString('base64')
}
function decodePathwayContext (pathwayContext) {

@@ -57,6 +67,55 @@ if (pathwayContext == null || pathwayContext.length < 8) {

function decodePathwayContextBase64 (pathwayContext) {
if (pathwayContext == null || pathwayContext.length < 8) {
return
}
if (Buffer.isBuffer(pathwayContext)) {
pathwayContext = pathwayContext.toString()
}
const encodedPathway = Buffer.from(pathwayContext, 'base64')
return decodePathwayContext(encodedPathway)
}
class DsmPathwayCodec {
// we use a class for encoding / decoding in case we update our encoding/decoding. A class will make updates easier
// instead of using individual functions.
static encode (dataStreamsContext, carrier) {
if (!dataStreamsContext || !dataStreamsContext.hash) {
return
}
carrier[CONTEXT_PROPAGATION_KEY_BASE64] = encodePathwayContextBase64(dataStreamsContext)
}
static decode (carrier) {
if (carrier == null) return
let ctx
if (CONTEXT_PROPAGATION_KEY_BASE64 in carrier) {
// decode v2 encoding of base64
ctx = decodePathwayContextBase64(carrier[CONTEXT_PROPAGATION_KEY_BASE64])
} else if (CONTEXT_PROPAGATION_KEY in carrier) {
try {
// decode v1 encoding
ctx = decodePathwayContext(carrier[CONTEXT_PROPAGATION_KEY])
} catch {
// pass
}
// cover case where base64 context was received under wrong key
if (!ctx) ctx = decodePathwayContextBase64(carrier[CONTEXT_PROPAGATION_KEY])
}
return ctx
}
static contextExists (carrier) {
return CONTEXT_PROPAGATION_KEY_BASE64 in carrier || CONTEXT_PROPAGATION_KEY in carrier
}
}
module.exports = {
computePathwayHash: computeHash,
encodePathwayContext,
decodePathwayContext
decodePathwayContext,
encodePathwayContextBase64,
decodePathwayContextBase64,
DsmPathwayCodec
}

@@ -7,3 +7,3 @@ const os = require('os')

const { LogCollapsingLowestDenseDDSketch } = require('@datadog/sketches-js')
const { encodePathwayContext } = require('./pathway')
const { DsmPathwayCodec } = require('./pathway')
const { DataStreamsWriter } = require('./writer')

@@ -17,3 +17,2 @@ const { computePathwayHash } = require('./pathway')

const HIGH_ACCURACY_DISTRIBUTION = 0.0075
const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx'

@@ -130,2 +129,17 @@ class StatsPoint {

}
if (Array.isArray(obj) && obj.length > 0) {
if (typeof obj[0] === 'number') return Buffer.from(obj).length
let payloadSize = 0
obj.forEach(item => {
payloadSize += getSizeOrZero(item)
})
return payloadSize
}
if (obj !== null && typeof obj === 'object') {
try {
return getHeadersSize(obj)
} catch {
// pass
}
}
return 0

@@ -144,2 +158,7 @@ }

function getAmqpMessageSize (message) {
const { headers, content } = message
return getSizeOrZero(content) + getHeadersSize(headers)
}
class TimeBuckets extends Map {

@@ -164,3 +183,4 @@ forTime (time) {

version,
service
service,
flushInterval
} = {}) {

@@ -181,7 +201,9 @@ this.writer = new DataStreamsWriter({

this.sequence = 0
this.flushInterval = flushInterval
if (this.enabled) {
this.timer = setInterval(this.onInterval.bind(this), 10000)
this.timer = setInterval(this.onInterval.bind(this), flushInterval)
this.timer.unref()
}
process.once('beforeExit', () => this.onInterval())
}

@@ -198,3 +220,4 @@

Version: this.version,
Lang: 'javascript'
Lang: 'javascript',
Tags: Object.entries(this.tags).map(([key, value]) => `${key}:${value}`)
}

@@ -211,3 +234,4 @@ this.writer.flush(payload)

const bucketTime = Math.round(timestamp - (timestamp % this.bucketSizeNs))
return this.buckets.forTime(bucketTime)
const bucket = this.buckets.forTime(bucketTime)
return bucket
}

@@ -261,22 +285,24 @@

const dataStreamsContext = {
hash: hash,
edgeStartNs: edgeStartNs,
pathwayStartNs: pathwayStartNs,
hash,
edgeStartNs,
pathwayStartNs,
previousDirection: direction,
closestOppositeDirectionHash: closestOppositeDirectionHash,
closestOppositeDirectionEdgeStart: closestOppositeDirectionEdgeStart
closestOppositeDirectionHash,
closestOppositeDirectionEdgeStart
}
if (direction === 'direction:out') {
// Add the header for this now, as the callee doesn't have access to context when producing
payloadSize += getSizeOrZero(encodePathwayContext(dataStreamsContext))
payloadSize += CONTEXT_PROPAGATION_KEY.length
// - 1 to account for extra byte for {
const ddInfoContinued = {}
DsmPathwayCodec.encode(dataStreamsContext, ddInfoContinued)
payloadSize += getSizeOrZero(JSON.stringify(ddInfoContinued)) - 1
}
const checkpoint = {
currentTimestamp: nowNs,
parentHash: parentHash,
hash: hash,
edgeTags: edgeTags,
edgeLatencyNs: edgeLatencyNs,
pathwayLatencyNs: pathwayLatencyNs,
payloadSize: payloadSize
parentHash,
hash,
edgeTags,
edgeLatencyNs,
pathwayLatencyNs,
payloadSize
}

@@ -307,3 +333,3 @@ this.recordCheckpoint(checkpoint, span)

for (const [ timeNs, bucket ] of this.buckets.entries()) {
for (const [timeNs, bucket] of this.buckets.entries()) {
const points = []

@@ -335,8 +361,12 @@

}
setUrl (url) {
this.writer.setUrl(url)
}
}
module.exports = {
DataStreamsProcessor: DataStreamsProcessor,
StatsPoint: StatsPoint,
StatsBucket: StatsBucket,
DataStreamsProcessor,
StatsPoint,
StatsBucket,
Backlog,

@@ -347,4 +377,4 @@ TimeBuckets,

getSizeOrZero,
ENTRY_PARENT_HASH,
CONTEXT_PROPAGATION_KEY
getAmqpMessageSize,
ENTRY_PARENT_HASH
}

@@ -18,9 +18,6 @@ const pkg = require('../../../../package.json')

'Content-Encoding': 'gzip'
}
},
url
}
options.protocol = url.protocol
options.hostname = url.hostname
options.port = url.port
log.debug(() => `Request to the intake: ${JSON.stringify(options)}`)

@@ -63,2 +60,11 @@

}
setUrl (url) {
try {
url = new URL(url)
this._url = url
} catch (e) {
log.warn(e.stack)
}
}
}

@@ -65,0 +71,0 @@

@@ -15,2 +15,3 @@ 'use strict'

const TYPE_DISTRIBUTION = 'd'
const TYPE_HISTOGRAM = 'h'

@@ -50,2 +51,6 @@ class DogStatsDClient {

histogram (stat, value, tags) {
this._add(stat, value, TYPE_HISTOGRAM, tags)
}
flush () {

@@ -72,3 +77,3 @@ const queue = this._enqueue()

log.error('HTTP error from agent: ' + err.stack)
if (err.status) {
if (err.status === 404) {
// Inside this if-block, we have connectivity to the agent, but

@@ -78,7 +83,5 @@ // we're not getting a 200 from the proxy endpoint. If it's a 404,

// options. Either way, we can give UDP a try.
if (err.status === 404) {
this._httpOptions = null
}
this._sendUdp(queue)
this._httpOptions = null
}
this._sendUdp(queue)
}

@@ -189,12 +192,2 @@ })

class NoopDogStatsDClient {
gauge () { }
increment () { }
distribution () { }
flush () { }
}
// This is a simplified user-facing proxy to the underlying DogStatsDClient instance

@@ -239,2 +232,10 @@ class CustomMetrics {

histogram (stat, value, tags) {
return this.dogstatsd.histogram(
stat,
value,
CustomMetrics.tagTranslator(tags)
)
}
flush () {

@@ -263,4 +264,3 @@ return this.dogstatsd.flush()

DogStatsDClient,
NoopDogStatsDClient,
CustomMetrics
}

@@ -86,9 +86,13 @@ 'use strict'

if (span.type) {
if (span.type && span.meta_struct) {
bytes.buffer[bytes.length++] = 0x8d
} else if (span.type || span.meta_struct) {
bytes.buffer[bytes.length++] = 0x8c
} else {
bytes.buffer[bytes.length++] = 0x8b
}
if (span.type) {
this._encodeString(bytes, 'type')
this._encodeString(bytes, span.type)
} else {
bytes.buffer[bytes.length++] = 0x8b
}

@@ -118,2 +122,6 @@

this._encodeMap(bytes, span.metrics)
if (span.meta_struct) {
this._encodeString(bytes, 'meta_struct')
this._encodeMetaStruct(bytes, span.meta_struct)
}
}

@@ -268,2 +276,80 @@ }

_encodeMetaStruct (bytes, value) {
const keys = Array.isArray(value) ? [] : Object.keys(value)
const validKeys = keys.filter(key => {
const v = value[key]
return typeof v === 'string' ||
typeof v === 'number' ||
(v !== null && typeof v === 'object')
})
this._encodeMapPrefix(bytes, validKeys.length)
for (const key of validKeys) {
const v = value[key]
this._encodeString(bytes, key)
this._encodeObjectAsByteArray(bytes, v)
}
}
_encodeObjectAsByteArray (bytes, value) {
const prefixLength = 5
const offset = bytes.length
bytes.reserve(prefixLength)
bytes.length += prefixLength
this._encodeObject(bytes, value)
// we should do it after encoding the object to know the real length
const length = bytes.length - offset - prefixLength
bytes.buffer[offset] = 0xc6
bytes.buffer[offset + 1] = length >> 24
bytes.buffer[offset + 2] = length >> 16
bytes.buffer[offset + 3] = length >> 8
bytes.buffer[offset + 4] = length
}
_encodeObject (bytes, value, circularReferencesDetector = new Set()) {
circularReferencesDetector.add(value)
if (Array.isArray(value)) {
this._encodeObjectAsArray(bytes, value, circularReferencesDetector)
} else if (value !== null && typeof value === 'object') {
this._encodeObjectAsMap(bytes, value, circularReferencesDetector)
} else if (typeof value === 'string' || typeof value === 'number') {
this._encodeValue(bytes, value)
}
}
_encodeObjectAsMap (bytes, value, circularReferencesDetector) {
const keys = Object.keys(value)
const validKeys = keys.filter(key => {
const v = value[key]
return typeof v === 'string' ||
typeof v === 'number' ||
(v !== null && typeof v === 'object' && !circularReferencesDetector.has(v))
})
this._encodeMapPrefix(bytes, validKeys.length)
for (const key of validKeys) {
const v = value[key]
this._encodeString(bytes, key)
this._encodeObject(bytes, v, circularReferencesDetector)
}
}
_encodeObjectAsArray (bytes, value, circularReferencesDetector) {
const validValue = value.filter(item =>
typeof item === 'string' ||
typeof item === 'number' ||
(item !== null && typeof item === 'object' && !circularReferencesDetector.has(item)))
this._encodeArrayPrefix(bytes, validValue)
for (const item of validValue) {
this._encodeObject(bytes, item, circularReferencesDetector)
}
}
_cacheString (value) {

@@ -270,0 +356,0 @@ if (!(value in this._stringMap)) {

@@ -19,2 +19,3 @@ 'use strict'

const TEST_SESSION_KEYS_LENGTH = 10
const TEST_AND_SPAN_KEYS_LENGTH = 11

@@ -149,5 +150,3 @@ const INTAKE_SOFT_LIMIT = 2 * 1024 * 1024 // 2MB

_encodeEventContent (bytes, content) {
const keysLength = Object.keys(content).length
let totalKeysLength = keysLength
let totalKeysLength = TEST_AND_SPAN_KEYS_LENGTH
if (content.meta.test_session_id) {

@@ -166,2 +165,5 @@ totalKeysLength = totalKeysLength + 1

}
if (content.type) {
totalKeysLength = totalKeysLength + 1
}
this._encodeMapPrefix(bytes, totalKeysLength)

@@ -332,4 +334,4 @@ if (content.type) {

'*': {
'language': 'javascript',
'library_version': ddTraceVersion
language: 'javascript',
library_version: ddTraceVersion
}

@@ -336,0 +338,0 @@ },

@@ -111,3 +111,3 @@ 'use strict'

{
filename: `coverage1.msgpack`,
filename: 'coverage1.msgpack',
contentType: 'application/msgpack'

@@ -114,0 +114,0 @@ }

@@ -21,3 +21,5 @@ 'use strict'

case exporters.JEST_WORKER:
return require('./ci-visibility/exporters/jest-worker')
case exporters.CUCUMBER_WORKER:
case exporters.MOCHA_WORKER:
return require('./ci-visibility/exporters/test-worker')
default:

@@ -24,0 +26,0 @@ return inAWSLambda && !usingLambdaExtension ? require('./exporters/log') : require('./exporters/agent')

@@ -10,3 +10,3 @@ 'use strict'

this._config = config
const { url, hostname, port, lookup, protocolVersion, stats = {} } = config
const { url, hostname, port, lookup, protocolVersion, stats = {}, appsec } = config
this._url = url || new URL(format({

@@ -19,3 +19,3 @@ protocol: 'http:',

const headers = {}
if (stats.enabled) {
if (stats.enabled || appsec?.standalone?.enabled) {
headers['Datadog-Client-Computed-Stats'] = 'yes'

@@ -22,0 +22,0 @@ }

@@ -9,3 +9,6 @@ 'use strict'

const https = require('https')
// eslint-disable-next-line n/no-deprecated-api
const { parse: urlParse } = require('url')
const zlib = require('zlib')
const docker = require('./docker')

@@ -97,12 +100,27 @@ const { httpAgent, httpsAgent } = require('./agents')

const onResponse = res => {
let responseData = ''
const chunks = []
res.setTimeout(timeout)
res.on('data', chunk => { responseData += chunk })
res.on('data', chunk => {
chunks.push(chunk)
})
res.on('end', () => {
activeRequests--
const buffer = Buffer.concat(chunks)
if (res.statusCode >= 200 && res.statusCode <= 299) {
callback(null, responseData, res.statusCode)
const isGzip = res.headers['content-encoding'] === 'gzip'
if (isGzip) {
zlib.gunzip(buffer, (err, result) => {
if (err) {
log.error(`Could not gunzip response: ${err.message}`)
callback(null, '', res.statusCode)
} else {
callback(null, result.toString(), res.statusCode)
}
})
} else {
callback(null, buffer.toString(), res.statusCode)
}
} else {

@@ -119,2 +137,3 @@ let errorMessage = ''

}
const responseData = buffer.toString()
if (responseData) {

@@ -121,0 +140,0 @@ errorMessage += ` Response from the endpoint: "${responseData}"`

@@ -1,2 +0,1 @@

const { SpanStatsEncoder } = require('../../encode/span-stats')

@@ -3,0 +2,0 @@

@@ -54,7 +54,7 @@ const tracerLogger = require('../../log')// path to require tracer logger

...log,
'timestamp': Date.now(),
'hostname': log.hostname || this.hostname,
'ddsource': log.ddsource || this.ddsource,
'service': log.service || this.service,
'ddtags': logTags || undefined
timestamp: Date.now(),
hostname: log.hostname || this.hostname,
ddsource: log.ddsource || this.ddsource,
service: log.service || this.service,
ddtags: logTags || undefined
}

@@ -61,0 +61,0 @@

@@ -36,2 +36,4 @@ 'use strict'

extractSpanLinks(formatted, span)
extractSpanEvents(formatted, span)
extractRootTags(formatted, span)

@@ -55,5 +57,7 @@ extractChunkTags(formatted, span)

meta: {},
meta_struct: span.meta_struct,
metrics: {},
start: Math.round(span._startTime * 1e6),
duration: Math.round(span._duration * 1e6)
duration: Math.round(span._duration * 1e6),
links: []
}

@@ -69,2 +73,40 @@ }

function extractSpanLinks (trace, span) {
const links = []
if (span._links) {
for (const link of span._links) {
const { context, attributes } = link
const formattedLink = {}
formattedLink.trace_id = context.toTraceId(true)
formattedLink.span_id = context.toSpanId(true)
if (attributes && Object.keys(attributes).length > 0) {
formattedLink.attributes = attributes
}
if (context?._sampling?.priority >= 0) formattedLink.flags = context._sampling.priority > 0 ? 1 : 0
if (context?._tracestate) formattedLink.tracestate = context._tracestate.toString()
links.push(formattedLink)
}
}
if (links.length > 0) { trace.meta['_dd.span_links'] = JSON.stringify(links) }
}
function extractSpanEvents (trace, span) {
const events = []
if (span._events) {
for (const event of span._events) {
const formattedEvent = {
name: event.name,
time_unix_nano: Math.round(event.startTime * 1e6),
attributes: event.attributes && Object.keys(event.attributes).length > 0 ? event.attributes : undefined
}
events.push(formattedEvent)
}
}
if (events.length > 0) { trace.meta.events = JSON.stringify(events) }
}
function extractTags (trace, span) {

@@ -90,3 +132,2 @@ const context = span.context()

switch (tag) {
case 'operation.name':
case 'service.name':

@@ -118,3 +159,6 @@ case 'span.type':

if (context._name !== 'fs.operation') {
trace.error = 1
// HACK: to ensure otel.recordException does not influence trace.error
if (tags.setTraceError) {
trace.error = 1
}
} else {

@@ -127,3 +171,2 @@ break

}
setSingleSpanIngestionTags(trace, context._spanSampling)

@@ -130,0 +173,0 @@

@@ -8,4 +8,8 @@ 'use strict'

module.exports = isFalse(process.env.DD_TRACE_ENABLED) || inJestWorker
const ddTraceDisabled = process.env.DD_TRACE_ENABLED
? isFalse(process.env.DD_TRACE_ENABLED)
: String(process.env.OTEL_TRACES_EXPORTER).toLowerCase() === 'none'
module.exports = ddTraceDisabled || inJestWorker
? require('./noop/proxy')
: require('./proxy')

@@ -90,3 +90,3 @@ /**

if (originalLambdaHandler !== undefined) {
if (originalLambdaHandler !== undefined && lambdaTaskRoot !== undefined) {
const [moduleRoot, moduleAndHandler] = _extractModuleRootAndHandler(originalLambdaHandler)

@@ -93,0 +93,0 @@ const [_module] = _extractModuleNameAndHandlerPath(moduleAndHandler)

@@ -6,42 +6,67 @@ 'use strict'

const Level = {
Debug: 'debug',
Info: 'info',
Warn: 'warn',
Error: 'error'
trace: 20,
debug: 20,
info: 30,
warn: 40,
error: 50,
critical: 50,
off: 100
}
const defaultLevel = Level.Debug
const debugChannel = channel('datadog:log:debug')
const infoChannel = channel('datadog:log:info')
const warnChannel = channel('datadog:log:warn')
const errorChannel = channel('datadog:log:error')
// based on: https://github.com/trentm/node-bunyan#levels
const logChannels = {
[Level.Debug]: createLogChannel(Level.Debug, 20),
[Level.Info]: createLogChannel(Level.Info, 30),
[Level.Warn]: createLogChannel(Level.Warn, 40),
[Level.Error]: createLogChannel(Level.Error, 50)
}
const defaultLevel = Level.debug
function createLogChannel (name, logLevel) {
const logChannel = channel(`datadog:log:${name}`)
logChannel.logLevel = logLevel
return logChannel
function getChannelLogLevel (level) {
return level && typeof level === 'string'
? Level[level.toLowerCase().trim()] || defaultLevel
: defaultLevel
}
function getChannelLogLevel (level) {
let logChannel
if (level && typeof level === 'string') {
logChannel = logChannels[level.toLowerCase().trim()] || logChannels[defaultLevel]
} else {
logChannel = logChannels[defaultLevel]
class LogChannel {
constructor (level) {
this._level = getChannelLogLevel(level)
}
return logChannel.logLevel
subscribe (logger) {
if (Level.debug >= this._level) {
debugChannel.subscribe(logger.debug)
}
if (Level.info >= this._level) {
infoChannel.subscribe(logger.info)
}
if (Level.warn >= this._level) {
warnChannel.subscribe(logger.warn)
}
if (Level.error >= this._level) {
errorChannel.subscribe(logger.error)
}
}
unsubscribe (logger) {
if (debugChannel.hasSubscribers) {
debugChannel.unsubscribe(logger.debug)
}
if (infoChannel.hasSubscribers) {
infoChannel.unsubscribe(logger.info)
}
if (warnChannel.hasSubscribers) {
warnChannel.unsubscribe(logger.warn)
}
if (errorChannel.hasSubscribers) {
errorChannel.unsubscribe(logger.error)
}
}
}
module.exports = {
Level,
getChannelLogLevel,
LogChannel,
debugChannel: logChannels[Level.Debug],
infoChannel: logChannels[Level.Info],
warnChannel: logChannels[Level.Warn],
errorChannel: logChannels[Level.Error]
debugChannel,
infoChannel,
warnChannel,
errorChannel
}
'use strict'
const coalesce = require('koalas')
const { isTrue } = require('../util')
const { debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels')

@@ -23,4 +25,18 @@ const logWriter = require('./writer')

const config = {
enabled: false,
logger: undefined,
logLevel: 'debug'
}
const log = {
/**
* @returns Read-only version of logging config. To modify config, call `log.use` and `log.toggle`
*/
getConfig () {
return { ...config }
},
use (logger) {
config.logger = logger
logWriter.use(logger)

@@ -31,2 +47,4 @@ return this

toggle (enabled, logLevel) {
config.enabled = enabled
config.logLevel = logLevel
logWriter.toggle(enabled, logLevel)

@@ -81,2 +99,16 @@ return this

const enabled = isTrue(coalesce(
process.env.DD_TRACE_DEBUG,
process.env.OTEL_LOG_LEVEL === 'debug',
config.enabled
))
const logLevel = coalesce(
process.env.DD_TRACE_LOG_LEVEL,
process.env.OTEL_LOG_LEVEL,
config.logLevel
)
log.toggle(enabled, logLevel)
module.exports = log
'use strict'
const { storage } = require('../../../datadog-core')
const { getChannelLogLevel, debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels')
const { LogChannel } = require('./channels')
const defaultLogger = {

@@ -15,3 +14,3 @@ debug: msg => console.debug(msg), /* eslint-disable-line no-console */

let logger = defaultLogger
let logLevel = getChannelLogLevel()
let logChannel = new LogChannel()

@@ -27,32 +26,11 @@ function withNoop (fn) {

function unsubscribeAll () {
if (debugChannel.hasSubscribers) {
debugChannel.unsubscribe(onDebug)
}
if (infoChannel.hasSubscribers) {
infoChannel.unsubscribe(onInfo)
}
if (warnChannel.hasSubscribers) {
warnChannel.unsubscribe(onWarn)
}
if (errorChannel.hasSubscribers) {
errorChannel.unsubscribe(onError)
}
logChannel.unsubscribe({ debug, info, warn, error })
}
function toggleSubscription (enable) {
function toggleSubscription (enable, level) {
unsubscribeAll()
if (enable) {
if (debugChannel.logLevel >= logLevel) {
debugChannel.subscribe(onDebug)
}
if (infoChannel.logLevel >= logLevel) {
infoChannel.subscribe(onInfo)
}
if (warnChannel.logLevel >= logLevel) {
warnChannel.subscribe(onWarn)
}
if (errorChannel.logLevel >= logLevel) {
errorChannel.subscribe(onError)
}
logChannel = new LogChannel(level)
logChannel.subscribe({ debug, info, warn, error })
}

@@ -62,7 +40,4 @@ }

function toggle (enable, level) {
if (level !== undefined) {
logLevel = getChannelLogLevel(level)
}
enabled = enable
toggleSubscription(enabled)
toggleSubscription(enabled, level)
}

@@ -79,22 +54,5 @@

enabled = false
logLevel = getChannelLogLevel()
toggleSubscription(false)
}
function onError (err) {
if (enabled) error(err)
}
function onWarn (message) {
if (enabled) warn(message)
}
function onInfo (message) {
if (enabled) info(message)
}
function onDebug (message) {
if (enabled) debug(message)
}
function error (err) {

@@ -101,0 +59,0 @@ if (typeof err !== 'object' || !err) {

@@ -5,5 +5,7 @@ 'use strict'

const NoopAppsecSdk = require('../appsec/sdk/noop')
const NoopDogStatsDClient = require('./dogstatsd')
const noop = new NoopTracer()
const noopAppsec = new NoopAppsecSdk()
const noopDogStatsDClient = new NoopDogStatsDClient()

@@ -14,2 +16,3 @@ class Tracer {

this.appsec = noopAppsec
this.dogstatsd = noopDogStatsDClient
}

@@ -16,0 +19,0 @@

@@ -21,2 +21,3 @@ 'use strict'

addTags (keyValueMap) { return this }
addLink (link) { return this }
log () { return this }

@@ -23,0 +24,0 @@ logEvent () {}

@@ -5,2 +5,3 @@ 'use strict'

const { trace, ROOT_CONTEXT } = require('@opentelemetry/api')
const DataDogSpanContext = require('../opentracing/span_context')

@@ -10,31 +11,2 @@ const SpanContext = require('./span_context')

// Horrible hack to acquire the otherwise inaccessible SPAN_KEY so we can redirect it...
// This is used for getting the current span context in OpenTelemetry, but the SPAN_KEY value is
// not exposed as it's meant to be read-only from outside the module. We want to hijack this logic
// so we can instead get the span context from the datadog context manager instead.
let SPAN_KEY
trace.getSpan({
getValue (key) {
SPAN_KEY = key
}
})
// Whenever a value is acquired from the context map we should mostly delegate to the real getter,
// but when accessing the current span we should hijack that access to instead provide a fake span
// which we can use to get an OTel span context wrapping the datadog active scope span context.
function wrappedGetValue (target) {
return (key) => {
if (key === SPAN_KEY) {
return {
spanContext () {
const activeSpan = tracer.scope().active()
const context = activeSpan && activeSpan.context()
return new SpanContext(context)
}
}
}
return target.getValue(key)
}
}
class ContextManager {

@@ -46,9 +18,18 @@ constructor () {

active () {
const active = this._store.getStore() || ROOT_CONTEXT
const activeSpan = tracer.scope().active()
const store = this._store.getStore()
const context = (activeSpan && activeSpan.context()) || store || ROOT_CONTEXT
return new Proxy(active, {
get (target, key) {
return key === 'getValue' ? wrappedGetValue(target) : target[key]
}
})
if (!(context instanceof DataDogSpanContext)) {
return context
}
if (!context._otelSpanContext) {
const newSpanContext = new SpanContext(context)
context._otelSpanContext = newSpanContext
}
if (store && trace.getSpanContext(store) === context._otelSpanContext) {
return store
}
return trace.setSpanContext(store || ROOT_CONTEXT, context._otelSpanContext)
}

@@ -59,6 +40,10 @@

const ddScope = tracer.scope()
return ddScope.activate(span._ddSpan, () => {
const run = () => {
const cb = thisArg == null ? fn : fn.bind(thisArg)
return this._store.run(context, cb, ...args)
})
}
if (span && span._ddSpan) {
return ddScope.activate(span._ddSpan, run)
}
return run()
}

@@ -73,7 +58,5 @@

// Not part of the spec but the Node.js API expects these
enable () {}
disable () {}
}
module.exports = ContextManager

@@ -27,7 +27,7 @@ 'use strict'

get traceId () {
return this._ddContext._traceId.toString(16)
return this._ddContext.toTraceId(true)
}
get spanId () {
return this._ddContext._spanId.toString(16)
return this._ddContext.toSpanId(true)
}

@@ -34,0 +34,0 @@

@@ -23,2 +23,16 @@ 'use strict'

function isTimeInput (startTime) {
if (typeof startTime === 'number') {
return true
}
if (startTime instanceof Date) {
return true
}
if (Array.isArray(startTime) && startTime.length === 2 &&
typeof startTime[0] === 'number' && typeof startTime[1] === 'number') {
return true
}
return false
}
const spanKindNames = {

@@ -136,3 +150,4 @@ [api.SpanKind.INTERNAL]: kinds.INTERNAL,

[RESOURCE_NAME]: spanName
}
},
links
}, _tracer._debug)

@@ -153,3 +168,2 @@

this.kind = kind
this.links = links
this._spanProcessor.onStart(this, context)

@@ -167,5 +181,7 @@ }

}
get instrumentationLibrary () {
return this._parentTracer.instrumentationLibrary
}
get _spanProcessor () {

@@ -184,2 +200,6 @@ return this._parentTracer.getActiveSpanProcessor()

setAttribute (key, value) {
if (key === 'http.response.status_code') {
this._ddSpan.setTag('http.status_code', value.toString())
}
this._ddSpan.setTag(key, value)

@@ -190,2 +210,6 @@ return this

setAttributes (attributes) {
if ('http.response.status_code' in attributes) {
attributes['http.status_code'] = attributes['http.response.status_code'].toString()
}
this._ddSpan.addTags(attributes)

@@ -195,4 +219,6 @@ return this

addEvent (name, attributesOrStartTime, startTime) {
api.diag.warn('Events not supported')
addLink (context, attributes) {
// extract dd context
const ddSpanContext = context._ddContext
this._ddSpan.addLink(ddSpanContext, attributes)
return this

@@ -237,8 +263,25 @@ }

recordException (exception) {
addEvent (name, attributesOrStartTime, startTime) {
startTime = attributesOrStartTime && isTimeInput(attributesOrStartTime) ? attributesOrStartTime : startTime
const hrStartTime = timeInputToHrTime(startTime || (performance.now() + timeOrigin))
startTime = hrTimeToMilliseconds(hrStartTime)
this._ddSpan.addEvent(name, attributesOrStartTime, startTime)
return this
}
recordException (exception, timeInput) {
// HACK: identifier is added so that trace.error remains unchanged after a call to otel.recordException
this._ddSpan.addTags({
[ERROR_TYPE]: exception.name,
[ERROR_MESSAGE]: exception.message,
[ERROR_STACK]: exception.stack
[ERROR_STACK]: exception.stack,
doNotSetTraceError: true
})
const attributes = {}
if (exception.message) attributes['exception.message'] = exception.message
if (exception.type) attributes['exception.type'] = exception.type
if (exception.escaped) attributes['exception.escaped'] = exception.escaped
if (exception.stack) attributes['exception.stacktrace'] = exception.stack
this.addEvent(exception.name, attributes, timeInput)
}

@@ -251,3 +294,3 @@

get ended () {
return typeof this.duration !== 'undefined'
return this.duration !== undefined
}

@@ -254,0 +297,0 @@ }

'use strict'
const { trace, context } = require('@opentelemetry/api')
const { trace, context, propagation } = require('@opentelemetry/api')
const { W3CTraceContextPropagator } = require('@opentelemetry/core')

@@ -55,2 +56,9 @@ const tracer = require('../../')

}
// The default propagator used is the W3C Trace Context propagator, users should be able to pass in others
// as needed
if (config.propagator) {
propagation.setGlobalPropagator(config.propagator)
} else {
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
}
}

@@ -57,0 +65,0 @@

@@ -10,2 +10,3 @@ 'use strict'

const SpanContext = require('./span_context')
const TextMapPropagator = require('../opentracing/propagation/text_map')

@@ -19,2 +20,3 @@ class Tracer {

this.instrumentationLibrary = library
this._spanLimits = {}
}

@@ -26,2 +28,20 @@

_createSpanContextFromParent (parentSpanContext) {
return new SpanContext({
traceId: parentSpanContext._traceId,
spanId: id(),
parentId: parentSpanContext._spanId,
sampling: parentSpanContext._sampling,
baggageItems: Object.assign({}, parentSpanContext._baggageItems),
trace: parentSpanContext._trace,
tracestate: parentSpanContext._tracestate
})
}
// Extracted method to create span context for a new span
_createSpanContextForNewSpan (context) {
const { traceId, spanId, traceFlags, traceState } = context
return TextMapPropagator._convertOtelContextToDatadog(traceId, spanId, traceFlags, traceState)
}
startSpan (name, options = {}, context = api.context.active()) {

@@ -34,17 +54,7 @@ // remove span from context in case a root span is requested via options

const parentSpanContext = parentSpan && parentSpan.spanContext()
let spanContext
// TODO: Need a way to get 128-bit trace IDs for the validity check API to work...
// if (parent && api.trace.isSpanContextValid(parent)) {
if (parentSpanContext && parentSpanContext.traceId) {
const parent = parentSpanContext._ddContext
spanContext = new SpanContext({
traceId: parent._traceId,
spanId: id(),
parentId: parent._spanId,
sampling: parent._sampling,
baggageItems: Object.assign({}, parent._baggageItems),
trace: parent._trace,
tracestate: parent._tracestate
})
if (parentSpanContext && api.trace.isSpanContextValid(parentSpanContext)) {
spanContext = parentSpanContext._ddContext
? this._createSpanContextFromParent(parentSpanContext._ddContext)
: this._createSpanContextForNewSpan(parentSpanContext)
} else {

@@ -125,4 +135,9 @@ spanContext = new SpanContext()

}
// not used in our codebase but needed for compatibility. See issue #1244
getSpanLimits () {
return this._spanLimits
}
}
module.exports = Tracer

@@ -18,3 +18,3 @@ 'use strict'

if (this._config.traceId128BitLoggingEnabled && spanContext._trace.tags['_dd.p.tid']) {
carrier.dd.trace_id = spanContext._trace.tags['_dd.p.tid'] + spanContext._traceId.toString(16)
carrier.dd.trace_id = spanContext.toTraceId(true)
} else {

@@ -21,0 +21,0 @@ carrier.dd.trace_id = spanContext.toTraceId()

'use strict'
const pick = require('lodash.pick')
const pick = require('../../../../datadog-core/src/utils/src/pick')
const id = require('../../id')
const DatadogSpanContext = require('../span_context')
const OtelSpanContext = require('../../opentelemetry/span_context')
const log = require('../../log')
const TraceState = require('./tracestate')
const tags = require('../../../../../ext/tags')
const { channel } = require('dc-polyfill')
const { AUTO_KEEP, AUTO_REJECT, USER_KEEP } = require('../../../../../ext/priority')
const injectCh = channel('dd-trace:span:inject')
const extractCh = channel('dd-trace:span:extract')
const traceKey = 'x-datadog-trace-id'

@@ -42,2 +48,3 @@ const spanKey = 'x-datadog-parent-id'

const invalidSegment = /^0+$/
const zeroTraceId = '0000000000000000'

@@ -56,2 +63,6 @@ class TextMapPropagator {

if (injectCh.hasSubscribers) {
injectCh.publish({ spanContext, carrier })
}
log.debug(() => `Inject into carrier: ${JSON.stringify(pick(carrier, logKeys))}.`)

@@ -65,2 +76,6 @@ }

if (extractCh.hasSubscribers) {
extractCh.publish({ spanContext, carrier })
}
log.debug(() => `Extract from carrier: ${JSON.stringify(pick(carrier, logKeys))}.`)

@@ -177,5 +192,13 @@

ts.forVendor('dd', state => {
if (!spanContext._isRemote) {
// SpanContext was created by a ddtrace span.
// Last datadog span id should be set to the current span.
state.set('p', spanContext._spanId)
} else if (spanContext._trace.tags[tags.DD_PARENT_ID]) {
// Propagate the last Datadog span id set on the remote span.
state.set('p', spanContext._trace.tags[tags.DD_PARENT_ID])
}
state.set('s', priority)
if (mechanism) {
state.set('t.dm', mechanism)
state.set('t.dm', `-${mechanism}`)
}

@@ -213,5 +236,52 @@

_hasTraceIdConflict (w3cSpanContext, firstSpanContext) {
return w3cSpanContext !== null &&
firstSpanContext.toTraceId(true) === w3cSpanContext.toTraceId(true) &&
firstSpanContext.toSpanId() !== w3cSpanContext.toSpanId()
}
_hasParentIdInTags (spanContext) {
return tags.DD_PARENT_ID in spanContext._trace.tags &&
spanContext._trace.tags[tags.DD_PARENT_ID] !== zeroTraceId
}
_updateParentIdFromDdHeaders (carrier, firstSpanContext) {
const ddCtx = this._extractDatadogContext(carrier)
if (ddCtx !== null) {
firstSpanContext._trace.tags[tags.DD_PARENT_ID] = ddCtx._spanId.toString().padStart(16, '0')
}
}
_resolveTraceContextConflicts (w3cSpanContext, firstSpanContext, carrier) {
if (!this._hasTraceIdConflict(w3cSpanContext, firstSpanContext)) {
return firstSpanContext
}
if (this._hasParentIdInTags(w3cSpanContext)) {
// tracecontext headers contain a p value, ensure this value is sent to backend
firstSpanContext._trace.tags[tags.DD_PARENT_ID] = w3cSpanContext._trace.tags[tags.DD_PARENT_ID]
} else {
// if p value is not present in tracestate, use the parent id from the datadog headers
this._updateParentIdFromDdHeaders(carrier, firstSpanContext)
}
// the span_id in tracecontext takes precedence over the first extracted propagation style
firstSpanContext._spanId = w3cSpanContext._spanId
return firstSpanContext
}
_extractSpanContext (carrier) {
let spanContext = null
for (const extractor of this._config.tracePropagationStyle.extract) {
let spanContext = null
// add logic to ensure tracecontext headers takes precedence over other extracted headers
if (spanContext !== null) {
if (this._config.tracePropagationExtractFirst) {
return spanContext
}
if (extractor !== 'tracecontext') {
continue
}
spanContext = this._resolveTraceContextConflicts(
this._extractTraceparentContext(carrier), spanContext, carrier)
break
}
switch (extractor) {

@@ -224,17 +294,19 @@ case 'datadog':

break
case 'b3': // TODO: should match "b3 single header" in next major
case 'b3' && this
._config
.tracePropagationStyle
.otelPropagators: // TODO: should match "b3 single header" in next major
case 'b3 single header': // TODO: delete in major after singular "b3"
spanContext = this._extractB3SingleContext(carrier)
break
case 'b3':
case 'b3multi':
spanContext = this._extractB3MultiContext(carrier)
break
case 'b3 single header': // TODO: delete in major after singular "b3"
spanContext = this._extractB3SingleContext(carrier)
break
default:
log.warn(`Unknown propagation style: ${extractor}`)
}
if (spanContext !== null) {
return spanContext
}
}
return this._extractSqsdContext(carrier)
return spanContext || this._extractSqsdContext(carrier)
}

@@ -288,3 +360,4 @@

spanId: null,
sampling: { priority }
sampling: { priority },
isRemote: true
})

@@ -322,3 +395,3 @@ }

if (matches.length) {
const [ version, traceId, spanId, flags, tail ] = matches.slice(1)
const [version, traceId, spanId, flags, tail] = matches.slice(1)
const traceparent = { version }

@@ -338,2 +411,3 @@ const tracestate = TraceState.fromString(carrier.tracestate)

spanId: id(spanId, 16),
isRemote: true,
sampling: { priority: parseInt(flags, 10) & 1 ? 1 : 0 },

@@ -349,2 +423,6 @@ traceparent,

switch (key) {
case 'p': {
spanContext._trace.tags[tags.DD_PARENT_ID] = value
break
}
case 's': {

@@ -380,2 +458,6 @@ const priority = parseInt(value, 10)

if (!spanContext._trace.tags[tags.DD_PARENT_ID]) {
spanContext._trace.tags[tags.DD_PARENT_ID] = zeroTraceId
}
this._extractBaggageItems(carrier, spanContext)

@@ -393,3 +475,4 @@ return spanContext

traceId: id(carrier[traceKey], radix),
spanId: id(carrier[spanKey], radix)
spanId: id(carrier[spanKey], radix),
isRemote: true
})

@@ -521,3 +604,3 @@ }

if (tid === '0000000000000000') return
if (tid === zeroTraceId) return

@@ -552,4 +635,63 @@ spanContext._trace.tags['_dd.p.tid'] = tid

}
static _convertOtelContextToDatadog (traceId, spanId, traceFlag, ts, meta = {}) {
const origin = null
let samplingPriority = traceFlag
ts = ts?.traceparent || null
if (ts) {
// Use TraceState.fromString to parse the tracestate header
const traceState = TraceState.fromString(ts)
let ddTraceStateData = null
// Extract Datadog specific trace state data
traceState.forVendor('dd', (state) => {
ddTraceStateData = state
return state // You might need to adjust this part based on actual logic needed
})
if (ddTraceStateData) {
// Assuming ddTraceStateData is now a Map or similar structure containing Datadog trace state data
// Extract values as needed, similar to the original logic
const samplingPriorityTs = ddTraceStateData.get('s')
const origin = ddTraceStateData.get('o')
// Convert Map to object for meta
const otherPropagatedTags = Object.fromEntries(ddTraceStateData.entries())
// Update meta and samplingPriority based on extracted values
Object.assign(meta, otherPropagatedTags)
samplingPriority = TextMapPropagator._getSamplingPriority(traceFlag, parseInt(samplingPriorityTs, 10), origin)
} else {
log.debug(`no dd list member in tracestate from incoming request: ${ts}`)
}
}
const spanContext = new OtelSpanContext({
traceId: id(traceId, 16), spanId: id(), tags: meta, parentId: id(spanId, 16)
})
spanContext._sampling = { priority: samplingPriority }
spanContext._trace = { origin }
return spanContext
}
static _getSamplingPriority (traceparentSampled, tracestateSamplingPriority, origin = null) {
const fromRumWithoutPriority = !tracestateSamplingPriority && origin === 'rum'
let samplingPriority
if (!fromRumWithoutPriority && traceparentSampled === 0 &&
(!tracestateSamplingPriority || tracestateSamplingPriority >= 0)) {
samplingPriority = 0
} else if (!fromRumWithoutPriority && traceparentSampled === 1 &&
(!tracestateSamplingPriority || tracestateSamplingPriority < 0)) {
samplingPriority = 1
} else {
samplingPriority = tracestateSamplingPriority
}
return samplingPriority
}
}
module.exports = TextMapPropagator

@@ -14,2 +14,3 @@ 'use strict'

this._spanId = props.spanId
this._isRemote = props.isRemote ?? true
this._parentId = props.parentId || null

@@ -30,9 +31,18 @@ this._name = props.name

}
this._otelSpanContext = undefined
}
toTraceId () {
toTraceId (get128bitId = false) {
if (get128bitId) {
return this._traceId.toBuffer().length <= 8 && this._trace.tags[TRACE_ID_128]
? this._trace.tags[TRACE_ID_128] + this._traceId.toString(16).padStart(16, '0')
: this._traceId.toString(16).padStart(32, '0')
}
return this._traceId.toString(10)
}
toSpanId () {
toSpanId (get128bitId = false) {
if (get128bitId) {
return this._spanId.toString(16).padStart(16, '0')
}
return this._spanId.toString(10)

@@ -43,6 +53,4 @@ }

const flags = this._sampling.priority >= AUTO_KEEP ? '01' : '00'
const traceId = this._traceId.toBuffer().length <= 8 && this._trace.tags[TRACE_ID_128]
? this._trace.tags[TRACE_ID_128] + this._traceId.toString(16).padStart(16, '0')
: this._traceId.toString(16).padStart(32, '0')
const spanId = this._spanId.toString(16).padStart(16, '0')
const traceId = this.toTraceId(true)
const spanId = this.toSpanId(true)
const version = (this._traceparent && this._traceparent.version) || '00'

@@ -49,0 +57,0 @@ return `${version}-${traceId}-${spanId}-${flags}`

@@ -29,8 +29,10 @@ 'use strict'

const OTEL_ENABLED = !!process.env.DD_TRACE_OTEL_ENABLED
const ALLOWED = ['string', 'number', 'boolean']
const integrationCounters = {
span_created: {},
span_finished: {}
spans_created: {},
spans_finished: {}
}
const startCh = channel('dd-trace:span:start')
const finishCh = channel('dd-trace:span:finish')

@@ -69,2 +71,4 @@

this._events = []
// For internal use only. You probably want `context()._name`.

@@ -76,3 +80,3 @@ // This name property is not updated when the span name changes.

getIntegrationCounter('span_created', this._integrationName).inc()
getIntegrationCounter('spans_created', this._integrationName).inc()

@@ -88,2 +92,5 @@ this._spanContext = this._createContext(parent, fields)

this._links = []
fields.links && fields.links.forEach(link => this.addLink(link.context, link.attributes))
if (DD_TRACE_EXPERIMENTAL_SPAN_COUNTS && finishedRegistry) {

@@ -99,2 +106,6 @@ runtimeMetrics.increment('runtime.node.spans.unfinished')

spanleak.addSpan(this, operationName)
if (startCh.hasSubscribers) {
startCh.publish({ span: this, fields })
}
}

@@ -158,2 +169,22 @@

addLink (context, attributes) {
this._links.push({
context: context._ddContext ? context._ddContext : context,
attributes: this._sanitizeAttributes(attributes)
})
}
addEvent (name, attributesOrStartTime, startTime) {
const event = { name }
if (attributesOrStartTime) {
if (typeof attributesOrStartTime === 'object') {
event.attributes = this._sanitizeEventAttributes(attributesOrStartTime)
} else {
startTime = attributesOrStartTime
}
}
event.startTime = startTime || this._getTime()
this._events.push(event)
}
finish (finishTime) {

@@ -170,3 +201,3 @@ if (this._duration !== undefined) {

getIntegrationCounter('span_finished', this._integrationName).inc()
getIntegrationCounter('spans_finished', this._integrationName).inc()

@@ -195,2 +226,52 @@ if (DD_TRACE_EXPERIMENTAL_SPAN_COUNTS && finishedRegistry) {

_sanitizeAttributes (attributes = {}) {
const sanitizedAttributes = {}
const addArrayOrScalarAttributes = (key, maybeArray) => {
if (Array.isArray(maybeArray)) {
for (const subkey in maybeArray) {
addArrayOrScalarAttributes(`${key}.${subkey}`, maybeArray[subkey])
}
} else {
const maybeScalar = maybeArray
if (ALLOWED.includes(typeof maybeScalar)) {
// Wrap the value as a string if it's not already a string
sanitizedAttributes[key] = typeof maybeScalar === 'string' ? maybeScalar : String(maybeScalar)
} else {
log.warn('Dropping span link attribute. It is not of an allowed type')
}
}
}
Object.entries(attributes).forEach(entry => {
const [key, value] = entry
addArrayOrScalarAttributes(key, value)
})
return sanitizedAttributes
}
_sanitizeEventAttributes (attributes = {}) {
const sanitizedAttributes = {}
for (const key in attributes) {
const value = attributes[key]
if (Array.isArray(value)) {
const newArray = []
for (const subkey in value) {
if (ALLOWED.includes(typeof value[subkey])) {
newArray.push(value[subkey])
} else {
log.warn('Dropping span event attribute. It is not of an allowed type')
}
}
sanitizedAttributes[key] = newArray
} else if (ALLOWED.includes(typeof value)) {
sanitizedAttributes[key] = value
} else {
log.warn('Dropping span event attribute. It is not of an allowed type')
}
}
return sanitizedAttributes
}
_createContext (parent, fields) {

@@ -239,2 +320,4 @@ let spanContext

}
// SpanContext was NOT propagated from a remote parent
spanContext._isRemote = false

@@ -241,0 +324,0 @@ return spanContext

@@ -8,2 +8,3 @@ 'use strict'

const TextMapPropagator = require('./propagation/text_map')
const DSMTextMapPropagator = require('./propagation/text_map_dsm')
const HttpPropagator = require('./propagation/http')

@@ -23,3 +24,3 @@ const BinaryPropagator = require('./propagation/binary')

class DatadogTracer {
constructor (config) {
constructor (config, prioritySampler) {
const Exporter = getExporter(config.experimental.exporter)

@@ -33,3 +34,3 @@

this._debug = config.debug
this._prioritySampler = new PrioritySampler(config.env, config.sampler)
this._prioritySampler = prioritySampler ?? new PrioritySampler(config.env, config.sampler)
this._exporter = new Exporter(config, this._prioritySampler)

@@ -44,3 +45,4 @@ this._processor = new SpanProcessor(this._exporter, this._prioritySampler, config)

[formats.BINARY]: new BinaryPropagator(config),
[formats.LOG]: new LogPropagator(config)
[formats.LOG]: new LogPropagator(config),
[formats.TEXT_MAP_DSM]: new DSMTextMapPropagator(config)
}

@@ -68,3 +70,4 @@ if (config.reportHostname) {

traceId128BitGenerationEnabled: this._traceId128BitGenerationEnabled,
integrationName: options.integrationName
integrationName: options.integrationName,
links: options.links
}, this._debug)

@@ -78,10 +81,12 @@

inject (spanContext, format, carrier) {
if (spanContext instanceof Span) {
spanContext = spanContext.context()
inject (context, format, carrier) {
if (context instanceof Span) {
context = context.context()
}
try {
this._prioritySampler.sample(spanContext)
this._propagators[format].inject(spanContext, carrier)
if (format !== 'text_map_dsm') {
this._prioritySampler.sample(context)
}
this._propagators[format].inject(context, carrier)
} catch (e) {

@@ -88,0 +93,0 @@ log.error(e)

@@ -7,3 +7,2 @@ 'use strict'

const log = require('./log')
const Nomenclature = require('./service-naming')

@@ -106,3 +105,3 @@ const loadChannel = channel('dd-trace:instrumentation:load')

this._tracerConfig = config
Nomenclature.configure(config)
this._tracer._nomenclature.configure(config)

@@ -109,0 +108,0 @@ for (const name in pluginClasses) {

@@ -19,3 +19,4 @@ const {

TEST_SKIPPED_BY_ITR,
ITR_CORRELATION_ID
ITR_CORRELATION_ID,
TEST_SOURCE_FILE
} = require('./util/test')

@@ -31,3 +32,3 @@ const Plugin = require('./plugin')

} = require('../ci-visibility/telemetry')
const { CI_PROVIDER_NAME, GIT_REPOSITORY_URL, GIT_COMMIT_SHA, GIT_BRANCH } = require('./util/tags')
const { CI_PROVIDER_NAME, GIT_REPOSITORY_URL, GIT_COMMIT_SHA, GIT_BRANCH, CI_WORKSPACE_PATH } = require('./util/tags')
const { OS_VERSION, OS_PLATFORM, OS_ARCHITECTURE, RUNTIME_NAME, RUNTIME_VERSION } = require('./util/env')

@@ -41,13 +42,13 @@

this.addSub(`ci:${this.constructor.id}:itr-configuration`, ({ onDone }) => {
if (!this.tracer._exporter || !this.tracer._exporter.getItrConfiguration) {
this.addSub(`ci:${this.constructor.id}:library-configuration`, ({ onDone }) => {
if (!this.tracer._exporter || !this.tracer._exporter.getLibraryConfiguration) {
return onDone({ err: new Error('CI Visibility was not initialized correctly') })
}
this.tracer._exporter.getItrConfiguration(this.testConfiguration, (err, itrConfig) => {
this.tracer._exporter.getLibraryConfiguration(this.testConfiguration, (err, libraryConfig) => {
if (err) {
log.error(`Intelligent Test Runner configuration could not be fetched. ${err.message}`)
log.error(`Library configuration could not be fetched. ${err.message}`)
} else {
this.itrConfig = itrConfig
this.libraryConfig = libraryConfig
}
onDone({ err, itrConfig })
onDone({ err, libraryConfig })
})

@@ -57,3 +58,3 @@ })

this.addSub(`ci:${this.constructor.id}:test-suite:skippable`, ({ onDone }) => {
if (!this.tracer._exporter || !this.tracer._exporter.getSkippableSuites) {
if (!this.tracer._exporter?.getSkippableSuites) {
return onDone({ err: new Error('CI Visibility was not initialized correctly') })

@@ -98,2 +99,9 @@ }

})
// only for vitest
// These are added for the worker threads to use
if (this.constructor.id === 'vitest') {
process.env.DD_CIVISIBILITY_TEST_SESSION_ID = this.testSessionSpan.context().toTraceId()
process.env.DD_CIVISIBILITY_TEST_MODULE_ID = this.testModuleSpan.context().toSpanId()
}
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'module')

@@ -123,2 +131,15 @@ })

})
this.addSub(`ci:${this.constructor.id}:known-tests`, ({ onDone }) => {
if (!this.tracer._exporter?.getKnownTests) {
return onDone({ err: new Error('CI Visibility was not initialized correctly') })
}
this.tracer._exporter.getKnownTests(this.testConfiguration, (err, knownTests) => {
if (err) {
log.error(`Known tests could not be fetched. ${err.message}`)
this.libraryConfig.isEarlyFlakeDetectionEnabled = false
}
onDone({ err, knownTests })
})
})
}

@@ -133,3 +154,3 @@

testFramework,
isUnsupportedCIProvider: this.isUnsupportedCIProvider,
isUnsupportedCIProvider: !this.ciProviderName,
...tags

@@ -150,3 +171,2 @@ })

this.testEnvironmentMetadata = getTestEnvironmentMetadata(this.constructor.id, this.config)
this.codeOwnersEntries = getCodeOwnersFileEntries()

@@ -162,7 +182,12 @@ const {

[GIT_BRANCH]: branch,
[CI_PROVIDER_NAME]: ciProviderName
[CI_PROVIDER_NAME]: ciProviderName,
[CI_WORKSPACE_PATH]: repositoryRoot
} = this.testEnvironmentMetadata
this.isUnsupportedCIProvider = !ciProviderName
this.repositoryRoot = repositoryRoot || process.cwd()
this.codeOwnersEntries = getCodeOwnersFileEntries(repositoryRoot)
this.ciProviderName = ciProviderName
this.testConfiguration = {

@@ -195,3 +220,9 @@ repositoryUrl,

const codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
const { [TEST_SOURCE_FILE]: testSourceFile } = extraTags
// We'll try with the test source file if available (it could be different from the test suite)
let codeOwners = getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries)
if (!codeOwners) {
codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
}
if (codeOwners) {

@@ -198,0 +229,0 @@ testTags[TEST_CODE_OWNERS] = codeOwners

@@ -15,7 +15,7 @@ 'use strict'

configure (config) {
super.configure(config)
for (const name in this.constructor.plugins) {
const pluginConfig = config[name] === false ? false : {
...config,
...config[name]
}
const pluginConfig = config[name] === false
? false
: { ...config, ...config[name] }

@@ -22,0 +22,0 @@ this[name].configure(pluginConfig)

'use strict'
const StoragePlugin = require('./storage')
const { PEER_SERVICE_KEY } = require('../constants')
const { PEER_SERVICE_KEY, PEER_SERVICE_SOURCE_KEY } = require('../constants')

@@ -23,2 +23,3 @@ class DatabasePlugin extends StoragePlugin {

}
encodingServiceTags (serviceTag, encodeATag, spanConfig) {

@@ -31,3 +32,3 @@ if (serviceTag !== spanConfig) {

createDBMPropagationCommentService (serviceName) {
createDBMPropagationCommentService (serviceName, span) {
this.encodingServiceTags('dddbs', 'encodedDddbs', serviceName)

@@ -37,7 +38,22 @@ this.encodingServiceTags('dde', 'encodedDde', this.tracer._env)

this.encodingServiceTags('ddpv', 'encodedDdpv', this.tracer._version)
if (span.context()._tags['out.host']) {
this.encodingServiceTags('ddh', 'encodedDdh', span._spanContext._tags['out.host'])
}
if (span.context()._tags['db.name']) {
this.encodingServiceTags('dddb', 'encodedDddb', span._spanContext._tags['db.name'])
}
const { encodedDddbs, encodedDde, encodedDdps, encodedDdpv } = this.serviceTags
const { encodedDddb, encodedDddbs, encodedDde, encodedDdh, encodedDdps, encodedDdpv } = this.serviceTags
return `dddbs='${encodedDddbs}',dde='${encodedDde}',` +
let dbmComment = `dddb='${encodedDddb}',dddbs='${encodedDddbs}',dde='${encodedDde}',ddh='${encodedDdh}',` +
`ddps='${encodedDdps}',ddpv='${encodedDdpv}'`
const peerData = this.getPeerService(span.context()._tags)
if (peerData !== undefined && peerData[PEER_SERVICE_SOURCE_KEY] === PEER_SERVICE_KEY) {
this.encodingServiceTags('ddprs', 'encodedDdprs', peerData[PEER_SERVICE_KEY])
const { encodedDdprs } = this.serviceTags
dbmComment += `,ddprs='${encodedDdprs}'`
}
return dbmComment
}

@@ -61,3 +77,3 @@

const servicePropagation = this.createDBMPropagationCommentService(dbmService)
const servicePropagation = this.createDBMPropagationCommentService(dbmService, span)

@@ -64,0 +80,0 @@ if (isPreparedStatement || mode === 'service') {

'use strict'
module.exports = {
get '@apollo/gateway' () { return require('../../../datadog-plugin-apollo/src') },
get '@aws-sdk/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },

@@ -20,23 +21,25 @@ get '@cucumber/cucumber' () { return require('../../../datadog-plugin-cucumber/src') },

get '@smithy/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
get 'aerospike' () { return require('../../../datadog-plugin-aerospike/src') },
get 'amqp10' () { return require('../../../datadog-plugin-amqp10/src') },
get 'amqplib' () { return require('../../../datadog-plugin-amqplib/src') },
get '@vitest/runner' () { return require('../../../datadog-plugin-vitest/src') },
get aerospike () { return require('../../../datadog-plugin-aerospike/src') },
get amqp10 () { return require('../../../datadog-plugin-amqp10/src') },
get amqplib () { return require('../../../datadog-plugin-amqplib/src') },
get 'aws-sdk' () { return require('../../../datadog-plugin-aws-sdk/src') },
get 'bunyan' () { return require('../../../datadog-plugin-bunyan/src') },
get bunyan () { return require('../../../datadog-plugin-bunyan/src') },
get 'cassandra-driver' () { return require('../../../datadog-plugin-cassandra-driver/src') },
get 'connect' () { return require('../../../datadog-plugin-connect/src') },
get 'couchbase' () { return require('../../../datadog-plugin-couchbase/src') },
get 'cypress' () { return require('../../../datadog-plugin-cypress/src') },
get 'dns' () { return require('../../../datadog-plugin-dns/src') },
get 'elasticsearch' () { return require('../../../datadog-plugin-elasticsearch/src') },
get 'express' () { return require('../../../datadog-plugin-express/src') },
get 'fastify' () { return require('../../../datadog-plugin-fastify/src') },
get child_process () { return require('../../../datadog-plugin-child_process/src') },
get connect () { return require('../../../datadog-plugin-connect/src') },
get couchbase () { return require('../../../datadog-plugin-couchbase/src') },
get cypress () { return require('../../../datadog-plugin-cypress/src') },
get dns () { return require('../../../datadog-plugin-dns/src') },
get elasticsearch () { return require('../../../datadog-plugin-elasticsearch/src') },
get express () { return require('../../../datadog-plugin-express/src') },
get fastify () { return require('../../../datadog-plugin-fastify/src') },
get 'find-my-way' () { return require('../../../datadog-plugin-find-my-way/src') },
get 'graphql' () { return require('../../../datadog-plugin-graphql/src') },
get 'grpc' () { return require('../../../datadog-plugin-grpc/src') },
get 'hapi' () { return require('../../../datadog-plugin-hapi/src') },
get 'http' () { return require('../../../datadog-plugin-http/src') },
get 'http2' () { return require('../../../datadog-plugin-http2/src') },
get 'https' () { return require('../../../datadog-plugin-http/src') },
get 'ioredis' () { return require('../../../datadog-plugin-ioredis/src') },
get graphql () { return require('../../../datadog-plugin-graphql/src') },
get grpc () { return require('../../../datadog-plugin-grpc/src') },
get hapi () { return require('../../../datadog-plugin-hapi/src') },
get http () { return require('../../../datadog-plugin-http/src') },
get http2 () { return require('../../../datadog-plugin-http2/src') },
get https () { return require('../../../datadog-plugin-http/src') },
get ioredis () { return require('../../../datadog-plugin-ioredis/src') },
get 'jest-circus' () { return require('../../../datadog-plugin-jest/src') },

@@ -46,19 +49,21 @@ get 'jest-config' () { return require('../../../datadog-plugin-jest/src') },

get 'jest-environment-jsdom' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-jasmine2' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-runtime' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-worker' () { return require('../../../datadog-plugin-jest/src') },
get 'koa' () { return require('../../../datadog-plugin-koa/src') },
get koa () { return require('../../../datadog-plugin-koa/src') },
get 'koa-router' () { return require('../../../datadog-plugin-koa/src') },
get 'kafkajs' () { return require('../../../datadog-plugin-kafkajs/src') },
get 'mariadb' () { return require('../../../datadog-plugin-mariadb/src') },
get 'memcached' () { return require('../../../datadog-plugin-memcached/src') },
get kafkajs () { return require('../../../datadog-plugin-kafkajs/src') },
get mariadb () { return require('../../../datadog-plugin-mariadb/src') },
get memcached () { return require('../../../datadog-plugin-memcached/src') },
get 'microgateway-core' () { return require('../../../datadog-plugin-microgateway-core/src') },
get 'mocha' () { return require('../../../datadog-plugin-mocha/src') },
get mocha () { return require('../../../datadog-plugin-mocha/src') },
get 'mocha-each' () { return require('../../../datadog-plugin-mocha/src') },
get 'moleculer' () { return require('../../../datadog-plugin-moleculer/src') },
get 'mongodb' () { return require('../../../datadog-plugin-mongodb-core/src') },
get vitest () { return require('../../../datadog-plugin-vitest/src') },
get workerpool () { return require('../../../datadog-plugin-mocha/src') },
get moleculer () { return require('../../../datadog-plugin-moleculer/src') },
get mongodb () { return require('../../../datadog-plugin-mongodb-core/src') },
get 'mongodb-core' () { return require('../../../datadog-plugin-mongodb-core/src') },
get 'mysql' () { return require('../../../datadog-plugin-mysql/src') },
get 'mysql2' () { return require('../../../datadog-plugin-mysql2/src') },
get 'net' () { return require('../../../datadog-plugin-net/src') },
get 'next' () { return require('../../../datadog-plugin-next/src') },
get mysql () { return require('../../../datadog-plugin-mysql/src') },
get mysql2 () { return require('../../../datadog-plugin-mysql2/src') },
get net () { return require('../../../datadog-plugin-net/src') },
get next () { return require('../../../datadog-plugin-next/src') },
get 'node:dns' () { return require('../../../datadog-plugin-dns/src') },

@@ -69,16 +74,19 @@ get 'node:http' () { return require('../../../datadog-plugin-http/src') },

get 'node:net' () { return require('../../../datadog-plugin-net/src') },
get 'oracledb' () { return require('../../../datadog-plugin-oracledb/src') },
get 'openai' () { return require('../../../datadog-plugin-openai/src') },
get 'paperplane' () { return require('../../../datadog-plugin-paperplane/src') },
get 'pg' () { return require('../../../datadog-plugin-pg/src') },
get 'pino' () { return require('../../../datadog-plugin-pino/src') },
get nyc () { return require('../../../datadog-plugin-nyc/src') },
get oracledb () { return require('../../../datadog-plugin-oracledb/src') },
get openai () { return require('../../../datadog-plugin-openai/src') },
get paperplane () { return require('../../../datadog-plugin-paperplane/src') },
get pg () { return require('../../../datadog-plugin-pg/src') },
get pino () { return require('../../../datadog-plugin-pino/src') },
get 'pino-pretty' () { return require('../../../datadog-plugin-pino/src') },
get 'playwright' () { return require('../../../datadog-plugin-playwright/src') },
get 'redis' () { return require('../../../datadog-plugin-redis/src') },
get 'restify' () { return require('../../../datadog-plugin-restify/src') },
get 'rhea' () { return require('../../../datadog-plugin-rhea/src') },
get 'router' () { return require('../../../datadog-plugin-router/src') },
get 'sharedb' () { return require('../../../datadog-plugin-sharedb/src') },
get 'tedious' () { return require('../../../datadog-plugin-tedious/src') },
get 'winston' () { return require('../../../datadog-plugin-winston/src') }
get playwright () { return require('../../../datadog-plugin-playwright/src') },
get redis () { return require('../../../datadog-plugin-redis/src') },
get restify () { return require('../../../datadog-plugin-restify/src') },
get rhea () { return require('../../../datadog-plugin-rhea/src') },
get router () { return require('../../../datadog-plugin-router/src') },
get 'selenium-webdriver' () { return require('../../../datadog-plugin-selenium/src') },
get sharedb () { return require('../../../datadog-plugin-sharedb/src') },
get tedious () { return require('../../../datadog-plugin-tedious/src') },
get undici () { return require('../../../datadog-plugin-undici/src') },
get winston () { return require('../../../datadog-plugin-winston/src') }
}

@@ -6,2 +6,3 @@ 'use strict'

const dc = require('dc-polyfill')
const logger = require('../log')
const { storage } = require('../../../datadog-core')

@@ -76,3 +77,13 @@

addSub (channelName, handler) {
this._subscriptions.push(new Subscription(channelName, handler))
const plugin = this
const wrappedHandler = function () {
try {
return handler.apply(this, arguments)
} catch (e) {
logger.error('Error in plugin handler:', e)
logger.info('Disabling plugin:', plugin.id)
plugin.configure(false)
}
}
this._subscriptions.push(new Subscription(channelName, wrappedHandler))
}

@@ -89,3 +100,3 @@

if (!store.span._spanContext._tags['error']) {
if (!store.span._spanContext._tags.error) {
store.span.setTag('error', error || 1)

@@ -92,0 +103,0 @@ }

@@ -7,3 +7,2 @@ 'use strict'

const { COMPONENT } = require('../constants')
const Nomenclature = require('../service-naming')

@@ -33,3 +32,3 @@ class TracingPlugin extends Plugin {

return Nomenclature.serviceName(type, kind, id, opts)
return this._tracer._nomenclature.serviceName(type, kind, id, opts)
}

@@ -44,3 +43,3 @@

return Nomenclature.opName(type, kind, id, opts)
return this._tracer._nomenclature.opName(type, kind, id, opts)
}

@@ -64,4 +63,8 @@

error (error) {
this.addError(error)
error (ctxOrError) {
if (ctxOrError?.currentStore) {
ctxOrError.currentStore?.span.setTag('error', ctxOrError?.error)
return
}
this.addError(ctxOrError)
}

@@ -98,3 +101,3 @@

addError (error, span = this.activeSpan) {
if (!span._spanContext._tags['error']) {
if (!span._spanContext._tags.error) {
// Errors may be wrapped in a context.

@@ -101,0 +104,0 @@ error = (error && error.error) || error

@@ -29,4 +29,5 @@ const cp = require('child_process')

const { filterSensitiveInfoFromRepository } = require('./url')
const { storage } = require('../../../../datadog-core')
const GIT_REV_LIST_MAX_BUFFER = 8 * 1024 * 1024 // 8MB
const GIT_REV_LIST_MAX_BUFFER = 12 * 1024 * 1024 // 12MB

@@ -40,2 +41,5 @@ function sanitizedExec (

) {
const store = storage.getStore()
storage.enterWith({ noop: true })
let startTime

@@ -54,8 +58,14 @@ if (operationMetric) {

return result
} catch (e) {
} catch (err) {
if (errorMetric) {
incrementCountMetric(errorMetric.name, { ...errorMetric.tags, exitCode: e.status })
incrementCountMetric(errorMetric.name, {
...errorMetric.tags,
errorType: err.code,
exitCode: err.status || err.errno
})
}
log.error(e)
log.error(err)
return ''
} finally {
storage.enterWith(store)
}

@@ -73,2 +83,14 @@ }

function isGitAvailable () {
const isWindows = os.platform() === 'win32'
const command = isWindows ? 'where' : 'which'
try {
cp.execFileSync(command, ['git'], { stdio: 'pipe' })
return true
} catch (e) {
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'check_git', exitCode: 'missing' })
return false
}
}
function isShallowRepository () {

@@ -130,3 +152,6 @@ return sanitizedExec(

log.error(err)
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'unshallow', exitCode: err.status })
incrementCountMetric(
TELEMETRY_GIT_COMMAND_ERRORS,
{ command: 'unshallow', errorType: err.code, exitCode: err.status || err.errno }
)
const upstreamRemote = sanitizedExec('git', ['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{upstream}'])

@@ -141,3 +166,6 @@ try {

log.error(err)
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'unshallow', exitCode: err.status })
incrementCountMetric(
TELEMETRY_GIT_COMMAND_ERRORS,
{ command: 'unshallow', errorType: err.code, exitCode: err.status || err.errno }
)
// We use sanitizedExec here because if this last option fails, we'll give up.

@@ -178,3 +206,6 @@ sanitizedExec(

log.error(`Get latest commits failed: ${err.message}`)
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'get_local_commits', errorType: err.status })
incrementCountMetric(
TELEMETRY_GIT_COMMAND_ERRORS,
{ command: 'get_local_commits', errorType: err.status }
)
return []

@@ -185,3 +216,3 @@ }

function getCommitsRevList (commitsToExclude, commitsToInclude) {
let result = []
let result = null

@@ -210,3 +241,6 @@ const commitsToExcludeString = commitsToExclude.map(commit => `^${commit}`)

log.error(`Get commits to upload failed: ${err.message}`)
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'get_objects', errorType: err.status })
incrementCountMetric(
TELEMETRY_GIT_COMMAND_ERRORS,
{ command: 'get_objects', errorType: err.code, exitCode: err.status || err.errno } // err.status might be null
)
}

@@ -251,3 +285,6 @@ distributionMetric(TELEMETRY_GIT_COMMAND_MS, { command: 'get_objects' }, Date.now() - startTime)

log.error(err)
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'pack_objects', errorType: err.status })
incrementCountMetric(
TELEMETRY_GIT_COMMAND_ERRORS,
{ command: 'pack_objects', exitCode: err.status || err.errno, errorType: err.code }
)
/**

@@ -269,3 +306,6 @@ * The generation of pack files in the temporary folder (from `os.tmpdir()`)

log.error(err)
incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'pack_objects', errorType: err.status })
incrementCountMetric(
TELEMETRY_GIT_COMMAND_ERRORS,
{ command: 'pack_objects', exitCode: err.status || err.errno, errorType: err.code }
)
}

@@ -328,3 +368,4 @@ }

isShallowRepository,
unshallowRepository
unshallowRepository,
isGitAvailable
}
'use strict'
const BlockList = require('./ip_blocklist')
const { BlockList } = require('net')
const net = require('net')

@@ -5,0 +5,0 @@

@@ -51,2 +51,9 @@ const path = require('path')

const TEST_SKIPPED_BY_ITR = 'test.skipped_by_itr'
// Browser used in browser test. Namespaced by test.configuration because it affects the fingerprint
const TEST_CONFIGURATION_BROWSER_NAME = 'test.configuration.browser_name'
// Early flake detection
const TEST_IS_NEW = 'test.is_new'
const TEST_IS_RETRY = 'test.is_retry'
const TEST_EARLY_FLAKE_ENABLED = 'test.early_flake.enabled'
const TEST_EARLY_FLAKE_ABORT_REASON = 'test.early_flake.abort_reason'

@@ -56,3 +63,7 @@ const CI_APP_ORIGIN = 'ciapp-test'

const JEST_TEST_RUNNER = 'test.jest.test_runner'
const JEST_DISPLAY_NAME = 'test.jest.display_name'
const CUCUMBER_IS_PARALLEL = 'test.cucumber.is_parallel'
const MOCHA_IS_PARALLEL = 'test.mocha.is_parallel'
const TEST_ITR_TESTS_SKIPPED = '_dd.ci.itr.tests_skipped'

@@ -69,2 +80,8 @@ const TEST_ITR_SKIPPING_ENABLED = 'test.itr.tests_skipping.enabled'

// selenium tags
const TEST_BROWSER_DRIVER = 'test.browser.driver'
const TEST_BROWSER_DRIVER_VERSION = 'test.browser.driver_version'
const TEST_BROWSER_NAME = 'test.browser.name'
const TEST_BROWSER_VERSION = 'test.browser.version'
// jest worker variables

@@ -74,2 +91,15 @@ const JEST_WORKER_TRACE_PAYLOAD_CODE = 60

// cucumber worker variables
const CUCUMBER_WORKER_TRACE_PAYLOAD_CODE = 70
// mocha worker variables
const MOCHA_WORKER_TRACE_PAYLOAD_CODE = 80
// Early flake detection util strings
const EFD_STRING = "Retried by Datadog's Early Flake Detection"
const EFD_TEST_NAME_REGEX = new RegExp(EFD_STRING + ' \\(#\\d+\\): ', 'g')
// Flaky test retries
const NUM_FAILED_TEST_RETRIES = 5
module.exports = {

@@ -80,2 +110,5 @@ TEST_CODE_OWNERS,

JEST_TEST_RUNNER,
JEST_DISPLAY_NAME,
CUCUMBER_IS_PARALLEL,
MOCHA_IS_PARALLEL,
TEST_TYPE,

@@ -93,4 +126,11 @@ TEST_NAME,

JEST_WORKER_COVERAGE_PAYLOAD_CODE,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE,
MOCHA_WORKER_TRACE_PAYLOAD_CODE,
TEST_SOURCE_START,
TEST_SKIPPED_BY_ITR,
TEST_CONFIGURATION_BROWSER_NAME,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_ENABLED,
TEST_EARLY_FLAKE_ABORT_REASON,
getTestEnvironmentMetadata,

@@ -130,3 +170,13 @@ getTestParametersString,

removeInvalidMetadata,
parseAnnotations
parseAnnotations,
EFD_STRING,
EFD_TEST_NAME_REGEX,
removeEfdStringFromTestName,
addEfdStringToTestName,
getIsFaultyEarlyFlakeDetection,
TEST_BROWSER_DRIVER,
TEST_BROWSER_DRIVER_VERSION,
TEST_BROWSER_NAME,
TEST_BROWSER_VERSION,
NUM_FAILED_TEST_RETRIES
}

@@ -263,3 +313,2 @@

[TEST_SUITE]: suite,
[TEST_SOURCE_FILE]: suite,
[RESOURCE_NAME]: `${suite}.${name}`,

@@ -280,3 +329,4 @@ [TEST_FRAMEWORK_VERSION]: version,

const testSuitePath = testSuiteAbsolutePath === sourceRoot
? testSuiteAbsolutePath : path.relative(sourceRoot, testSuiteAbsolutePath)
? testSuiteAbsolutePath
: path.relative(sourceRoot, testSuiteAbsolutePath)

@@ -293,12 +343,32 @@ return testSuitePath.replace(path.sep, '/')

function getCodeOwnersFileEntries (rootDir = process.cwd()) {
let codeOwnersContent
POSSIBLE_CODEOWNERS_LOCATIONS.forEach(location => {
function readCodeOwners (rootDir) {
for (const location of POSSIBLE_CODEOWNERS_LOCATIONS) {
try {
codeOwnersContent = fs.readFileSync(`${rootDir}/${location}`).toString()
return fs.readFileSync(path.join(rootDir, location)).toString()
} catch (e) {
// retry with next path
}
})
}
return ''
}
function getCodeOwnersFileEntries (rootDir) {
let codeOwnersContent
let usedRootDir = rootDir
let isTriedCwd = false
const processCwd = process.cwd()
if (!usedRootDir || usedRootDir === processCwd) {
usedRootDir = processCwd
isTriedCwd = true
}
codeOwnersContent = readCodeOwners(usedRootDir)
// If we haven't found CODEOWNERS in the provided root dir, we try with process.cwd()
if (!codeOwnersContent && !isTriedCwd) {
codeOwnersContent = readCodeOwners(processCwd)
}
if (!codeOwnersContent) {

@@ -536,1 +606,27 @@ return null

}
function addEfdStringToTestName (testName, numAttempt) {
return `${EFD_STRING} (#${numAttempt}): ${testName}`
}
function removeEfdStringFromTestName (testName) {
return testName.replace(EFD_TEST_NAME_REGEX, '')
}
function getIsFaultyEarlyFlakeDetection (projectSuites, testsBySuiteName, faultyThresholdPercentage) {
let newSuites = 0
for (const suite of projectSuites) {
if (!testsBySuiteName[suite]) {
newSuites++
}
}
const newSuitesPercentage = (newSuites / projectSuites.length) * 100
// The faulty threshold represents a percentage, but we also want to consider
// smaller projects, where big variations in the % are more likely.
// This is why we also check the absolute number of new suites.
return (
newSuites > faultyThresholdPercentage &&
newSuitesPercentage > faultyThresholdPercentage
)
}
'use strict'
const uniq = require('lodash.uniq')
const uniq = require('../../../../datadog-core/src/utils/src/uniq')
const analyticsSampler = require('../../analytics_sampler')

@@ -66,3 +66,3 @@ const FORMAT_HTTP_HEADERS = 'http_headers'

span.context()._name = `${name}.request`
span.context()._tags['component'] = name
span.context()._tags.component = name

@@ -267,3 +267,3 @@ web.setConfig(req, config)

const error = context.error
const hasExistingError = span.context()._tags['error'] || span.context()._tags[ERROR_MESSAGE]
const hasExistingError = span.context()._tags.error || span.context()._tags[ERROR_MESSAGE]

@@ -410,3 +410,3 @@ if (!hasExistingError && !context.config.validateStatus(statusCode)) {

function isOriginAllowed (req, headers) {
const origin = req.headers['origin']
const origin = req.headers.origin
const allowOrigin = headers['access-control-allow-origin']

@@ -504,3 +504,3 @@

const protocol = getProtocol(req)
return `${protocol}://${req.headers['host']}${req.originalUrl || req.url}`
return `${protocol}://${req.headers.host}${req.originalUrl || req.url}`
}

@@ -507,0 +507,0 @@ }

'use strict'
const RateLimiter = require('./rate_limiter')
const Sampler = require('./sampler')
const { setSamplingRules } = require('./startup-log')
const SamplingRule = require('./sampling_rule')
const { hasOwn } = require('./util')

@@ -12,2 +14,4 @@ const {

SAMPLING_MECHANISM_MANUAL,
SAMPLING_MECHANISM_REMOTE_USER,
SAMPLING_MECHANISM_REMOTE_DYNAMIC,
SAMPLING_RULE_DECISION,

@@ -44,5 +48,6 @@ SAMPLING_LIMIT_DECISION,

configure (env, { sampleRate, rateLimit = 100, rules = [] } = {}) {
configure (env, { sampleRate, provenance = undefined, rateLimit = 100, rules = [] } = {}) {
this._env = env
this._rules = this._normalizeRules(rules, sampleRate, rateLimit)
this._rules = this._normalizeRules(rules, sampleRate, rateLimit, provenance)
this._limiter = new RateLimiter(rateLimit)

@@ -67,3 +72,3 @@ setSamplingRules(this._rules)

const tag = this._getPriorityFromTags(context._tags)
const tag = this._getPriorityFromTags(context._tags, context)

@@ -141,11 +146,16 @@ if (this.validate(tag)) {

context._sampling.mechanism = SAMPLING_MECHANISM_RULE
if (rule.provenance === 'customer') context._sampling.mechanism = SAMPLING_MECHANISM_REMOTE_USER
if (rule.provenance === 'dynamic') context._sampling.mechanism = SAMPLING_MECHANISM_REMOTE_DYNAMIC
const sampled = rule.sample()
const priority = sampled ? USER_KEEP : USER_REJECT
return rule.sample() && this._isSampledByRateLimit(context)
? USER_KEEP
: USER_REJECT
}
if (sampled) {
context._trace[SAMPLING_LIMIT_DECISION] = rule.effectiveRate
}
_isSampledByRateLimit (context) {
const allowed = this._limiter.isAllowed()
return priority
context._trace[SAMPLING_LIMIT_DECISION] = this._limiter.effectiveRate()
return allowed
}

@@ -183,7 +193,7 @@

_normalizeRules (rules, sampleRate, rateLimit) {
_normalizeRules (rules, sampleRate, rateLimit, provenance) {
rules = [].concat(rules || [])
return rules
.concat({ sampleRate, maxPerSecond: rateLimit })
.concat({ sampleRate, maxPerSecond: rateLimit, provenance })
.map(rule => ({ ...rule, sampleRate: parseFloat(rule.sampleRate) }))

@@ -201,6 +211,2 @@ .filter(rule => !isNaN(rule.sampleRate))

function hasOwn (object, prop) {
return Object.prototype.hasOwnProperty.call(object, prop)
}
module.exports = PrioritySampler

@@ -12,3 +12,3 @@ 'use strict'

const { service, version, env, url, hostname, port, tags, repositoryUrl, commitSHA } = config
const { enabled, sourceMap, exporters } = config.profiling
const { sourceMap, exporters } = config.profiling
const logger = {

@@ -22,3 +22,2 @@ debug: (message) => log.debug(message),

return profiler.start({
enabled,
service,

@@ -25,0 +24,0 @@ version,

@@ -21,33 +21,32 @@ 'use strict'

const {
DD_PROFILING_ENABLED,
DD_PROFILING_PROFILERS,
DD_AGENT_HOST,
DD_ENV,
DD_TAGS,
DD_SERVICE,
DD_VERSION,
DD_TRACE_AGENT_URL,
DD_AGENT_HOST,
DD_TRACE_AGENT_PORT,
DD_PROFILING_CODEHOTSPOTS_ENABLED,
DD_PROFILING_CPU_ENABLED,
DD_PROFILING_DEBUG_SOURCE_MAPS,
DD_PROFILING_UPLOAD_TIMEOUT,
DD_PROFILING_ENDPOINT_COLLECTION_ENABLED,
DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED,
DD_PROFILING_EXPERIMENTAL_CPU_ENABLED,
DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED,
DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES,
DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE,
DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT,
DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED,
DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED,
DD_PROFILING_HEAP_ENABLED,
DD_PROFILING_PPROF_PREFIX,
DD_PROFILING_PROFILERS,
DD_PROFILING_SOURCE_MAP,
DD_PROFILING_TIMELINE_ENABLED,
DD_PROFILING_UPLOAD_PERIOD,
DD_PROFILING_PPROF_PREFIX,
DD_PROFILING_HEAP_ENABLED,
DD_PROFILING_UPLOAD_TIMEOUT,
DD_PROFILING_V8_PROFILER_BUG_WORKAROUND,
DD_PROFILING_WALLTIME_ENABLED,
DD_PROFILING_EXPERIMENTAL_CPU_ENABLED,
DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED,
DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE,
DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT,
DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES,
DD_PROFILING_TIMELINE_ENABLED,
DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED,
DD_PROFILING_CODEHOTSPOTS_ENABLED,
DD_PROFILING_ENDPOINT_COLLECTION_ENABLED,
DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED,
DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED
DD_SERVICE,
DD_TAGS,
DD_TRACE_AGENT_PORT,
DD_TRACE_AGENT_URL,
DD_VERSION
} = process.env
const enabled = isTrue(coalesce(options.enabled, DD_PROFILING_ENABLED, true))
const env = coalesce(options.env, DD_ENV)

@@ -67,3 +66,2 @@ const service = options.service || DD_SERVICE || 'node'

this.enabled = enabled
this.service = service

@@ -170,3 +168,3 @@ this.env = env

DD_PROFILING_TIMELINE_ENABLED,
DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, false))
DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, samplingContextsAvailable))
logExperimentalVarDeprecation('TIMELINE_ENABLED')

@@ -182,3 +180,5 @@ checkOptionWithSamplingContextAllowed(this.timelineEnabled, 'Timeline view')

this.cpuProfilingEnabled = isTrue(coalesce(options.cpuProfilingEnabled,
DD_PROFILING_EXPERIMENTAL_CPU_ENABLED, false))
DD_PROFILING_CPU_ENABLED,
DD_PROFILING_EXPERIMENTAL_CPU_ENABLED, samplingContextsAvailable))
logExperimentalVarDeprecation('CPU_ENABLED')
checkOptionWithSamplingContextAllowed(this.cpuProfilingEnabled, 'CPU profiling')

@@ -244,3 +244,3 @@

return [ ...new Set(strategies) ]
return [...new Set(strategies)]
}

@@ -296,4 +296,5 @@

// Events profiler is a profiler for timeline events
if (options.timelineEnabled) {
// Events profiler is a profiler that produces timeline events. It is only
// added if timeline is enabled and there's a wall profiler.
if (options.timelineEnabled && profilers.some(p => p instanceof WallProfiler)) {
profilers.push(new EventsProfiler(options))

@@ -300,0 +301,0 @@ }

@@ -12,2 +12,5 @@ 'use strict'

const version = require('../../../../../package.json').version
const os = require('os')
const { urlToHttpOptions } = require('url')
const perf = require('perf_hooks').performance

@@ -54,3 +57,3 @@ const containerId = docker.id()

class AgentExporter {
constructor ({ url, logger, uploadTimeout } = {}) {
constructor ({ url, logger, uploadTimeout, env, host, service, version } = {}) {
this._url = url

@@ -63,34 +66,74 @@ this._logger = logger

this._backoffTries = backoffTries
this._env = env
this._host = host
this._service = service
this._appVersion = version
}
export ({ profiles, start, end, tags }) {
const types = Object.keys(profiles)
const fields = []
const fields = [
['recording-start', start.toISOString()],
['recording-end', end.toISOString()],
['language', 'javascript'],
['runtime', 'nodejs'],
['runtime_version', process.version],
['profiler_version', version],
['format', 'pprof'],
function typeToFile (type) {
return `${type}.pprof`
}
['tags[]', 'language:javascript'],
['tags[]', 'runtime:nodejs'],
['tags[]', `runtime_version:${process.version}`],
['tags[]', `process_id:${process.pid}`],
['tags[]', `profiler_version:${version}`],
['tags[]', 'format:pprof'],
...Object.entries(tags).map(([key, value]) => ['tags[]', `${key}:${value}`])
]
const event = JSON.stringify({
attachments: Object.keys(profiles).map(typeToFile),
start: start.toISOString(),
end: end.toISOString(),
family: 'node',
version: '4',
tags_profiler: [
'language:javascript',
'runtime:nodejs',
`runtime_arch:${process.arch}`,
`runtime_os:${process.platform}`,
`runtime_version:${process.version}`,
`process_id:${process.pid}`,
`profiler_version:${version}`,
'format:pprof',
...Object.entries(tags).map(([key, value]) => `${key}:${value}`)
].join(','),
info: {
application: {
env: this._env,
service: this._service,
start_time: new Date(perf.nodeTiming.nodeStart + perf.timeOrigin).toISOString(),
version: this._appVersion
},
platform: {
hostname: this._host,
kernel_name: os.type(),
kernel_release: os.release(),
kernel_version: os.version()
},
profiler: {
version
},
runtime: {
// Using `nodejs` for consistency with the existing `runtime` tag.
// Note that the event `family` property uses `node`, as that's what's
// proscribed by the Intake API, but that's an internal enum and is
// not customer visible.
engine: 'nodejs',
// strip off leading 'v'. This makes the format consistent with other
// runtimes (e.g. Ruby) but not with the existing `runtime_version` tag.
// We'll keep it like this as we want cross-engine consistency. We
// also aren't changing the format of the existing tag as we don't want
// to break it.
version: process.version.substring(1)
}
}
})
fields.push(['event', event, {
filename: 'event.json',
contentType: 'application/json'
}])
this._logger.debug(() => {
const body = fields.map(([key, value]) => ` ${key}: ${value}`).join('\n')
return `Building agent export report: ${'\n' + body}`
return `Building agent export report:\n${event}`
})
for (let index = 0; index < types.length; index++) {
const type = types[index]
const buffer = profiles[type]
for (const [type, buffer] of Object.entries(profiles)) {
this._logger.debug(() => {

@@ -101,7 +144,6 @@ const bytes = buffer.toString('hex').match(/../g).join(' ')

fields.push([`types[${index}]`, type])
fields.push([`data[${index}]`, buffer, {
filename: `${type}.pb.gz`,
contentType: 'application/octet-stream',
knownLength: buffer.length
const filename = typeToFile(type)
fields.push([filename, buffer, {
filename,
contentType: 'application/octet-stream'
}])

@@ -128,3 +170,7 @@ }

path: '/profiling/v1/input',
headers: form.getHeaders(),
headers: {
'DD-EVP-ORIGIN': 'dd-trace-js',
'DD-EVP-ORIGIN-VERSION': version,
...form.getHeaders()
},
timeout: this._backoffTime * Math.pow(2, attempt)

@@ -140,5 +186,6 @@ }

} else {
options.protocol = this._url.protocol
options.hostname = this._url.hostname
options.port = this._url.port
const httpOptions = urlToHttpOptions(this._url)
options.protocol = httpOptions.protocol
options.hostname = httpOptions.hostname
options.port = httpOptions.port
}

@@ -145,0 +192,0 @@

@@ -15,3 +15,3 @@ 'use strict'

constructor (options = {}) {
this._level = mapping[options.level] || mapping['error']
this._level = mapping[options.level] || mapping.error
}

@@ -18,0 +18,0 @@

@@ -7,3 +7,6 @@ 'use strict'

const { threadNamePrefix } = require('./profilers/shared')
const dc = require('dc-polyfill')
const profileSubmittedChannel = dc.channel('datadog:profiling:profile-submitted')
function maybeSourceMap (sourceMap, SourceMapper, debug) {

@@ -16,2 +19,8 @@ if (!sourceMap) return

function logError (logger, err) {
if (logger) {
logger.error(err)
}
}
class Profiler extends EventEmitter {

@@ -30,5 +39,3 @@ constructor () {

return this._start(options).catch((err) => {
if (options.logger) {
options.logger.error(err)
}
logError(options.logger, err)
return false

@@ -38,2 +45,6 @@ })

_logError (err) {
logError(this._logger, err)
}
async _start (options) {

@@ -43,3 +54,2 @@ if (this._enabled) return true

const config = this._config = new Config(options)
if (!config.enabled) return false

@@ -58,3 +68,3 @@ this._logger = config.logger

mapper = await maybeSourceMap(config.sourceMap, SourceMapper, config.debugSourceMaps)
if (config.SourceMap && config.debugSourceMaps) {
if (config.sourceMap && config.debugSourceMaps) {
this._logger.debug(() => {

@@ -67,3 +77,3 @@ return mapper.infoMap.size === 0

} catch (err) {
this._logger.error(err)
this._logError(err)
}

@@ -85,3 +95,3 @@

} catch (e) {
this._logger.error(e)
this._logError(e)
this._stop()

@@ -172,5 +182,6 @@ return false

await this._submit(encodedProfiles, startDate, endDate, snapshotKind)
profileSubmittedChannel.publish()
this._logger.debug('Submitted profiles')
} catch (err) {
this._logger.error(err)
this._logError(err)
this._stop()

@@ -190,3 +201,3 @@ }

const task = exporter.export({ profiles, start, end, tags })
.catch(err => this._logger.error(err))
.catch(err => this._logError(err))

@@ -193,0 +204,0 @@ tasks.push(task)

@@ -18,2 +18,4 @@ const { performance, constants, PerformanceObserver } = require('perf_hooks')

const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS))
function labelFromStr (stringTable, key, valStr) {

@@ -150,2 +152,72 @@ return new Label({ key, str: stringTable.dedup(valStr) })

// Translates performance entries into pprof samples.
class EventSerializer {
constructor () {
this.stringTable = new StringTable()
this.samples = []
this.locations = []
this.functions = []
this.decorators = {}
// A synthetic single-frame location to serve as the location for timeline
// samples. We need these as the profiling backend (mimicking official pprof
// tool's behavior) ignores these.
const fn = new Function({ id: this.functions.length + 1, name: this.stringTable.dedup('') })
this.functions.push(fn)
const line = new Line({ functionId: fn.id })
const location = new Location({ id: this.locations.length + 1, line: [line] })
this.locations.push(location)
this.locationId = [location.id]
this.timestampLabelKey = this.stringTable.dedup(END_TIMESTAMP_LABEL)
}
addEvent (item) {
const { entryType, startTime, duration } = item
let decorator = this.decorators[entryType]
if (!decorator) {
const DecoratorCtor = decoratorTypes[entryType]
if (DecoratorCtor) {
decorator = new DecoratorCtor(this.stringTable)
decorator.eventTypeLabel = labelFromStrStr(this.stringTable, 'event', entryType)
this.decorators[entryType] = decorator
} else {
// Shouldn't happen but it's better to not rely on observer only getting
// requested event types.
return
}
}
const endTime = startTime + duration
const sampleInput = {
value: [Math.round(duration * MS_TO_NS)],
locationId: this.locationId,
label: [
decorator.eventTypeLabel,
new Label({ key: this.timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) })
]
}
decorator.decorateSample(sampleInput, item)
this.samples.push(new Sample(sampleInput))
}
createProfile (startDate, endDate) {
const timeValueType = new ValueType({
type: this.stringTable.dedup(pprofValueType),
unit: this.stringTable.dedup(pprofValueUnit)
})
return new Profile({
sampleType: [timeValueType],
timeNanos: endDate.getTime() * MS_TO_NS,
periodType: timeValueType,
period: 1,
durationNanos: (endDate.getTime() - startDate.getTime()) * MS_TO_NS,
sample: this.samples,
location: this.locations,
function: this.functions,
stringTable: this.stringTable
})
}
}
/**

@@ -160,3 +232,3 @@ * This class generates pprof files with timeline events sourced from Node.js

this._observer = undefined
this.entries = []
this.eventSerializer = new EventSerializer()
}

@@ -169,3 +241,5 @@

function add (items) {
this.entries.push(...items.getEntries())
for (const item of items.getEntries()) {
this.eventSerializer.addEvent(item)
}
}

@@ -184,85 +258,8 @@ this._observer = new PerformanceObserver(add.bind(this))

profile (restart, startDate, endDate) {
if (this.entries.length === 0) {
// No events in the period; don't produce a profile
return null
}
const stringTable = new StringTable()
const locations = []
const functions = []
// A synthetic single-frame location to serve as the location for timeline
// samples. We need these as the profiling backend (mimicking official pprof
// tool's behavior) ignores these.
const locationId = (() => {
const fn = new Function({ id: functions.length + 1, name: stringTable.dedup('') })
functions.push(fn)
const line = new Line({ functionId: fn.id })
const location = new Location({ id: locations.length + 1, line: [line] })
locations.push(location)
return [location.id]
})()
const decorators = {}
for (const [eventType, DecoratorCtor] of Object.entries(decoratorTypes)) {
const decorator = new DecoratorCtor(stringTable)
decorator.eventTypeLabel = labelFromStrStr(stringTable, 'event', eventType)
decorators[eventType] = decorator
}
const timestampLabelKey = stringTable.dedup(END_TIMESTAMP_LABEL)
const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS))
const lateEntries = []
const perfEndDate = endDate.getTime() - performance.timeOrigin
const samples = this.entries.map((item) => {
const decorator = decorators[item.entryType]
if (!decorator) {
// Shouldn't happen but it's better to not rely on observer only getting
// requested event types.
return null
}
const { startTime, duration } = item
if (startTime >= perfEndDate) {
// An event past the current recording end date; save it for the next
// profile. Not supposed to happen as long as there's no async activity
// between capture of the endDate value in profiler.js _collect() and
// here, but better be safe than sorry.
lateEntries.push(item)
return null
}
const endTime = startTime + duration
const sampleInput = {
value: [Math.round(duration * MS_TO_NS)],
locationId,
label: [
decorator.eventTypeLabel,
new Label({ key: timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) })
]
}
decorator.decorateSample(sampleInput, item)
return new Sample(sampleInput)
}).filter(v => v)
this.entries = lateEntries
const timeValueType = new ValueType({
type: stringTable.dedup(pprofValueType),
unit: stringTable.dedup(pprofValueUnit)
})
if (!restart) {
this.stop()
}
return new Profile({
sampleType: [timeValueType],
timeNanos: endDate.getTime() * MS_TO_NS,
periodType: timeValueType,
period: 1,
durationNanos: (endDate.getTime() - startDate.getTime()) * MS_TO_NS,
sample: samples,
location: locations,
function: functions,
stringTable: stringTable
})
const profile = this.eventSerializer.createProfile(startDate, endDate)
this.eventSerializer = new EventSerializer()
return profile
}

@@ -269,0 +266,0 @@

@@ -9,2 +9,3 @@ 'use strict'

const telemetry = require('./telemetry')
const nomenclature = require('./service-naming')
const PluginManager = require('./plugin_manager')

@@ -14,4 +15,22 @@ const remoteConfig = require('./appsec/remote_config')

const dogstatsd = require('./dogstatsd')
const NoopDogStatsDClient = require('./noop/dogstatsd')
const spanleak = require('./spanleak')
const { SSIHeuristics } = require('./profiling/ssi-heuristics')
const appsecStandalone = require('./appsec/standalone')
class LazyModule {
constructor (provider) {
this.provider = provider
}
enable (...args) {
this.module = this.provider()
this.module.enable(...args)
}
disable () {
this.module?.disable()
}
}
class Tracer extends NoopProxy {

@@ -22,4 +41,13 @@ constructor () {

this._initialized = false
this._nomenclature = nomenclature
this._pluginManager = new PluginManager(this)
this.dogstatsd = new dogstatsd.NoopDogStatsDClient()
this.dogstatsd = new NoopDogStatsDClient()
this._tracingInitialized = false
this._flare = new LazyModule(() => require('./flare'))
// these requires must work with esm bundler
this._modules = {
appsec: new LazyModule(() => require('./appsec')),
iast: new LazyModule(() => require('./appsec/iast'))
}
}

@@ -34,2 +62,3 @@

const config = new Config(options) // TODO: support dynamic code config
telemetry.start(config, this._pluginManager)

@@ -59,3 +88,3 @@ if (config.dogstatsd) {

if (config.remoteConfig.enabled && !config.isCiVisibility) {
const rc = remoteConfig.enable(config)
const rc = remoteConfig.enable(config, this._modules.appsec)

@@ -68,26 +97,56 @@ rc.on('APM_TRACING', (action, conf) => {

}
this._enableOrDisableTracing(config)
})
if (config.tracing) {
this._tracer.configure(config)
this._pluginManager.configure(config)
rc.on('AGENT_CONFIG', (action, conf) => {
if (!conf?.name?.startsWith('flare-log-level.')) return
if (action === 'unapply') {
this._flare.disable()
} else if (conf.config?.log_level) {
this._flare.enable(config)
this._flare.module.prepare(conf.config.log_level)
}
})
rc.on('AGENT_TASK', (action, conf) => {
if (action === 'unapply' || !conf) return
if (conf.task_type !== 'tracer_flare' || !conf.args) return
this._flare.enable(config)
this._flare.module.send(conf.args)
})
}
if (config.isGCPFunction || config.isAzureFunctionConsumptionPlan) {
if (config.isGCPFunction || config.isAzureFunction) {
require('./serverless').maybeStartServerlessMiniAgent(config)
}
if (config.profiling.enabled) {
// do not stop tracer initialization if the profiler fails to be imported
try {
const profiler = require('./profiler')
this._profilerStarted = profiler.start(config)
} catch (e) {
log.error(e)
if (config.profiling.enabled !== 'false') {
const ssiHeuristics = new SSIHeuristics(config)
ssiHeuristics.start()
let mockProfiler = null
if (config.profiling.enabled === 'true') {
this._profilerStarted = this._startProfiler(config)
} else if (ssiHeuristics.emitsTelemetry) {
// Start a mock profiler that emits mock profile-submitted events for the telemetry.
// It will be stopped if the real profiler is started by the heuristics.
mockProfiler = require('./profiling/ssi-telemetry-mock-profiler')
mockProfiler.start(config)
}
if (ssiHeuristics.heuristicsActive) {
ssiHeuristics.onTriggered(() => {
if (mockProfiler) {
mockProfiler.stop()
}
this._startProfiler(config)
ssiHeuristics.onTriggered() // deregister this callback
})
}
if (!this._profilerStarted) {
this._profilerStarted = Promise.resolve(false)
}
}
if (!this._profilerStarted) {
this._profilerStarted = Promise.resolve(false)
}

@@ -98,21 +157,5 @@ if (config.runtimeMetrics) {

this._enableOrDisableTracing(config)
if (config.tracing) {
// TODO: This should probably not require tracing to be enabled.
telemetry.start(config, this._pluginManager)
// dirty require for now so zero appsec code is executed unless explicitly enabled
if (config.appsec.enabled) {
require('./appsec').enable(config)
}
this._tracer = new DatadogTracer(config)
this.appsec = new AppsecSdk(this._tracer, config)
if (config.iast.enabled) {
require('./appsec/iast').enable(config, this._tracer)
}
this._pluginManager.configure(config)
setStartupLogPluginManager(this._pluginManager)
if (config.isManualApiEnabled) {

@@ -131,2 +174,38 @@ const TestApiManualPlugin = require('./ci-visibility/test-api-manual/test-api-manual-plugin')

_startProfiler (config) {
// do not stop tracer initialization if the profiler fails to be imported
try {
return require('./profiler').start(config)
} catch (e) {
log.error(e)
}
}
_enableOrDisableTracing (config) {
if (config.tracing !== false) {
if (config.appsec.enabled) {
this._modules.appsec.enable(config)
}
if (!this._tracingInitialized) {
const prioritySampler = appsecStandalone.configure(config)
this._tracer = new DatadogTracer(config, prioritySampler)
this.dataStreamsCheckpointer = this._tracer.dataStreamsCheckpointer
this.appsec = new AppsecSdk(this._tracer, config)
this._tracingInitialized = true
}
if (config.iast.enabled) {
this._modules.iast.enable(config, this._tracer)
}
} else if (this._tracingInitialized) {
this._modules.appsec.disable()
this._modules.iast.disable()
}
if (this._tracingInitialized) {
this._tracer.configure(config)
this._pluginManager.configure(config)
setStartupLogPluginManager(this._pluginManager)
}
}
profilerStarted () {

@@ -133,0 +212,0 @@ if (!this._profilerStarted) {

@@ -6,5 +6,5 @@ 'use strict'

class RateLimiter {
constructor (rateLimit) {
constructor (rateLimit, interval = 'second') {
this._rateLimit = parseInt(rateLimit)
this._limiter = new limiter.RateLimiter(this._rateLimit, 'second')
this._limiter = new limiter.RateLimiter(this._rateLimit, interval)
this._tokensRequested = 0

@@ -11,0 +11,0 @@ this._prevIntervalTokens = 0

@@ -53,4 +53,16 @@ 'use strict'

const _origRequire = Module.prototype.require
patchedRequire = Module.prototype.require = function (request) {
const filename = Module._resolveFilename(request, this)
/*
If resolving the filename for a `require(...)` fails, defer to the wrapped
require implementation rather than failing right away. This allows a
possibly monkey patched `require` to work.
*/
let filename
try {
filename = Module._resolveFilename(request, this)
} catch (resolveErr) {
return _origRequire.apply(this, arguments)
}
const core = filename.indexOf(path.sep) === -1

@@ -57,0 +69,0 @@ let name, basedir, hooks

@@ -241,8 +241,11 @@ 'use strict'

*/
const captureELU = ('eventLoopUtilization' in performance) ? () => {
// if elu is undefined (first run) the measurement is from start of process
elu = performance.eventLoopUtilization(elu)
let captureELU = () => {}
if ('eventLoopUtilization' in performance) {
captureELU = () => {
// if elu is undefined (first run) the measurement is from start of process
elu = performance.eventLoopUtilization(elu)
client.gauge('runtime.node.event_loop.utilization', elu.utilization)
} : () => {}
client.gauge('runtime.node.event_loop.utilization', elu.utilization)
}
}

@@ -249,0 +252,0 @@ function captureCommonMetrics () {

@@ -67,3 +67,3 @@ 'use strict'

class SamplingRule {
constructor ({ name, service, resource, tags, sampleRate = 1.0, maxPerSecond } = {}) {
constructor ({ name, service, resource, tags, sampleRate = 1.0, provenance = undefined, maxPerSecond } = {}) {
this.matchers = []

@@ -86,2 +86,3 @@

this._limiter = undefined
this.provenance = provenance

@@ -88,0 +89,0 @@ if (Number.isFinite(maxPerSecond)) {

@@ -7,3 +7,3 @@ 'use strict'

if (process.platform !== 'win32' && process.platform !== 'linux') {
log.error(`Serverless Mini Agent is only supported on Windows and Linux.`)
log.error('Serverless Mini Agent is only supported on Windows and Linux.')
return

@@ -38,3 +38,4 @@ }

const rustBinaryPathOsFolder = process.platform === 'win32'
? 'datadog-serverless-agent-windows-amd64' : 'datadog-serverless-agent-linux-amd64'
? 'datadog-serverless-agent-windows-amd64'
: 'datadog-serverless-agent-linux-amd64'

@@ -57,9 +58,7 @@ const rustBinaryExtension = process.platform === 'win32' ? '.exe' : ''

function getIsAzureFunctionConsumptionPlan () {
function getIsAzureFunction () {
const isAzureFunction =
process.env.FUNCTIONS_EXTENSION_VERSION !== undefined && process.env.FUNCTIONS_WORKER_RUNTIME !== undefined
const azureWebsiteSKU = process.env.WEBSITE_SKU
const isConsumptionPlan = azureWebsiteSKU === undefined || azureWebsiteSKU === 'Dynamic'
return isAzureFunction && isConsumptionPlan
return isAzureFunction
}

@@ -70,4 +69,4 @@

getIsGCPFunction,
getIsAzureFunctionConsumptionPlan,
getIsAzureFunction,
getRustBinaryPath
}

@@ -33,5 +33,33 @@ const { identityService, httpPluginClientService, awsServiceV0 } = require('../util')

serviceName: awsServiceV0
},
undici: {
opName: () => 'undici.request',
serviceName: httpPluginClientService
}
},
server: {
'apollo.gateway.request': {
opName: () => 'apollo.gateway.request',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.plan': {
opName: () => 'apollo.gateway.plan',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.validate': {
opName: () => 'apollo.gateway.validate',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.execute': {
opName: () => 'apollo.gateway.execute',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.fetch': {
opName: () => 'apollo.gateway.fetch',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.postprocessing': {
opName: () => 'apollo.gateway.postprocessing',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
grpc: {

@@ -38,0 +66,0 @@ opName: () => DD_MAJOR <= 2 ? 'grpc.request' : 'grpc.server',

@@ -1,2 +0,1 @@

function configWithFallback ({ tracerService, pluginConfig }) {

@@ -3,0 +2,0 @@ return pluginConfig.service || tracerService

@@ -32,5 +32,33 @@ const { identityService, httpPluginClientService } = require('../util')

serviceName: identityService
},
undici: {
opName: () => 'undici.request',
serviceName: httpPluginClientService
}
},
server: {
'apollo.gateway.request': {
opName: () => 'apollo.gateway.request',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.plan': {
opName: () => 'apollo.gateway.plan',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.validate': {
opName: () => 'apollo.gateway.validate',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.execute': {
opName: () => 'apollo.gateway.execute',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.fetch': {
opName: () => 'apollo.gateway.fetch',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.postprocessing': {
opName: () => 'apollo.gateway.postprocessing',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
grpc: {

@@ -37,0 +65,0 @@ opName: () => 'grpc.server.request',

@@ -30,6 +30,10 @@ 'use strict'

const trace = spanContext._trace
const { flushMinSpans } = this._config
const { flushMinSpans, tracing } = this._config
const { started, finished } = trace
if (trace.record === false) return
if (tracing === false) {
this._erase(trace, active)
return
}
if (started.length === finished.length || finished.length >= flushMinSpans) {

@@ -58,7 +62,7 @@ this._prioritySampler.sample(spanContext)

if (this._killAll) {
started.map(startedSpan => {
for (const startedSpan of started) {
if (!startedSpan._finished) {
startedSpan.finish()
}
})
}
}

@@ -65,0 +69,0 @@ }

@@ -129,3 +129,4 @@ const os = require('os')

env,
tags
tags,
appsec
} = {}) {

@@ -142,3 +143,3 @@ this.exporter = new SpanStatsExporter({

this.hostname = os.hostname()
this.enabled = enabled
this.enabled = enabled && !appsec?.standalone?.enabled
this.env = env

@@ -148,3 +149,3 @@ this.tags = tags || {}

if (enabled) {
if (this.enabled) {
this.timer = setInterval(this.onInterval.bind(this), interval * 1e3)

@@ -187,3 +188,3 @@ this.timer.unref()

for (const [ timeNs, bucket ] of this.buckets.entries()) {
for (const [timeNs, bucket] of this.buckets.entries()) {
const bucketAggStats = []

@@ -190,0 +191,0 @@

@@ -40,2 +40,19 @@ 'use strict'

const out = tracerInfo({ agentError })
if (agentError) {
out.agent_error = agentError.message
}
info('DATADOG TRACER CONFIGURATION - ' + out)
if (agentError) {
warn('DATADOG TRACER DIAGNOSTIC - Agent Error: ' + agentError.message)
errors.agentError = {
code: agentError.code ? agentError.code : '',
message: `Agent Error:${agentError.message}`
}
}
}
function tracerInfo () {
const url = config.url || `http://${config.hostname || 'localhost'}:${config.port}`

@@ -63,5 +80,2 @@

out.agent_url = url
if (agentError) {
out.agent_error = agentError.message
}
out.debug = !!config.debug

@@ -92,14 +106,3 @@ out.sample_rate = config.sampler.sampleRate

info('DATADOG TRACER CONFIGURATION - ' + out)
if (agentError) {
warn('DATADOG TRACER DIAGNOSTIC - Agent Error: ' + agentError.message)
errors.agentError = {
code: agentError.code ? agentError.code : '',
message: `Agent Error:${agentError.message}`
}
}
config = undefined
pluginManager = undefined
samplingRules = undefined
return out
}

@@ -124,3 +127,4 @@

setSamplingRules,
tracerInfo,
errors
}
'use strict'
const constants = require('./constants')
const log = require('./log')
const ERROR_MESSAGE = constants.ERROR_MESSAGE
const ERROR_STACK = constants.ERROR_STACK
const ERROR_TYPE = constants.ERROR_TYPE
function add (carrier, keyValuePairs) {
const otelTagMap = {
'deployment.environment': 'env',
'service.name': 'service',
'service.version': 'version'
}
function add (carrier, keyValuePairs, parseOtelTags = false) {
if (!carrier || !keyValuePairs) return

@@ -11,3 +21,2 @@

}
try {

@@ -17,11 +26,21 @@ if (typeof keyValuePairs === 'string') {

for (const segment of segments) {
const separatorIndex = segment.indexOf(':')
const separatorIndex = parseOtelTags ? segment.indexOf('=') : segment.indexOf(':')
if (separatorIndex === -1) continue
const key = segment.slice(0, separatorIndex)
let key = segment.slice(0, separatorIndex)
const value = segment.slice(separatorIndex + 1)
if (parseOtelTags && key in otelTagMap) {
key = otelTagMap[key]
}
carrier[key.trim()] = value.trim()
}
} else {
// HACK: to ensure otel.recordException does not influence trace.error
if (ERROR_MESSAGE in keyValuePairs || ERROR_STACK in keyValuePairs || ERROR_TYPE in keyValuePairs) {
if (!('doNotSetTraceError' in keyValuePairs)) {
carrier.setTraceError = true
}
}
Object.assign(carrier, keyValuePairs)

@@ -28,0 +47,0 @@ }

@@ -15,3 +15,3 @@ 'use strict'

const FILE_URI_START = `file://`
const FILE_URI_START = 'file://'
const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart')

@@ -25,8 +25,7 @@

function createBatchPayload (payload) {
const batchPayload = []
payload.map(item => {
batchPayload.push({
const batchPayload = payload.map(item => {
return {
request_type: item.reqType,
payload: item.payload
})
}
})

@@ -33,0 +32,0 @@

@@ -9,5 +9,8 @@ 'use strict'

const { manager: metricsManager } = require('./metrics')
const telemetryLogger = require('./logs')
const logger = require('../log')
const telemetryStartChannel = dc.channel('datadog:telemetry:start')
const telemetryStopChannel = dc.channel('datadog:telemetry:stop')
const telemetryAppClosingChannel = dc.channel('datadog:telemetry:app-closing')

@@ -23,2 +26,3 @@ let config

let integrations
let configWithOrigin = []
let retryData = null

@@ -38,3 +42,3 @@ const extendedHeartbeatPayload = {}

const reqType = retryObj.payload[0].request_type
retryData = { payload: payload, reqType: reqType }
retryData = { payload, reqType }

@@ -47,13 +51,13 @@ // Since this payload failed twice it now gets save in to the extended heartbeat

if (failedReqType === 'app-integrations-change') {
if (extendedHeartbeatPayload['integrations']) {
extendedHeartbeatPayload['integrations'].push(failedPayload)
if (extendedHeartbeatPayload.integrations) {
extendedHeartbeatPayload.integrations.push(failedPayload)
} else {
extendedHeartbeatPayload['integrations'] = [failedPayload]
extendedHeartbeatPayload.integrations = [failedPayload]
}
}
if (failedReqType === 'app-dependencies-loaded') {
if (extendedHeartbeatPayload['dependencies']) {
extendedHeartbeatPayload['dependencies'].push(failedPayload)
if (extendedHeartbeatPayload.dependencies) {
extendedHeartbeatPayload.dependencies.push(failedPayload)
} else {
extendedHeartbeatPayload['dependencies'] = [failedPayload]
extendedHeartbeatPayload.dependencies = [failedPayload]
}

@@ -102,19 +106,2 @@ }

function flatten (input, result = [], prefix = [], traversedObjects = null) {
traversedObjects = traversedObjects || new WeakSet()
if (traversedObjects.has(input)) {
return
}
traversedObjects.add(input)
for (const [key, value] of Object.entries(input)) {
if (typeof value === 'object' && value !== null) {
flatten(value, result, [...prefix, key], traversedObjects)
} else {
// TODO: add correct origin value
result.push({ name: [...prefix, key].join('.'), value, origin: 'unknown' })
}
}
return result
}
function getInstallSignature (config) {

@@ -134,3 +121,3 @@ const { installSignature: sig } = config

products: getProducts(config),
configuration: flatten(config)
configuration: configWithOrigin
}

@@ -150,8 +137,11 @@ const installSignature = getInstallSignature(config)

function appClosing () {
if (!config?.telemetry?.enabled) {
return
}
// Give chance to listeners to update metrics before shutting down.
telemetryAppClosingChannel.publish()
const { reqType, payload } = createPayload('app-closing')
sendData(config, application, host, reqType, payload)
// we flush before shutting down. Only in CI Visibility
if (config.isCiVisibility) {
metricsManager.send(config, application, host)
}
// We flush before shutting down.
metricsManager.send(config, application, host)
}

@@ -209,8 +199,7 @@

function createBatchPayload (payload) {
const batchPayload = []
payload.map(item => {
batchPayload.push({
const batchPayload = payload.map(item => {
return {
request_type: item.reqType,
payload: item.payload
})
}
})

@@ -225,6 +214,6 @@

const batchPayload = createBatchPayload([payload, retryData])
return { 'reqType': 'message-batch', 'payload': batchPayload }
return { reqType: 'message-batch', payload: batchPayload }
}
return { 'reqType': currReqType, 'payload': currPayload }
return { reqType: currReqType, payload: currPayload }
}

@@ -235,2 +224,3 @@

metricsManager.send(config, application, host)
telemetryLogger.send(config, application, host)

@@ -259,2 +249,6 @@ const { reqType, payload } = createPayload('app-heartbeat')

if (!aConfig.telemetry.enabled) {
if (aConfig.sca?.enabled) {
logger.warn('DD_APPSEC_SCA_ENABLED requires enabling telemetry to work.')
}
return

@@ -270,2 +264,3 @@ }

dependencies.start(config, application, host, getRetryData, updateRetryData)
telemetryLogger.start(config)

@@ -314,2 +309,10 @@ sendData(config, application, host, 'app-started', appStarted(config))

function formatMapForTelemetry (map) {
// format from an object to a string map in order for
// telemetry intake to accept the configuration
return map
? Object.entries(map).map(([key, value]) => `${key}:${value}`).join(',')
: ''
}
function updateConfig (changes, config) {

@@ -319,29 +322,31 @@ if (!config.telemetry.enabled) return

// Hack to make system tests happy until we ship telemetry v2
if (process.env.DD_INTERNAL_TELEMETRY_V2_ENABLED !== '1') return
const application = createAppObject(config)
const host = createHostObject()
const names = {
const nameMapping = {
sampleRate: 'DD_TRACE_SAMPLE_RATE',
logInjection: 'DD_LOG_INJECTION',
headerTags: 'DD_TRACE_HEADER_TAGS',
tags: 'DD_TAGS'
tags: 'DD_TAGS',
'sampler.rules': 'DD_TRACE_SAMPLING_RULES'
}
const namesNeedFormatting = new Set(['DD_TAGS', 'peerServiceMapping', 'serviceMapping'])
const configuration = []
const names = [] // list of config names whose values have been changed
for (const change of changes) {
if (!names.hasOwnProperty(change.name)) continue
const name = nameMapping[change.name] || change.name
const name = names[change.name]
names.push(name)
const { origin, value } = change
const entry = { name, origin, value }
const entry = { name, value, origin }
if (Array.isArray(value)) {
entry.value = value.join(',')
} else if (name === 'DD_TAGS') {
entry.value = Object.entries(value).map(([key, value]) => `${key}:${value}`)
if (namesNeedFormatting.has(entry.name)) entry.value = formatMapForTelemetry(entry.value)
if (entry.name === 'url' && entry.value) entry.value = entry.value.toString()
if (entry.name === 'DD_TRACE_SAMPLING_RULES') {
entry.value = JSON.stringify(entry.value)
}
if (Array.isArray(entry.value)) entry.value = value.join(',')

@@ -351,5 +356,15 @@ configuration.push(entry)

const { reqType, payload } = createPayload('app-client-configuration-change', { configuration })
function isNotModified (entry) {
return !names.includes(entry.name)
}
sendData(config, application, host, reqType, payload, updateRetryData)
if (!configWithOrigin.length) {
configWithOrigin = configuration
} else {
// update configWithOrigin to contain up-to-date full list of config values for app-extended-heartbeat
configWithOrigin = configWithOrigin.filter(isNotModified)
configWithOrigin = configWithOrigin.concat(configuration)
const { reqType, payload } = createPayload('app-client-configuration-change', { configuration })
sendData(config, application, host, reqType, payload, updateRetryData)
}
}

@@ -356,0 +371,0 @@

@@ -8,2 +8,3 @@ 'use strict'

const telemetryLog = dc.channel('datadog:telemetry:log')
const errorLog = dc.channel('datadog:log:error')

@@ -37,2 +38,12 @@ let enabled = false

function onErrorLog (msg) {
if (msg instanceof Error) {
onLog({
level: 'ERROR',
message: msg.message,
stack: msg.stack
})
}
}
function start (config) {

@@ -44,2 +55,4 @@ if (!config.telemetry.logCollection || enabled) return

telemetryLog.subscribe(onLog)
errorLog.subscribe(onErrorLog)
}

@@ -53,2 +66,4 @@

}
errorLog.unsubscribe(onErrorLog)
}

@@ -59,5 +74,5 @@

const logs = { 'logs': logCollector.drain() }
const logs = logCollector.drain()
if (logs) {
sendData(config, application, host, 'logs', logs)
sendData(config, application, host, 'logs', { logs })
}

@@ -64,0 +79,0 @@ }

'use strict'
const log = require('../../log')
const { calculateDDBasePath } = require('../../util')

@@ -32,2 +33,33 @@ const logs = new Map()

const ddBasePath = calculateDDBasePath(__dirname)
const EOL = '\n'
const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm
function sanitize (logEntry) {
const stack = logEntry.stack_trace
if (!stack) return logEntry
let stackLines = stack.split(EOL)
const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX))
const isDDCode = firstIndex > -1 && stackLines[firstIndex].includes(ddBasePath)
stackLines = stackLines
.filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath))
.map(line => line.replace(ddBasePath, ''))
logEntry.stack_trace = stackLines.join(EOL)
if (logEntry.stack_trace === '') {
// If entire stack was removed, we'd just have a message saying "omitted"
// in which case we'd rather not log it at all.
return null
}
if (!isDDCode) {
logEntry.message = 'omitted'
}
return logEntry
}
const logCollector = {

@@ -41,5 +73,9 @@ add (logEntry) {

overflowedCount++
return
return false
}
logEntry = sanitize(logEntry)
if (!logEntry) {
return false
}
const hash = createHash(logEntry)

@@ -56,2 +92,7 @@ if (!logs.has(hash)) {

// Used for testing
hasEntry (logEntry) {
return logs.has(createHash(logEntry))
},
drain () {

@@ -58,0 +99,0 @@ if (logs.size === 0) return

@@ -78,4 +78,4 @@ 'use strict'

dec (value = -1) {
return this.track(value)
dec (value = 1) {
return this.track(-value)
}

@@ -82,0 +82,0 @@

@@ -1,2 +0,1 @@

const request = require('../exporters/common/request')

@@ -30,5 +29,2 @@ const log = require('../log')

}
if (site === 'datadoghq.eu') {
return 'https://instrumentation-telemetry-intake.eu1.datadoghq.com'
}
return `https://instrumentation-telemetry-intake.${site}`

@@ -35,0 +31,0 @@ }

@@ -11,5 +11,8 @@ 'use strict'

const { DataStreamsProcessor } = require('./datastreams/processor')
const { decodePathwayContext } = require('./datastreams/pathway')
const { DsmPathwayCodec } = require('./datastreams/pathway')
const { DD_MAJOR } = require('../../../version')
const DataStreamsContext = require('./data_streams_context')
const { DataStreamsCheckpointer } = require('./data_streams')
const { flushStartupLogs } = require('../../datadog-instrumentations/src/check_require_cache')
const log = require('./log/writer')

@@ -22,7 +25,9 @@ const SPAN_TYPE = tags.SPAN_TYPE

class DatadogTracer extends Tracer {
constructor (config) {
super(config)
constructor (config, prioritySampler) {
super(config, prioritySampler)
this._dataStreamsProcessor = new DataStreamsProcessor(config)
this.dataStreamsCheckpointer = new DataStreamsCheckpointer(this)
this._scope = new Scope()
setStartupLogConfig(config)
flushStartupLogs(log)
}

@@ -44,4 +49,4 @@

decodeDataStreamsContext (data) {
const ctx = decodePathwayContext(data)
decodeDataStreamsContext (carrier) {
const ctx = DsmPathwayCodec.decode(carrier)
// we erase the previous context everytime we decode a new one

@@ -141,2 +146,3 @@ DataStreamsContext.setDataStreamsContext(ctx)

this._exporter.setUrl(url)
this._dataStreamsProcessor.setUrl(url)
}

@@ -143,0 +149,0 @@

@@ -35,9 +35,2 @@ 'use strict'

switch (c) {
default: // ordinary character
if (sx < subject.length && subject[sx] === c) {
px++
sx++
continue
}
break
case '?':

@@ -55,2 +48,9 @@ if (sx < subject.length) {

continue
default: // ordinary character
if (sx < subject.length && subject[sx] === c) {
px++
sx++
continue
}
break
}

@@ -74,2 +74,6 @@ }

function hasOwn (object, prop) {
return Object.prototype.hasOwnProperty.call(object, prop)
}
module.exports = {

@@ -80,3 +84,4 @@ isTrue,

globMatch,
calculateDDBasePath
calculateDDBasePath,
hasOwn
}

@@ -5,3 +5,2 @@ # `dd-trace`: Node.js APM Tracer Library

[![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node16)
[![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=blue&label=dd-trace%40v3&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node14)
[![codecov](https://codecov.io/gh/DataDog/dd-trace-js/branch/master/graph/badge.svg)](https://codecov.io/gh/DataDog/dd-trace-js)

@@ -32,8 +31,8 @@

| [`v2`](https://github.com/DataDog/dd-trace-js/tree/v2.x) | ![npm v2](https://img.shields.io/npm/v/dd-trace/latest-node12?color=white&label=%20&style=flat-square) | `>= v12` | **End of Life** | 2022-01-28 | 2023-08-15 |
| [`v3`](https://github.com/DataDog/dd-trace-js/tree/v3.x) | ![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=white&label=%20&style=flat-square) | `>= v14` | **Maintenance** | 2022-08-15 | 2024-05-15 |
| [`v3`](https://github.com/DataDog/dd-trace-js/tree/v3.x) | ![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=white&label=%20&style=flat-square) | `>= v14` | **End of Life** | 2022-08-15 | 2024-05-15 |
| [`v4`](https://github.com/DataDog/dd-trace-js/tree/v4.x) | ![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=white&label=%20&style=flat-square) | `>= v16` | **Maintenance** | 2023-05-12 | 2025-01-11 |
| [`v5`](https://github.com/DataDog/dd-trace-js/tree/v5.x) | ![npm v5](https://img.shields.io/npm/v/dd-trace/latest?color=white&label=%20&style=flat-square) | `>= v18` | **Current** | 2024-01-11 | Unknown |
We currently maintain three release lines, namely `v5`, `v4` and `v3`.
Features and bug fixes that are merged are released to the `v5` line and, if appropriate, also the `v4` & `v3` line.
We currently maintain two release lines, namely `v5`, and `v4`.
Features and bug fixes that are merged are released to the `v5` line and, if appropriate, also `v4`.

@@ -47,3 +46,3 @@ For any new projects it is recommended to use the `v5` release line:

However, existing projects that already use the `v4` & `v3` release line, or projects that need to support EOL versions of Node.js, may continue to use these release lines.
However, existing projects that already use the `v4` release line, or projects that need to support EOL versions of Node.js, may continue to use these release lines.
This is done by specifying the version when installing the package.

@@ -68,123 +67,23 @@

## Development
## Development and Contribution
Before contributing to this open source project, read our [CONTRIBUTING.md](https://github.com/DataDog/dd-trace-js/blob/master/CONTRIBUTING.md).
Please read the [CONTRIBUTING.md](https://github.com/DataDog/dd-trace-js/blob/master/CONTRIBUTING.md) document before contributing to this open source project.
## Requirements
## EcmaScript Modules (ESM) Support
Since this project supports multiple Node versions, using a version
manager such as [nvm](https://github.com/creationix/nvm) is recommended.
ESM support requires an additional command-line argument. Use the following to enable experimental ESM support with your application:
We use [yarn](https://yarnpkg.com/) for its workspace functionality, so make sure to install that as well.
Node.js < v20.6
To install dependencies once you have Node and yarn installed, run:
```sh
$ yarn
```
## Testing
Before running _plugin_ tests, the data stores need to be running.
The easiest way to start all of them is to use the provided
docker-compose configuration:
```sh
$ docker-compose up -d -V --remove-orphans --force-recreate
$ yarn services
```
> **Note**
> The `couchbase`, `grpc` and `oracledb` instrumentations rely on native modules
> that do not compile on ARM64 devices (for example M1/M2 Mac) - their tests
> cannot be run locally on these devices.
### Unit Tests
There are several types of unit tests, for various types of components. The
following commands may be useful:
```sh
# Tracer core tests (i.e. testing `packages/dd-trace`)
$ yarn test:trace:core
# "Core" library tests (i.e. testing `packages/datadog-core`
$ yarn test:core
# Instrumentations tests (i.e. testing `packages/datadog-instrumentations`
$ yarn test:instrumentations
```
Several other components have test commands as well. See `package.json` for
details.
To test _plugins_ (i.e. components in `packages/datadog-plugin-XXXX`
directories, set the `PLUGINS` environment variable to the plugin you're
interested in, and use `yarn test:plugins`. If you need to test multiple
plugins you may separate then with a pipe (`|`) delimiter. Here's an
example testing the `express` and `bluebird` plugins:
```sh
PLUGINS="express|bluebird" yarn test:plugins
```
### Memory Leaks
To run the memory leak tests, use:
```sh
$ yarn leak:core
# or
$ yarn leak:plugins
```
### Linting
We use [ESLint](https://eslint.org) to make sure that new code
conforms to our coding standards.
To run the linter, use:
```sh
$ yarn lint
```
### Experimental ESM Support
> **Warning**
>
> ESM support has been temporarily disabled starting from Node 20 as significant
> changes are in progress.
ESM support is currently in the experimental stages, while CJS has been supported
since inception. This means that code loaded using `require()` should work fine
but code loaded using `import` might not always work.
Use the following command to enable experimental ESM support with your application:
```sh
node --loader dd-trace/loader-hook.mjs entrypoint.js
```
Node.js >= v20.6
### Benchmarks
Our microbenchmarks live in `benchmark/sirun`. Each directory in there
corresponds to a specific benchmark test and its variants, which are used to
track regressions and improvements over time.
In addition to those, when two or more approaches must be compared, please write
a benchmark in the `benchmark/index.js` module so that we can keep track of the
most efficient algorithm. To run your benchmark, use:
```sh
$ yarn bench
node --import dd-trace/register.js entrypoint.js
```
## Serverless / Lambda

@@ -201,38 +100,14 @@

Generally, `dd-trace` works by intercepting `require()` calls that a Node.js application makes when loading modules. This includes modules that are built-in to Node.js, like the `fs` module for accessing the filesystem, as well as modules installed from the npm registry, like the `pg` database module.
If you would like to trace your bundled application then please read this page on [bundling and dd-trace](https://docs.datadoghq.com/tracing/trace_collection/automatic_instrumentation/dd_libraries/nodejs/#bundling). It includes information on how to use our ESBuild plugin and includes caveats for other bundlers.
Also generally, bundlers work by crawling all of the `require()` calls that an application makes to files on disk, replacing the `require()` calls with custom code, and then concatenating all of the resulting JavaScript into one "bundled" file. When a built-in module is loaded, like `require('fs')`, that call can then remain the same in the resulting bundle.
Fundamentally APM tools like `dd-trace` stop working at this point. Perhaps they continue to intercept the calls for built-in modules but don't intercept calls to third party libraries. This means that by default when you bundle a `dd-trace` app with a bundler it is likely to capture information about disk access (via `fs`) and outbound HTTP requests (via `http`), but will otherwise omit calls to third party libraries (like extracting incoming request route information for the `express` framework or showing which query is run for the `mysql` database client).
## Security Vulnerabilities
To get around this, one can treat all third party modules, or at least third party modules that the APM needs to instrument, as being "external" to the bundler. With this setting the instrumented modules remain on disk and continue to be loaded via `require()` while the non-instrumented modules are bundled. Sadly this results in a build with many extraneous files and starts to defeat the purpose of bundling.
Please refer to the [SECURITY.md](https://github.com/DataDog/dd-trace-js/blob/master/SECURITY.md) document if you have found a security issue.
For these reasons it's necessary to have custom-built bundler plugins. Such plugins are able to instruct the bundler on how to behave, injecting intermediary code and otherwise intercepting the "translated" `require()` calls. The result is that many more packages are then included in the bundled JavaScript file. Some applications can have 100% of modules bundled, however native modules still need to remain external to the bundle.
### ESBuild Support
## Datadog With OpenTelemetery
This library provides experimental ESBuild support in the form of an ESBuild plugin. Require the `dd-trace/esbuild` module when building your bundle to enable the plugin.
Please refer to the [Node.js Custom Instrumentation using OpenTelemetry API](https://docs.datadoghq.com/tracing/trace_collection/custom_instrumentation/nodejs/otel/) document. It includes information on how to use the OpenTelemetry API with dd-trace-js.
Here's an example of how one might use `dd-trace` with ESBuild:
```javascript
const ddPlugin = require('dd-trace/esbuild')
const esbuild = require('esbuild')
esbuild.build({
entryPoints: ['app.js'],
bundle: true,
outfile: 'out.js',
plugins: [ddPlugin],
platform: 'node', // allows built-in modules to be required
target: ['node18']
}).catch((err) => {
console.error(err)
process.exit(1)
})
```
## Security Vulnerabilities
If you have found a security issue, please contact the security team directly at [security@datadoghq.com](mailto:security@datadoghq.com).
Note that our internal implementation of the OpenTelemetry API is currently set within the version range `>=1.0.0 <1.9.0`. This range will be updated at a regular cadence therefore, we recommend updating your tracer to the latest release to ensure up to date support.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc