Socket
Socket
Sign inDemoInstall

dd-trace

Package Overview
Dependencies
Maintainers
1
Versions
574
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dd-trace - npm Package Compare versions

Comparing version 6.0.0-pre-ba6ad8b to 6.0.0-pre-c26eac4

ci/cypress/after-run.js

6

ci/init.js

@@ -23,5 +23,5 @@ /* eslint-disable no-console */

} else {
console.error(`DD_CIVISIBILITY_AGENTLESS_ENABLED is set, \
but neither DD_API_KEY nor DATADOG_API_KEY are set in your environment, \
so dd-trace will not be initialized.`)
console.error('DD_CIVISIBILITY_AGENTLESS_ENABLED is set, but neither ' +
'DD_API_KEY nor DATADOG_API_KEY are set in your environment, so ' +
'dd-trace will not be initialized.')
shouldInit = false

@@ -28,0 +28,0 @@ }

{
"name": "dd-trace",
"version": "6.0.0-pre-ba6ad8b",
"version": "6.0.0-pre-c26eac4",
"description": "Datadog APM tracing client for JavaScript",

@@ -72,7 +72,7 @@ "main": "index.js",

"dependencies": {
"@datadog/native-appsec": "7.0.0",
"@datadog/native-iast-rewriter": "2.2.3",
"@datadog/native-iast-taint-tracking": "1.6.4",
"@datadog/native-appsec": "7.1.0",
"@datadog/native-iast-rewriter": "2.3.0",
"@datadog/native-iast-taint-tracking": "1.7.0",
"@datadog/native-metrics": "^2.0.0",
"@datadog/pprof": "5.0.0",
"@datadog/pprof": "5.2.0",
"@datadog/sketches-js": "^2.1.0",

@@ -99,3 +99,3 @@ "@opentelemetry/api": "^1.0.0",

"path-to-regexp": "^0.1.2",
"pprof-format": "^2.0.7",
"pprof-format": "^2.1.0",
"protobufjs": "^7.2.5",

@@ -121,7 +121,6 @@ "retry": "^0.13.1",

"eslint": "^8.23.0",
"eslint-config-standard": "^11.0.0-beta.0",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-import": "^2.8.0",
"eslint-plugin-mocha": "^10.1.0",
"eslint-plugin-n": "^15.7.0",
"eslint-plugin-node": "^5.2.1",
"eslint-plugin-promise": "^3.6.0",

@@ -128,0 +127,0 @@ "eslint-plugin-standard": "^3.0.1",

@@ -92,3 +92,3 @@ 'use strict'

try {
pathToPackageJson = require.resolve(`${extracted.pkg}/package.json`, { paths: [ args.resolveDir ] })
pathToPackageJson = require.resolve(`${extracted.pkg}/package.json`, { paths: [args.resolveDir] })
} catch (err) {

@@ -177,3 +177,3 @@ if (err.code === 'MODULE_NOT_FOUND') {

return require.resolve(path, { paths: [ directory ] })
return require.resolve(path, { paths: [directory] })
}

@@ -180,0 +180,0 @@

@@ -8,3 +8,3 @@ 'use strict'

} = require('./helpers/instrument')
const kebabCase = require('../../utils/src/kebabcase')
const kebabCase = require('../../datadog-core/src/utils/src/kebabcase')
const shimmer = require('../../datadog-shimmer')

@@ -11,0 +11,0 @@

@@ -40,3 +40,3 @@ 'use strict'

resolve({
headers: headers,
headers,
status: abortData.statusCode,

@@ -43,0 +43,0 @@ body: {

@@ -114,2 +114,3 @@ 'use strict'

function wrapCb (cb, serviceName, request, ar) {
// eslint-disable-next-line n/handle-callback-err
return function wrappedCb (err, response) {

@@ -167,3 +168,5 @@ const obj = { request, response }

'sqs'
].includes(name) ? name : 'default'
].includes(name)
? name
: 'default'
}

@@ -170,0 +173,0 @@

@@ -13,3 +13,3 @@ 'use strict'

const errorCh = channel('apm:cassandra-driver:query:error')
const connectCh = channel(`apm:cassandra-driver:query:connect`)
const connectCh = channel('apm:cassandra-driver:query:connect')

@@ -16,0 +16,0 @@ addHook({ name: 'cassandra-driver', versions: ['>=3.0.0'] }, cassandra => {

@@ -20,2 +20,3 @@ 'use strict'

const libraryConfigurationCh = channel('ci:cucumber:library-configuration')
const knownTestsCh = channel('ci:cucumber:known-tests')
const skippableSuitesCh = channel('ci:cucumber:test-suite:skippable')

@@ -45,2 +46,5 @@ const sessionStartCh = channel('ci:cucumber:session:start')

const lastStatusByPickleId = new Map()
const numRetriesByPickleId = new Map()
let pickleByFile = {}

@@ -52,2 +56,5 @@ const pickleResultByFile = {}

let isUnskippable = false
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let knownTests = []

@@ -90,2 +97,17 @@ function getSuiteStatusFromTestStatuses (testStatuses) {

function isNewTest (testSuite, testName) {
const testsForSuite = knownTests.cucumber?.[testSuite] || []
return !testsForSuite.includes(testName)
}
function getTestStatusFromRetries (testStatuses) {
if (testStatuses.every(status => status === 'fail')) {
return 'fail'
}
if (testStatuses.some(status => status === 'pass')) {
return 'pass'
}
return 'pass'
}
function wrapRun (pl, isLatestVersion) {

@@ -105,15 +127,4 @@ if (patched.has(pl)) return

if (!pickleResultByFile[testFileAbsolutePath]) { // first test in suite
isUnskippable = isMarkedAsUnskippable(this.pickle)
const testSuitePath = getTestSuitePath(testFileAbsolutePath, process.cwd())
isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath)
const testSourceLine = this.gherkinDocument?.feature?.location?.line
testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId })
}
const testSourceLine = this.gherkinDocument &&
this.gherkinDocument.feature &&
this.gherkinDocument.feature.location &&
this.gherkinDocument.feature.location.line
testStartCh.publish({

@@ -129,28 +140,19 @@ testName: this.pickle.name,

const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result) : getStatusFromResult(result)
? getStatusFromResultLatest(result)
: getStatusFromResult(result)
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = [status]
if (lastStatusByPickleId.has(this.pickle.id)) {
lastStatusByPickleId.get(this.pickle.id).push(status)
} else {
pickleResultByFile[testFileAbsolutePath].push(status)
lastStatusByPickleId.set(this.pickle.id, [status])
}
testFinishCh.publish({ status, skipReason, errorMessage })
// last test in suite
if (pickleResultByFile[testFileAbsolutePath].length === pickleByFile[testFileAbsolutePath].length) {
const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testFileAbsolutePath])
if (global.__coverage__) {
const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__)
let isNew = false
let isEfdRetry = false
if (isEarlyFlakeDetectionEnabled && status !== 'skip') {
const numRetries = numRetriesByPickleId.get(this.pickle.id)
testSuiteCodeCoverageCh.publish({
coverageFiles,
suiteFile: testFileAbsolutePath
})
// We need to reset coverage to get a code coverage per suite
// Before that, we preserve the original coverage
mergeCoverage(global.__coverage__, originalCoverageMap)
resetCoverage(global.__coverage__)
}
testSuiteFinishCh.publish(testSuiteStatus)
isNew = numRetries !== undefined
isEfdRetry = numRetries > 0
}
testFinishCh.publish({ status, skipReason, errorMessage, isNew, isEfdRetry })
})

@@ -185,3 +187,4 @@ return promise

const { status, skipReason, errorMessage } = isLatestVersion
? getStatusFromResultLatest(result) : getStatusFromResult(result)
? getStatusFromResultLatest(result)
: getStatusFromResult(result)

@@ -268,8 +271,7 @@ testFinishCh.publish({ isStep: true, status, skipReason, errorMessage })

addHook({
name: '@cucumber/cucumber',
versions: ['>=7.0.0'],
file: 'lib/runtime/index.js'
}, (runtimePackage, frameworkVersion) => {
shimmer.wrap(runtimePackage.default.prototype, 'start', start => async function () {
function getWrappedStart (start, frameworkVersion) {
return async function () {
if (!libraryConfigurationCh.hasSubscribers) {
return start.apply(this, arguments)
}
const asyncResource = new AsyncResource('bound-anonymous-fn')

@@ -286,4 +288,22 @@ let onDone

await configPromise
const configurationResponse = await configPromise
isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries
if (isEarlyFlakeDetectionEnabled) {
const knownTestsPromise = new Promise(resolve => {
onDone = resolve
})
asyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
const knownTestsResponse = await knownTestsPromise
if (!knownTestsResponse.err) {
knownTests = knownTestsResponse.knownTests
} else {
isEarlyFlakeDetectionEnabled = false
}
}
const skippableSuitesPromise = new Promise(resolve => {

@@ -354,9 +374,108 @@ onDone = resolve

hasUnskippableSuites: isUnskippable,
hasForcedToRunSuites: isForcedToRun
hasForcedToRunSuites: isForcedToRun,
isEarlyFlakeDetectionEnabled
})
})
return success
})
}
}
function getWrappedRunTest (runTestFunction) {
return async function (pickleId) {
const test = this.eventDataCollector.getPickle(pickleId)
const testFileAbsolutePath = test.uri
const testSuitePath = getTestSuitePath(testFileAbsolutePath, process.cwd())
if (!pickleResultByFile[testFileAbsolutePath]) { // first test in suite
isUnskippable = isMarkedAsUnskippable(test)
isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath)
testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId })
}
let isNew = false
if (isEarlyFlakeDetectionEnabled) {
isNew = isNewTest(testSuitePath, test.name)
if (isNew) {
numRetriesByPickleId.set(pickleId, 0)
}
}
const runTestCaseResult = await runTestFunction.apply(this, arguments)
const testStatuses = lastStatusByPickleId.get(pickleId)
const lastTestStatus = testStatuses[testStatuses.length - 1]
// If it's a new test and it hasn't been skipped, we run it again
if (isEarlyFlakeDetectionEnabled && lastTestStatus !== 'skip' && isNew) {
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
numRetriesByPickleId.set(pickleId, retryIndex + 1)
await runTestFunction.apply(this, arguments)
}
}
let testStatus = lastTestStatus
if (isEarlyFlakeDetectionEnabled) {
/**
* If Early Flake Detection (EFD) is enabled the logic is as follows:
* - If all attempts for a test are failing, the test has failed and we will let the test process fail.
* - If just a single attempt passes, we will prevent the test process from failing.
* The rationale behind is the following: you may still be able to block your CI pipeline by gating
* on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too.
*/
testStatus = getTestStatusFromRetries(testStatuses)
if (testStatus === 'pass') {
this.success = true
}
}
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = [testStatus]
} else {
pickleResultByFile[testFileAbsolutePath].push(testStatus)
}
// last test in suite
if (pickleResultByFile[testFileAbsolutePath].length === pickleByFile[testFileAbsolutePath].length) {
const testSuiteStatus = getSuiteStatusFromTestStatuses(pickleResultByFile[testFileAbsolutePath])
if (global.__coverage__) {
const coverageFiles = getCoveredFilenamesFromCoverage(global.__coverage__)
testSuiteCodeCoverageCh.publish({
coverageFiles,
suiteFile: testFileAbsolutePath
})
// We need to reset coverage to get a code coverage per suite
// Before that, we preserve the original coverage
mergeCoverage(global.__coverage__, originalCoverageMap)
resetCoverage(global.__coverage__)
}
testSuiteFinishCh.publish(testSuiteStatus)
}
return runTestCaseResult
}
}
// From 7.3.0 onwards, runPickle becomes runTestCase
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/index.js'
}, (runtimePackage, frameworkVersion) => {
shimmer.wrap(runtimePackage.default.prototype, 'runTestCase', runTestCase => getWrappedRunTest(runTestCase))
shimmer.wrap(runtimePackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion))
return runtimePackage
})
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.0.0 <7.3.0'],
file: 'lib/runtime/index.js'
}, (runtimePackage, frameworkVersion) => {
shimmer.wrap(runtimePackage.default.prototype, 'runPickle', runPickle => getWrappedRunTest(runPickle))
shimmer.wrap(runtimePackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion))
return runtimePackage
})

@@ -1,2 +0,1 @@

'use strict'

@@ -3,0 +2,0 @@

@@ -14,3 +14,3 @@ 'use strict'

const receiveStartCh = channel(`apm:google-cloud-pubsub:receive:start`)
const receiveStartCh = channel('apm:google-cloud-pubsub:receive:start')
const receiveFinishCh = channel('apm:google-cloud-pubsub:receive:finish')

@@ -17,0 +17,0 @@ const receiveErrorCh = channel('apm:google-cloud-pubsub:receive:error')

@@ -95,3 +95,5 @@ 'use strict'

break
case 'finish':
// Streams are always cancelled before `finish` since 1.10.0 so we have
// to use `prefinish` instead to avoid cancellation false positives.
case 'prefinish':
if (call.status) {

@@ -98,0 +100,0 @@ updateChannel.publish(call.status)

@@ -5,2 +5,3 @@ 'use strict'

'@apollo/server': () => require('../apollo-server'),
'@apollo/gateway': () => require('../apollo'),
'apollo-server-core': () => require('../apollo-server-core'),

@@ -26,32 +27,32 @@ '@aws-sdk/smithy-client': () => require('../aws-sdk'),

'@smithy/smithy-client': () => require('../aws-sdk'),
'aerospike': () => require('../aerospike'),
'amqp10': () => require('../amqp10'),
'amqplib': () => require('../amqplib'),
aerospike: () => require('../aerospike'),
amqp10: () => require('../amqp10'),
amqplib: () => require('../amqplib'),
'aws-sdk': () => require('../aws-sdk'),
'bluebird': () => require('../bluebird'),
bluebird: () => require('../bluebird'),
'body-parser': () => require('../body-parser'),
'bunyan': () => require('../bunyan'),
bunyan: () => require('../bunyan'),
'cassandra-driver': () => require('../cassandra-driver'),
'child_process': () => require('../child_process'),
'connect': () => require('../connect'),
'cookie': () => require('../cookie'),
child_process: () => require('../child_process'),
connect: () => require('../connect'),
cookie: () => require('../cookie'),
'cookie-parser': () => require('../cookie-parser'),
'couchbase': () => require('../couchbase'),
'crypto': () => require('../crypto'),
'cypress': () => require('../cypress'),
'dns': () => require('../dns'),
'elasticsearch': () => require('../elasticsearch'),
'express': () => require('../express'),
couchbase: () => require('../couchbase'),
crypto: () => require('../crypto'),
cypress: () => require('../cypress'),
dns: () => require('../dns'),
elasticsearch: () => require('../elasticsearch'),
express: () => require('../express'),
'express-mongo-sanitize': () => require('../express-mongo-sanitize'),
'fastify': () => require('../fastify'),
fastify: () => require('../fastify'),
'find-my-way': () => require('../find-my-way'),
'fs': () => require('../fs'),
fs: () => require('../fs'),
'generic-pool': () => require('../generic-pool'),
'graphql': () => require('../graphql'),
'grpc': () => require('../grpc'),
'hapi': () => require('../hapi'),
'http': () => require('../http'),
'http2': () => require('../http2'),
'https': () => require('../http'),
'ioredis': () => require('../ioredis'),
graphql: () => require('../graphql'),
grpc: () => require('../grpc'),
hapi: () => require('../hapi'),
http: () => require('../http'),
http2: () => require('../http2'),
https: () => require('../http'),
ioredis: () => require('../ioredis'),
'jest-circus': () => require('../jest'),

@@ -63,22 +64,22 @@ 'jest-config': () => require('../jest'),

'jest-worker': () => require('../jest'),
'knex': () => require('../knex'),
'koa': () => require('../koa'),
knex: () => require('../knex'),
koa: () => require('../koa'),
'koa-router': () => require('../koa'),
'kafkajs': () => require('../kafkajs'),
'ldapjs': () => require('../ldapjs'),
kafkajs: () => require('../kafkajs'),
ldapjs: () => require('../ldapjs'),
'limitd-client': () => require('../limitd-client'),
'mariadb': () => require('../mariadb'),
'memcached': () => require('../memcached'),
mariadb: () => require('../mariadb'),
memcached: () => require('../memcached'),
'microgateway-core': () => require('../microgateway-core'),
'mocha': () => require('../mocha'),
mocha: () => require('../mocha'),
'mocha-each': () => require('../mocha'),
'moleculer': () => require('../moleculer'),
'mongodb': () => require('../mongodb'),
moleculer: () => require('../moleculer'),
mongodb: () => require('../mongodb'),
'mongodb-core': () => require('../mongodb-core'),
'mongoose': () => require('../mongoose'),
'mquery': () => require('../mquery'),
'mysql': () => require('../mysql'),
'mysql2': () => require('../mysql2'),
'net': () => require('../net'),
'next': () => require('../next'),
mongoose: () => require('../mongoose'),
mquery: () => require('../mquery'),
mysql: () => require('../mysql'),
mysql2: () => require('../mysql2'),
net: () => require('../net'),
next: () => require('../next'),
'node:child_process': () => require('../child_process'),

@@ -91,24 +92,24 @@ 'node:crypto': () => require('../crypto'),

'node:net': () => require('../net'),
'oracledb': () => require('../oracledb'),
'openai': () => require('../openai'),
'paperplane': () => require('../paperplane'),
oracledb: () => require('../oracledb'),
openai: () => require('../openai'),
paperplane: () => require('../paperplane'),
'passport-http': () => require('../passport-http'),
'passport-local': () => require('../passport-local'),
'pg': () => require('../pg'),
'pino': () => require('../pino'),
pg: () => require('../pg'),
pino: () => require('../pino'),
'pino-pretty': () => require('../pino'),
'playwright': () => require('../playwright'),
playwright: () => require('../playwright'),
'promise-js': () => require('../promise-js'),
'promise': () => require('../promise'),
'q': () => require('../q'),
'qs': () => require('../qs'),
'redis': () => require('../redis'),
'restify': () => require('../restify'),
'rhea': () => require('../rhea'),
'router': () => require('../router'),
'sharedb': () => require('../sharedb'),
'sequelize': () => require('../sequelize'),
'tedious': () => require('../tedious'),
'when': () => require('../when'),
'winston': () => require('../winston')
promise: () => require('../promise'),
q: () => require('../q'),
qs: () => require('../qs'),
redis: () => require('../redis'),
restify: () => require('../restify'),
rhea: () => require('../rhea'),
router: () => require('../router'),
sharedb: () => require('../sharedb'),
sequelize: () => require('../sequelize'),
tedious: () => require('../tedious'),
when: () => require('../when'),
winston: () => require('../winston')
}

@@ -59,3 +59,3 @@ 'use strict'

Object.defineProperties(bound, {
'length': {
length: {
configurable: true,

@@ -66,3 +66,3 @@ enumerable: false,

},
'asyncResource': {
asyncResource: {
configurable: true,

@@ -69,0 +69,0 @@ enumerable: true,

@@ -158,2 +158,3 @@ 'use strict'

} catch (e) {
// eslint-disable-next-line n/no-deprecated-api
return url.parse(inputURL)

@@ -160,0 +161,0 @@ }

@@ -12,3 +12,5 @@ 'use strict'

getTestSuitePath,
getTestParametersString
getTestParametersString,
addEfdStringToTestName,
removeEfdStringFromTestName
} = require('../../dd-trace/src/plugins/util/test')

@@ -46,2 +48,5 @@ const {

// Message sent by jest's main process to workers to run a test suite (=test file)
// https://github.com/jestjs/jest/blob/1d682f21c7a35da4d3ab3a1436a357b980ebd0fa/packages/jest-worker/src/types.ts#L37
const CHILD_MESSAGE_CALL = 1
// Maximum time we'll wait for the tracer to flush

@@ -51,3 +56,3 @@ const FLUSH_TIMEOUT = 10000

let skippableSuites = []
let knownTests = []
let knownTests = {}
let isCodeCoverageEnabled = false

@@ -62,14 +67,12 @@ let isSuitesSkippingEnabled = false

let earlyFlakeDetectionNumRetries = 0
let hasFilteredSkippableSuites = false
const EFD_STRING = "Retried by Datadog's Early Flake Detection"
const EFD_TEST_NAME_REGEX = new RegExp(EFD_STRING + ' \\(#\\d+\\): ', 'g')
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
const specStatusToTestStatus = {
'pending': 'skip',
'disabled': 'skip',
'todo': 'skip',
'passed': 'pass',
'failed': 'fail'
pending: 'skip',
disabled: 'skip',
todo: 'skip',
passed: 'pass',
failed: 'fail'
}

@@ -108,10 +111,2 @@

function getEfdTestName (testName, numAttempt) {
return `${EFD_STRING} (#${numAttempt}): ${testName}`
}
function removeEfdTestName (testName) {
return testName.replace(EFD_TEST_NAME_REGEX, '')
}
function getWrappedEnvironment (BaseEnvironment, jestVersion) {

@@ -124,14 +119,23 @@ return class DatadogEnvironment extends BaseEnvironment {

this.testSuite = getTestSuitePath(context.testPath, rootDir)
this.testFileAbsolutePath = context.testPath
this.nameToParams = {}
this.global._ddtrace = global._ddtrace
this.displayName = config.projectConfig?.displayName?.name
this.testEnvironmentOptions = getTestEnvironmentOptions(config)
const repositoryRoot = this.testEnvironmentOptions._ddRepositoryRoot
if (repositoryRoot) {
this.testSourceFile = getTestSuitePath(context.testPath, repositoryRoot)
}
this.isEarlyFlakeDetectionEnabled = this.testEnvironmentOptions._ddIsEarlyFlakeDetectionEnabled
if (this.isEarlyFlakeDetectionEnabled) {
const hasKnownTests = !!knownTests.jest
earlyFlakeDetectionNumRetries = this.testEnvironmentOptions._ddEarlyFlakeDetectionNumRetries
try {
this.knownTestsForThisSuite = this.getKnownTestsForSuite(this.testEnvironmentOptions._ddKnownTests)
this.knownTestsForThisSuite = hasKnownTests
? (knownTests.jest[this.testSuite] || [])
: this.getKnownTestsForSuite(this.testEnvironmentOptions._ddKnownTests)
} catch (e) {

@@ -147,10 +151,12 @@ // If there has been an error parsing the tests, we'll disable Early Flake Deteciton

getKnownTestsForSuite (knownTests) {
if (this.knownTestsForThisSuite) {
return this.knownTestsForThisSuite
}
let knownTestsForSuite = knownTests
// If jest runs in band, the known tests are not serialized, so they're an array.
if (!Array.isArray(knownTests)) {
// If jest is using workers, known tests are serialized to json.
// If jest runs in band, they are not.
if (typeof knownTestsForSuite === 'string') {
knownTestsForSuite = JSON.parse(knownTestsForSuite)
}
return knownTestsForSuite
.filter(test => test.includes(this.testSuite))
.map(test => test.replace(`jest.${this.testSuite}.`, '').trim())
}

@@ -162,3 +168,4 @@

const describeSuffix = getJestTestName(state.currentDescribeBlock)
return removeEfdTestName(`${describeSuffix} ${event.testName}`).trim()
const fullTestName = describeSuffix ? `${describeSuffix} ${event.testName}` : event.testName
return removeEfdStringFromTestName(fullTestName)
}

@@ -197,3 +204,3 @@

if (this.isEarlyFlakeDetectionEnabled) {
const originalTestName = removeEfdTestName(testName)
const originalTestName = removeEfdStringFromTestName(testName)
isNewTest = retriedTestsToNumAttempts.has(originalTestName)

@@ -205,9 +212,9 @@ if (isNewTest) {

}
asyncResource.runInAsyncScope(() => {
testStartCh.publish({
name: removeEfdTestName(testName),
name: removeEfdStringFromTestName(testName),
suite: this.testSuite,
testFileAbsolutePath: this.testFileAbsolutePath,
testSourceFile: this.testSourceFile,
runner: 'jest-circus',
displayName: this.displayName,
testParameters,

@@ -231,3 +238,3 @@ frameworkVersion: jestVersion,

if (this.global.test) {
this.global.test(getEfdTestName(event.testName, retryIndex), event.fn, event.timeout)
this.global.test(addEfdStringToTestName(event.testName, retryIndex), event.fn, event.timeout)
} else {

@@ -263,4 +270,5 @@ log.error('Early flake detection could not retry test because global.test is undefined')

suite: this.testSuite,
testFileAbsolutePath: this.testFileAbsolutePath,
testSourceFile: this.testSourceFile,
runner: 'jest-circus',
displayName: this.displayName,
frameworkVersion: jestVersion,

@@ -285,2 +293,19 @@ testStartLine: getTestLineStart(event.test.asyncError, this.testSuite)

function applySuiteSkipping (originalTests, rootDir, frameworkVersion) {
const jestSuitesToRun = getJestSuitesToRun(skippableSuites, originalTests, rootDir || process.cwd())
hasFilteredSkippableSuites = true
log.debug(
() => `${jestSuitesToRun.suitesToRun.length} out of ${originalTests.length} suites are going to run.`
)
hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== originalTests.length
numSkippedSuites = jestSuitesToRun.skippedSuites.length
itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
skippableSuites = []
return jestSuitesToRun.suitesToRun
}
addHook({

@@ -296,3 +321,48 @@ name: 'jest-environment-node',

function getWrappedScheduleTests (scheduleTests, frameworkVersion) {
return async function (tests) {
if (!isSuitesSkippingEnabled || hasFilteredSkippableSuites) {
return scheduleTests.apply(this, arguments)
}
const [test] = tests
const rootDir = test?.context?.config?.rootDir
arguments[0] = applySuiteSkipping(tests, rootDir, frameworkVersion)
return scheduleTests.apply(this, arguments)
}
}
addHook({
name: '@jest/core',
file: 'build/TestScheduler.js',
versions: ['>=27.0.0']
}, (testSchedulerPackage, frameworkVersion) => {
const oldCreateTestScheduler = testSchedulerPackage.createTestScheduler
const newCreateTestScheduler = async function () {
if (!isSuitesSkippingEnabled || hasFilteredSkippableSuites) {
return oldCreateTestScheduler.apply(this, arguments)
}
// If suite skipping is enabled and has not filtered skippable suites yet, we'll attempt to do it
const scheduler = await oldCreateTestScheduler.apply(this, arguments)
shimmer.wrap(scheduler, 'scheduleTests', scheduleTests => getWrappedScheduleTests(scheduleTests, frameworkVersion))
return scheduler
}
testSchedulerPackage.createTestScheduler = newCreateTestScheduler
return testSchedulerPackage
})
addHook({
name: '@jest/core',
file: 'build/TestScheduler.js',
versions: ['>=24.8.0 <27.0.0']
}, (testSchedulerPackage, frameworkVersion) => {
shimmer.wrap(
testSchedulerPackage.default.prototype,
'scheduleTests', scheduleTests => getWrappedScheduleTests(scheduleTests, frameworkVersion)
)
return testSchedulerPackage
})
addHook({
name: '@jest/test-sequencer',

@@ -304,25 +374,9 @@ versions: ['>=24.8.0']

if (!shardedTests.length) {
if (!shardedTests.length || !isSuitesSkippingEnabled || !skippableSuites.length) {
return shardedTests
}
// TODO: could we get the rootDir from each test?
const [test] = shardedTests
const rootDir = test?.context?.config?.rootDir
const jestSuitesToRun = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd())
log.debug(
() => `${jestSuitesToRun.suitesToRun.length} out of ${shardedTests.length} suites are going to run.`
)
hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== shardedTests.length
numSkippedSuites = jestSuitesToRun.skippedSuites.length
itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
skippableSuites = []
return jestSuitesToRun.suitesToRun
return applySuiteSkipping(shardedTests, rootDir, frameworkVersion)
})

@@ -371,2 +425,5 @@ return sequencerPackage

knownTests = receivedKnownTests
} else {
// We disable EFD if there has been an error in the known tests request
isEarlyFlakeDetectionEnabled = false
}

@@ -531,2 +588,3 @@ } catch (err) {

testEnvironmentOptions: environment.testEnvironmentOptions,
displayName: environment.displayName,
frameworkVersion: jestVersion

@@ -587,3 +645,2 @@ })

config.testEnvironmentOptions._ddTestCodeCoverageEnabled = isCodeCoverageEnabled
config.testEnvironmentOptions._ddKnownTests = knownTests
})

@@ -656,2 +713,3 @@

_ddEarlyFlakeDetectionNumRetries,
_ddRepositoryRoot,
...restOfTestEnvironmentOptions

@@ -681,3 +739,3 @@ } = testEnvironmentOptions

shimmer.wrap(SearchSource.prototype, 'getTestPaths', getTestPaths => async function () {
if (!skippableSuites.length) {
if (!isSuitesSkippingEnabled || !skippableSuites.length) {
return getTestPaths.apply(this, arguments)

@@ -688,3 +746,3 @@ }

if (shard && shard.shardIndex) {
if (shard?.shardCount > 1) {
// If the user is using jest sharding, we want to apply the filtering of tests in the shard process.

@@ -703,17 +761,4 @@ // The reason for this is the following:

const jestSuitesToRun = getJestSuitesToRun(skippableSuites, tests, rootDir)
log.debug(() => `${jestSuitesToRun.suitesToRun.length} out of ${tests.length} suites are going to run.`)
hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites
hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites
isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== tests.length
numSkippedSuites = jestSuitesToRun.skippedSuites.length
itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion })
skippableSuites = []
return { ...testPaths, tests: jestSuitesToRun.suitesToRun }
const suitesToRun = applySuiteSkipping(tests, rootDir, frameworkVersion)
return { ...testPaths, tests: suitesToRun }
})

@@ -780,2 +825,34 @@

const ChildProcessWorker = childProcessWorker.default
shimmer.wrap(ChildProcessWorker.prototype, 'send', send => function (request) {
if (!isEarlyFlakeDetectionEnabled) {
return send.apply(this, arguments)
}
const [type] = request
// eslint-disable-next-line
// https://github.com/jestjs/jest/blob/1d682f21c7a35da4d3ab3a1436a357b980ebd0fa/packages/jest-worker/src/workers/ChildProcessWorker.ts#L424
if (type === CHILD_MESSAGE_CALL) {
// This is the message that the main process sends to the worker to run a test suite (=test file).
// In here we modify the config.testEnvironmentOptions to include the known tests for the suite.
// This way the suite only knows about the tests that are part of it.
const args = request[request.length - 1]
if (args.length > 1) {
return send.apply(this, arguments)
}
if (!args[0]?.config) {
return send.apply(this, arguments)
}
const [{ globalConfig, config, path: testSuiteAbsolutePath }] = args
const testSuite = getTestSuitePath(testSuiteAbsolutePath, globalConfig.rootDir || process.cwd())
const suiteKnownTests = knownTests.jest?.[testSuite] || []
args[0].config = {
...config,
testEnvironmentOptions: {
...config.testEnvironmentOptions,
_ddKnownTests: suiteKnownTests
}
}
}
return send.apply(this, arguments)
})
shimmer.wrap(ChildProcessWorker.prototype, '_onMessage', _onMessage => function () {

@@ -782,0 +859,0 @@ const [code, data] = arguments[0]

@@ -41,3 +41,4 @@ 'use strict'

this._brokers = (options.brokers && typeof options.brokers !== 'function')
? options.brokers.join(',') : undefined
? options.brokers.join(',')
: undefined
}

@@ -44,0 +45,0 @@ }

@@ -79,4 +79,5 @@ 'use strict'

const callback = arguments[callbackIndex]
// eslint-disable-next-line n/handle-callback-err
arguments[callbackIndex] = shimmer.wrap(callback, function (err, corkedEmitter) {
if (typeof corkedEmitter === 'object' && typeof corkedEmitter['on'] === 'function') {
if (typeof corkedEmitter === 'object' && typeof corkedEmitter.on === 'function') {
wrapEmitter(corkedEmitter)

@@ -83,0 +84,0 @@ }

@@ -14,3 +14,5 @@ const { createCoverageMap } = require('istanbul-lib-coverage')

fromCoverageMapToCoverage,
getCallSites
getCallSites,
addEfdStringToTestName,
removeEfdStringFromTestName
} = require('../../dd-trace/src/plugins/util/test')

@@ -25,2 +27,3 @@

const libraryConfigurationCh = channel('ci:mocha:library-configuration')
const knownTestsCh = channel('ci:mocha:known-tests')
const skippableSuitesCh = channel('ci:mocha:test-suite:skippable')

@@ -45,2 +48,3 @@

const testToStartLine = new WeakMap()
const newTests = {}

@@ -60,2 +64,6 @@ // `isWorker` is true if it's a Mocha worker

let itrCorrelationId = ''
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let isSuitesSkippingEnabled = false
let knownTests = []

@@ -100,2 +108,25 @@ function getSuitesByTestFile (root) {

function getTestFullName (test) {
return `mocha.${getTestSuitePath(test.file, process.cwd())}.${removeEfdStringFromTestName(test.fullTitle())}`
}
function isNewTest (test) {
const testSuite = getTestSuitePath(test.file, process.cwd())
const testName = removeEfdStringFromTestName(test.fullTitle())
const testsForSuite = knownTests.mocha?.[testSuite] || []
return !testsForSuite.includes(testName)
}
function retryTest (test) {
const originalTestName = test.title
const suite = test.parent
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
const clonedTest = test.clone()
clonedTest.title = addEfdStringToTestName(originalTestName, retryIndex + 1)
suite.addTest(clonedTest)
clonedTest._ddIsNew = true
clonedTest._ddIsEfdRetry = true
}
}
function getTestAsyncResource (test) {

@@ -131,2 +162,15 @@ if (!test.fn) {

shimmer.wrap(Runner.prototype, 'runTests', runTests => function (suite, fn) {
if (isEarlyFlakeDetectionEnabled) {
// by the time we reach `this.on('test')`, it is too late. We need to add retries here
suite.tests.forEach(test => {
if (!test.isPending() && isNewTest(test)) {
test._ddIsNew = true
retryTest(test)
}
})
}
return runTests.apply(this, arguments)
})
shimmer.wrap(Runner.prototype, 'run', run => function () {

@@ -153,2 +197,20 @@ if (!testStartCh.hasSubscribers || isWorker) {

if (isEarlyFlakeDetectionEnabled) {
/**
* If Early Flake Detection (EFD) is enabled the logic is as follows:
* - If all attempts for a test are failing, the test has failed and we will let the test process fail.
* - If just a single attempt passes, we will prevent the test process from failing.
* The rationale behind is the following: you may still be able to block your CI pipeline by gating
* on flakiness (the test will be considered flaky), but you may choose to unblock the pipeline too.
*/
for (const tests of Object.values(newTests)) {
const failingNewTests = tests.filter(test => test.isFailed())
const areAllNewTestsFailing = failingNewTests.length === tests.length
if (failingNewTests.length && !areAllNewTestsFailing) {
this.stats.failures -= failingNewTests.length
this.failures -= failingNewTests.length
}
}
}
if (status === 'fail') {

@@ -178,3 +240,4 @@ error = new Error(`Failed tests: ${this.failures}.`)

hasUnskippableSuites: !!unskippableSuites.length,
error
error,
isEarlyFlakeDetectionEnabled
})

@@ -264,4 +327,31 @@ }))

testToAr.set(test.fn, asyncResource)
const {
file: testSuiteAbsolutePath,
title,
_ddIsNew: isNew,
_ddIsEfdRetry: isEfdRetry
} = test
const testInfo = {
testName: test.fullTitle(),
testSuiteAbsolutePath,
title,
isNew,
isEfdRetry,
testStartLine
}
// We want to store the result of the new tests
if (isNew) {
const testFullName = getTestFullName(test)
if (newTests[testFullName]) {
newTests[testFullName].push(test)
} else {
newTests[testFullName] = [test]
}
}
asyncResource.runInAsyncScope(() => {
testStartCh.publish({ test, testStartLine })
testStartCh.publish(testInfo)
})

@@ -335,6 +425,19 @@ })

this.on('pending', (test) => {
const testStartLine = testToStartLine.get(test)
const {
file: testSuiteAbsolutePath,
title
} = test
const testInfo = {
testName: test.fullTitle(),
testSuiteAbsolutePath,
title,
testStartLine
}
const asyncResource = getTestAsyncResource(test)
if (asyncResource) {
asyncResource.runInAsyncScope(() => {
skipCh.publish(test)
skipCh.publish(testInfo)
})

@@ -351,3 +454,3 @@ } else {

skippedTestAsyncResource.runInAsyncScope(() => {
skipCh.publish(test)
skipCh.publish(testInfo)
})

@@ -372,4 +475,4 @@ }

return {
it: function (name) {
parameterizedTestCh.publish({ name, params })
it: function (title) {
parameterizedTestCh.publish({ title, params })
it.apply(this, arguments)

@@ -395,3 +498,3 @@ },

if (this.options.parallel) {
log.warn(`Unable to initialize CI Visibility because Mocha is running in parallel mode.`)
log.warn('Unable to initialize CI Visibility because Mocha is running in parallel mode.')
return run.apply(this, arguments)

@@ -441,13 +544,39 @@ }

const onReceivedConfiguration = ({ err }) => {
const onReceivedKnownTests = ({ err, knownTests: receivedKnownTests }) => {
if (err) {
return global.run()
knownTests = []
isEarlyFlakeDetectionEnabled = false
} else {
knownTests = receivedKnownTests
}
if (!skippableSuitesCh.hasSubscribers) {
if (isSuitesSkippingEnabled) {
skippableSuitesCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
})
} else {
global.run()
}
}
const onReceivedConfiguration = ({ err, libraryConfig }) => {
if (err || !skippableSuitesCh.hasSubscribers || !knownTestsCh.hasSubscribers) {
return global.run()
}
skippableSuitesCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
})
isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled
earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
if (isEarlyFlakeDetectionEnabled) {
knownTestsCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedKnownTests)
})
} else if (isSuitesSkippingEnabled) {
skippableSuitesCh.publish({
onDone: mochaRunAsyncResource.bind(onReceivedSkippableSuites)
})
} else {
global.run()
}
}

@@ -454,0 +583,0 @@

@@ -10,5 +10,5 @@ 'use strict'

const startCh = channel(`apm:mongodb:query:start`)
const finishCh = channel(`apm:mongodb:query:finish`)
const errorCh = channel(`apm:mongodb:query:error`)
const startCh = channel('apm:mongodb:query:start')
const finishCh = channel('apm:mongodb:query:finish')
const errorCh = channel('apm:mongodb:query:error')

@@ -36,3 +36,3 @@ addHook({ name: 'mongodb-core', versions: ['2 - 3.1.9'] }, Server => {

addHook({ name: 'mongodb', versions: ['>=4.6.0'], file: 'lib/cmap/connection.js' }, Connection => {
addHook({ name: 'mongodb', versions: ['>=4.6.0 <6.4.0'], file: 'lib/cmap/connection.js' }, Connection => {
const proto = Connection.Connection.prototype

@@ -43,2 +43,8 @@ shimmer.wrap(proto, 'command', command => wrapConnectionCommand(command, 'command'))

addHook({ name: 'mongodb', versions: ['>=6.4.0'], file: 'lib/cmap/connection.js' }, Connection => {
const proto = Connection.Connection.prototype
shimmer.wrap(proto, 'command', command => wrapConnectionCommand(command, 'command', undefined, instrumentPromise))
return Connection
})
addHook({ name: 'mongodb', versions: ['>=3.3 <4'], file: 'lib/core/wireprotocol/index.js' }, wp => wrapWp(wp))

@@ -95,3 +101,3 @@

function wrapConnectionCommand (command, operation, name) {
function wrapConnectionCommand (command, operation, name, instrumentFn = instrument) {
const wrapped = function (ns, ops) {

@@ -108,3 +114,3 @@ if (!startCh.hasSubscribers) {

ns = `${ns.db}.${ns.collection}`
return instrument(operation, command, this, arguments, topology, ns, ops, { name })
return instrumentFn(operation, command, this, arguments, topology, ns, ops, { name })
}

@@ -187,1 +193,24 @@ return shimmer.wrap(command, wrapped)

}
function instrumentPromise (operation, command, ctx, args, server, ns, ops, options = {}) {
const name = options.name || (ops && Object.keys(ops)[0])
const serverInfo = server && server.s && server.s.options
const asyncResource = new AsyncResource('bound-anonymous-fn')
return asyncResource.runInAsyncScope(() => {
startCh.publish({ ns, ops, options: serverInfo, name })
const promise = command.apply(ctx, args)
return promise.then(function (res) {
finishCh.publish()
return res
}, function (err) {
errorCh.publish(err)
finishCh.publish()
return Promise.reject(err)
})
})
}

@@ -18,3 +18,3 @@ 'use strict'

const connectionCh = channel(`apm:net:tcp:connection`)
const connectionCh = channel('apm:net:tcp:connection')

@@ -21,0 +21,0 @@ const names = ['net', 'node:net']

@@ -24,3 +24,3 @@ 'use strict'

addHook({ name: 'oracledb', versions: ['5'] }, oracledb => {
addHook({ name: 'oracledb', versions: ['>=5'] }, oracledb => {
shimmer.wrap(oracledb.Connection.prototype, 'execute', execute => {

@@ -27,0 +27,0 @@ return function wrappedExecute (dbQuery, ...args) {

@@ -13,2 +13,3 @@ 'use strict'

// eslint-disable-next-line n/handle-callback-err
return shimmer.wrap(verified, function (err, user, info) {

@@ -15,0 +16,0 @@ const credentials = { type, username }

@@ -0,4 +1,7 @@

const semver = require('semver')
const { addHook, channel, AsyncResource } = require('./helpers/instrument')
const shimmer = require('../../datadog-shimmer')
const { parseAnnotations } = require('../../dd-trace/src/plugins/util/test')
const { parseAnnotations, getTestSuitePath } = require('../../dd-trace/src/plugins/util/test')
const log = require('../../dd-trace/src/log')

@@ -11,2 +14,5 @@ const testStartCh = channel('ci:playwright:test:start')

const libraryConfigurationCh = channel('ci:playwright:library-configuration')
const knownTestsCh = channel('ci:playwright:known-tests')
const testSuiteStartCh = channel('ci:playwright:test-suite:start')

@@ -18,3 +24,6 @@ const testSuiteFinishCh = channel('ci:playwright:test-suite:finish')

const testSuiteToTestStatuses = new Map()
const testSuiteToErrors = new Map()
let applyRepeatEachIndex = null
let startedSuites = []

@@ -30,3 +39,41 @@

let remainingTestsByFile = {}
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let knownTests = {}
let rootDir = ''
const MINIMUM_SUPPORTED_VERSION_EFD = '1.38.0'
function isNewTest (test) {
const testSuite = getTestSuitePath(test._requireFile, rootDir)
const testsForSuite = knownTests?.playwright?.[testSuite] || []
return !testsForSuite.includes(test.title)
}
function getSuiteType (test, type) {
let suite = test.parent
while (suite && suite._type !== type) {
suite = suite.parent
}
return suite
}
// Copy of Suite#_deepClone but with a function to filter tests
function deepCloneSuite (suite, filterTest) {
const copy = suite._clone()
for (const entry of suite._entries) {
if (entry.constructor.name === 'Suite') {
copy._addSuite(deepCloneSuite(entry, filterTest))
} else {
if (filterTest(entry)) {
const copiedTest = entry._clone()
copiedTest._ddIsNew = true
copiedTest._ddIsEfdRetry = true
copy._addTest(copiedTest)
}
}
}
return copy
}
function getTestsBySuiteFromTestGroups (testGroups) {

@@ -87,3 +134,8 @@ return testGroups.reduce((acc, { requireFile, tests }) => {

const config = getPlaywrightConfig(runner)
return config.projects?.map(({ project }) => project)
return config.projects?.map((project) => {
if (project.project) {
return project.project
}
return project
})
}

@@ -100,16 +152,61 @@

function getBrowserNameFromProjects (projects, projectId) {
if (!projects) {
function getBrowserNameFromProjects (projects, test) {
if (!projects || !test) {
return null
}
return projects.find(project =>
project.__projectId === projectId || project._id === projectId
)?.name
const { _projectIndex, _projectId: testProjectId } = test
if (_projectIndex !== undefined) {
return projects[_projectIndex]?.name
}
return projects.find(({ __projectId, _id, name }) => {
if (__projectId !== undefined) {
return __projectId === testProjectId
}
if (_id !== undefined) {
return _id === testProjectId
}
return name === testProjectId
})?.name
}
function formatTestHookError (error, hookType, isTimeout) {
let hookError = error
if (error) {
hookError.message = `Error in ${hookType} hook: ${error.message}`
}
if (!hookError && isTimeout) {
hookError = new Error(`${hookType} hook timed out`)
}
return hookError
}
function addErrorToTestSuite (testSuiteAbsolutePath, error) {
if (testSuiteToErrors.has(testSuiteAbsolutePath)) {
testSuiteToErrors.get(testSuiteAbsolutePath).push(error)
} else {
testSuiteToErrors.set(testSuiteAbsolutePath, [error])
}
}
function getTestSuiteError (testSuiteAbsolutePath) {
const errors = testSuiteToErrors.get(testSuiteAbsolutePath)
if (!errors) {
return null
}
if (errors.length === 1) {
return errors[0]
}
return new Error(`${errors.length} errors in this test suite:\n${errors.map(e => e.message).join('\n------\n')}`)
}
function testBeginHandler (test, browserName) {
const {
_requireFile: testSuiteAbsolutePath,
title: testName, _type,
location: { line: testSourceLine }
title: testName,
_type,
location: {
line: testSourceLine
}
} = test

@@ -139,3 +236,3 @@

function testEndHandler (test, annotations, testStatus, error) {
function testEndHandler (test, annotations, testStatus, error, isTimeout) {
let annotationTags

@@ -148,2 +245,7 @@ if (annotations.length) {

if (_type === 'beforeAll' || _type === 'afterAll') {
const hookError = formatTestHookError(error, _type, isTimeout)
if (hookError) {
addErrorToTestSuite(testSuiteAbsolutePath, hookError)
}
return

@@ -155,14 +257,26 @@ }

testAsyncResource.runInAsyncScope(() => {
testFinishCh.publish({ testStatus, steps: testResult.steps, error, extraTags: annotationTags })
testFinishCh.publish({
testStatus,
steps: testResult.steps,
error,
extraTags: annotationTags,
isNew: test._ddIsNew,
isEfdRetry: test._ddIsEfdRetry
})
})
if (!testSuiteToTestStatuses.has(testSuiteAbsolutePath)) {
if (testSuiteToTestStatuses.has(testSuiteAbsolutePath)) {
testSuiteToTestStatuses.get(testSuiteAbsolutePath).push(testStatus)
} else {
testSuiteToTestStatuses.set(testSuiteAbsolutePath, [testStatus])
} else {
testSuiteToTestStatuses.get(testSuiteAbsolutePath).push(testStatus)
}
if (error) {
addErrorToTestSuite(testSuiteAbsolutePath, error)
}
remainingTestsByFile[testSuiteAbsolutePath] = remainingTestsByFile[testSuiteAbsolutePath]
.filter(currentTest => currentTest !== test)
// Last test, we finish the suite
if (!remainingTestsByFile[testSuiteAbsolutePath].length) {

@@ -178,5 +292,6 @@ const testStatuses = testSuiteToTestStatuses.get(testSuiteAbsolutePath)

const suiteError = getTestSuiteError(testSuiteAbsolutePath)
const testSuiteAsyncResource = testSuiteToAr.get(testSuiteAbsolutePath)
testSuiteAsyncResource.runInAsyncScope(() => {
testSuiteFinishCh.publish(testSuiteStatus)
testSuiteFinishCh.publish({ status: testSuiteStatus, error: suiteError })
})

@@ -209,3 +324,3 @@ }

const projects = getProjectsFromDispatcher(dispatcher)
const browser = getBrowserNameFromProjects(projects, test._projectId)
const browser = getBrowserNameFromProjects(projects, test)
testBeginHandler(test, browser)

@@ -218,3 +333,4 @@ } else if (method === 'testEnd') {

testEndHandler(test, params.annotations, STATUS_TO_TEST_STATUS[testResult.status], testResult.error)
const isTimeout = testResult.status === 'timedOut'
testEndHandler(test, params.annotations, STATUS_TO_TEST_STATUS[testResult.status], testResult.error, isTimeout)
}

@@ -246,3 +362,3 @@ })

const projects = getProjectsFromDispatcher(dispatcher)
const browser = getBrowserNameFromProjects(projects, test._projectId)
const browser = getBrowserNameFromProjects(projects, test)
testBeginHandler(test, browser)

@@ -253,3 +369,4 @@ })

testEndHandler(test, annotations, STATUS_TO_TEST_STATUS[status], errors && errors[0])
const isTimeout = status === 'timedOut'
testEndHandler(test, annotations, STATUS_TO_TEST_STATUS[status], errors && errors[0], isTimeout)
})

@@ -264,5 +381,8 @@

shimmer.wrap(runnerExport.Runner.prototype, 'runAllTests', runAllTests => async function () {
let onDone
const testSessionAsyncResource = new AsyncResource('bound-anonymous-fn')
const rootDir = getRootDir(this)
rootDir = getRootDir(this)
const processArgv = process.argv.slice(2).join(' ')

@@ -273,2 +393,41 @@ const command = `playwright ${processArgv}`

})
const configurationPromise = new Promise((resolve) => {
onDone = resolve
})
testSessionAsyncResource.runInAsyncScope(() => {
libraryConfigurationCh.publish({ onDone })
})
try {
const { err, libraryConfig } = await configurationPromise
if (!err) {
isEarlyFlakeDetectionEnabled = libraryConfig.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = libraryConfig.earlyFlakeDetectionNumRetries
}
} catch (e) {
log.error(e)
}
if (isEarlyFlakeDetectionEnabled && semver.gte(playwrightVersion, MINIMUM_SUPPORTED_VERSION_EFD)) {
const knownTestsPromise = new Promise((resolve) => {
onDone = resolve
})
testSessionAsyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
try {
const { err, knownTests: receivedKnownTests } = await knownTestsPromise
if (!err) {
knownTests = receivedKnownTests
} else {
isEarlyFlakeDetectionEnabled = false
}
} catch (err) {
log.error(err)
}
}
const projects = getProjectsFromRunner(this)

@@ -283,3 +442,3 @@

tests.forEach(test => {
const browser = getBrowserNameFromProjects(projects, test._projectId)
const browser = getBrowserNameFromProjects(projects, test)
testBeginHandler(test, browser)

@@ -292,3 +451,2 @@ testEndHandler(test, [], 'skip')

let onDone
const flushWait = new Promise(resolve => {

@@ -298,3 +456,7 @@ onDone = resolve

testSessionAsyncResource.runInAsyncScope(() => {
testSessionFinishCh.publish({ status: STATUS_TO_TEST_STATUS[sessionStatus], onDone })
testSessionFinishCh.publish({
status: STATUS_TO_TEST_STATUS[sessionStatus],
isEarlyFlakeDetectionEnabled,
onDone
})
})

@@ -348,2 +510,3 @@ await flushWait

}, runnerHook)
addHook({

@@ -354,1 +517,51 @@ name: 'playwright',

}, (dispatcher) => dispatcherHookNew(dispatcher, dispatcherRunWrapperNew))
// Hook used for early flake detection. EFD only works from >=1.38.0
addHook({
name: 'playwright',
file: 'lib/common/suiteUtils.js',
versions: [`>=${MINIMUM_SUPPORTED_VERSION_EFD}`]
}, suiteUtilsPackage => {
// We grab `applyRepeatEachIndex` to use it later
// `applyRepeatEachIndex` needs to be applied to a cloned suite
applyRepeatEachIndex = suiteUtilsPackage.applyRepeatEachIndex
return suiteUtilsPackage
})
// Hook used for early flake detection. EFD only works from >=1.38.0
addHook({
name: 'playwright',
file: 'lib/runner/loadUtils.js',
versions: [`>=${MINIMUM_SUPPORTED_VERSION_EFD}`]
}, (loadUtilsPackage) => {
const oldCreateRootSuite = loadUtilsPackage.createRootSuite
async function newCreateRootSuite () {
const rootSuite = await oldCreateRootSuite.apply(this, arguments)
if (!isEarlyFlakeDetectionEnabled) {
return rootSuite
}
const newTests = rootSuite
.allTests()
.filter(isNewTest)
newTests.forEach(newTest => {
newTest._ddIsNew = true
if (newTest.expectedStatus !== 'skipped') {
const fileSuite = getSuiteType(newTest, 'file')
const projectSuite = getSuiteType(newTest, 'project')
for (let repeatEachIndex = 0; repeatEachIndex < earlyFlakeDetectionNumRetries; repeatEachIndex++) {
const copyFileSuite = deepCloneSuite(fileSuite, isNewTest)
applyRepeatEachIndex(projectSuite._fullProject, copyFileSuite, repeatEachIndex + 1)
projectSuite._addSuite(copyFileSuite)
}
}
})
return rootSuite
}
loadUtilsPackage.createRootSuite = newCreateRootSuite
return loadUtilsPackage
})

@@ -48,3 +48,5 @@ 'use strict'

const targetAddress = this.options && this.options.target &&
this.options.target.address ? this.options.target.address : undefined
this.options.target.address
? this.options.target.address
: undefined

@@ -191,3 +193,4 @@ const asyncResource = new AsyncResource('bound-anonymous-fn')

const state = remoteState && remoteState.constructor
? entry.remote_state.constructor.composite_type : undefined
? entry.remote_state.constructor.composite_type
: undefined
asyncResource.runInAsyncScope(() => {

@@ -194,0 +197,0 @@ exports.beforeFinish(entry, state)

@@ -10,3 +10,3 @@ 'use strict'

addHook({ name: 'tedious', versions: [ '>=1.0.0' ] }, tedious => {
addHook({ name: 'tedious', versions: ['>=1.0.0'] }, tedious => {
const startCh = channel('apm:tedious:request:start')

@@ -13,0 +13,0 @@ const finishCh = channel('apm:tedious:request:finish')

@@ -5,3 +5,4 @@ 'use strict'

const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer')
const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getResourceName } = require('./util')

@@ -32,6 +33,9 @@

if (this.config.dsmEnabled && message) {
if (
this.config.dsmEnabled && message?.properties?.headers &&
DsmPathwayCodec.contextExists(message.properties.headers)
) {
const payloadSize = getAmqpMessageSize({ headers: message.properties.headers, content: message.content })
const queue = fields.queue ?? fields.routingKey
this.tracer.decodeDataStreamsContext(message.properties.headers[CONTEXT_PROPAGATION_KEY])
const queue = fields.queue ? fields.queue : fields.routingKey
this.tracer.decodeDataStreamsContext(message.properties.headers)
this.tracer

@@ -38,0 +42,0 @@ .setCheckpoint(['direction:in', `topic:${queue}`, 'type:rabbitmq'], span, payloadSize)

@@ -6,4 +6,4 @@ 'use strict'

const ProducerPlugin = require('../../dd-trace/src/plugins/producer')
const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway')
const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { getResourceName } = require('./util')

@@ -44,4 +44,3 @@

, span, payloadSize)
const pathwayCtx = encodePathwayContext(dataStreamsContext)
fields.headers[CONTEXT_PROPAGATION_KEY] = pathwayCtx
DsmPathwayCodec.encode(dataStreamsContext, fields.headers)
}

@@ -48,0 +47,0 @@ }

@@ -40,6 +40,6 @@ 'use strict'

'aws.region': awsRegion,
'region': awsRegion,
'aws_service': awsService,
region: awsRegion,
aws_service: awsService,
'aws.service': awsService,
'component': 'aws-sdk'
component: 'aws-sdk'
}

@@ -130,4 +130,5 @@ if (this.requestTags) this.requestTags.set(request, tags)

if (err.requestId) {
span.addTags({ 'aws.response.request_id': err.requestId })
const requestId = err.RequestId || err.requestId
if (requestId) {
span.addTags({ 'aws.response.request_id': requestId })
}

@@ -134,0 +135,0 @@ }

@@ -16,3 +16,3 @@ 'use strict'

'aws.cloudwatch.logs.log_group_name': params.logGroupName,
'loggroupname': params.logGroupName
loggroupname: params.logGroupName
})

@@ -19,0 +19,0 @@ }

@@ -17,3 +17,3 @@ 'use strict'

'aws.dynamodb.table_name': params.TableName,
'tablename': params.TableName
tablename: params.TableName
})

@@ -34,3 +34,3 @@ }

'aws.dynamodb.table_name': tableName,
'tablename': tableName
tablename: tableName
})

@@ -37,0 +37,0 @@ }

@@ -14,3 +14,3 @@ 'use strict'

'aws.eventbridge.source': `${params.source}`,
'rulename': `${rulename}`
rulename: `${rulename}`
}

@@ -17,0 +17,0 @@ }

'use strict'
const {
CONTEXT_PROPAGATION_KEY,
getSizeOrZero
} = require('../../../dd-trace/src/datastreams/processor')
const { encodePathwayContext } = require('../../../dd-trace/src/datastreams/pathway')
const { DsmPathwayCodec } = require('../../../dd-trace/src/datastreams/pathway')
const log = require('../../../dd-trace/src/log')

@@ -56,3 +55,3 @@ const BaseAwsSdkPlugin = require('../base')

this.responseExtractDSMContext(
request.operation, response, span ?? null, streamName
request.operation, response, span || null, streamName
)

@@ -74,3 +73,3 @@ }

'aws.kinesis.stream_name': params.StreamName,
'streamname': params.StreamName
streamname: params.StreamName
}

@@ -118,9 +117,6 @@ }

if (
parsedAttributes &&
parsedAttributes._datadog &&
parsedAttributes._datadog[CONTEXT_PROPAGATION_KEY] &&
streamName
parsedAttributes?._datadog && streamName && DsmPathwayCodec.contextExists(parsedAttributes._datadog)
) {
const payloadSize = getSizeOrZero(record.Data)
this.tracer.decodeDataStreamsContext(Buffer.from(parsedAttributes._datadog[CONTEXT_PROPAGATION_KEY]))
this.tracer.decodeDataStreamsContext(parsedAttributes._datadog)
this.tracer

@@ -149,60 +145,67 @@ .setCheckpoint(['direction:in', `topic:${streamName}`, 'type:kinesis'], span, payloadSize)

requestInject (span, request) {
const operation = request.operation
if (operation === 'putRecord' || operation === 'putRecords') {
if (!request.params) {
return
}
const traceData = {}
const { operation, params } = request
if (!params) return
// inject data with DD context
this.tracer.inject(span, 'text_map', traceData)
let injectPath
if (request.params.Records && request.params.Records.length > 0) {
injectPath = request.params.Records[0]
} else if (request.params.Data) {
injectPath = request.params
} else {
log.error('No valid payload passed, unable to pass trace context')
let stream
switch (operation) {
case 'putRecord':
stream = params.StreamArn ? params.StreamArn : (params.StreamName ? params.StreamName : '')
this.injectToMessage(span, params, stream, true)
break
case 'putRecords':
stream = params.StreamArn ? params.StreamArn : (params.StreamName ? params.StreamName : '')
for (let i = 0; i < params.Records.length; i++) {
this.injectToMessage(span, params.Records[i], stream, i === 0)
}
}
}
injectToMessage (span, params, stream, injectTraceContext) {
if (!params) {
return
}
let parsedData
if (injectTraceContext || this.config.dsmEnabled) {
parsedData = this._tryParse(params.Data)
if (!parsedData) {
log.error('Unable to parse payload, unable to pass trace context or set DSM checkpoint (if enabled)')
return
}
}
const parsedData = this._tryParse(injectPath.Data)
if (parsedData) {
parsedData._datadog = traceData
const ddInfo = {}
// for now, we only want to inject to the first message, this may change for batches in the future
if (injectTraceContext) { this.tracer.inject(span, 'text_map', ddInfo) }
// set DSM hash if enabled
if (this.config.dsmEnabled) {
// get payload size of request data
const payloadSize = Buffer.from(JSON.stringify(parsedData)).byteLength
let stream
// users can optionally use either stream name or stream arn
if (request.params && request.params.StreamArn) {
stream = request.params.StreamArn
} else if (request.params && request.params.StreamName) {
stream = request.params.StreamName
}
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${stream}`, 'type:kinesis'], span, payloadSize)
if (dataStreamsContext) {
const pathwayCtx = encodePathwayContext(dataStreamsContext)
parsedData._datadog[CONTEXT_PROPAGATION_KEY] = pathwayCtx.toJSON()
}
}
// set DSM hash if enabled
if (this.config.dsmEnabled) {
parsedData._datadog = ddInfo
const dataStreamsContext = this.setDSMCheckpoint(span, parsedData, stream)
DsmPathwayCodec.encode(dataStreamsContext, ddInfo)
}
const finalData = Buffer.from(JSON.stringify(parsedData))
const byteSize = finalData.length
// Kinesis max payload size is 1MB
// So we must ensure adding DD context won't go over that (512b is an estimate)
if (byteSize >= 1048576) {
log.info('Payload size too large to pass context')
return
}
injectPath.Data = finalData
} else {
log.error('Unable to parse payload, unable to pass trace context')
if (Object.keys(ddInfo).length !== 0) {
parsedData._datadog = ddInfo
const finalData = Buffer.from(JSON.stringify(parsedData))
const byteSize = finalData.length
// Kinesis max payload size is 1MB
// So we must ensure adding DD context won't go over that (512b is an estimate)
if (byteSize >= 1048576) {
log.info('Payload size too large to pass context')
return
}
params.Data = finalData
}
}
setDSMCheckpoint (span, parsedData, stream) {
// get payload size of request data
const payloadSize = Buffer.from(JSON.stringify(parsedData)).byteLength
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${stream}`, 'type:kinesis'], span, payloadSize)
return dataStreamsContext
}
}
module.exports = Kinesis

@@ -16,3 +16,3 @@ 'use strict'

'resource.name': `${operation} ${params.FunctionName}`,
'functionname': params.FunctionName,
functionname: params.FunctionName,
'aws.lambda': params.FunctionName

@@ -19,0 +19,0 @@ })

@@ -16,3 +16,3 @@ 'use strict'

'aws.redshift.cluster_identifier': params.ClusterIdentifier,
'clusteridentifier': params.ClusterIdentifier
clusteridentifier: params.ClusterIdentifier
})

@@ -19,0 +19,0 @@ }

@@ -17,3 +17,3 @@ 'use strict'

'aws.s3.bucket_name': params.Bucket,
'bucketname': params.Bucket
bucketname: params.Bucket
})

@@ -20,0 +20,0 @@ }

'use strict'
const { CONTEXT_PROPAGATION_KEY, getHeadersSize } = require('../../../dd-trace/src/datastreams/processor')
const { encodePathwayContext } = require('../../../dd-trace/src/datastreams/pathway')
const { getHeadersSize } = require('../../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../../dd-trace/src/datastreams/pathway')
const log = require('../../../dd-trace/src/log')

@@ -26,3 +26,3 @@ const BaseAwsSdkPlugin = require('../base')

'aws.sns.topic_arn': TopicArn,
'topicname': topicName
topicname: topicName
}

@@ -59,7 +59,7 @@

case 'publish':
this._injectMessageAttributes(span, params)
this.injectToMessage(span, params, params.TopicArn, true)
break
case 'publishBatch':
if (params.PublishBatchRequestEntries && params.PublishBatchRequestEntries.length > 0) {
this._injectMessageAttributes(span, params.PublishBatchRequestEntries[0])
for (let i = 0; i < params.PublishBatchRequestEntries.length; i++) {
this.injectToMessage(span, params.PublishBatchRequestEntries[i], params.TopicArn, i === 0)
}

@@ -70,3 +70,3 @@ break

_injectMessageAttributes (span, params) {
injectToMessage (span, params, topicArn, injectTraceContext) {
if (!params.MessageAttributes) {

@@ -79,26 +79,46 @@ params.MessageAttributes = {}

}
const ddInfo = {}
this.tracer.inject(span, 'text_map', ddInfo)
// add ddInfo before checking DSM so we can include DD attributes in payload size
params.MessageAttributes._datadog = {
DataType: 'Binary',
BinaryValue: ddInfo
// for now, we only want to inject to the first message, this may change for batches in the future
if (injectTraceContext) {
this.tracer.inject(span, 'text_map', ddInfo)
// add ddInfo before checking DSM so we can include DD attributes in payload size
params.MessageAttributes._datadog = {
DataType: 'Binary',
BinaryValue: ddInfo
}
}
if (this.config.dsmEnabled) {
const payloadSize = getHeadersSize({
Message: params.Message,
MessageAttributes: params.MessageAttributes
})
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${params.TopicArn}`, 'type:sns'], span, payloadSize)
if (dataStreamsContext) {
const pathwayCtx = encodePathwayContext(dataStreamsContext)
ddInfo[CONTEXT_PROPAGATION_KEY] = pathwayCtx.toJSON()
if (!params.MessageAttributes._datadog) {
params.MessageAttributes._datadog = {
DataType: 'Binary',
BinaryValue: ddInfo
}
}
const dataStreamsContext = this.setDSMCheckpoint(span, params, topicArn)
DsmPathwayCodec.encode(dataStreamsContext, ddInfo)
}
// BINARY types are automatically base64 encoded
params.MessageAttributes._datadog.BinaryValue = Buffer.from(JSON.stringify(ddInfo))
if (Object.keys(ddInfo).length !== 0) {
// BINARY types are automatically base64 encoded
params.MessageAttributes._datadog.BinaryValue = Buffer.from(JSON.stringify(ddInfo))
} else if (params.MessageAttributes._datadog) {
// let's avoid adding any additional information to payload if we failed to inject
delete params.MessageAttributes._datadog
}
}
setDSMCheckpoint (span, params, topicArn) {
// only set a checkpoint if publishing to a topic
if (topicArn) {
const payloadSize = getHeadersSize(params)
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${topicArn}`, 'type:sns'], span, payloadSize)
return dataStreamsContext
}
}
}
module.exports = Sns

@@ -6,4 +6,4 @@ 'use strict'

const { storage } = require('../../../datadog-core')
const { CONTEXT_PROPAGATION_KEY, getHeadersSize } = require('../../../dd-trace/src/datastreams/processor')
const { encodePathwayContext } = require('../../../dd-trace/src/datastreams/pathway')
const { getHeadersSize } = require('../../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../../dd-trace/src/datastreams/pathway')

@@ -44,3 +44,3 @@ class Sqs extends BaseAwsSdkPlugin {

this.responseExtractDSMContext(
request.operation, request.params, response, span ?? null, parsedMessageAttributes ?? null
request.operation, request.params, response, span || null, parsedMessageAttributes || null
)

@@ -106,3 +106,3 @@ })

'aws.sqs.queue_name': params.QueueName || params.QueueUrl,
'queuename': queueName
queuename: queueName
})

@@ -152,3 +152,3 @@

datadogContext: this.tracer.extract('text_map', parsedAttributes),
parsedAttributes: parsedAttributes
parsedAttributes
}

@@ -199,3 +199,3 @@ }

}
if (parsedAttributes && parsedAttributes[CONTEXT_PROPAGATION_KEY]) {
if (parsedAttributes && DsmPathwayCodec.contextExists(parsedAttributes)) {
const payloadSize = getHeadersSize({

@@ -206,3 +206,3 @@ Body: message.Body,

const queue = params.QueueUrl.split('/').pop()
this.tracer.decodeDataStreamsContext(Buffer.from(parsedAttributes[CONTEXT_PROPAGATION_KEY]))
this.tracer.decodeDataStreamsContext(parsedAttributes)
this.tracer

@@ -215,37 +215,71 @@ .setCheckpoint(['direction:in', `topic:${queue}`, 'type:sqs'], span, payloadSize)

requestInject (span, request) {
const operation = request.operation
if (operation === 'sendMessage') {
if (!request.params) {
request.params = {}
}
if (!request.params.MessageAttributes) {
request.params.MessageAttributes = {}
} else if (Object.keys(request.params.MessageAttributes).length >= 10) { // SQS quota
// TODO: add test when the test suite is fixed
return
}
const ddInfo = {}
const { operation, params } = request
if (!params) return
switch (operation) {
case 'sendMessage':
this.injectToMessage(span, params, params.QueueUrl, true)
break
case 'sendMessageBatch':
for (let i = 0; i < params.Entries.length; i++) {
this.injectToMessage(span, params.Entries[i], params.QueueUrl, i === 0)
}
break
}
}
injectToMessage (span, params, queueUrl, injectTraceContext) {
if (!params) {
params = {}
}
if (!params.MessageAttributes) {
params.MessageAttributes = {}
} else if (Object.keys(params.MessageAttributes).length >= 10) { // SQS quota
// TODO: add test when the test suite is fixed
return
}
const ddInfo = {}
// for now, we only want to inject to the first message, this may change for batches in the future
if (injectTraceContext) {
this.tracer.inject(span, 'text_map', ddInfo)
request.params.MessageAttributes._datadog = {
params.MessageAttributes._datadog = {
DataType: 'String',
StringValue: JSON.stringify(ddInfo)
}
if (this.config.dsmEnabled) {
const payloadSize = getHeadersSize({
Body: request.params.MessageBody,
MessageAttributes: request.params.MessageAttributes
})
const queue = request.params.QueueUrl.split('/').pop()
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${queue}`, 'type:sqs'], span, payloadSize)
if (dataStreamsContext) {
const pathwayCtx = encodePathwayContext(dataStreamsContext)
ddInfo[CONTEXT_PROPAGATION_KEY] = pathwayCtx.toJSON()
}
if (this.config.dsmEnabled) {
if (!params.MessageAttributes._datadog) {
params.MessageAttributes._datadog = {
DataType: 'String',
StringValue: JSON.stringify(ddInfo)
}
}
request.params.MessageAttributes._datadog.StringValue = JSON.stringify(ddInfo)
const dataStreamsContext = this.setDSMCheckpoint(span, params, queueUrl)
if (dataStreamsContext) {
DsmPathwayCodec.encode(dataStreamsContext, ddInfo)
params.MessageAttributes._datadog.StringValue = JSON.stringify(ddInfo)
}
}
if (params.MessageAttributes._datadog && Object.keys(ddInfo).length === 0) {
// let's avoid adding any additional information to payload if we failed to inject
delete params.MessageAttributes._datadog
}
}
setDSMCheckpoint (span, params, queueUrl) {
const payloadSize = getHeadersSize({
Body: params.MessageBody,
MessageAttributes: params.MessageAttributes
})
const queue = queueUrl.split('/').pop()
const dataStreamsContext = this.tracer
.setCheckpoint(['direction:out', `topic:${queue}`, 'type:sqs'], span, payloadSize)
return dataStreamsContext
}
}
module.exports = Sqs

@@ -48,3 +48,3 @@ 'use strict'

const meta = {
'component': 'subprocess',
component: 'subprocess',
[property]: (shell === true) ? cmdFields.join(' ') : JSON.stringify(cmdFields)

@@ -51,0 +51,0 @@ }

@@ -19,3 +19,3 @@ 'use strict'

'db.type': 'couchbase',
'component': 'couchbase',
component: 'couchbase',
'resource.name': `couchbase.${operation}`,

@@ -65,2 +65,3 @@ 'span.kind': this.constructor.kind,

}
_addCommandSubs (name) {

@@ -67,0 +68,0 @@ this.addSubs(name, ({ bucket, collection, seedNodes }) => {

@@ -18,3 +18,6 @@ 'use strict'

ITR_CORRELATION_ID,
TEST_SOURCE_FILE
TEST_SOURCE_FILE,
TEST_EARLY_FLAKE_IS_ENABLED,
TEST_IS_NEW,
TEST_IS_RETRY
} = require('../../dd-trace/src/plugins/util/test')

@@ -50,3 +53,4 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

hasUnskippableSuites,
hasForcedToRunSuites
hasForcedToRunSuites,
isEarlyFlakeDetectionEnabled
}) => {

@@ -68,2 +72,5 @@ const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {}

)
if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_IS_ENABLED, 'true')
}

@@ -147,4 +154,9 @@ this.testSessionSpan.setTag(TEST_STATUS, status)

const testSourceFile = getTestSuitePath(testFileAbsolutePath, this.repositoryRoot)
const testSpan = this.startTestSpan(testName, testSuite, testSourceFile, testSourceLine)
const extraTags = {
[TEST_SOURCE_START]: testSourceLine,
[TEST_SOURCE_FILE]: testSourceFile
}
const testSpan = this.startTestSpan(testName, testSuite, extraTags)
this.enter(testSpan, store)

@@ -167,3 +179,3 @@ })

this.addSub('ci:cucumber:test:finish', ({ isStep, status, skipReason, errorMessage }) => {
this.addSub('ci:cucumber:test:finish', ({ isStep, status, skipReason, errorMessage, isNew, isEfdRetry }) => {
const span = storage.getStore().span

@@ -174,2 +186,9 @@ const statusTag = isStep ? 'step.status' : TEST_STATUS

if (isNew) {
span.setTag(TEST_IS_NEW, 'true')
if (isEfdRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}
}
if (skipReason) {

@@ -202,3 +221,3 @@ span.setTag(TEST_SKIP_REASON, skipReason)

startTestSpan (testName, testSuite, testSourceFile, testSourceLine) {
startTestSpan (testName, testSuite, extraTags) {
return super.startTestSpan(

@@ -208,6 +227,3 @@ testName,

this.testSuiteSpan,
{
[TEST_SOURCE_START]: testSourceLine,
[TEST_SOURCE_FILE]: testSourceFile
}
extraTags
)

@@ -214,0 +230,0 @@ }

@@ -1,154 +0,4 @@

const {
TEST_STATUS,
TEST_IS_RUM_ACTIVE,
TEST_CODE_OWNERS,
getTestEnvironmentMetadata,
CI_APP_ORIGIN,
getTestParentSpan,
getCodeOwnersFileEntries,
getCodeOwnersForFilename,
getTestCommonTags,
getTestSessionCommonTags,
getTestModuleCommonTags,
getTestSuiteCommonTags,
TEST_SUITE_ID,
TEST_MODULE_ID,
TEST_SESSION_ID,
TEST_COMMAND,
TEST_MODULE,
TEST_SOURCE_START,
finishAllTraceSpans,
getCoveredFilenamesFromCoverage,
getTestSuitePath,
addIntelligentTestRunnerSpanTags,
TEST_SKIPPED_BY_ITR,
TEST_ITR_UNSKIPPABLE,
TEST_ITR_FORCED_RUN,
ITR_CORRELATION_ID,
TEST_SOURCE_FILE
} = require('../../dd-trace/src/plugins/util/test')
const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants')
const log = require('../../dd-trace/src/log')
const NoopTracer = require('../../dd-trace/src/noop/tracer')
const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util')
const {
TELEMETRY_EVENT_CREATED,
TELEMETRY_EVENT_FINISHED,
TELEMETRY_ITR_FORCED_TO_RUN,
TELEMETRY_CODE_COVERAGE_EMPTY,
TELEMETRY_ITR_UNSKIPPABLE,
TELEMETRY_CODE_COVERAGE_NUM_FILES,
incrementCountMetric,
distributionMetric
} = require('../../dd-trace/src/ci-visibility/telemetry')
const { appClosing: appClosingTelemetry } = require('../../dd-trace/src/telemetry')
const {
GIT_REPOSITORY_URL,
GIT_COMMIT_SHA,
GIT_BRANCH,
CI_PROVIDER_NAME,
CI_WORKSPACE_PATH
} = require('../../dd-trace/src/plugins/util/tags')
const {
OS_VERSION,
OS_PLATFORM,
OS_ARCHITECTURE,
RUNTIME_NAME,
RUNTIME_VERSION
} = require('../../dd-trace/src/plugins/util/env')
const cypressPlugin = require('./cypress-plugin')
const TEST_FRAMEWORK_NAME = 'cypress'
const CYPRESS_STATUS_TO_TEST_STATUS = {
passed: 'pass',
failed: 'fail',
pending: 'skip',
skipped: 'skip'
}
function getTestSpanMetadata (tracer, testName, testSuite, cypressConfig) {
const childOf = getTestParentSpan(tracer)
const commonTags = getTestCommonTags(testName, testSuite, cypressConfig.version, TEST_FRAMEWORK_NAME)
return {
childOf,
...commonTags
}
}
function getCypressVersion (details) {
if (details && details.cypressVersion) {
return details.cypressVersion
}
if (details && details.config && details.config.version) {
return details.config.version
}
return ''
}
function getRootDir (details) {
if (details && details.config) {
return details.config.projectRoot || details.config.repoRoot || process.cwd()
}
return process.cwd()
}
function getCypressCommand (details) {
if (!details) {
return TEST_FRAMEWORK_NAME
}
return `${TEST_FRAMEWORK_NAME} ${details.specPattern || ''}`
}
function getSessionStatus (summary) {
if (summary.totalFailed !== undefined && summary.totalFailed > 0) {
return 'fail'
}
if (summary.totalSkipped !== undefined && summary.totalSkipped === summary.totalTests) {
return 'skip'
}
return 'pass'
}
function getSuiteStatus (suiteStats) {
if (suiteStats.failures !== undefined && suiteStats.failures > 0) {
return 'fail'
}
if (suiteStats.tests !== undefined && suiteStats.tests === suiteStats.pending) {
return 'skip'
}
return 'pass'
}
function getLibraryConfiguration (tracer, testConfiguration) {
return new Promise(resolve => {
if (!tracer._tracer._exporter?.getLibraryConfiguration) {
return resolve({ err: new Error('CI Visibility was not initialized correctly') })
}
tracer._tracer._exporter.getLibraryConfiguration(testConfiguration, (err, libraryConfig) => {
resolve({ err, libraryConfig })
})
})
}
function getSkippableTests (isSuitesSkippingEnabled, tracer, testConfiguration) {
if (!isSuitesSkippingEnabled) {
return Promise.resolve({ skippableTests: [] })
}
return new Promise(resolve => {
if (!tracer._tracer._exporter?.getLibraryConfiguration) {
return resolve({ err: new Error('CI Visibility was not initialized correctly') })
}
tracer._tracer._exporter.getSkippableSuites(testConfiguration, (err, skippableTests, correlationId) => {
resolve({
err,
skippableTests,
correlationId
})
})
})
}
const noopTask = {

@@ -170,4 +20,2 @@ 'dd:testSuiteStart': () => {

module.exports = (on, config) => {
let isTestsSkipped = false
const skippedTests = []
const tracer = require('../../dd-trace')

@@ -181,383 +29,8 @@

const testEnvironmentMetadata = getTestEnvironmentMetadata(TEST_FRAMEWORK_NAME)
cypressPlugin.init(tracer, config)
const {
[GIT_REPOSITORY_URL]: repositoryUrl,
[GIT_COMMIT_SHA]: sha,
[OS_VERSION]: osVersion,
[OS_PLATFORM]: osPlatform,
[OS_ARCHITECTURE]: osArchitecture,
[RUNTIME_NAME]: runtimeName,
[RUNTIME_VERSION]: runtimeVersion,
[GIT_BRANCH]: branch,
[CI_PROVIDER_NAME]: ciProviderName,
[CI_WORKSPACE_PATH]: repositoryRoot
} = testEnvironmentMetadata
const isUnsupportedCIProvider = !ciProviderName
const finishedTestsByFile = {}
const testConfiguration = {
repositoryUrl,
sha,
osVersion,
osPlatform,
osArchitecture,
runtimeName,
runtimeVersion,
branch,
testLevel: 'test'
}
const codeOwnersEntries = getCodeOwnersFileEntries(repositoryRoot)
let activeSpan = null
let testSessionSpan = null
let testModuleSpan = null
let testSuiteSpan = null
let command = null
let frameworkVersion
let rootDir
let isSuitesSkippingEnabled = false
let isCodeCoverageEnabled = false
let testsToSkip = []
let itrCorrelationId = ''
const unskippableSuites = []
let hasForcedToRunSuites = false
let hasUnskippableSuites = false
function ciVisEvent (name, testLevel, tags = {}) {
incrementCountMetric(name, {
testLevel,
testFramework: 'cypress',
isUnsupportedCIProvider,
...tags
})
}
function getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) {
const testSuiteTags = {
[TEST_COMMAND]: command,
[TEST_COMMAND]: command,
[TEST_MODULE]: TEST_FRAMEWORK_NAME
}
if (testSuiteSpan) {
testSuiteTags[TEST_SUITE_ID] = testSuiteSpan.context().toSpanId()
}
if (testSessionSpan && testModuleSpan) {
testSuiteTags[TEST_SESSION_ID] = testSessionSpan.context().toTraceId()
testSuiteTags[TEST_MODULE_ID] = testModuleSpan.context().toSpanId()
}
const {
childOf,
resource,
...testSpanMetadata
} = getTestSpanMetadata(tracer, testName, testSuite, config)
const codeOwners = getCodeOwnersForFilename(testSuite, codeOwnersEntries)
if (codeOwners) {
testSpanMetadata[TEST_CODE_OWNERS] = codeOwners
}
if (isUnskippable) {
hasUnskippableSuites = true
incrementCountMetric(TELEMETRY_ITR_UNSKIPPABLE, { testLevel: 'suite' })
testSpanMetadata[TEST_ITR_UNSKIPPABLE] = 'true'
}
if (isForcedToRun) {
hasForcedToRunSuites = true
incrementCountMetric(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' })
testSpanMetadata[TEST_ITR_FORCED_RUN] = 'true'
}
ciVisEvent(TELEMETRY_EVENT_CREATED, 'test', { hasCodeOwners: !!codeOwners })
return tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test`, {
childOf,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
[ORIGIN_KEY]: CI_APP_ORIGIN,
...testSpanMetadata,
...testEnvironmentMetadata,
...testSuiteTags
}
})
}
on('before:run', (details) => {
return getLibraryConfiguration(tracer, testConfiguration).then(({ err, libraryConfig }) => {
if (err) {
log.error(err)
} else {
isSuitesSkippingEnabled = libraryConfig.isSuitesSkippingEnabled
isCodeCoverageEnabled = libraryConfig.isCodeCoverageEnabled
}
return getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration)
.then(({ err, skippableTests, correlationId }) => {
if (err) {
log.error(err)
} else {
testsToSkip = skippableTests || []
itrCorrelationId = correlationId
}
// `details.specs` are test files
details.specs.forEach(({ absolute, relative }) => {
const isUnskippableSuite = isMarkedAsUnskippable({ path: absolute })
if (isUnskippableSuite) {
unskippableSuites.push(relative)
}
})
const childOf = getTestParentSpan(tracer)
rootDir = getRootDir(details)
command = getCypressCommand(details)
frameworkVersion = getCypressVersion(details)
const testSessionSpanMetadata = getTestSessionCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME)
const testModuleSpanMetadata = getTestModuleCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME)
testSessionSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_session`, {
childOf,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
...testEnvironmentMetadata,
...testSessionSpanMetadata
}
})
ciVisEvent(TELEMETRY_EVENT_CREATED, 'session')
testModuleSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_module`, {
childOf: testSessionSpan,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
...testEnvironmentMetadata,
...testModuleSpanMetadata
}
})
ciVisEvent(TELEMETRY_EVENT_CREATED, 'module')
return details
})
})
})
on('after:spec', (spec, { tests, stats }) => {
const cypressTests = tests || []
const finishedTests = finishedTestsByFile[spec.relative] || []
// Get tests that didn't go through `dd:afterEach`
// and create a skipped test span for each of them
cypressTests.filter(({ title }) => {
const cypressTestName = title.join(' ')
const isTestFinished = finishedTests.find(({ testName }) => cypressTestName === testName)
return !isTestFinished
}).forEach(({ title }) => {
const cypressTestName = title.join(' ')
const isSkippedByItr = testsToSkip.find(test =>
cypressTestName === test.name && spec.relative === test.suite
)
const skippedTestSpan = getTestSpan(cypressTestName, spec.relative)
if (spec.absolute && repositoryRoot) {
skippedTestSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, repositoryRoot))
} else {
skippedTestSpan.setTag(TEST_SOURCE_FILE, spec.relative)
}
skippedTestSpan.setTag(TEST_STATUS, 'skip')
if (isSkippedByItr) {
skippedTestSpan.setTag(TEST_SKIPPED_BY_ITR, 'true')
}
if (itrCorrelationId) {
skippedTestSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId)
}
skippedTestSpan.finish()
})
// Make sure that reported test statuses are the same as Cypress reports.
// This is not always the case, such as when an `after` hook fails:
// Cypress will report the last run test as failed, but we don't know that yet at `dd:afterEach`
let latestError
finishedTests.forEach((finishedTest) => {
const cypressTest = cypressTests.find(test => test.title.join(' ') === finishedTest.testName)
if (!cypressTest) {
return
}
if (cypressTest.displayError) {
latestError = new Error(cypressTest.displayError)
}
const cypressTestStatus = CYPRESS_STATUS_TO_TEST_STATUS[cypressTest.state]
// update test status
if (cypressTestStatus !== finishedTest.testStatus) {
finishedTest.testSpan.setTag(TEST_STATUS, cypressTestStatus)
finishedTest.testSpan.setTag('error', latestError)
}
if (itrCorrelationId) {
finishedTest.testSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId)
}
if (spec.absolute && repositoryRoot) {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, repositoryRoot))
} else {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, spec.relative)
}
finishedTest.testSpan.finish(finishedTest.finishTime)
})
if (testSuiteSpan) {
const status = getSuiteStatus(stats)
testSuiteSpan.setTag(TEST_STATUS, status)
if (latestError) {
testSuiteSpan.setTag('error', latestError)
}
testSuiteSpan.finish()
testSuiteSpan = null
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')
}
})
on('after:run', (suiteStats) => {
if (testSessionSpan && testModuleSpan) {
const testStatus = getSessionStatus(suiteStats)
testModuleSpan.setTag(TEST_STATUS, testStatus)
testSessionSpan.setTag(TEST_STATUS, testStatus)
addIntelligentTestRunnerSpanTags(
testSessionSpan,
testModuleSpan,
{
isSuitesSkipped: isTestsSkipped,
isSuitesSkippingEnabled,
isCodeCoverageEnabled,
skippingType: 'test',
skippingCount: skippedTests.length,
hasForcedToRunSuites,
hasUnskippableSuites
}
)
testModuleSpan.finish()
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')
testSessionSpan.finish()
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session')
finishAllTraceSpans(testSessionSpan)
}
return new Promise(resolve => {
const exporter = tracer._tracer._exporter
if (!exporter) {
return resolve(null)
}
if (exporter.flush) {
exporter.flush(() => {
appClosingTelemetry()
resolve(null)
})
} else if (exporter._writer) {
exporter._writer.flush(() => {
appClosingTelemetry()
resolve(null)
})
}
})
})
on('task', {
'dd:testSuiteStart': (suite) => {
if (testSuiteSpan) {
return null
}
const testSuiteSpanMetadata = getTestSuiteCommonTags(command, frameworkVersion, suite, TEST_FRAMEWORK_NAME)
testSuiteSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_suite`, {
childOf: testModuleSpan,
tags: {
[COMPONENT]: TEST_FRAMEWORK_NAME,
...testEnvironmentMetadata,
...testSuiteSpanMetadata
}
})
ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')
return null
},
'dd:beforeEach': (test) => {
const { testName, testSuite } = test
const shouldSkip = !!testsToSkip.find(test => {
return testName === test.name && testSuite === test.suite
})
const isUnskippable = unskippableSuites.includes(testSuite)
const isForcedToRun = shouldSkip && isUnskippable
// skip test
if (shouldSkip && !isUnskippable) {
skippedTests.push(test)
isTestsSkipped = true
return { shouldSkip: true }
}
if (!activeSpan) {
activeSpan = getTestSpan(testName, testSuite, isUnskippable, isForcedToRun)
}
return activeSpan ? { traceId: activeSpan.context().toTraceId() } : {}
},
'dd:afterEach': ({ test, coverage }) => {
const { state, error, isRUMActive, testSourceLine, testSuite, testName } = test
if (activeSpan) {
if (coverage && isCodeCoverageEnabled && tracer._tracer._exporter && tracer._tracer._exporter.exportCoverage) {
const coverageFiles = getCoveredFilenamesFromCoverage(coverage)
const relativeCoverageFiles = coverageFiles.map(file => getTestSuitePath(file, rootDir))
if (!relativeCoverageFiles.length) {
incrementCountMetric(TELEMETRY_CODE_COVERAGE_EMPTY)
}
distributionMetric(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length)
const { _traceId, _spanId } = testSuiteSpan.context()
const formattedCoverage = {
sessionId: _traceId,
suiteId: _spanId,
testId: activeSpan.context()._spanId,
files: relativeCoverageFiles
}
tracer._tracer._exporter.exportCoverage(formattedCoverage)
}
const testStatus = CYPRESS_STATUS_TO_TEST_STATUS[state]
activeSpan.setTag(TEST_STATUS, testStatus)
if (error) {
activeSpan.setTag('error', error)
}
if (isRUMActive) {
activeSpan.setTag(TEST_IS_RUM_ACTIVE, 'true')
}
if (testSourceLine) {
activeSpan.setTag(TEST_SOURCE_START, testSourceLine)
}
const finishedTest = {
testName,
testStatus,
finishTime: activeSpan._getTime(), // we store the finish time here
testSpan: activeSpan
}
if (finishedTestsByFile[testSuite]) {
finishedTestsByFile[testSuite].push(finishedTest)
} else {
finishedTestsByFile[testSuite] = [finishedTest]
}
// test spans are finished at after:spec
}
activeSpan = null
ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test')
return null
},
'dd:addTags': (tags) => {
if (activeSpan) {
activeSpan.addTags(tags)
}
return null
}
})
on('before:run', cypressPlugin.beforeRun.bind(cypressPlugin))
on('after:spec', cypressPlugin.afterSpec.bind(cypressPlugin))
on('after:run', cypressPlugin.afterRun.bind(cypressPlugin))
on('task', cypressPlugin.getTasks())
}
/* eslint-disable */
let isEarlyFlakeDetectionEnabled = false
let knownTestsForSuite = []
let suiteTests = []
let earlyFlakeDetectionNumRetries = 0
function isNewTest (test) {
return !knownTestsForSuite.includes(test.fullTitle())
}
function retryTest (test, suiteTests) {
for (let retryIndex = 0; retryIndex < earlyFlakeDetectionNumRetries; retryIndex++) {
const clonedTest = test.clone()
// TODO: signal in framework logs that this is a retry.
// TODO: Change it so these tests are allowed to fail.
// TODO: figure out if reported duration is skewed.
suiteTests.unshift(clonedTest)
clonedTest._ddIsNew = true
clonedTest._ddIsEfdRetry = true
}
}
const oldRunTests = Cypress.mocha.getRunner().runTests
Cypress.mocha.getRunner().runTests = function (suite, fn) {
if (!isEarlyFlakeDetectionEnabled) {
return oldRunTests.apply(this, arguments)
}
// We copy the new tests at the beginning of the suite run (runTests), so that they're run
// multiple times.
suite.tests.forEach(test => {
if (!test._ddIsNew && !test.isPending() && isNewTest(test)) {
test._ddIsNew = true
retryTest(test, suite.tests)
}
})
return oldRunTests.apply(this, [suite, fn])
}
beforeEach(function () {

@@ -14,4 +53,10 @@ cy.task('dd:beforeEach', {

before(() => {
cy.task('dd:testSuiteStart', Cypress.mocha.getRootSuite().file)
before(function () {
cy.task('dd:testSuiteStart', Cypress.mocha.getRootSuite().file).then((suiteConfig) => {
if (suiteConfig) {
isEarlyFlakeDetectionEnabled = suiteConfig.isEarlyFlakeDetectionEnabled
knownTestsForSuite = suiteConfig.knownTestsForSuite
earlyFlakeDetectionNumRetries = suiteConfig.earlyFlakeDetectionNumRetries
}
})
})

@@ -28,3 +73,3 @@

afterEach(() => {
afterEach(function () {
cy.window().then(win => {

@@ -37,2 +82,4 @@ const currentTest = Cypress.mocha.getRunner().suite.ctx.currentTest

error: currentTest.err,
isNew: currentTest._ddIsNew,
isEfdRetry: currentTest._ddIsEfdRetry
}

@@ -39,0 +86,0 @@ try {

@@ -7,3 +7,3 @@ 'use strict'

static get id () { return 'fetch' }
static get prefix () { return `apm:fetch:request` }
static get prefix () { return 'apm:fetch:request' }

@@ -10,0 +10,0 @@ addTraceSub (eventName, handler) {

'use strict'
const pick = require('../../utils/src/pick')
const pick = require('../../datadog-core/src/utils/src/pick')
const CompositePlugin = require('../../dd-trace/src/plugins/composite')

@@ -5,0 +5,0 @@ const log = require('../../dd-trace/src/log')

@@ -125,11 +125,13 @@ 'use strict'

const directives = info.fieldNodes[0].directives
for (const directive of directives) {
const argList = {}
for (const argument of directive['arguments']) {
argList[argument.name.value] = argument.value.value
}
const directives = info.fieldNodes?.[0]?.directives
if (Array.isArray(directives)) {
for (const directive of directives) {
const argList = {}
for (const argument of directive.arguments) {
argList[argument.name.value] = argument.value.value
}
if (Object.keys(argList).length) {
resolverVars[directive.name.value] = argList
if (Object.keys(argList).length) {
resolverVars[directive.name.value] = argList
}
}

@@ -136,0 +138,0 @@ }

@@ -11,3 +11,3 @@ 'use strict'

static get operation () { return 'client:request' }
static get prefix () { return `apm:grpc:client:request` }
static get prefix () { return 'apm:grpc:client:request' }
static get peerServicePrecursors () { return ['rpc.service'] }

@@ -34,3 +34,3 @@

meta: {
'component': 'grpc',
component: 'grpc',
'grpc.method.kind': method.kind,

@@ -37,0 +37,0 @@ 'grpc.method.path': method.path,

@@ -11,3 +11,3 @@ 'use strict'

static get operation () { return 'server:request' }
static get prefix () { return `apm:grpc:server:request` }
static get prefix () { return 'apm:grpc:server:request' }

@@ -43,3 +43,3 @@ constructor (...args) {

meta: {
'component': 'grpc',
component: 'grpc',
'grpc.method.kind': method.kind,

@@ -46,0 +46,0 @@ 'grpc.method.path': method.path,

'use strict'
const pick = require('../../utils/src/pick')
const pick = require('../../datadog-core/src/utils/src/pick')
const log = require('../../dd-trace/src/log')

@@ -5,0 +5,0 @@

@@ -20,3 +20,3 @@ 'use strict'

static get id () { return 'http' }
static get prefix () { return `apm:http:client:request` }
static get prefix () { return 'apm:http:client:request' }

@@ -231,3 +231,3 @@ bindStart (message) {

if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) {
if ([].concat(headers.authorization).some(startsWith('AWS4-HMAC-SHA256'))) {
return true

@@ -234,0 +234,0 @@ }

@@ -28,3 +28,3 @@ 'use strict'

static get id () { return 'http2' }
static get prefix () { return `apm:http2:client:request` }
static get prefix () { return 'apm:http2:client:request' }

@@ -126,3 +126,4 @@ bindStart (message) {

let port = '' + (authority.port !== ''
? authority.port : (authority.protocol === 'http:' ? 80 : 443))
? authority.port
: (authority.protocol === 'http:' ? 80 : 443))
let host = authority.hostname || authority.host || 'localhost'

@@ -149,3 +150,3 @@

if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) {
if ([].concat(headers.authorization).some(startsWith('AWS4-HMAC-SHA256'))) {
return true

@@ -152,0 +153,0 @@ }

@@ -19,6 +19,6 @@ const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')

TEST_SOURCE_FILE,
getTestSuitePath,
TEST_IS_NEW,
TEST_EARLY_FLAKE_IS_RETRY,
TEST_EARLY_FLAKE_IS_ENABLED
TEST_IS_RETRY,
TEST_EARLY_FLAKE_IS_ENABLED,
JEST_DISPLAY_NAME
} = require('../../dd-trace/src/plugins/util/test')

@@ -55,2 +55,3 @@ const { COMPONENT } = require('../../dd-trace/src/constants')

}
getForcedToRunSuites (forcedToRunSuitesList) {

@@ -146,6 +147,7 @@ if (!this.forcedToRunSuites) {

config._ddEarlyFlakeDetectionNumRetries = this.libraryConfig?.earlyFlakeDetectionNumRetries ?? 0
config._ddRepositoryRoot = this.repositoryRoot
})
})
this.addSub('ci:jest:test-suite:start', ({ testSuite, testEnvironmentOptions, frameworkVersion }) => {
this.addSub('ci:jest:test-suite:start', ({ testSuite, testEnvironmentOptions, frameworkVersion, displayName }) => {
const {

@@ -185,2 +187,5 @@ _ddTestSessionId: testSessionId,

}
if (displayName) {
testSuiteMetadata[JEST_DISPLAY_NAME] = displayName
}

@@ -315,6 +320,7 @@ this.testSuiteSpan = this.tracer.startSpan('jest.test_suite', {

runner,
displayName,
testParameters,
frameworkVersion,
testStartLine,
testFileAbsolutePath,
testSourceFile,
isNew,

@@ -332,7 +338,7 @@ isEfdRetry

}
if (testFileAbsolutePath) {
extraTags[TEST_SOURCE_FILE] = getTestSuitePath(testFileAbsolutePath, this.repositoryRoot)
} else {
// If for whatever we don't have the full path, we'll set the source file to the suite name
extraTags[TEST_SOURCE_FILE] = suite
// If for whatever we don't have the source file, we'll fall back to the suite name
extraTags[TEST_SOURCE_FILE] = testSourceFile || suite
if (displayName) {
extraTags[JEST_DISPLAY_NAME] = displayName
}

@@ -343,3 +349,3 @@

if (isEfdRetry) {
extraTags[TEST_EARLY_FLAKE_IS_RETRY] = 'true'
extraTags[TEST_IS_RETRY] = 'true'
}

@@ -346,0 +352,0 @@ }

'use strict'
const { getMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const dc = require('dc-polyfill')
const { getMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer')
const afterStartCh = dc.channel('dd-trace:kafkajs:consumer:afterStart')
const beforeFinishCh = dc.channel('dd-trace:kafkajs:consumer:beforeFinish')
class KafkajsConsumerPlugin extends ConsumerPlugin {

@@ -68,3 +73,3 @@ static get id () { return 'kafkajs' }

meta: {
'component': 'kafkajs',
component: 'kafkajs',
'kafka.topic': topic,

@@ -77,9 +82,21 @@ 'kafka.message.offset': message.offset

})
if (this.config.dsmEnabled) {
if (this.config.dsmEnabled && message?.headers && DsmPathwayCodec.contextExists(message.headers)) {
const payloadSize = getMessageSize(message)
this.tracer.decodeDataStreamsContext(message.headers[CONTEXT_PROPAGATION_KEY])
this.tracer.decodeDataStreamsContext(message.headers)
this.tracer
.setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'], span, payloadSize)
}
if (afterStartCh.hasSubscribers) {
afterStartCh.publish({ topic, partition, message, groupId })
}
}
finish () {
if (beforeFinishCh.hasSubscribers) {
beforeFinishCh.publish()
}
super.finish()
}
}

@@ -86,0 +103,0 @@

'use strict'
const ProducerPlugin = require('../../dd-trace/src/plugins/producer')
const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway')
const { getMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getMessageSize } = require('../../dd-trace/src/datastreams/processor')

@@ -70,7 +70,6 @@ const BOOTSTRAP_SERVERS_KEY = 'messaging.kafka.bootstrap.servers'

start ({ topic, messages, bootstrapServers }) {
let pathwayCtx
const span = this.startSpan({
resource: topic,
meta: {
'component': 'kafkajs',
component: 'kafkajs',
'kafka.topic': topic

@@ -92,4 +91,3 @@ },

.setCheckpoint(['direction:out', `topic:${topic}`, 'type:kafka'], span, payloadSize)
pathwayCtx = encodePathwayContext(dataStreamsContext)
message.headers[CONTEXT_PROPAGATION_KEY] = pathwayCtx
DsmPathwayCodec.encode(dataStreamsContext, message.headers)
}

@@ -96,0 +94,0 @@ }

@@ -19,3 +19,7 @@ 'use strict'

ITR_CORRELATION_ID,
TEST_SOURCE_FILE
TEST_SOURCE_FILE,
removeEfdStringFromTestName,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_IS_ENABLED
} = require('../../dd-trace/src/plugins/util/test')

@@ -43,3 +47,3 @@ const { COMPONENT } = require('../../dd-trace/src/constants')

this._testSuites = new Map()
this._testNameToParams = {}
this._testTitleToParams = {}
this.sourceRoot = process.cwd()

@@ -136,5 +140,5 @@

this.addSub('ci:mocha:test:start', ({ test, testStartLine }) => {
this.addSub('ci:mocha:test:start', (testInfo) => {
const store = storage.getStore()
const span = this.startTestSpan(test, testStartLine)
const span = this.startTestSpan(testInfo)

@@ -162,3 +166,3 @@ this.enter(span, store)

this.addSub('ci:mocha:test:skip', (test) => {
this.addSub('ci:mocha:test:skip', (testInfo) => {
const store = storage.getStore()

@@ -168,3 +172,3 @@ // skipped through it.skip, so the span is not created yet

if (!store) {
const testSpan = this.startTestSpan(test)
const testSpan = this.startTestSpan(testInfo)
this.enter(testSpan, store)

@@ -187,4 +191,4 @@ }

this.addSub('ci:mocha:test:parameterize', ({ name, params }) => {
this._testNameToParams[name] = params
this.addSub('ci:mocha:test:parameterize', ({ title, params }) => {
this._testTitleToParams[title] = params
})

@@ -199,3 +203,4 @@

hasUnskippableSuites,
error
error,
isEarlyFlakeDetectionEnabled
}) => {

@@ -227,2 +232,6 @@ if (this.testSessionSpan) {

if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_IS_ENABLED, 'true')
}
this.testModuleSpan.finish()

@@ -239,8 +248,15 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')

startTestSpan (test, testStartLine) {
const testName = test.fullTitle()
const { file: testSuiteAbsolutePath, title } = test
startTestSpan (testInfo) {
const {
testSuiteAbsolutePath,
title,
isNew,
isEfdRetry,
testStartLine
} = testInfo
const testName = removeEfdStringFromTestName(testInfo.testName)
const extraTags = {}
const testParametersString = getTestParametersString(this._testNameToParams, title)
const testParametersString = getTestParametersString(this._testTitleToParams, title)
if (testParametersString) {

@@ -257,6 +273,4 @@ extraTags[TEST_PARAMETERS] = testParametersString

const testSourceFile = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot)
if (testSourceFile) {
extraTags[TEST_SOURCE_FILE] = testSourceFile
if (this.repositoryRoot !== this.sourceRoot && !!this.repositoryRoot) {
extraTags[TEST_SOURCE_FILE] = getTestSuitePath(testSuiteAbsolutePath, this.repositoryRoot)
} else {

@@ -266,2 +280,9 @@ extraTags[TEST_SOURCE_FILE] = testSuite

if (isNew) {
extraTags[TEST_IS_NEW] = 'true'
if (isEfdRetry) {
extraTags[TEST_IS_RETRY] = 'true'
}
}
return super.startTestSpan(testName, testSuite, testSuiteSpan, extraTags)

@@ -268,0 +289,0 @@ }

@@ -61,3 +61,3 @@ 'use strict'

const span = store.span
const error = span.context()._tags['error']
const error = span.context()._tags.error
const requestError = req.error || nextRequest.error

@@ -64,0 +64,0 @@

@@ -92,3 +92,3 @@ 'use strict'

// This is a single prompt, either String or [Number]
tags[`openai.request.prompt`] = normalizeStringOrTokenArray(prompt, true)
tags['openai.request.prompt'] = normalizeStringOrTokenArray(prompt, true)
} else if (Array.isArray(prompt)) {

@@ -105,3 +105,3 @@ // This is multiple prompts, either [String] or [[Number]]

const normalized = normalizeStringOrTokenArray(payload.input, false)
tags[`openai.request.input`] = truncateText(normalized)
tags['openai.request.input'] = truncateText(normalized)
store.input = normalized

@@ -208,3 +208,3 @@ }

const tags = [`error:1`]
const tags = ['error:1']
this.metrics.distribution('openai.request.duration', span._duration * 1000, tags)

@@ -221,3 +221,3 @@ this.metrics.increment('openai.request.error', 1, tags)

`model:${headers['openai-model']}`,
`error:0`
'error:0'
]

@@ -224,0 +224,0 @@

@@ -14,3 +14,6 @@ 'use strict'

TEST_SOURCE_FILE,
TEST_CONFIGURATION_BROWSER_NAME
TEST_CONFIGURATION_BROWSER_NAME,
TEST_IS_NEW,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_IS_ENABLED
} = require('../../dd-trace/src/plugins/util/test')

@@ -34,7 +37,23 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

this._testSuites = new Map()
this.numFailedTests = 0
this.numFailedSuites = 0
this.addSub('ci:playwright:session:finish', ({ status, onDone }) => {
this.addSub('ci:playwright:session:finish', ({ status, isEarlyFlakeDetectionEnabled, onDone }) => {
this.testModuleSpan.setTag(TEST_STATUS, status)
this.testSessionSpan.setTag(TEST_STATUS, status)
if (isEarlyFlakeDetectionEnabled) {
this.testSessionSpan.setTag(TEST_EARLY_FLAKE_IS_ENABLED, 'true')
}
if (this.numFailedSuites > 0) {
let errorMessage = `Test suites failed: ${this.numFailedSuites}.`
if (this.numFailedTests > 0) {
errorMessage += ` Tests failed: ${this.numFailedTests}`
}
const error = new Error(errorMessage)
this.testModuleSpan.setTag('error', error)
this.testSessionSpan.setTag('error', error)
}
this.testModuleSpan.finish()

@@ -47,2 +66,3 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module')

this.tracer._exporter.flush(onDone)
this.numFailedTests = 0
})

@@ -75,7 +95,17 @@

this.addSub('ci:playwright:test-suite:finish', (status) => {
this.addSub('ci:playwright:test-suite:finish', ({ status, error }) => {
const store = storage.getStore()
const span = store && store.span
if (!span) return
span.setTag(TEST_STATUS, status)
if (error) {
span.setTag('error', error)
span.setTag(TEST_STATUS, 'fail')
} else {
span.setTag(TEST_STATUS, status)
}
if (status === 'fail' || error) {
this.numFailedSuites++
}
span.finish()

@@ -93,3 +123,3 @@ this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')

})
this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags }) => {
this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags, isNew, isEfdRetry }) => {
const store = storage.getStore()

@@ -107,2 +137,8 @@ const span = store && store.span

}
if (isNew) {
span.setTag(TEST_IS_NEW, 'true')
if (isEfdRetry) {
span.setTag(TEST_IS_RETRY, 'true')
}
}

@@ -123,3 +159,7 @@ steps.forEach(step => {

}
stepSpan.finish(stepStartTime + step.duration)
let stepDuration = step.duration
if (stepDuration <= 0 || isNaN(stepDuration)) {
stepDuration = 0
}
stepSpan.finish(stepStartTime + stepDuration)
})

@@ -129,2 +169,6 @@

if (testStatus === 'fail') {
this.numFailedTests++
}
this.telemetry.ciVisEvent(

@@ -131,0 +175,0 @@ TELEMETRY_EVENT_FINISHED,

@@ -5,3 +5,4 @@ 'use strict'

const { storage } = require('../../datadog-core')
const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')

@@ -29,3 +30,3 @@ class RheaConsumerPlugin extends ConsumerPlugin {

meta: {
'component': 'rhea',
component: 'rhea',
'amqp.link.source.address': name,

@@ -36,7 +37,11 @@ 'amqp.link.role': 'receiver'

if (this.config.dsmEnabled && msgObj.message) {
if (
this.config.dsmEnabled &&
msgObj?.message?.delivery_annotations &&
DsmPathwayCodec.contextExists(msgObj.message.delivery_annotations)
) {
const payloadSize = getAmqpMessageSize(
{ headers: msgObj.message.delivery_annotations, content: msgObj.message.body }
)
this.tracer.decodeDataStreamsContext(msgObj.message.delivery_annotations[CONTEXT_PROPAGATION_KEY])
this.tracer.decodeDataStreamsContext(msgObj.message.delivery_annotations)
this.tracer

@@ -43,0 +48,0 @@ .setCheckpoint(['direction:in', `topic:${name}`, 'type:rabbitmq'], span, payloadSize)

@@ -5,4 +5,4 @@ 'use strict'

const ProducerPlugin = require('../../dd-trace/src/plugins/producer')
const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway')
const { getAmqpMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor')
const { DsmPathwayCodec } = require('../../dd-trace/src/datastreams/pathway')
const { getAmqpMessageSize } = require('../../dd-trace/src/datastreams/processor')

@@ -23,3 +23,3 @@ class RheaProducerPlugin extends ProducerPlugin {

meta: {
'component': 'rhea',
component: 'rhea',
'amqp.link.target.address': name,

@@ -49,4 +49,3 @@ 'amqp.link.role': 'sender',

.setCheckpoint(['direction:out', `exchange:${targetName}`, 'type:rabbitmq'], span, payloadSize)
const pathwayCtx = encodePathwayContext(dataStreamsContext)
msg.delivery_annotations[CONTEXT_PROPAGATION_KEY] = pathwayCtx
DsmPathwayCodec.encode(dataStreamsContext, msg.delivery_annotations)
}

@@ -53,0 +52,0 @@ }

@@ -74,3 +74,3 @@ 'use strict'

this.addSub(`apm:http:server:request:finish`, ({ req }) => {
this.addSub('apm:http:server:request:finish', ({ req }) => {
const context = this._contexts.get(req)

@@ -77,0 +77,0 @@

@@ -19,3 +19,3 @@ 'use strict'

'db.type': 'mssql',
'component': 'tedious',
component: 'tedious',
'out.host': connectionConfig.server,

@@ -22,0 +22,0 @@ [CLIENT_PORT_KEY]: connectionConfig.options.port,

@@ -31,3 +31,3 @@ 'use strict'

const headers = {
'Location': blockingConfiguration.parameters.location
Location: blockingConfiguration.parameters.location
}

@@ -34,0 +34,0 @@

'use strict'
module.exports = {
'COMMAND_INJECTION_ANALYZER': require('./command-injection-analyzer'),
'HARCODED_SECRET_ANALYZER': require('./hardcoded-secret-analyzer'),
'HEADER_INJECTION_ANALYZER': require('./header-injection-analyzer'),
'HSTS_HEADER_MISSING_ANALYZER': require('./hsts-header-missing-analyzer'),
'INSECURE_COOKIE_ANALYZER': require('./insecure-cookie-analyzer'),
'LDAP_ANALYZER': require('./ldap-injection-analyzer'),
'NO_HTTPONLY_COOKIE_ANALYZER': require('./no-httponly-cookie-analyzer'),
'NO_SAMESITE_COOKIE_ANALYZER': require('./no-samesite-cookie-analyzer'),
'NOSQL_MONGODB_INJECTION': require('./nosql-injection-mongodb-analyzer'),
'PATH_TRAVERSAL_ANALYZER': require('./path-traversal-analyzer'),
'SQL_INJECTION_ANALYZER': require('./sql-injection-analyzer'),
'SSRF': require('./ssrf-analyzer'),
'UNVALIDATED_REDIRECT_ANALYZER': require('./unvalidated-redirect-analyzer'),
'WEAK_CIPHER_ANALYZER': require('./weak-cipher-analyzer'),
'WEAK_HASH_ANALYZER': require('./weak-hash-analyzer'),
'WEAK_RANDOMNESS_ANALYZER': require('./weak-randomness-analyzer'),
'XCONTENTTYPE_HEADER_MISSING_ANALYZER': require('./xcontenttype-header-missing-analyzer')
COMMAND_INJECTION_ANALYZER: require('./command-injection-analyzer'),
HARCODED_SECRET_ANALYZER: require('./hardcoded-secret-analyzer'),
HEADER_INJECTION_ANALYZER: require('./header-injection-analyzer'),
HSTS_HEADER_MISSING_ANALYZER: require('./hsts-header-missing-analyzer'),
INSECURE_COOKIE_ANALYZER: require('./insecure-cookie-analyzer'),
LDAP_ANALYZER: require('./ldap-injection-analyzer'),
NO_HTTPONLY_COOKIE_ANALYZER: require('./no-httponly-cookie-analyzer'),
NO_SAMESITE_COOKIE_ANALYZER: require('./no-samesite-cookie-analyzer'),
NOSQL_MONGODB_INJECTION: require('./nosql-injection-mongodb-analyzer'),
PATH_TRAVERSAL_ANALYZER: require('./path-traversal-analyzer'),
SQL_INJECTION_ANALYZER: require('./sql-injection-analyzer'),
SSRF: require('./ssrf-analyzer'),
UNVALIDATED_REDIRECT_ANALYZER: require('./unvalidated-redirect-analyzer'),
WEAK_CIPHER_ANALYZER: require('./weak-cipher-analyzer'),
WEAK_HASH_ANALYZER: require('./weak-hash-analyzer'),
WEAK_RANDOMNESS_ANALYZER: require('./weak-randomness-analyzer'),
XCONTENTTYPE_HEADER_MISSING_ANALYZER: require('./xcontenttype-header-missing-analyzer')
}

@@ -37,2 +37,3 @@ 'use strict'

}
_checkOCE (context, value) {

@@ -39,0 +40,0 @@ if (value && value.location) {

@@ -6,265 +6,265 @@ /* eslint-disable max-len */

{
'id': 'adobe-client-secret',
'regex': /\b((p8e-)[a-z0-9]{32})(?:['"\s\x60;]|$)/i
id: 'adobe-client-secret',
regex: /\b((p8e-)[a-z0-9]{32})(?:['"\s\x60;]|$)/i
},
{
'id': 'age-secret-key',
'regex': /AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}/
id: 'age-secret-key',
regex: /AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}/
},
{
'id': 'alibaba-access-key-id',
'regex': /\b((LTAI)[a-z0-9]{20})(?:['"\s\x60;]|$)/i
id: 'alibaba-access-key-id',
regex: /\b((LTAI)[a-z0-9]{20})(?:['"\s\x60;]|$)/i
},
{
'id': 'authress-service-client-access-key',
'regex': /\b((?:sc|ext|scauth|authress)_[a-z0-9]{5,30}\.[a-z0-9]{4,6}\.acc[_-][a-z0-9-]{10,32}\.[a-z0-9+/_=-]{30,120})(?:['"\s\x60;]|$)/i
id: 'authress-service-client-access-key',
regex: /\b((?:sc|ext|scauth|authress)_[a-z0-9]{5,30}\.[a-z0-9]{4,6}\.acc[_-][a-z0-9-]{10,32}\.[a-z0-9+/_=-]{30,120})(?:['"\s\x60;]|$)/i
},
{
'id': 'aws-access-token',
'regex': /\b((A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})(?:['"\s\x60;]|$)/
id: 'aws-access-token',
regex: /\b((A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})(?:['"\s\x60;]|$)/
},
{
'id': 'clojars-api-token',
'regex': /(CLOJARS_)[a-z0-9]{60}/i
id: 'clojars-api-token',
regex: /(CLOJARS_)[a-z0-9]{60}/i
},
{
'id': 'databricks-api-token',
'regex': /\b(dapi[a-h0-9]{32})(?:['"\s\x60;]|$)/i
id: 'databricks-api-token',
regex: /\b(dapi[a-h0-9]{32})(?:['"\s\x60;]|$)/i
},
{
'id': 'digitalocean-access-token',
'regex': /\b(doo_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
id: 'digitalocean-access-token',
regex: /\b(doo_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
},
{
'id': 'digitalocean-pat',
'regex': /\b(dop_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
id: 'digitalocean-pat',
regex: /\b(dop_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
},
{
'id': 'digitalocean-refresh-token',
'regex': /\b(dor_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
id: 'digitalocean-refresh-token',
regex: /\b(dor_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i
},
{
'id': 'doppler-api-token',
'regex': /(dp\.pt\.)[a-z0-9]{43}/i
id: 'doppler-api-token',
regex: /(dp\.pt\.)[a-z0-9]{43}/i
},
{
'id': 'duffel-api-token',
'regex': /duffel_(test|live)_[a-z0-9_\-=]{43}/i
id: 'duffel-api-token',
regex: /duffel_(test|live)_[a-z0-9_\-=]{43}/i
},
{
'id': 'dynatrace-api-token',
'regex': /dt0c01\.[a-z0-9]{24}\.[a-z0-9]{64}/i
id: 'dynatrace-api-token',
regex: /dt0c01\.[a-z0-9]{24}\.[a-z0-9]{64}/i
},
{
'id': 'easypost-api-token',
'regex': /\bEZAK[a-z0-9]{54}/i
id: 'easypost-api-token',
regex: /\bEZAK[a-z0-9]{54}/i
},
{
'id': 'flutterwave-public-key',
'regex': /FLWPUBK_TEST-[a-h0-9]{32}-X/i
id: 'flutterwave-public-key',
regex: /FLWPUBK_TEST-[a-h0-9]{32}-X/i
},
{
'id': 'frameio-api-token',
'regex': /fio-u-[a-z0-9\-_=]{64}/i
id: 'frameio-api-token',
regex: /fio-u-[a-z0-9\-_=]{64}/i
},
{
'id': 'gcp-api-key',
'regex': /\b(AIza[0-9a-z\-_]{35})(?:['"\s\x60;]|$)/i
id: 'gcp-api-key',
regex: /\b(AIza[0-9a-z\-_]{35})(?:['"\s\x60;]|$)/i
},
{
'id': 'github-app-token',
'regex': /(ghu|ghs)_[0-9a-zA-Z]{36}/
id: 'github-app-token',
regex: /(ghu|ghs)_[0-9a-zA-Z]{36}/
},
{
'id': 'github-fine-grained-pat',
'regex': /github_pat_[0-9a-zA-Z_]{82}/
id: 'github-fine-grained-pat',
regex: /github_pat_[0-9a-zA-Z_]{82}/
},
{
'id': 'github-oauth',
'regex': /gho_[0-9a-zA-Z]{36}/
id: 'github-oauth',
regex: /gho_[0-9a-zA-Z]{36}/
},
{
'id': 'github-pat',
'regex': /ghp_[0-9a-zA-Z]{36}/
id: 'github-pat',
regex: /ghp_[0-9a-zA-Z]{36}/
},
{
'id': 'gitlab-pat',
'regex': /glpat-[0-9a-zA-Z\-_]{20}/
id: 'gitlab-pat',
regex: /glpat-[0-9a-zA-Z\-_]{20}/
},
{
'id': 'gitlab-ptt',
'regex': /glptt-[0-9a-f]{40}/
id: 'gitlab-ptt',
regex: /glptt-[0-9a-f]{40}/
},
{
'id': 'gitlab-rrt',
'regex': /GR1348941[0-9a-zA-Z\-_]{20}/
id: 'gitlab-rrt',
regex: /GR1348941[0-9a-zA-Z\-_]{20}/
},
{
'id': 'grafana-api-key',
'regex': /\b(eyJrIjoi[a-z0-9]{70,400}={0,2})(?:['"\s\x60;]|$)/i
id: 'grafana-api-key',
regex: /\b(eyJrIjoi[a-z0-9]{70,400}={0,2})(?:['"\s\x60;]|$)/i
},
{
'id': 'grafana-cloud-api-token',
'regex': /\b(glc_[a-z0-9+/]{32,400}={0,2})(?:['"\s\x60;]|$)/i
id: 'grafana-cloud-api-token',
regex: /\b(glc_[a-z0-9+/]{32,400}={0,2})(?:['"\s\x60;]|$)/i
},
{
'id': 'grafana-service-account-token',
'regex': /\b(glsa_[a-z0-9]{32}_[a-f0-9]{8})(?:['"\s\x60;]|$)/i
id: 'grafana-service-account-token',
regex: /\b(glsa_[a-z0-9]{32}_[a-f0-9]{8})(?:['"\s\x60;]|$)/i
},
{
'id': 'hashicorp-tf-api-token',
'regex': /[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}/i
id: 'hashicorp-tf-api-token',
regex: /[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}/i
},
{
'id': 'jwt',
'regex': /\b(ey[a-zA-Z0-9]{17,}\.ey[a-zA-Z0-9/_-]{17,}\.(?:[a-zA-Z0-9/_-]{10,}={0,2})?)(?:['"\s\x60;]|$)/
id: 'jwt',
regex: /\b(ey[a-zA-Z0-9]{17,}\.ey[a-zA-Z0-9/_-]{17,}\.(?:[a-zA-Z0-9/_-]{10,}={0,2})?)(?:['"\s\x60;]|$)/
},
{
'id': 'linear-api-key',
'regex': /lin_api_[a-z0-9]{40}/i
id: 'linear-api-key',
regex: /lin_api_[a-z0-9]{40}/i
},
{
'id': 'npm-access-token',
'regex': /\b(npm_[a-z0-9]{36})(?:['"\s\x60;]|$)/i
id: 'npm-access-token',
regex: /\b(npm_[a-z0-9]{36})(?:['"\s\x60;]|$)/i
},
{
'id': 'openai-api-key',
'regex': /\b(sk-[a-z0-9]{20}T3BlbkFJ[a-z0-9]{20})(?:['"\s\x60;]|$)/i
id: 'openai-api-key',
regex: /\b(sk-[a-z0-9]{20}T3BlbkFJ[a-z0-9]{20})(?:['"\s\x60;]|$)/i
},
{
'id': 'planetscale-api-token',
'regex': /\b(pscale_tkn_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
id: 'planetscale-api-token',
regex: /\b(pscale_tkn_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
},
{
'id': 'planetscale-oauth-token',
'regex': /\b(pscale_oauth_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
id: 'planetscale-oauth-token',
regex: /\b(pscale_oauth_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
},
{
'id': 'planetscale-password',
'regex': /\b(pscale_pw_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
id: 'planetscale-password',
regex: /\b(pscale_pw_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i
},
{
'id': 'postman-api-token',
'regex': /\b(PMAK-[a-f0-9]{24}-[a-f0-9]{34})(?:['"\s\x60;]|$)/i
id: 'postman-api-token',
regex: /\b(PMAK-[a-f0-9]{24}-[a-f0-9]{34})(?:['"\s\x60;]|$)/i
},
{
'id': 'prefect-api-token',
'regex': /\b(pnu_[a-z0-9]{36})(?:['"\s\x60;]|$)/i
id: 'prefect-api-token',
regex: /\b(pnu_[a-z0-9]{36})(?:['"\s\x60;]|$)/i
},
{
'id': 'private-key',
'regex': /-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY( BLOCK)?-----[\s\S]*KEY( BLOCK)?----/i
id: 'private-key',
regex: /-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY( BLOCK)?-----[\s\S]*KEY( BLOCK)?----/i
},
{
'id': 'pulumi-api-token',
'regex': /\b(pul-[a-f0-9]{40})(?:['"\s\x60;]|$)/i
id: 'pulumi-api-token',
regex: /\b(pul-[a-f0-9]{40})(?:['"\s\x60;]|$)/i
},
{
'id': 'pypi-upload-token',
'regex': /pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}/
id: 'pypi-upload-token',
regex: /pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}/
},
{
'id': 'readme-api-token',
'regex': /\b(rdme_[a-z0-9]{70})(?:['"\s\x60;]|$)/i
id: 'readme-api-token',
regex: /\b(rdme_[a-z0-9]{70})(?:['"\s\x60;]|$)/i
},
{
'id': 'rubygems-api-token',
'regex': /\b(rubygems_[a-f0-9]{48})(?:['"\s\x60;]|$)/i
id: 'rubygems-api-token',
regex: /\b(rubygems_[a-f0-9]{48})(?:['"\s\x60;]|$)/i
},
{
'id': 'scalingo-api-token',
'regex': /tk-us-[a-zA-Z0-9-_]{48}/
id: 'scalingo-api-token',
regex: /tk-us-[a-zA-Z0-9-_]{48}/
},
{
'id': 'sendgrid-api-token',
'regex': /\b(SG\.[a-z0-9=_\-.]{66})(?:['"\s\x60;]|$)/i
id: 'sendgrid-api-token',
regex: /\b(SG\.[a-z0-9=_\-.]{66})(?:['"\s\x60;]|$)/i
},
{
'id': 'sendinblue-api-token',
'regex': /\b(xkeysib-[a-f0-9]{64}-[a-z0-9]{16})(?:['"\s\x60;]|$)/i
id: 'sendinblue-api-token',
regex: /\b(xkeysib-[a-f0-9]{64}-[a-z0-9]{16})(?:['"\s\x60;]|$)/i
},
{
'id': 'shippo-api-token',
'regex': /\b(shippo_(live|test)_[a-f0-9]{40})(?:['"\s\x60;]|$)/i
id: 'shippo-api-token',
regex: /\b(shippo_(live|test)_[a-f0-9]{40})(?:['"\s\x60;]|$)/i
},
{
'id': 'shopify-access-token',
'regex': /shpat_[a-fA-F0-9]{32}/
id: 'shopify-access-token',
regex: /shpat_[a-fA-F0-9]{32}/
},
{
'id': 'shopify-custom-access-token',
'regex': /shpca_[a-fA-F0-9]{32}/
id: 'shopify-custom-access-token',
regex: /shpca_[a-fA-F0-9]{32}/
},
{
'id': 'shopify-private-app-access-token',
'regex': /shppa_[a-fA-F0-9]{32}/
id: 'shopify-private-app-access-token',
regex: /shppa_[a-fA-F0-9]{32}/
},
{
'id': 'shopify-shared-secret',
'regex': /shpss_[a-fA-F0-9]{32}/
id: 'shopify-shared-secret',
regex: /shpss_[a-fA-F0-9]{32}/
},
{
'id': 'slack-app-token',
'regex': /(xapp-\d-[A-Z0-9]+-\d+-[a-z0-9]+)/i
id: 'slack-app-token',
regex: /(xapp-\d-[A-Z0-9]+-\d+-[a-z0-9]+)/i
},
{
'id': 'slack-bot-token',
'regex': /(xoxb-[0-9]{10,13}-[0-9]{10,13}[a-zA-Z0-9-]*)/
id: 'slack-bot-token',
regex: /(xoxb-[0-9]{10,13}-[0-9]{10,13}[a-zA-Z0-9-]*)/
},
{
'id': 'slack-config-access-token',
'regex': /(xoxe.xox[bp]-\d-[A-Z0-9]{163,166})/i
id: 'slack-config-access-token',
regex: /(xoxe.xox[bp]-\d-[A-Z0-9]{163,166})/i
},
{
'id': 'slack-config-refresh-token',
'regex': /(xoxe-\d-[A-Z0-9]{146})/i
id: 'slack-config-refresh-token',
regex: /(xoxe-\d-[A-Z0-9]{146})/i
},
{
'id': 'slack-legacy-bot-token',
'regex': /(xoxb-[0-9]{8,14}-[a-zA-Z0-9]{18,26})/
id: 'slack-legacy-bot-token',
regex: /(xoxb-[0-9]{8,14}-[a-zA-Z0-9]{18,26})/
},
{
'id': 'slack-legacy-token',
'regex': /(xox[os]-\d+-\d+-\d+-[a-fA-F\d]+)/
id: 'slack-legacy-token',
regex: /(xox[os]-\d+-\d+-\d+-[a-fA-F\d]+)/
},
{
'id': 'slack-legacy-workspace-token',
'regex': /(xox[ar]-(?:\d-)?[0-9a-zA-Z]{8,48})/
id: 'slack-legacy-workspace-token',
regex: /(xox[ar]-(?:\d-)?[0-9a-zA-Z]{8,48})/
},
{
'id': 'slack-user-token',
'regex': /(xox[pe](?:-[0-9]{10,13}){3}-[a-zA-Z0-9-]{28,34})/
id: 'slack-user-token',
regex: /(xox[pe](?:-[0-9]{10,13}){3}-[a-zA-Z0-9-]{28,34})/
},
{
'id': 'slack-webhook-url',
'regex': /(https?:\/\/)?hooks.slack.com\/(services|workflows)\/[A-Za-z0-9+/]{43,46}/
id: 'slack-webhook-url',
regex: /(https?:\/\/)?hooks.slack.com\/(services|workflows)\/[A-Za-z0-9+/]{43,46}/
},
{
'id': 'square-access-token',
'regex': /\b(sq0atp-[0-9a-z\-_]{22})(?:['"\s\x60;]|$)/i
id: 'square-access-token',
regex: /\b(sq0atp-[0-9a-z\-_]{22})(?:['"\s\x60;]|$)/i
},
{
'id': 'square-secret',
'regex': /\b(sq0csp-[0-9a-z\-_]{43})(?:['"\s\x60;]|$)/i
id: 'square-secret',
regex: /\b(sq0csp-[0-9a-z\-_]{43})(?:['"\s\x60;]|$)/i
},
{
'id': 'stripe-access-token',
'regex': /(sk|pk)_(test|live)_[0-9a-z]{10,32}/i
id: 'stripe-access-token',
regex: /(sk|pk)_(test|live)_[0-9a-z]{10,32}/i
},
{
'id': 'telegram-bot-api-token',
'regex': /(?:^|[^0-9])([0-9]{5,16}:A[a-z0-9_-]{34})(?:$|[^a-z0-9_-])/i
id: 'telegram-bot-api-token',
regex: /(?:^|[^0-9])([0-9]{5,16}:A[a-z0-9_-]{34})(?:$|[^a-z0-9_-])/i
},
{
'id': 'twilio-api-key',
'regex': /SK[0-9a-fA-F]{32}/
id: 'twilio-api-key',
regex: /SK[0-9a-fA-F]{32}/
},
{
'id': 'vault-batch-token',
'regex': /\b(hvb\.[a-z0-9_-]{138,212})(?:['"\s\x60;]|$)/i
id: 'vault-batch-token',
regex: /\b(hvb\.[a-z0-9_-]{138,212})(?:['"\s\x60;]|$)/i
},
{
'id': 'vault-service-token',
'regex': /\b(hvs\.[a-z0-9_-]{90,100})(?:['"\s\x60;]|$)/i
id: 'vault-service-token',
regex: /\b(hvs\.[a-z0-9_-]{90,100})(?:['"\s\x60;]|$)/i
}
]

@@ -12,2 +12,3 @@ 'use strict'

}
_isVulnerableFromRequestAndResponse (req, res) {

@@ -14,0 +15,0 @@ const headerValues = this._getHeaderValues(res, HSTS_HEADER_NAME)

@@ -15,3 +15,3 @@ 'use strict'

function iterateObjectStrings (target, fn, levelKeys = [], depth = 50, visited = new Set()) {
function iterateObjectStrings (target, fn, levelKeys = [], depth = 20, visited = new Set()) {
if (target && typeof target === 'object') {

@@ -18,0 +18,0 @@ Object.keys(target).forEach((key) => {

@@ -23,7 +23,11 @@ 'use strict'

'sqreen/lib/package-reader/index.js',
'ws/lib/websocket-server.js'
'ws/lib/websocket-server.js',
'google-gax/build/src/grpc.js',
'cookie-signature/index.js'
)
const EXCLUDED_PATHS_FROM_STACK = [
path.join('node_modules', 'object-hash', path.sep)
path.join('node_modules', 'object-hash', path.sep),
path.join('node_modules', 'aws-sdk', 'lib', 'util.js'),
path.join('node_modules', 'keygrip', path.sep)
]

@@ -30,0 +34,0 @@ class WeakHashAnalyzer extends Analyzer {

@@ -8,3 +8,4 @@ 'use strict'

const iastTelemetry = require('./telemetry')
const { getInstrumentedMetric, getExecutedMetric, TagKey, EXECUTED_SOURCE } = require('./telemetry/iast-metric')
const { getInstrumentedMetric, getExecutedMetric, TagKey, EXECUTED_SOURCE, formatTags } =
require('./telemetry/iast-metric')
const { storage } = require('../../../../datadog-core')

@@ -24,9 +25,12 @@ const { getIastContext } = require('./iast-context')

class IastPluginSubscription {
constructor (moduleName, channelName, tag, tagKey = TagKey.VULNERABILITY_TYPE) {
constructor (moduleName, channelName, tagValues, tagKey = TagKey.VULNERABILITY_TYPE) {
this.moduleName = moduleName
this.channelName = channelName
this.tag = tag
this.tagKey = tagKey
this.executedMetric = getExecutedMetric(this.tagKey)
this.instrumentedMetric = getInstrumentedMetric(this.tagKey)
tagValues = Array.isArray(tagValues) ? tagValues : [tagValues]
this.tags = formatTags(tagValues, tagKey)
this.executedMetric = getExecutedMetric(tagKey)
this.instrumentedMetric = getInstrumentedMetric(tagKey)
this.moduleInstrumented = false

@@ -36,10 +40,11 @@ }

increaseInstrumented () {
if (this.moduleInstrumented) return
if (!this.moduleInstrumented) {
this.moduleInstrumented = true
this.moduleInstrumented = true
this.instrumentedMetric.inc(this.tag)
this.tags.forEach(tag => this.instrumentedMetric.inc(undefined, tag))
}
}
increaseExecuted (iastContext) {
this.executedMetric.inc(this.tag, iastContext)
this.tags.forEach(tag => this.executedMetric.inc(iastContext, tag))
}

@@ -82,6 +87,12 @@

_execHandlerAndIncMetric ({ handler, metric, tag, iastContext = getIastContext(storage.getStore()) }) {
_execHandlerAndIncMetric ({ handler, metric, tags, iastContext = getIastContext(storage.getStore()) }) {
try {
const result = handler()
iastTelemetry.isEnabled() && metric.inc(tag, iastContext)
if (iastTelemetry.isEnabled()) {
if (Array.isArray(tags)) {
tags.forEach(tag => metric.inc(iastContext, tag))
} else {
metric.inc(iastContext, tags)
}
}
return result

@@ -108,2 +119,10 @@ } catch (e) {

enable () {
this.configure(true)
}
disable () {
this.configure(false)
}
onConfigure () {}

@@ -110,0 +129,0 @@

@@ -24,4 +24,8 @@ const vulnerabilityReporter = require('./vulnerability-reporter')

let isEnabled = false
function enable (config, _tracer) {
iastTelemetry.configure(config, config.iast && config.iast.telemetryVerbosity)
if (isEnabled) return
iastTelemetry.configure(config, config.iast?.telemetryVerbosity)
enableAllAnalyzers(config)

@@ -34,5 +38,11 @@ enableTaintTracking(config.iast, iastTelemetry.verbosity)

vulnerabilityReporter.start(config, _tracer)
isEnabled = true
}
function disable () {
if (!isEnabled) return
isEnabled = false
iastTelemetry.stop()

@@ -48,3 +58,3 @@ disableAllAnalyzers()

function onIncomingHttpRequestStart (data) {
if (data && data.req) {
if (data?.req) {
const store = storage.getStore()

@@ -74,7 +84,7 @@ if (store) {

function onIncomingHttpRequestEnd (data) {
if (data && data.req) {
if (data?.req) {
const store = storage.getStore()
const topContext = web.getContext(data.req)
const iastContext = iastContextFunctions.getIastContext(store, topContext)
if (iastContext && iastContext.rootSpan) {
if (iastContext?.rootSpan) {
iastResponseEnd.publish(data)

@@ -81,0 +91,0 @@

@@ -55,5 +55,6 @@ 'use strict'

function acquireRequest (rootSpan) {
if (availableRequest > 0) {
if (availableRequest > 0 && rootSpan) {
const sampling = config && typeof config.requestSampling === 'number'
? config.requestSampling : 30
? config.requestSampling
: 30
if (rootSpan.context().toSpanId().slice(-2) <= sampling) {

@@ -60,0 +61,0 @@ availableRequest--

@@ -32,9 +32,12 @@ 'use strict'

Error.stackTraceLimit = 100
Error.prepareStackTrace = function (_, callsites) {
callsiteList = callsites
try {
Error.prepareStackTrace = function (_, callsites) {
callsiteList = callsites
}
const e = new Error()
e.stack
} finally {
Error.prepareStackTrace = previousPrepareStackTrace
Error.stackTraceLimit = previousStackTraceLimit
}
const e = new Error()
e.stack
Error.prepareStackTrace = previousPrepareStackTrace
Error.stackTraceLimit = previousStackTraceLimit
return callsiteList

@@ -41,0 +44,0 @@ }

@@ -5,2 +5,3 @@ 'use strict'

{ src: 'concat' },
{ src: 'parse' },
{ src: 'plusOperator', operator: true },

@@ -7,0 +8,0 @@ { src: 'random' },

@@ -13,3 +13,6 @@ 'use strict'

const taintTrackingPlugin = require('./plugin')
const kafkaConsumerPlugin = require('./plugins/kafka')
const kafkaContextPlugin = require('../context/kafka-ctx-plugin')
module.exports = {

@@ -20,2 +23,6 @@ enableTaintTracking (config, telemetryVerbosity) {

taintTrackingPlugin.enable()
kafkaContextPlugin.enable()
kafkaConsumerPlugin.enable()
setMaxTransactions(config.maxConcurrentRequests)

@@ -27,7 +34,10 @@ },

taintTrackingPlugin.disable()
kafkaContextPlugin.disable()
kafkaConsumerPlugin.disable()
},
setMaxTransactions: setMaxTransactions,
createTransaction: createTransaction,
removeTransaction: removeTransaction,
setMaxTransactions,
createTransaction,
removeTransaction,
taintTrackingPlugin
}

@@ -5,3 +5,2 @@ 'use strict'

const { IAST_TRANSACTION_ID } = require('../iast-context')
const iastLog = require('../iast-log')
const iastTelemetry = require('../telemetry')

@@ -11,2 +10,3 @@ const { REQUEST_TAINTED } = require('../telemetry/iast-metric')

const { getTaintTrackingImpl, getTaintTrackingNoop } = require('./taint-tracking-impl')
const { taintObject } = require('./operations-taint-object')

@@ -24,3 +24,3 @@ function createTransaction (id, iastContext) {

if (metrics?.requestCount) {
REQUEST_TAINTED.add(metrics.requestCount, null, iastContext)
REQUEST_TAINTED.inc(iastContext, metrics.requestCount)
}

@@ -40,3 +40,3 @@ }

function newTaintedString (iastContext, string, name, type) {
let result = string
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]

@@ -51,46 +51,9 @@ if (transactionId) {

function taintObject (iastContext, object, type, keyTainting, keyType) {
let result = object
function newTaintedObject (iastContext, obj, name, type) {
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]
if (transactionId) {
const queue = [{ parent: null, property: null, value: object }]
const visited = new WeakSet()
while (queue.length > 0) {
const { parent, property, value, key } = queue.pop()
if (value === null) {
continue
}
try {
if (typeof value === 'string') {
const tainted = TaintedUtils.newTaintedString(transactionId, value, property, type)
if (!parent) {
result = tainted
} else {
if (keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = tainted
} else {
parent[key] = tainted
}
}
} else if (typeof value === 'object' && !visited.has(value)) {
visited.add(value)
const keys = Object.keys(value)
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
queue.push({ parent: value, property: property ? `${property}.${key}` : key, value: value[key], key })
}
if (parent && keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = value
}
}
} catch (e) {
iastLog.error(`Error visiting property : ${property}`).errorAndPublish(e)
}
}
result = TaintedUtils.newTaintedObject(transactionId, obj, name, type)
} else {
result = obj
}

@@ -101,3 +64,3 @@ return result

function isTainted (iastContext, string) {
let result = false
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]

@@ -113,3 +76,3 @@ if (transactionId) {

function getRanges (iastContext, string) {
let result = []
let result
const transactionId = iastContext?.[IAST_TRANSACTION_ID]

@@ -158,2 +121,3 @@ if (transactionId) {

newTaintedString,
newTaintedObject,
taintObject,

@@ -160,0 +124,0 @@ isTainted,

@@ -6,3 +6,3 @@ 'use strict'

const { storage } = require('../../../../../datadog-core')
const { taintObject, newTaintedString } = require('./operations')
const { taintObject, newTaintedString, getRanges } = require('./operations')
const {

@@ -18,3 +18,7 @@ HTTP_REQUEST_BODY,

} = require('./source-types')
const { EXECUTED_SOURCE } = require('../telemetry/iast-metric')
const REQ_HEADER_TAGS = EXECUTED_SOURCE.formatTags(HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME)
const REQ_URI_TAGS = EXECUTED_SOURCE.formatTags(HTTP_REQUEST_URI)
class TaintTrackingPlugin extends SourceIastPlugin {

@@ -31,5 +35,5 @@ constructor () {

const iastContext = getIastContext(storage.getStore())
if (iastContext && iastContext['body'] !== req.body) {
if (iastContext && iastContext.body !== req.body) {
this._taintTrackingHandler(HTTP_REQUEST_BODY, req, 'body', iastContext)
iastContext['body'] = req.body
iastContext.body = req.body
}

@@ -49,5 +53,5 @@ }

const iastContext = getIastContext(storage.getStore())
if (iastContext && iastContext['body'] !== req.body) {
if (iastContext && iastContext.body !== req.body) {
this._taintTrackingHandler(HTTP_REQUEST_BODY, req, 'body', iastContext)
iastContext['body'] = req.body
iastContext.body = req.body
}

@@ -72,2 +76,14 @@ }

this.addSub(
{ channelName: 'apm:graphql:resolve:start', tag: HTTP_REQUEST_BODY },
(data) => {
const iastContext = getIastContext(storage.getStore())
const source = data.context?.source
const ranges = source && getRanges(iastContext, source)
if (ranges?.length) {
this._taintTrackingHandler(ranges[0].iinfo.type, data.args, null, iastContext)
}
}
)
// this is a special case to increment INSTRUMENTED_SOURCE metric for header

@@ -93,3 +109,3 @@ this.addInstrumentedSource('http', [HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME])

handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE, true, HTTP_REQUEST_HEADER_NAME),
tag: [HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME],
tags: REQ_HEADER_TAGS,
iastContext

@@ -104,3 +120,3 @@ })

},
tag: [HTTP_REQUEST_URI],
tags: REQ_URI_TAGS,
iastContext

@@ -114,12 +130,4 @@ })

}
enable () {
this.configure(true)
}
disable () {
this.configure(false)
}
}
module.exports = new TaintTrackingPlugin()

@@ -17,3 +17,3 @@ 'use strict'

if (metrics && metrics.instrumentedPropagation) {
INSTRUMENTED_PROPAGATION.add(metrics.instrumentedPropagation)
INSTRUMENTED_PROPAGATION.inc(undefined, metrics.instrumentedPropagation)
}

@@ -20,0 +20,0 @@

@@ -15,2 +15,3 @@ 'use strict'

let getPrepareStackTrace
let kSymbolPrepareStackTrace

@@ -48,2 +49,3 @@ let getRewriterOriginalPathAndLineFromSourceMap = function (path, line, column) {

getPrepareStackTrace = iastRewriter.getPrepareStackTrace
kSymbolPrepareStackTrace = iastRewriter.kSymbolPrepareStackTrace

@@ -70,4 +72,5 @@ const chainSourceMap = isFlagPresent('--enable-source-maps')

let originalPrepareStackTrace = Error.prepareStackTrace
let originalPrepareStackTrace
function getPrepareStackTraceAccessor () {
originalPrepareStackTrace = Error.prepareStackTrace
let actual = getPrepareStackTrace(originalPrepareStackTrace)

@@ -127,3 +130,12 @@ return {

shimmer.unwrap(Module.prototype, '_compile')
Error.prepareStackTrace = originalPrepareStackTrace
if (!Error.prepareStackTrace?.[kSymbolPrepareStackTrace]) return
try {
delete Error.prepareStackTrace
Error.prepareStackTrace = originalPrepareStackTrace
} catch (e) {
iastLog.warn(e)
}
}

@@ -130,0 +142,0 @@

@@ -12,3 +12,5 @@ 'use strict'

HTTP_REQUEST_PATH_PARAM: 'http.request.path.parameter',
HTTP_REQUEST_URI: 'http.request.uri'
HTTP_REQUEST_URI: 'http.request.uri',
KAFKA_MESSAGE_KEY: 'kafka.message.key',
KAFKA_MESSAGE_VALUE: 'kafka.message.value'
}

@@ -10,5 +10,8 @@ 'use strict'

const { isDebugAllowed } = require('../telemetry/verbosity')
const { taintObject } = require('./operations-taint-object')
const mathRandomCallCh = dc.channel('datadog:random:call')
const JSON_VALUE = 'json.value'
function noop (res) { return res }

@@ -18,4 +21,5 @@ // NOTE: methods of this object must be synchronized with csi-methods.js file definitions!

const TaintTrackingNoop = {
concat: noop,
parse: noop,
plusOperator: noop,
concat: noop,
random: noop,

@@ -31,3 +35,3 @@ replace: noop,

function getTransactionId (iastContext) {
return iastContext && iastContext[iastContextFunctions.IAST_TRANSACTION_ID]
return iastContext?.[iastContextFunctions.IAST_TRANSACTION_ID]
}

@@ -42,3 +46,3 @@

const iastContext = getContextDefault()
EXECUTED_PROPAGATION.inc(null, iastContext)
EXECUTED_PROPAGATION.inc(iastContext)
return iastContext

@@ -110,3 +114,3 @@ }

} catch (e) {
iastLog.error(`Error invoking CSI plusOperator`)
iastLog.error('Error invoking CSI plusOperator')
.errorAndPublish(e)

@@ -129,2 +133,25 @@ }

return res
},
parse: function (res, fn, target, json) {
if (fn === JSON.parse) {
try {
const iastContext = getContext()
const transactionId = getTransactionId(iastContext)
if (transactionId) {
const ranges = TaintedUtils.getRanges(transactionId, json)
// TODO: first version.
// here we are losing the original source because taintObject always creates a new tainted
if (ranges?.length > 0) {
const range = ranges.find(range => range.iinfo?.type)
res = taintObject(iastContext, res, range?.iinfo.type || JSON_VALUE)
}
}
} catch (e) {
iastLog.error(e)
}
}
return res
}

@@ -131,0 +158,0 @@ }

@@ -22,2 +22,15 @@ 'use strict'

function formatTags (tags, tagKey) {
return tags.map(tagValue => tagValue ? [`${tagKey}:${tagValue.toLowerCase()}`] : undefined)
}
function getNamespace (scope, context) {
let namespace = globalNamespace
if (scope === Scope.REQUEST) {
namespace = getNamespaceFromContext(context) || globalNamespace
}
return namespace
}
class IastMetric {

@@ -30,26 +43,22 @@ constructor (name, scope, tagKey) {

getNamespace (context) {
return getNamespaceFromContext(context) || globalNamespace
formatTags (...tags) {
return formatTags(tags, this.tagKey)
}
getTag (tagValue) {
return tagValue ? { [this.tagKey]: tagValue } : undefined
inc (context, tags, value = 1) {
const namespace = getNamespace(this.scope, context)
namespace.count(this.name, tags).inc(value)
}
}
addValue (value, tagValue, context) {
this.getNamespace(context)
.count(this.name, this.getTag(tagValue))
.inc(value)
}
class NoTaggedIastMetric extends IastMetric {
constructor (name, scope) {
super(name, scope)
add (value, tagValue, context) {
if (Array.isArray(tagValue)) {
tagValue.forEach(tag => this.addValue(value, tag, context))
} else {
this.addValue(value, tagValue, context)
}
this.tags = []
}
inc (tagValue, context) {
this.add(1, tagValue, context)
inc (context, value = 1) {
const namespace = getNamespace(this.scope, context)
namespace.count(this.name, this.tags).inc(value)
}

@@ -66,3 +75,3 @@ }

const INSTRUMENTED_PROPAGATION = new IastMetric('instrumented.propagation', Scope.GLOBAL)
const INSTRUMENTED_PROPAGATION = new NoTaggedIastMetric('instrumented.propagation', Scope.GLOBAL)
const INSTRUMENTED_SOURCE = new IastMetric('instrumented.source', Scope.GLOBAL, TagKey.SOURCE_TYPE)

@@ -74,11 +83,8 @@ const INSTRUMENTED_SINK = new IastMetric('instrumented.sink', Scope.GLOBAL, TagKey.VULNERABILITY_TYPE)

const REQUEST_TAINTED = new IastMetric('request.tainted', Scope.REQUEST)
const REQUEST_TAINTED = new NoTaggedIastMetric('request.tainted', Scope.REQUEST)
// DEBUG using metrics
const EXECUTED_PROPAGATION = new IastMetric('executed.propagation', Scope.REQUEST)
const EXECUTED_TAINTED = new IastMetric('executed.tainted', Scope.REQUEST)
const EXECUTED_PROPAGATION = new NoTaggedIastMetric('executed.propagation', Scope.REQUEST)
const EXECUTED_TAINTED = new NoTaggedIastMetric('executed.tainted', Scope.REQUEST)
// DEBUG using distribution endpoint
const INSTRUMENTATION_TIME = new IastMetric('instrumentation.time', Scope.GLOBAL)
module.exports = {

@@ -96,4 +102,2 @@ INSTRUMENTED_PROPAGATION,

INSTRUMENTATION_TIME,
PropagationType,

@@ -103,5 +107,8 @@ TagKey,

IastMetric,
NoTaggedIastMetric,
getExecutedMetric,
getInstrumentedMetric
getInstrumentedMetric,
formatTags
}

@@ -5,4 +5,5 @@ 'use strict'

const { Namespace } = require('../../../telemetry/metrics')
const { addMetricsToSpan, filterTags } = require('./span-tags')
const { addMetricsToSpan } = require('./span-tags')
const { IAST_TRACE_METRIC_PREFIX } = require('../tags')
const iastLog = require('../iast-log')

@@ -14,3 +15,3 @@ const DD_IAST_METRICS_NAMESPACE = Symbol('_dd.iast.request.metrics.namespace')

const namespace = new Namespace('iast')
const namespace = new IastNamespace()
context[DD_IAST_METRICS_NAMESPACE] = namespace

@@ -21,3 +22,3 @@ return namespace

function getNamespaceFromContext (context) {
return context && context[DD_IAST_METRICS_NAMESPACE]
return context?.[DD_IAST_METRICS_NAMESPACE]
}

@@ -30,8 +31,7 @@

const metrics = [...namespace.metrics.values()]
namespace.metrics.clear()
addMetricsToSpan(rootSpan, [...namespace.metrics.values()], IAST_TRACE_METRIC_PREFIX)
addMetricsToSpan(rootSpan, metrics, IAST_TRACE_METRIC_PREFIX)
merge(namespace)
merge(metrics)
namespace.clear()
} catch (e) {

@@ -46,13 +46,12 @@ log.error(e)

function merge (metrics) {
metrics.forEach(metric => metric.points.forEach(point => {
globalNamespace
.count(metric.metric, getTagsObject(metric.tags))
.inc(point[1])
}))
}
function merge (namespace) {
for (const [metricName, metricsByTagMap] of namespace.iastMetrics) {
for (const [tags, metric] of metricsByTagMap) {
const { type, points } = metric
function getTagsObject (tags) {
if (tags && tags.length > 0) {
return filterTags(tags)
if (points?.length && type === 'count') {
const gMetric = globalNamespace.getMetric(metricName, tags)
points.forEach(point => gMetric.inc(point[1]))
}
}
}

@@ -62,9 +61,45 @@ }

class IastNamespace extends Namespace {
constructor () {
constructor (maxMetricTagsSize = 100) {
super('iast')
this.maxMetricTagsSize = maxMetricTagsSize
this.iastMetrics = new Map()
}
reset () {
getIastMetrics (name) {
let metrics = this.iastMetrics.get(name)
if (!metrics) {
metrics = new Map()
this.iastMetrics.set(name, metrics)
}
return metrics
}
getMetric (name, tags, type = 'count') {
const metrics = this.getIastMetrics(name)
let metric = metrics.get(tags)
if (!metric) {
metric = super[type](name, Array.isArray(tags) ? [...tags] : tags)
if (metrics.size === this.maxMetricTagsSize) {
metrics.clear()
iastLog.warnAndPublish(`Tags cache max size reached for metric ${name}`)
}
metrics.set(tags, metric)
}
return metric
}
count (name, tags) {
return this.getMetric(name, tags, 'count')
}
clear () {
this.iastMetrics.clear()
this.distributions.clear()
this.metrics.clear()
this.distributions.clear()
}

@@ -81,3 +116,5 @@ }

DD_IAST_METRICS_NAMESPACE
DD_IAST_METRICS_NAMESPACE,
IastNamespace
}
'use strict'
function addMetricsToSpan (rootSpan, metrics, tagPrefix) {
if (!rootSpan || !rootSpan.addTags || !metrics) return
if (!rootSpan?.addTags || !metrics) return
const flattenMap = new Map()
metrics
.filter(data => data && data.metric)
.filter(data => data?.metric)
.forEach(data => {

@@ -30,3 +30,4 @@ const name = taggedMetricName(data)

function flatten (metricData) {
return metricData.points && metricData.points.map(point => point[1]).reduce((total, value) => total + value, 0)
const { points } = metricData
return points ? points.map(point => point[1]).reduce((total, value) => total + value, 0) : 0
}

@@ -36,4 +37,4 @@

const metric = data.metric
const tags = data.tags && filterTags(data.tags)
return !tags || !tags.length
const tags = filterTags(data.tags)
return !tags?.length
? metric

@@ -44,3 +45,3 @@ : `${metric}.${processTagValue(tags)}`

function filterTags (tags) {
return tags.filter(tag => !tag.startsWith('lib_language') && !tag.startsWith('version'))
return tags?.filter(tag => !tag.startsWith('version'))
}

@@ -47,0 +48,0 @@

@@ -71,3 +71,3 @@ 'use strict'

if (!pattern) {
pattern = patterns['ANSI']
pattern = patterns.ANSI
}

@@ -74,0 +74,0 @@ pattern.lastIndex = 0

@@ -16,3 +16,3 @@ 'use strict'

function iterateObject (target, fn, levelKeys = [], depth = 50) {
function iterateObject (target, fn, levelKeys = [], depth = 10, visited = new Set()) {
Object.keys(target).forEach((key) => {

@@ -22,6 +22,9 @@ const nextLevelKeys = [...levelKeys, key]

fn(val, nextLevelKeys, target, key)
if (typeof val !== 'object' || !visited.has(val)) {
visited.add(val)
fn(val, nextLevelKeys, target, key)
if (val !== null && typeof val === 'object') {
iterateObject(val, fn, nextLevelKeys, depth - 1)
if (val !== null && typeof val === 'object' && depth > 0) {
iterateObject(val, fn, nextLevelKeys, depth - 1, visited)
}
}

@@ -28,0 +31,0 @@ })

@@ -17,3 +17,4 @@ 'use strict'

APM_TRACING_HTTP_HEADER_TAGS: 1n << 14n,
APM_TRACING_CUSTOM_TAGS: 1n << 15n
APM_TRACING_CUSTOM_TAGS: 1n << 15n,
APM_TRACING_ENABLED: 1n << 19n
}

@@ -17,2 +17,3 @@ 'use strict'

rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_SAMPLE_RATE, true)
rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_ENABLED, true)

@@ -63,3 +64,3 @@ const activation = Activation.fromConfig(config)

function enableWafUpdate (appsecConfig) {
if (rc && appsecConfig && !appsecConfig.customRulesProvided) {
if (rc && appsecConfig && !appsecConfig.rules) {
// dirty require to make startup faster for serverless

@@ -66,0 +67,0 @@ const RuleManager = require('../rule_manager')

@@ -20,21 +20,31 @@ 'use strict'

// following header lists are ordered in the same way the spec orders them, it doesn't matter but it's easier to compare
const contentHeaderList = [
'content-length',
'content-type',
'content-encoding',
'content-language',
'content-length',
'content-type'
'content-language'
]
const REQUEST_HEADERS_MAP = mapHeaderAndTags([
...ipHeaderList,
'forwarded',
'via',
...contentHeaderList,
'host',
'user-agent',
'accept',
'accept-encoding',
'accept-language',
'host',
'forwarded',
'user-agent',
'via',
'accept-language'
], 'http.request.headers.')
const IDENTIFICATION_HEADERS_MAP = mapHeaderAndTags([
'x-amzn-trace-id',
...ipHeaderList,
...contentHeaderList
'cloudfront-viewer-ja3-fingerprint',
'cf-ray',
'x-cloud-trace-context',
'x-appgw-trace-id',
'x-sigsci-requestid',
'x-sigsci-tags',
'akamai-user-risk'
], 'http.request.headers.')

@@ -175,2 +185,5 @@

// collect some headers even when no attack is detected
rootSpan.addTags(filterHeaders(req.headers, IDENTIFICATION_HEADERS_MAP))
if (!rootSpan.context()._tags['appsec.event']) return

@@ -177,0 +190,0 @@

@@ -42,3 +42,3 @@ 'use strict'

if (Object.keys(inputs).length) {
payload['persistent'] = inputs
payload.persistent = inputs
payloadHasData = true

@@ -48,3 +48,3 @@ }

if (ephemeral && Object.keys(ephemeral).length) {
payload['ephemeral'] = ephemeral
payload.ephemeral = ephemeral
payloadHasData = true

@@ -51,0 +51,0 @@ }

const request = require('../../exporters/common/request')
const id = require('../../id')
const log = require('../../log')

@@ -73,4 +72,3 @@ function getKnownTests ({

try {
const { data: { attributes: { test_full_names: knownTests } } } = JSON.parse(res)
log.debug(() => `Number of received known tests: ${Object.keys(knownTests).length}`)
const { data: { attributes: { tests: knownTests } } } = JSON.parse(res)
done(null, knownTests)

@@ -77,0 +75,0 @@ } catch (err) {

@@ -41,6 +41,11 @@ 'use strict'

this._isInitialized = true
const latestEvpProxyVersion = getLatestEvpProxyVersion(err, agentInfo)
let latestEvpProxyVersion = getLatestEvpProxyVersion(err, agentInfo)
const isEvpCompatible = latestEvpProxyVersion >= 2
const isGzipCompatible = latestEvpProxyVersion >= 4
// v3 does not work well citestcycle, so we downgrade to v2
if (latestEvpProxyVersion === 3) {
latestEvpProxyVersion = 2
}
const evpProxyPrefix = `${AGENT_EVP_PROXY_PATH_PREFIX}${latestEvpProxyVersion}`

@@ -47,0 +52,0 @@ if (isEvpCompatible) {

@@ -1,2 +0,1 @@

const fs = require('fs')

@@ -3,0 +2,0 @@ const path = require('path')

@@ -105,3 +105,4 @@ const request = require('../../exporters/common/request')

testLevel === 'test'
? TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS : TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES,
? TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS
: TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES,
{},

@@ -108,0 +109,0 @@ skippableSuites.length

@@ -13,2 +13,3 @@ const CiPlugin = require('../../plugins/ci_plugin')

}
constructor (...args) {

@@ -15,0 +16,0 @@ super(...args)

@@ -11,2 +11,5 @@ 'use strict'

const tagger = require('./tagger')
const get = require('../../datadog-core/src/utils/src/get')
const has = require('../../datadog-core/src/utils/src/has')
const set = require('../../datadog-core/src/utils/src/set')
const { isTrue, isFalse } = require('./util')

@@ -24,2 +27,6 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags')

const qsRegex = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:"|%22)(?:%2[^2]|%[^2]|[^"%])+(?:"|%22))|bearer(?:\\s|%20)+[a-z0-9\\._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+\\/=-]|%3D|%2F|%2B)+)?|[\\-]{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY[\\-]{5}[^\\-]+[\\-]{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY|ssh-rsa(?:\\s|%20)*(?:[a-z0-9\\/\\.+]|%2F|%5C|%2B){100,}'
// eslint-disable-next-line max-len
const defaultWafObfuscatorKeyRegex = '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization'
// eslint-disable-next-line max-len
const defaultWafObfuscatorValueRegex = '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:\\s*=[^;]|"\\s*:\\s*"[^"]+")|bearer\\s+[a-z0-9\\._\\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}'

@@ -99,2 +106,7 @@ function maybeFile (filepath) {

options = options || {}
options = this.options = {
...options,
appsec: options.appsec != null ? options.appsec : options.experimental?.appsec,
iastOptions: options.experimental?.iast
}

@@ -116,66 +128,2 @@ // Configure the logger first so it can be used to warn about other configs

const DD_PROFILING_ENABLED = coalesce(
options.profiling, // TODO: remove when enabled by default
process.env.DD_EXPERIMENTAL_PROFILING_ENABLED,
process.env.DD_PROFILING_ENABLED,
false
)
const DD_PROFILING_EXPORTERS = coalesce(
process.env.DD_PROFILING_EXPORTERS,
'agent'
)
const DD_PROFILING_SOURCE_MAP = process.env.DD_PROFILING_SOURCE_MAP
const DD_RUNTIME_METRICS_ENABLED = coalesce(
options.runtimeMetrics, // TODO: remove when enabled by default
process.env.DD_RUNTIME_METRICS_ENABLED,
false
)
const DD_DBM_PROPAGATION_MODE = coalesce(
options.dbmPropagationMode,
process.env.DD_DBM_PROPAGATION_MODE,
'disabled'
)
const DD_DATA_STREAMS_ENABLED = coalesce(
options.dsmEnabled,
process.env.DD_DATA_STREAMS_ENABLED,
false
)
const DD_AGENT_HOST = coalesce(
options.hostname,
process.env.DD_AGENT_HOST,
process.env.DD_TRACE_AGENT_HOSTNAME,
'127.0.0.1'
)
const DD_TRACE_AGENT_PORT = coalesce(
options.port,
process.env.DD_TRACE_AGENT_PORT,
'8126'
)
const DD_TRACE_AGENT_URL = coalesce(
options.url,
process.env.DD_TRACE_AGENT_URL,
process.env.DD_TRACE_URL,
null
)
const DD_IS_CIVISIBILITY = coalesce(
options.isCiVisibility,
false
)
const DD_CIVISIBILITY_AGENTLESS_URL = process.env.DD_CIVISIBILITY_AGENTLESS_URL
const DD_CIVISIBILITY_ITR_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_ITR_ENABLED,
true
)
const DD_CIVISIBILITY_MANUAL_API_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED,
false
)
const DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED,
true
)
const DD_TRACE_MEMCACHED_COMMAND_ENABLED = coalesce(

@@ -188,18 +136,9 @@ process.env.DD_TRACE_MEMCACHED_COMMAND_ENABLED,

options.serviceMapping,
process.env.DD_SERVICE_MAPPING ? fromEntries(
process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
) : {}
process.env.DD_SERVICE_MAPPING
? fromEntries(
process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
)
: {}
)
const DD_TRACE_STARTUP_LOGS = coalesce(
options.startupLogs,
process.env.DD_TRACE_STARTUP_LOGS,
false
)
const DD_OPENAI_LOGS_ENABLED = coalesce(
options.openAiLogsEnabled,
process.env.DD_OPENAI_LOGS_ENABLED,
false
)
const DD_API_KEY = coalesce(

@@ -210,52 +149,2 @@ process.env.DATADOG_API_KEY,

const inAWSLambda = process.env.AWS_LAMBDA_FUNCTION_NAME !== undefined
const isGCPFunction = getIsGCPFunction()
const isAzureFunctionConsumptionPlan = getIsAzureFunctionConsumptionPlan()
const inServerlessEnvironment = inAWSLambda || isGCPFunction || isAzureFunctionConsumptionPlan
const DD_INSTRUMENTATION_TELEMETRY_ENABLED = coalesce(
process.env.DD_TRACE_TELEMETRY_ENABLED, // for backward compatibility
process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED, // to comply with instrumentation telemetry specs
!inServerlessEnvironment
)
const DD_TELEMETRY_HEARTBEAT_INTERVAL = process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL
? Math.floor(parseFloat(process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL) * 1000)
: 60000
const DD_OPENAI_SPAN_CHAR_LIMIT = process.env.DD_OPENAI_SPAN_CHAR_LIMIT
? parseInt(process.env.DD_OPENAI_SPAN_CHAR_LIMIT)
: 128
const DD_TELEMETRY_DEBUG = coalesce(
process.env.DD_TELEMETRY_DEBUG,
false
)
const DD_TELEMETRY_METRICS_ENABLED = coalesce(
process.env.DD_TELEMETRY_METRICS_ENABLED,
true
)
const DD_TRACE_AGENT_PROTOCOL_VERSION = coalesce(
options.protocolVersion,
process.env.DD_TRACE_AGENT_PROTOCOL_VERSION,
'0.4'
)
const DD_TRACE_PARTIAL_FLUSH_MIN_SPANS = coalesce(
parseInt(options.flushMinSpans),
parseInt(process.env.DD_TRACE_PARTIAL_FLUSH_MIN_SPANS),
1000
)
const DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP = coalesce(
process.env.DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP,
qsRegex
)
const DD_TRACE_CLIENT_IP_ENABLED = coalesce(
options.clientIpEnabled,
process.env.DD_TRACE_CLIENT_IP_ENABLED && isTrue(process.env.DD_TRACE_CLIENT_IP_ENABLED),
false
)
const DD_TRACE_CLIENT_IP_HEADER = coalesce(
options.clientIpHeader,
process.env.DD_TRACE_CLIENT_IP_HEADER,
null
)
// TODO: Remove the experimental env vars as a major?

@@ -296,130 +185,17 @@ const DD_TRACE_B3_ENABLED = coalesce(

)
const DD_TRACE_RUNTIME_ID_ENABLED = coalesce(
options.experimental && options.experimental.runtimeId,
process.env.DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED,
false
)
const DD_TRACE_EXPORTER = coalesce(
options.experimental && options.experimental.exporter,
process.env.DD_TRACE_EXPERIMENTAL_EXPORTER
)
const DD_TRACE_GET_RUM_DATA_ENABLED = coalesce(
options.experimental && options.experimental.enableGetRumData,
process.env.DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED,
false
)
const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion(
coalesce(
options.spanAttributeSchema,
process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
)
)
const DD_TRACE_PEER_SERVICE_MAPPING = coalesce(
options.peerServiceMapping,
process.env.DD_TRACE_PEER_SERVICE_MAPPING ? fromEntries(
process.env.DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
) : {}
)
const peerServiceSet = (
options.hasOwnProperty('spanComputePeerService') ||
process.env.hasOwnProperty('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
)
const peerServiceValue = coalesce(
options.spanComputePeerService,
process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED
)
const DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = (
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0'
// In v0, peer service is computed only if it is explicitly set to true
? peerServiceSet && isTrue(peerServiceValue)
// In >v0, peer service is false only if it is explicitly set to false
: (peerServiceSet ? !isFalse(peerServiceValue) : true)
)
const DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED = coalesce(
options.spanRemoveIntegrationFromService,
isTrue(process.env.DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED)
)
const DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH = coalesce(
process.env.DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH,
'512'
)
const DD_TRACE_STATS_COMPUTATION_ENABLED = coalesce(
options.stats,
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,
isGCPFunction || isAzureFunctionConsumptionPlan
)
// the tracer generates 128 bit IDs by default as of v5
const DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = coalesce(
options.traceId128BitGenerationEnabled,
process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED,
true
)
const DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = coalesce(
options.traceId128BitLoggingEnabled,
process.env.DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED,
false
)
let appsec = options.appsec != null ? options.appsec : options.experimental && options.experimental.appsec
if (typeof appsec === 'boolean') {
appsec = {
enabled: appsec
if (typeof options.appsec === 'boolean') {
options.appsec = {
enabled: options.appsec
}
} else if (appsec == null) {
appsec = {}
} else if (options.appsec == null) {
options.appsec = {}
}
const DD_APPSEC_ENABLED = coalesce(
appsec.enabled,
process.env.DD_APPSEC_ENABLED && isTrue(process.env.DD_APPSEC_ENABLED)
)
const DD_APPSEC_RULES = coalesce(
appsec.rules,
process.env.DD_APPSEC_RULES
)
const DD_APPSEC_TRACE_RATE_LIMIT = coalesce(
parseInt(appsec.rateLimit),
parseInt(process.env.DD_APPSEC_TRACE_RATE_LIMIT),
100
)
const DD_APPSEC_WAF_TIMEOUT = coalesce(
parseInt(appsec.wafTimeout),
parseInt(process.env.DD_APPSEC_WAF_TIMEOUT),
5e3 // µs
)
const DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP = coalesce(
appsec.obfuscatorKeyRegex,
process.env.DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
`(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?(?:id|key|se\
cret)|sign(?:ed|ature)|bearer|authorization`
)
const DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP = coalesce(
appsec.obfuscatorValueRegex,
process.env.DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
`(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|to\
ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:\\s*=[^;]|"\\s*:\\s*"[^"]+")|bearer\
\\s+[a-z0-9\\._\\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?\
|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*[a-z0-9\\/\\.+]{100,}`
)
const DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML = coalesce(
maybeFile(appsec.blockedTemplateHtml),
maybeFile(process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML)
)
const DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = coalesce(
maybeFile(appsec.blockedTemplateJson),
maybeFile(process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON)
)
const DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = coalesce(
maybeFile(appsec.blockedTemplateGraphql),
maybeFile(options.appsec.blockedTemplateGraphql),
maybeFile(process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON)
)
const DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = coalesce(
appsec.eventTracking && appsec.eventTracking.mode,
options.appsec.eventTracking && options.appsec.eventTracking.mode,
process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING,

@@ -429,3 +205,3 @@ 'safe'

const DD_API_SECURITY_ENABLED = coalesce(
appsec?.apiSecurity?.enabled,
options.appsec?.apiSecurity?.enabled,
process.env.DD_API_SECURITY_ENABLED && isTrue(process.env.DD_API_SECURITY_ENABLED),

@@ -436,3 +212,3 @@ process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED && isTrue(process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED),

const DD_API_SECURITY_REQUEST_SAMPLE_RATE = coalesce(
appsec?.apiSecurity?.requestSampling,
options.appsec?.apiSecurity?.requestSampling,
parseFloat(process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE),

@@ -442,91 +218,2 @@ 0.1

const remoteConfigOptions = options.remoteConfig || {}
const DD_REMOTE_CONFIGURATION_ENABLED = coalesce(
process.env.DD_REMOTE_CONFIGURATION_ENABLED && isTrue(process.env.DD_REMOTE_CONFIGURATION_ENABLED),
!inServerlessEnvironment
)
const DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = coalesce(
parseFloat(remoteConfigOptions.pollInterval),
parseFloat(process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS),
5 // seconds
)
const iastOptions = options?.experimental?.iast
const DD_IAST_ENABLED = coalesce(
iastOptions &&
(iastOptions === true || iastOptions.enabled === true),
process.env.DD_IAST_ENABLED,
false
)
const DD_TELEMETRY_LOG_COLLECTION_ENABLED = coalesce(
process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED,
DD_IAST_ENABLED
)
const DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED = coalesce(
process.env.DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED,
true
)
const defaultIastRequestSampling = 30
const iastRequestSampling = coalesce(
parseInt(iastOptions?.requestSampling),
parseInt(process.env.DD_IAST_REQUEST_SAMPLING),
defaultIastRequestSampling
)
const DD_IAST_REQUEST_SAMPLING = iastRequestSampling < 0 ||
iastRequestSampling > 100 ? defaultIastRequestSampling : iastRequestSampling
const DD_IAST_MAX_CONCURRENT_REQUESTS = coalesce(
parseInt(iastOptions?.maxConcurrentRequests),
parseInt(process.env.DD_IAST_MAX_CONCURRENT_REQUESTS),
2
)
const DD_IAST_MAX_CONTEXT_OPERATIONS = coalesce(
parseInt(iastOptions?.maxContextOperations),
parseInt(process.env.DD_IAST_MAX_CONTEXT_OPERATIONS),
2
)
const DD_IAST_DEDUPLICATION_ENABLED = coalesce(
iastOptions?.deduplicationEnabled,
process.env.DD_IAST_DEDUPLICATION_ENABLED && isTrue(process.env.DD_IAST_DEDUPLICATION_ENABLED),
true
)
const DD_IAST_REDACTION_ENABLED = coalesce(
iastOptions?.redactionEnabled,
!isFalse(process.env.DD_IAST_REDACTION_ENABLED),
true
)
const DD_IAST_REDACTION_NAME_PATTERN = coalesce(
iastOptions?.redactionNamePattern,
process.env.DD_IAST_REDACTION_NAME_PATTERN,
null
)
const DD_IAST_REDACTION_VALUE_PATTERN = coalesce(
iastOptions?.redactionValuePattern,
process.env.DD_IAST_REDACTION_VALUE_PATTERN,
null
)
const DD_IAST_TELEMETRY_VERBOSITY = coalesce(
iastOptions?.telemetryVerbosity,
process.env.DD_IAST_TELEMETRY_VERBOSITY,
'INFORMATION'
)
const DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = coalesce(
process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED,
true
)
const DD_TRACE_GIT_METADATA_ENABLED = coalesce(
process.env.DD_TRACE_GIT_METADATA_ENABLED,
true
)
// 0: disabled, 1: logging, 2: garbage collection + logging

@@ -551,6 +238,3 @@ const DD_TRACE_SPAN_LEAK_DEBUG = coalesce(

const ingestion = options.ingestion || {}
const dogstatsd = coalesce(options.dogstatsd, {})
const sampler = {
rateLimit: coalesce(options.rateLimit, process.env.DD_TRACE_RATE_LIMIT, ingestion.rateLimit),
rules: coalesce(

@@ -578,25 +262,5 @@ options.samplingRules,

const defaultFlushInterval = inAWSLambda ? 0 : 2000
this.dbmPropagationMode = DD_DBM_PROPAGATION_MODE
this.dsmEnabled = isTrue(DD_DATA_STREAMS_ENABLED)
this.openAiLogsEnabled = DD_OPENAI_LOGS_ENABLED
// TODO: refactor
this.apiKey = DD_API_KEY
this.url = DD_CIVISIBILITY_AGENTLESS_URL ? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
: getAgentUrl(DD_TRACE_AGENT_URL, options)
this.site = coalesce(options.site, process.env.DD_SITE, 'datadoghq.com')
this.hostname = DD_AGENT_HOST || (this.url && this.url.hostname)
this.port = String(DD_TRACE_AGENT_PORT || (this.url && this.url.port))
this.flushInterval = coalesce(parseInt(options.flushInterval, 10), defaultFlushInterval)
this.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS
this.queryStringObfuscation = DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP
this.clientIpEnabled = DD_TRACE_CLIENT_IP_ENABLED
this.clientIpHeader = DD_TRACE_CLIENT_IP_HEADER
this.plugins = !!coalesce(options.plugins, true)
this.serviceMapping = DD_SERVICE_MAPPING
this.dogstatsd = {
hostname: coalesce(dogstatsd.hostname, process.env.DD_DOGSTATSD_HOSTNAME, this.hostname),
port: String(coalesce(dogstatsd.port, process.env.DD_DOGSTATSD_PORT, 8125))
}
this.runtimeMetrics = isTrue(DD_RUNTIME_METRICS_ENABLED)
this.tracePropagationStyle = {

@@ -607,42 +271,4 @@ inject: DD_TRACE_PROPAGATION_STYLE_INJECT,

this.tracePropagationExtractFirst = isTrue(DD_TRACE_PROPAGATION_EXTRACT_FIRST)
this.experimental = {
runtimeId: isTrue(DD_TRACE_RUNTIME_ID_ENABLED),
exporter: DD_TRACE_EXPORTER,
enableGetRumData: isTrue(DD_TRACE_GET_RUM_DATA_ENABLED)
}
this.sampler = sampler
this.reportHostname = isTrue(coalesce(options.reportHostname, process.env.DD_TRACE_REPORT_HOSTNAME, false))
this.scope = process.env.DD_TRACE_SCOPE
this.profiling = {
enabled: isTrue(DD_PROFILING_ENABLED),
sourceMap: !isFalse(DD_PROFILING_SOURCE_MAP),
exporters: DD_PROFILING_EXPORTERS
}
this.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
this.spanComputePeerService = DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED
this.spanRemoveIntegrationFromService = DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED
this.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING
this.lookup = options.lookup
this.startupLogs = isTrue(DD_TRACE_STARTUP_LOGS)
// Disabled for CI Visibility's agentless
this.telemetry = {
enabled: isTrue(DD_INSTRUMENTATION_TELEMETRY_ENABLED),
heartbeatInterval: DD_TELEMETRY_HEARTBEAT_INTERVAL,
debug: isTrue(DD_TELEMETRY_DEBUG),
logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED),
metrics: isTrue(DD_TELEMETRY_METRICS_ENABLED),
dependencyCollection: DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED
}
this.protocolVersion = DD_TRACE_AGENT_PROTOCOL_VERSION
this.tagsHeaderMaxLength = parseInt(DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH)
this.appsec = {
enabled: DD_APPSEC_ENABLED,
rules: DD_APPSEC_RULES,
customRulesProvided: !!DD_APPSEC_RULES,
rateLimit: DD_APPSEC_TRACE_RATE_LIMIT,
wafTimeout: DD_APPSEC_WAF_TIMEOUT,
obfuscatorKeyRegex: DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
obfuscatorValueRegex: DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
blockedTemplateHtml: DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML,
blockedTemplateJson: DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON,
blockedTemplateGraphql: DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON,

@@ -660,45 +286,6 @@ eventTracking: {

this.remoteConfig = {
enabled: DD_REMOTE_CONFIGURATION_ENABLED,
pollInterval: DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS
}
this.iast = {
enabled: isTrue(DD_IAST_ENABLED),
requestSampling: DD_IAST_REQUEST_SAMPLING,
maxConcurrentRequests: DD_IAST_MAX_CONCURRENT_REQUESTS,
maxContextOperations: DD_IAST_MAX_CONTEXT_OPERATIONS,
deduplicationEnabled: DD_IAST_DEDUPLICATION_ENABLED,
redactionEnabled: DD_IAST_REDACTION_ENABLED,
redactionNamePattern: DD_IAST_REDACTION_NAME_PATTERN,
redactionValuePattern: DD_IAST_REDACTION_VALUE_PATTERN,
telemetryVerbosity: DD_IAST_TELEMETRY_VERBOSITY
}
this.isCiVisibility = isTrue(DD_IS_CIVISIBILITY)
this.isIntelligentTestRunnerEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_ITR_ENABLED)
this.isGitUploadEnabled = this.isCiVisibility &&
(this.isIntelligentTestRunnerEnabled && !isFalse(DD_CIVISIBILITY_GIT_UPLOAD_ENABLED))
this.gitMetadataEnabled = isTrue(DD_TRACE_GIT_METADATA_ENABLED)
this.isManualApiEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_MANUAL_API_ENABLED)
this.isEarlyFlakeDetectionEnabled = this.isCiVisibility && isTrue(DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED)
this.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT
// Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
this.memcachedCommandEnabled = isTrue(DD_TRACE_MEMCACHED_COMMAND_ENABLED)
this.stats = {
enabled: isTrue(DD_TRACE_STATS_COMPUTATION_ENABLED)
}
this.traceId128BitGenerationEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
this.traceId128BitLoggingEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
this.isGCPFunction = isGCPFunction
this.isAzureFunctionConsumptionPlan = isAzureFunctionConsumptionPlan
this.isAzureFunctionConsumptionPlan = getIsAzureFunctionConsumptionPlan()
this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG)
this.installSignature = {

@@ -713,2 +300,3 @@ id: DD_INSTRUMENTATION_INSTALL_ID,

this._applyOptions(options)
this._applyCalculated()
this._applyRemote({})

@@ -772,5 +360,15 @@ this._merge()

// TODO: test
this._applyCalculated()
this._merge()
}
_isInServerlessEnvironment () {
const inAWSLambda = process.env.AWS_LAMBDA_FUNCTION_NAME !== undefined
const isGCPFunction = getIsGCPFunction()
const isAzureFunctionConsumptionPlan = getIsAzureFunctionConsumptionPlan()
return inAWSLambda || isGCPFunction || isAzureFunctionConsumptionPlan
}
// for _merge to work, every config value must have a default value
_applyDefaults () {

@@ -793,10 +391,79 @@ const {

this._setValue(defaults, 'appsec.blockedTemplateHtml', undefined)
this._setValue(defaults, 'appsec.blockedTemplateJson', undefined)
this._setValue(defaults, 'appsec.enabled', undefined)
this._setValue(defaults, 'appsec.obfuscatorKeyRegex', defaultWafObfuscatorKeyRegex)
this._setValue(defaults, 'appsec.obfuscatorValueRegex', defaultWafObfuscatorValueRegex)
this._setValue(defaults, 'appsec.rateLimit', 100)
this._setValue(defaults, 'appsec.rules', undefined)
this._setValue(defaults, 'appsec.wafTimeout', 5e3) // µs
this._setValue(defaults, 'clientIpEnabled', false)
this._setValue(defaults, 'clientIpHeader', null)
this._setValue(defaults, 'dbmPropagationMode', 'disabled')
this._setValue(defaults, 'dogstatsd.hostname', '127.0.0.1')
this._setValue(defaults, 'dogstatsd.port', '8125')
this._setValue(defaults, 'dsmEnabled', false)
this._setValue(defaults, 'env', undefined)
this._setValue(defaults, 'experimental.enableGetRumData', false)
this._setValue(defaults, 'experimental.exporter', undefined)
this._setValue(defaults, 'experimental.runtimeId', false)
this._setValue(defaults, 'flushInterval', 2000)
this._setValue(defaults, 'flushMinSpans', 1000)
this._setValue(defaults, 'gitMetadataEnabled', true)
this._setValue(defaults, 'headerTags', [])
this._setValue(defaults, 'hostname', '127.0.0.1')
this._setValue(defaults, 'iast.deduplicationEnabled', true)
this._setValue(defaults, 'iast.enabled', false)
this._setValue(defaults, 'iast.maxConcurrentRequests', 2)
this._setValue(defaults, 'iast.maxContextOperations', 2)
this._setValue(defaults, 'iast.redactionEnabled', true)
this._setValue(defaults, 'iast.redactionNamePattern', null)
this._setValue(defaults, 'iast.redactionValuePattern', null)
this._setValue(defaults, 'iast.requestSampling', 30)
this._setValue(defaults, 'iast.telemetryVerbosity', 'INFORMATION')
this._setValue(defaults, 'isCiVisibility', false)
this._setValue(defaults, 'isEarlyFlakeDetectionEnabled', false)
this._setValue(defaults, 'isGCPFunction', false)
this._setValue(defaults, 'isGitUploadEnabled', false)
this._setValue(defaults, 'isIntelligentTestRunnerEnabled', false)
this._setValue(defaults, 'isManualApiEnabled', false)
this._setValue(defaults, 'logInjection', false)
this._setValue(defaults, 'lookup', undefined)
this._setValue(defaults, 'openAiLogsEnabled', false)
this._setValue(defaults, 'openaiSpanCharLimit', 128)
this._setValue(defaults, 'peerServiceMapping', {})
this._setValue(defaults, 'plugins', true)
this._setValue(defaults, 'port', '8126')
this._setValue(defaults, 'profiling.enabled', false)
this._setValue(defaults, 'profiling.exporters', 'agent')
this._setValue(defaults, 'profiling.sourceMap', true)
this._setValue(defaults, 'protocolVersion', '0.4')
this._setValue(defaults, 'queryStringObfuscation', qsRegex)
this._setValue(defaults, 'remoteConfig.enabled', true)
this._setValue(defaults, 'remoteConfig.pollInterval', 5) // seconds
this._setValue(defaults, 'reportHostname', false)
this._setValue(defaults, 'runtimeMetrics', false)
this._setValue(defaults, 'sampleRate', undefined)
this._setValue(defaults, 'sampler.rateLimit', undefined)
this._setValue(defaults, 'scope', undefined)
this._setValue(defaults, 'service', service)
this._setValue(defaults, 'env', undefined)
this._setValue(defaults, 'site', 'datadoghq.com')
this._setValue(defaults, 'spanAttributeSchema', 'v0')
this._setValue(defaults, 'spanComputePeerService', false)
this._setValue(defaults, 'spanRemoveIntegrationFromService', false)
this._setValue(defaults, 'startupLogs', false)
this._setValue(defaults, 'stats.enabled', false)
this._setValue(defaults, 'tags', {})
this._setValue(defaults, 'tagsHeaderMaxLength', 512)
this._setValue(defaults, 'telemetry.debug', false)
this._setValue(defaults, 'telemetry.dependencyCollection', true)
this._setValue(defaults, 'telemetry.enabled', true)
this._setValue(defaults, 'telemetry.heartbeatInterval', 60000)
this._setValue(defaults, 'telemetry.logCollection', false)
this._setValue(defaults, 'telemetry.metrics', true)
this._setValue(defaults, 'traceId128BitGenerationEnabled', true)
this._setValue(defaults, 'traceId128BitLoggingEnabled', false)
this._setValue(defaults, 'tracing', true)
this._setValue(defaults, 'url', undefined)
this._setValue(defaults, 'version', pkg.version)
this._setUnit(defaults, 'sampleRate', undefined)
this._setBoolean(defaults, 'logInjection', false)
this._setArray(defaults, 'headerTags', [])
this._setValue(defaults, 'tags', {})
this._setBoolean(defaults, 'tracing', true)
}

@@ -806,11 +473,73 @@

const {
AWS_LAMBDA_FUNCTION_NAME,
DD_AGENT_HOST,
DD_APPSEC_ENABLED,
DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML,
DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON,
DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
DD_APPSEC_RULES,
DD_APPSEC_TRACE_RATE_LIMIT,
DD_APPSEC_WAF_TIMEOUT,
DD_DATA_STREAMS_ENABLED,
DD_DBM_PROPAGATION_MODE,
DD_DOGSTATSD_HOSTNAME,
DD_DOGSTATSD_PORT,
DD_ENV,
DD_EXPERIMENTAL_PROFILING_ENABLED,
JEST_WORKER_ID,
DD_IAST_DEDUPLICATION_ENABLED,
DD_IAST_ENABLED,
DD_IAST_MAX_CONCURRENT_REQUESTS,
DD_IAST_MAX_CONTEXT_OPERATIONS,
DD_IAST_REDACTION_ENABLED,
DD_IAST_REDACTION_NAME_PATTERN,
DD_IAST_REDACTION_VALUE_PATTERN,
DD_IAST_REQUEST_SAMPLING,
DD_IAST_TELEMETRY_VERBOSITY,
DD_INSTRUMENTATION_TELEMETRY_ENABLED,
DD_LOGS_INJECTION,
DD_OPENAI_LOGS_ENABLED,
DD_OPENAI_SPAN_CHAR_LIMIT,
DD_PROFILING_ENABLED,
DD_PROFILING_EXPORTERS,
DD_PROFILING_SOURCE_MAP,
DD_REMOTE_CONFIGURATION_ENABLED,
DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS,
DD_RUNTIME_METRICS_ENABLED,
DD_SERVICE,
DD_SERVICE_NAME,
DD_SITE,
DD_TAGS,
DD_TELEMETRY_DEBUG,
DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED,
DD_TELEMETRY_HEARTBEAT_INTERVAL,
DD_TELEMETRY_LOG_COLLECTION_ENABLED,
DD_TELEMETRY_METRICS_ENABLED,
DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED,
DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED,
DD_TRACE_AGENT_HOSTNAME,
DD_TRACE_AGENT_PORT,
DD_TRACE_AGENT_PROTOCOL_VERSION,
DD_TRACE_CLIENT_IP_ENABLED,
DD_TRACE_CLIENT_IP_HEADER,
DD_TRACE_EXPERIMENTAL_EXPORTER,
DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED,
DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED,
DD_TRACE_GIT_METADATA_ENABLED,
DD_TRACE_GLOBAL_TAGS,
DD_TRACE_HEADER_TAGS,
DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP,
DD_TRACE_PARTIAL_FLUSH_MIN_SPANS,
DD_TRACE_PEER_SERVICE_MAPPING,
DD_TRACE_RATE_LIMIT,
DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED,
DD_TRACE_REPORT_HOSTNAME,
DD_TRACE_SAMPLE_RATE,
DD_TRACE_SCOPE,
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA,
DD_TRACE_STARTUP_LOGS,
DD_TRACE_TAGS,
DD_TRACE_TELEMETRY_ENABLED,
DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH,
DD_TRACING_ENABLED,

@@ -827,10 +556,85 @@ DD_VERSION

this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service)
this._setValue(env, 'appsec.blockedTemplateHtml', maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML))
this._setValue(env, 'appsec.blockedTemplateJson', maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON))
this._setBoolean(env, 'appsec.enabled', DD_APPSEC_ENABLED)
this._setString(env, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP)
this._setString(env, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP)
this._setValue(env, 'appsec.rateLimit', maybeInt(DD_APPSEC_TRACE_RATE_LIMIT))
this._setString(env, 'appsec.rules', DD_APPSEC_RULES)
this._setValue(env, 'appsec.wafTimeout', maybeInt(DD_APPSEC_WAF_TIMEOUT))
this._setBoolean(env, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED)
this._setString(env, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER)
this._setString(env, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE)
this._setString(env, 'dogstatsd.hostname', DD_DOGSTATSD_HOSTNAME)
this._setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT)
this._setBoolean(env, 'dsmEnabled', DD_DATA_STREAMS_ENABLED)
this._setString(env, 'env', DD_ENV || tags.env)
this._setString(env, 'version', DD_VERSION || tags.version)
this._setBoolean(env, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED)
this._setString(env, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER)
this._setBoolean(env, 'experimental.runtimeId', DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED)
if (AWS_LAMBDA_FUNCTION_NAME) this._setValue(env, 'flushInterval', 0)
this._setValue(env, 'flushMinSpans', maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS))
this._setBoolean(env, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED)
this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS)
this._setString(env, 'hostname', coalesce(DD_AGENT_HOST, DD_TRACE_AGENT_HOSTNAME))
this._setBoolean(env, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED)
this._setBoolean(env, 'iast.enabled', DD_IAST_ENABLED)
this._setValue(env, 'iast.maxConcurrentRequests', maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS))
this._setValue(env, 'iast.maxContextOperations', maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS))
this._setBoolean(env, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED))
this._setString(env, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN)
this._setString(env, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN)
const iastRequestSampling = maybeInt(DD_IAST_REQUEST_SAMPLING)
if (iastRequestSampling > -1 && iastRequestSampling < 101) {
this._setValue(env, 'iast.requestSampling', iastRequestSampling)
}
this._setString(env, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY)
this._setBoolean(env, 'isGCPFunction', getIsGCPFunction())
this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION)
this._setBoolean(env, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED)
this._setValue(env, 'openaiSpanCharLimit', maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT))
if (DD_TRACE_PEER_SERVICE_MAPPING) {
this._setValue(env, 'peerServiceMapping', fromEntries(
process.env.DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
))
}
this._setString(env, 'port', DD_TRACE_AGENT_PORT)
this._setBoolean(env, 'profiling.enabled', coalesce(DD_EXPERIMENTAL_PROFILING_ENABLED, DD_PROFILING_ENABLED))
this._setString(env, 'profiling.exporters', DD_PROFILING_EXPORTERS)
this._setBoolean(env, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP))
this._setString(env, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION)
this._setString(env, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP)
this._setBoolean(env, 'remoteConfig.enabled', coalesce(
DD_REMOTE_CONFIGURATION_ENABLED,
!this._isInServerlessEnvironment()
))
this._setValue(env, 'remoteConfig.pollInterval', maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS))
this._setBoolean(env, 'reportHostname', DD_TRACE_REPORT_HOSTNAME)
this._setBoolean(env, 'runtimeMetrics', DD_RUNTIME_METRICS_ENABLED)
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE)
this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION)
this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS)
this._setValue(env, 'sampler.rateLimit', DD_TRACE_RATE_LIMIT)
this._setString(env, 'scope', DD_TRACE_SCOPE)
this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service)
this._setString(env, 'site', DD_SITE)
if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) {
this._setString(env, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA))
}
this._setBoolean(env, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED)
this._setBoolean(env, 'startupLogs', DD_TRACE_STARTUP_LOGS)
this._setTags(env, 'tags', tags)
this._setValue(env, 'tagsHeaderMaxLength', DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH)
this._setBoolean(env, 'telemetry.enabled', coalesce(
DD_TRACE_TELEMETRY_ENABLED, // for backward compatibility
DD_INSTRUMENTATION_TELEMETRY_ENABLED, // to comply with instrumentation telemetry specs
!(this._isInServerlessEnvironment() || JEST_WORKER_ID)
))
this._setBoolean(env, 'telemetry.debug', DD_TELEMETRY_DEBUG)
this._setBoolean(env, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED)
this._setValue(env, 'telemetry.heartbeatInterval', maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)))
this._setBoolean(env, 'telemetry.logCollection', coalesce(DD_TELEMETRY_LOG_COLLECTION_ENABLED, DD_IAST_ENABLED))
this._setBoolean(env, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED)
this._setBoolean(env, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
this._setBoolean(env, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
this._setBoolean(env, 'tracing', DD_TRACING_ENABLED)
this._setString(env, 'version', DD_VERSION || tags.version)
}

@@ -842,15 +646,191 @@

options = Object.assign({ ingestion: {} }, options, opts)
options = this.options = Object.assign({ ingestion: {} }, options, opts)
tagger.add(tags, options.tags)
this._setString(opts, 'service', options.service || tags.service)
this._setValue(opts, 'appsec.blockedTemplateHtml', maybeFile(options.appsec.blockedTemplateHtml))
this._setValue(opts, 'appsec.blockedTemplateJson', maybeFile(options.appsec.blockedTemplateJson))
this._setBoolean(opts, 'appsec.enabled', options.appsec.enabled)
this._setString(opts, 'appsec.obfuscatorKeyRegex', options.appsec.obfuscatorKeyRegex)
this._setString(opts, 'appsec.obfuscatorValueRegex', options.appsec.obfuscatorValueRegex)
this._setValue(opts, 'appsec.rateLimit', maybeInt(options.appsec.rateLimit))
this._setString(opts, 'appsec.rules', options.appsec.rules)
this._setValue(opts, 'appsec.wafTimeout', maybeInt(options.appsec.wafTimeout))
this._setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled)
this._setString(opts, 'clientIpHeader', options.clientIpHeader)
this._setString(opts, 'dbmPropagationMode', options.dbmPropagationMode)
if (options.dogstatsd) {
this._setString(opts, 'dogstatsd.hostname', options.dogstatsd.hostname)
this._setString(opts, 'dogstatsd.port', options.dogstatsd.port)
}
this._setBoolean(opts, 'dsmEnabled', options.dsmEnabled)
this._setString(opts, 'env', options.env || tags.env)
this._setString(opts, 'version', options.version || tags.version)
this._setBoolean(opts, 'experimental.enableGetRumData',
options.experimental && options.experimental.enableGetRumData)
this._setString(opts, 'experimental.exporter', options.experimental && options.experimental.exporter)
this._setBoolean(opts, 'experimental.runtimeId', options.experimental && options.experimental.runtimeId)
this._setValue(opts, 'flushInterval', maybeInt(options.flushInterval))
this._setValue(opts, 'flushMinSpans', maybeInt(options.flushMinSpans))
this._setArray(opts, 'headerTags', options.headerTags)
this._setString(opts, 'hostname', options.hostname)
this._setBoolean(opts, 'iast.deduplicationEnabled', options.iastOptions && options.iastOptions.deduplicationEnabled)
this._setBoolean(opts, 'iast.enabled',
options.iastOptions && (options.iastOptions === true || options.iastOptions.enabled === true))
const iastRequestSampling = maybeInt(options.iastOptions?.requestSampling)
this._setValue(opts, 'iast.maxConcurrentRequests',
maybeInt(options.iastOptions?.maxConcurrentRequests))
this._setValue(opts, 'iast.maxContextOperations',
maybeInt(options.iastOptions && options.iastOptions.maxContextOperations))
this._setBoolean(opts, 'iast.redactionEnabled', options.iastOptions && options.iastOptions.redactionEnabled)
this._setString(opts, 'iast.redactionNamePattern', options.iastOptions?.redactionNamePattern)
this._setString(opts, 'iast.redactionValuePattern', options.iastOptions?.redactionValuePattern)
if (iastRequestSampling > -1 && iastRequestSampling < 101) {
this._setValue(opts, 'iast.requestSampling', iastRequestSampling)
}
this._setString(opts, 'iast.telemetryVerbosity', options.iastOptions && options.iastOptions.telemetryVerbosity)
this._setBoolean(opts, 'isCiVisibility', options.isCiVisibility)
this._setBoolean(opts, 'logInjection', options.logInjection)
this._setString(opts, 'lookup', options.lookup)
this._setBoolean(opts, 'openAiLogsEnabled', options.openAiLogsEnabled)
this._setValue(opts, 'peerServiceMapping', options.peerServiceMapping)
this._setBoolean(opts, 'plugins', options.plugins)
this._setString(opts, 'port', options.port)
this._setBoolean(opts, 'profiling.enabled', options.profiling)
this._setString(opts, 'protocolVersion', options.protocolVersion)
if (options.remoteConfig) {
this._setValue(opts, 'remoteConfig.pollInterval', maybeFloat(options.remoteConfig.pollInterval))
}
this._setBoolean(opts, 'reportHostname', options.reportHostname)
this._setBoolean(opts, 'runtimeMetrics', options.runtimeMetrics)
this._setUnit(opts, 'sampleRate', coalesce(options.sampleRate, options.ingestion.sampleRate))
this._setBoolean(opts, 'logInjection', options.logInjection)
this._setArray(opts, 'headerTags', options.headerTags)
const ingestion = options.ingestion || {}
this._setValue(opts, 'sampler.rateLimit', coalesce(options.rateLimit, ingestion.rateLimit))
this._setString(opts, 'service', options.service || tags.service)
this._setString(opts, 'site', options.site)
if (options.spanAttributeSchema) {
this._setString(opts, 'spanAttributeSchema', validateNamingVersion(options.spanAttributeSchema))
}
this._setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService)
this._setBoolean(opts, 'startupLogs', options.startupLogs)
this._setTags(opts, 'tags', tags)
this._setBoolean(opts, 'telemetry.logCollection', options.iastOptions &&
(options.iastOptions === true || options.iastOptions.enabled === true))
this._setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled)
this._setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled)
this._setString(opts, 'version', options.version || tags.version)
}
_isCiVisibility () {
return coalesce(
this.options.isCiVisibility,
this._defaults.isCiVisibility
)
}
_isCiVisibilityItrEnabled () {
return coalesce(
process.env.DD_CIVISIBILITY_ITR_ENABLED,
true
)
}
_getHostname () {
const DD_CIVISIBILITY_AGENTLESS_URL = process.env.DD_CIVISIBILITY_AGENTLESS_URL
const url = DD_CIVISIBILITY_AGENTLESS_URL
? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
: getAgentUrl(this._getTraceAgentUrl(), this.options)
const DD_AGENT_HOST = coalesce(
this.options.hostname,
process.env.DD_AGENT_HOST,
process.env.DD_TRACE_AGENT_HOSTNAME,
'127.0.0.1'
)
return DD_AGENT_HOST || (url && url.hostname)
}
_getSpanComputePeerService () {
const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion(
coalesce(
this.options.spanAttributeSchema,
process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
)
)
const peerServiceSet = (
this.options.hasOwnProperty('spanComputePeerService') ||
process.env.hasOwnProperty('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
)
const peerServiceValue = coalesce(
this.options.spanComputePeerService,
process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED
)
const spanComputePeerService = (
DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0'
// In v0, peer service is computed only if it is explicitly set to true
? peerServiceSet && isTrue(peerServiceValue)
// In >v0, peer service is false only if it is explicitly set to false
: (peerServiceSet ? !isFalse(peerServiceValue) : true)
)
return spanComputePeerService
}
_isCiVisibilityGitUploadEnabled () {
return coalesce(
process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED,
true
)
}
_isCiVisibilityManualApiEnabled () {
return isTrue(coalesce(
process.env.DD_CIVISIBILITY_MANUAL_API_ENABLED,
false
))
}
_isTraceStatsComputationEnabled () {
return coalesce(
this.options.stats,
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,
getIsGCPFunction() || getIsAzureFunctionConsumptionPlan()
)
}
_getTraceAgentUrl () {
return coalesce(
this.options.url,
process.env.DD_TRACE_AGENT_URL,
process.env.DD_TRACE_URL,
null
)
}
// handles values calculated from a mixture of options and env vars
_applyCalculated () {
const calc = this._calculated = {}
const {
DD_CIVISIBILITY_AGENTLESS_URL
} = process.env
if (DD_CIVISIBILITY_AGENTLESS_URL) {
this._setValue(calc, 'url', new URL(DD_CIVISIBILITY_AGENTLESS_URL))
} else {
this._setValue(calc, 'url', getAgentUrl(this._getTraceAgentUrl(), this.options))
}
if (this._isCiVisibility()) {
this._setBoolean(calc, 'isEarlyFlakeDetectionEnabled',
coalesce(process.env.DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED, true))
this._setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(this._isCiVisibilityItrEnabled()))
this._setBoolean(calc, 'isManualApiEnabled', this._isCiVisibilityManualApiEnabled())
}
this._setString(calc, 'dogstatsd.hostname', this._getHostname())
this._setBoolean(calc, 'isGitUploadEnabled',
calc.isIntelligentTestRunnerEnabled && !isFalse(this._isCiVisibilityGitUploadEnabled()))
this._setBoolean(calc, 'spanComputePeerService', this._getSpanComputePeerService())
this._setBoolean(calc, 'stats.enabled', this._isTraceStatsComputationEnabled())
}
_applyRemote (options) {

@@ -912,3 +892,3 @@ const opts = this._remote = this._remote || {}

_setString (obj, name, value) {
obj[name] = value || undefined // unset for empty strings
obj[name] = value ? String(value) : undefined // unset for empty strings
}

@@ -931,5 +911,8 @@

// TODO: Move change tracking to telemetry.
// for telemetry reporting, `name`s in `containers` need to be keys from:
// eslint-disable-next-line max-len
// https://github.com/DataDog/dd-go/blob/prod/trace/apps/tracer-telemetry-intake/telemetry-payload/static/config_norm_rules.json
_merge () {
const containers = [this._remote, this._options, this._env, this._defaults]
const origins = ['remote_config', 'code', 'env_var', 'default']
const containers = [this._remote, this._options, this._env, this._calculated, this._defaults]
const origins = ['remote_config', 'code', 'env_var', 'calculated', 'default']
const changes = []

@@ -943,5 +926,6 @@

if ((container[name] !== null && container[name] !== undefined) || container === this._defaults) {
if (this[name] === container[name] && this.hasOwnProperty(name)) break
if (get(this, name) === container[name] && has(this, name)) break
const value = this[name] = container[name]
const value = container[name]
set(this, name, value)

@@ -956,3 +940,2 @@ changes.push({ name, value, origin })

this.sampler.sampleRate = this.sampleRate
updateConfig(changes, this)

@@ -962,2 +945,11 @@ }

function maybeInt (number) {
const parsed = parseInt(number)
return isNaN(parsed) ? undefined : parsed
}
function maybeFloat (number) {
const parsed = parseFloat(number)
return isNaN(parsed) ? undefined : parsed
}
function getAgentUrl (url, options) {

@@ -964,0 +956,0 @@ if (url) return new URL(url)

@@ -9,3 +9,3 @@ const { storage } = require('../../datadog-core')

function setDataStreamsContext (dataStreamsContext) {
storage.enterWith({ ...(storage.getStore()), dataStreamsContext })
if (dataStreamsContext) storage.enterWith({ ...(storage.getStore()), dataStreamsContext })
}

@@ -12,0 +12,0 @@

@@ -11,2 +11,5 @@ // encoding used here is sha256

const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx'
const CONTEXT_PROPAGATION_KEY_BASE64 = 'dd-pathway-ctx-base64'
function shaHash (checkpointString) {

@@ -23,3 +26,3 @@ const hash = crypto.createHash('md5').update(checkpointString).digest('hex').slice(0, 16)

const currentHash = shaHash(`${service}${env}` + edgeTags.join(''))
const buf = Buffer.concat([ currentHash, parentHash ], 16)
const buf = Buffer.concat([currentHash, parentHash], 16)
const val = shaHash(buf.toString())

@@ -38,2 +41,7 @@ cache.set(key, val)

function encodePathwayContextBase64 (dataStreamsContext) {
const encodedPathway = encodePathwayContext(dataStreamsContext)
return encodedPathway.toString('base64')
}
function decodePathwayContext (pathwayContext) {

@@ -57,6 +65,55 @@ if (pathwayContext == null || pathwayContext.length < 8) {

function decodePathwayContextBase64 (pathwayContext) {
if (pathwayContext == null || pathwayContext.length < 8) {
return
}
if (Buffer.isBuffer(pathwayContext)) {
pathwayContext = pathwayContext.toString()
}
const encodedPathway = Buffer.from(pathwayContext, 'base64')
return decodePathwayContext(encodedPathway)
}
class DsmPathwayCodec {
// we use a class for encoding / decoding in case we update our encoding/decoding. A class will make updates easier
// instead of using individual functions.
static encode (dataStreamsContext, carrier) {
if (!dataStreamsContext || !dataStreamsContext.hash) {
return
}
carrier[CONTEXT_PROPAGATION_KEY_BASE64] = encodePathwayContextBase64(dataStreamsContext)
}
static decode (carrier) {
if (carrier == null) return
let ctx
if (CONTEXT_PROPAGATION_KEY_BASE64 in carrier) {
// decode v2 encoding of base64
ctx = decodePathwayContextBase64(carrier[CONTEXT_PROPAGATION_KEY_BASE64])
} else if (CONTEXT_PROPAGATION_KEY in carrier) {
try {
// decode v1 encoding
ctx = decodePathwayContext(carrier[CONTEXT_PROPAGATION_KEY])
} catch {
// pass
}
// cover case where base64 context was received under wrong key
if (!ctx) ctx = decodePathwayContextBase64(carrier[CONTEXT_PROPAGATION_KEY])
}
return ctx
}
static contextExists (carrier) {
return CONTEXT_PROPAGATION_KEY_BASE64 in carrier || CONTEXT_PROPAGATION_KEY in carrier
}
}
module.exports = {
computePathwayHash: computeHash,
encodePathwayContext,
decodePathwayContext
decodePathwayContext,
encodePathwayContextBase64,
decodePathwayContextBase64,
DsmPathwayCodec
}

@@ -7,3 +7,3 @@ const os = require('os')

const { LogCollapsingLowestDenseDDSketch } = require('@datadog/sketches-js')
const { encodePathwayContext } = require('./pathway')
const { DsmPathwayCodec } = require('./pathway')
const { DataStreamsWriter } = require('./writer')

@@ -17,3 +17,2 @@ const { computePathwayHash } = require('./pathway')

const HIGH_ACCURACY_DISTRIBUTION = 0.0075
const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx'

@@ -279,8 +278,8 @@ class StatsPoint {

const dataStreamsContext = {
hash: hash,
edgeStartNs: edgeStartNs,
pathwayStartNs: pathwayStartNs,
hash,
edgeStartNs,
pathwayStartNs,
previousDirection: direction,
closestOppositeDirectionHash: closestOppositeDirectionHash,
closestOppositeDirectionEdgeStart: closestOppositeDirectionEdgeStart
closestOppositeDirectionHash,
closestOppositeDirectionEdgeStart
}

@@ -291,3 +290,3 @@ if (direction === 'direction:out') {

const ddInfoContinued = {}
ddInfoContinued[CONTEXT_PROPAGATION_KEY] = encodePathwayContext(dataStreamsContext).toJSON()
DsmPathwayCodec.encode(dataStreamsContext, ddInfoContinued)
payloadSize += getSizeOrZero(JSON.stringify(ddInfoContinued)) - 1

@@ -297,8 +296,8 @@ }

currentTimestamp: nowNs,
parentHash: parentHash,
hash: hash,
edgeTags: edgeTags,
edgeLatencyNs: edgeLatencyNs,
pathwayLatencyNs: pathwayLatencyNs,
payloadSize: payloadSize
parentHash,
hash,
edgeTags,
edgeLatencyNs,
pathwayLatencyNs,
payloadSize
}

@@ -329,3 +328,3 @@ this.recordCheckpoint(checkpoint, span)

for (const [ timeNs, bucket ] of this.buckets.entries()) {
for (const [timeNs, bucket] of this.buckets.entries()) {
const points = []

@@ -364,5 +363,5 @@

module.exports = {
DataStreamsProcessor: DataStreamsProcessor,
StatsPoint: StatsPoint,
StatsBucket: StatsBucket,
DataStreamsProcessor,
StatsPoint,
StatsBucket,
Backlog,

@@ -374,4 +373,3 @@ TimeBuckets,

getAmqpMessageSize,
ENTRY_PARENT_HASH,
CONTEXT_PROPAGATION_KEY
ENTRY_PARENT_HASH
}

@@ -331,4 +331,4 @@ 'use strict'

'*': {
'language': 'javascript',
'library_version': ddTraceVersion
language: 'javascript',
library_version: ddTraceVersion
}

@@ -335,0 +335,0 @@ },

@@ -111,3 +111,3 @@ 'use strict'

{
filename: `coverage1.msgpack`,
filename: 'coverage1.msgpack',
contentType: 'application/msgpack'

@@ -114,0 +114,0 @@ }

@@ -9,2 +9,3 @@ 'use strict'

const https = require('https')
// eslint-disable-next-line n/no-deprecated-api
const { parse: urlParse } = require('url')

@@ -11,0 +12,0 @@ const zlib = require('zlib')

@@ -1,2 +0,1 @@

const { SpanStatsEncoder } = require('../../encode/span-stats')

@@ -3,0 +2,0 @@

@@ -54,7 +54,7 @@ const tracerLogger = require('../../log')// path to require tracer logger

...log,
'timestamp': Date.now(),
'hostname': log.hostname || this.hostname,
'ddsource': log.ddsource || this.ddsource,
'service': log.service || this.service,
'ddtags': logTags || undefined
timestamp: Date.now(),
hostname: log.hostname || this.hostname,
ddsource: log.ddsource || this.ddsource,
service: log.service || this.service,
ddtags: logTags || undefined
}

@@ -61,0 +61,0 @@

@@ -111,3 +111,2 @@ 'use strict'

switch (tag) {
case 'operation.name':
case 'service.name':

@@ -114,0 +113,0 @@ case 'span.type':

@@ -90,3 +90,3 @@ /**

if (originalLambdaHandler !== undefined) {
if (originalLambdaHandler !== undefined && lambdaTaskRoot !== undefined) {
const [moduleRoot, moduleAndHandler] = _extractModuleRootAndHandler(originalLambdaHandler)

@@ -93,0 +93,0 @@ const [_module] = _extractModuleNameAndHandlerPath(moduleAndHandler)

@@ -164,5 +164,7 @@ 'use strict'

}
get instrumentationLibrary () {
return this._parentTracer.instrumentationLibrary
}
get _spanProcessor () {

@@ -169,0 +171,0 @@ return this._parentTracer.getActiveSpanProcessor()

'use strict'
const pick = require('../../../../utils/src/pick')
const pick = require('../../../../datadog-core/src/utils/src/pick')
const id = require('../../id')

@@ -174,5 +174,13 @@ const DatadogSpanContext = require('../span_context')

ts.forVendor('dd', state => {
if (!spanContext._isRemote) {
// SpanContext was created by a ddtrace span.
// Last datadog span id should be set to the current span.
state.set('p', spanContext._spanId)
} else if (spanContext._trace.tags['_dd.parent_id']) {
// Propagate the last Datadog span id set on the remote span.
state.set('p', spanContext._trace.tags['_dd.parent_id'])
}
state.set('s', priority)
if (mechanism) {
state.set('t.dm', mechanism)
state.set('t.dm', `-${mechanism}`)
}

@@ -283,3 +291,4 @@

spanId: null,
sampling: { priority }
sampling: { priority },
isRemote: true
})

@@ -317,3 +326,3 @@ }

if (matches.length) {
const [ version, traceId, spanId, flags, tail ] = matches.slice(1)
const [version, traceId, spanId, flags, tail] = matches.slice(1)
const traceparent = { version }

@@ -333,2 +342,3 @@ const tracestate = TraceState.fromString(carrier.tracestate)

spanId: id(spanId, 16),
isRemote: true,
sampling: { priority: parseInt(flags, 10) & 1 ? 1 : 0 },

@@ -344,2 +354,6 @@ traceparent,

switch (key) {
case 'p': {
spanContext._trace.tags['_dd.parent_id'] = value
break
}
case 's': {

@@ -375,2 +389,6 @@ const priority = parseInt(value, 10)

if (!spanContext._trace.tags['_dd.parent_id']) {
spanContext._trace.tags['_dd.parent_id'] = '0000000000000000'
}
this._extractBaggageItems(carrier, spanContext)

@@ -388,3 +406,4 @@ return spanContext

traceId: id(carrier[traceKey], radix),
spanId: id(carrier[spanKey], radix)
spanId: id(carrier[spanKey], radix),
isRemote: true
})

@@ -391,0 +410,0 @@ }

@@ -14,2 +14,3 @@ 'use strict'

this._spanId = props.spanId
this._isRemote = props.isRemote ?? true
this._parentId = props.parentId || null

@@ -16,0 +17,0 @@ this._name = props.name

@@ -32,4 +32,4 @@ 'use strict'

const integrationCounters = {
span_created: {},
span_finished: {}
spans_created: {},
spans_finished: {}
}

@@ -76,3 +76,3 @@

getIntegrationCounter('span_created', this._integrationName).inc()
getIntegrationCounter('spans_created', this._integrationName).inc()

@@ -177,3 +177,3 @@ this._spanContext = this._createContext(parent, fields)

getIntegrationCounter('span_finished', this._integrationName).inc()
getIntegrationCounter('spans_finished', this._integrationName).inc()

@@ -216,3 +216,3 @@ if (DD_TRACE_EXPERIMENTAL_SPAN_COUNTS && finishedRegistry) {

} else {
log.warn(`Dropping span link attribute. It is not of an allowed type`)
log.warn('Dropping span link attribute. It is not of an allowed type')
}

@@ -273,2 +273,4 @@ }

}
// SpanContext was NOT propagated from a remote parent
spanContext._isRemote = false

@@ -275,0 +277,0 @@ return spanContext

@@ -7,3 +7,2 @@ 'use strict'

const log = require('./log')
const Nomenclature = require('./service-naming')

@@ -106,3 +105,3 @@ const loadChannel = channel('dd-trace:instrumentation:load')

this._tracerConfig = config
Nomenclature.configure(config)
this._tracer._nomenclature.configure(config)

@@ -109,0 +108,0 @@ for (const name in pluginClasses) {

@@ -45,3 +45,3 @@ const {

if (err) {
log.error(`Intelligent Test Runner configuration could not be fetched. ${err.message}`)
log.error(`Library configuration could not be fetched. ${err.message}`)
} else {

@@ -55,3 +55,3 @@ this.libraryConfig = libraryConfig

this.addSub(`ci:${this.constructor.id}:test-suite:skippable`, ({ onDone }) => {
if (!this.tracer._exporter || !this.tracer._exporter.getSkippableSuites) {
if (!this.tracer._exporter?.getSkippableSuites) {
return onDone({ err: new Error('CI Visibility was not initialized correctly') })

@@ -128,2 +128,3 @@ }

log.error(`Known tests could not be fetched. ${err.message}`)
this.libraryConfig.isEarlyFlakeDetectionEnabled = false
}

@@ -130,0 +131,0 @@ onDone({ err, knownTests })

@@ -15,7 +15,7 @@ 'use strict'

configure (config) {
super.configure(config)
for (const name in this.constructor.plugins) {
const pluginConfig = config[name] === false ? false : {
...config,
...config[name]
}
const pluginConfig = config[name] === false
? false
: { ...config, ...config[name] }

@@ -22,0 +22,0 @@ this[name].configure(pluginConfig)

@@ -23,2 +23,3 @@ 'use strict'

}
encodingServiceTags (serviceTag, encodeATag, spanConfig) {

@@ -25,0 +26,0 @@ if (serviceTag !== spanConfig) {

'use strict'
module.exports = {
get '@apollo/gateway' () { return require('../../../datadog-plugin-apollo/src') },
get '@aws-sdk/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },

@@ -20,24 +21,24 @@ get '@cucumber/cucumber' () { return require('../../../datadog-plugin-cucumber/src') },

get '@smithy/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') },
get 'aerospike' () { return require('../../../datadog-plugin-aerospike/src') },
get 'amqp10' () { return require('../../../datadog-plugin-amqp10/src') },
get 'amqplib' () { return require('../../../datadog-plugin-amqplib/src') },
get aerospike () { return require('../../../datadog-plugin-aerospike/src') },
get amqp10 () { return require('../../../datadog-plugin-amqp10/src') },
get amqplib () { return require('../../../datadog-plugin-amqplib/src') },
get 'aws-sdk' () { return require('../../../datadog-plugin-aws-sdk/src') },
get 'bunyan' () { return require('../../../datadog-plugin-bunyan/src') },
get bunyan () { return require('../../../datadog-plugin-bunyan/src') },
get 'cassandra-driver' () { return require('../../../datadog-plugin-cassandra-driver/src') },
get 'child_process' () { return require('../../../datadog-plugin-child_process/src') },
get 'connect' () { return require('../../../datadog-plugin-connect/src') },
get 'couchbase' () { return require('../../../datadog-plugin-couchbase/src') },
get 'cypress' () { return require('../../../datadog-plugin-cypress/src') },
get 'dns' () { return require('../../../datadog-plugin-dns/src') },
get 'elasticsearch' () { return require('../../../datadog-plugin-elasticsearch/src') },
get 'express' () { return require('../../../datadog-plugin-express/src') },
get 'fastify' () { return require('../../../datadog-plugin-fastify/src') },
get child_process () { return require('../../../datadog-plugin-child_process/src') },
get connect () { return require('../../../datadog-plugin-connect/src') },
get couchbase () { return require('../../../datadog-plugin-couchbase/src') },
get cypress () { return require('../../../datadog-plugin-cypress/src') },
get dns () { return require('../../../datadog-plugin-dns/src') },
get elasticsearch () { return require('../../../datadog-plugin-elasticsearch/src') },
get express () { return require('../../../datadog-plugin-express/src') },
get fastify () { return require('../../../datadog-plugin-fastify/src') },
get 'find-my-way' () { return require('../../../datadog-plugin-find-my-way/src') },
get 'graphql' () { return require('../../../datadog-plugin-graphql/src') },
get 'grpc' () { return require('../../../datadog-plugin-grpc/src') },
get 'hapi' () { return require('../../../datadog-plugin-hapi/src') },
get 'http' () { return require('../../../datadog-plugin-http/src') },
get 'http2' () { return require('../../../datadog-plugin-http2/src') },
get 'https' () { return require('../../../datadog-plugin-http/src') },
get 'ioredis' () { return require('../../../datadog-plugin-ioredis/src') },
get graphql () { return require('../../../datadog-plugin-graphql/src') },
get grpc () { return require('../../../datadog-plugin-grpc/src') },
get hapi () { return require('../../../datadog-plugin-hapi/src') },
get http () { return require('../../../datadog-plugin-http/src') },
get http2 () { return require('../../../datadog-plugin-http2/src') },
get https () { return require('../../../datadog-plugin-http/src') },
get ioredis () { return require('../../../datadog-plugin-ioredis/src') },
get 'jest-circus' () { return require('../../../datadog-plugin-jest/src') },

@@ -49,17 +50,17 @@ get 'jest-config' () { return require('../../../datadog-plugin-jest/src') },

get 'jest-worker' () { return require('../../../datadog-plugin-jest/src') },
get 'koa' () { return require('../../../datadog-plugin-koa/src') },
get koa () { return require('../../../datadog-plugin-koa/src') },
get 'koa-router' () { return require('../../../datadog-plugin-koa/src') },
get 'kafkajs' () { return require('../../../datadog-plugin-kafkajs/src') },
get 'mariadb' () { return require('../../../datadog-plugin-mariadb/src') },
get 'memcached' () { return require('../../../datadog-plugin-memcached/src') },
get kafkajs () { return require('../../../datadog-plugin-kafkajs/src') },
get mariadb () { return require('../../../datadog-plugin-mariadb/src') },
get memcached () { return require('../../../datadog-plugin-memcached/src') },
get 'microgateway-core' () { return require('../../../datadog-plugin-microgateway-core/src') },
get 'mocha' () { return require('../../../datadog-plugin-mocha/src') },
get mocha () { return require('../../../datadog-plugin-mocha/src') },
get 'mocha-each' () { return require('../../../datadog-plugin-mocha/src') },
get 'moleculer' () { return require('../../../datadog-plugin-moleculer/src') },
get 'mongodb' () { return require('../../../datadog-plugin-mongodb-core/src') },
get moleculer () { return require('../../../datadog-plugin-moleculer/src') },
get mongodb () { return require('../../../datadog-plugin-mongodb-core/src') },
get 'mongodb-core' () { return require('../../../datadog-plugin-mongodb-core/src') },
get 'mysql' () { return require('../../../datadog-plugin-mysql/src') },
get 'mysql2' () { return require('../../../datadog-plugin-mysql2/src') },
get 'net' () { return require('../../../datadog-plugin-net/src') },
get 'next' () { return require('../../../datadog-plugin-next/src') },
get mysql () { return require('../../../datadog-plugin-mysql/src') },
get mysql2 () { return require('../../../datadog-plugin-mysql2/src') },
get net () { return require('../../../datadog-plugin-net/src') },
get next () { return require('../../../datadog-plugin-next/src') },
get 'node:dns' () { return require('../../../datadog-plugin-dns/src') },

@@ -70,16 +71,16 @@ get 'node:http' () { return require('../../../datadog-plugin-http/src') },

get 'node:net' () { return require('../../../datadog-plugin-net/src') },
get 'oracledb' () { return require('../../../datadog-plugin-oracledb/src') },
get 'openai' () { return require('../../../datadog-plugin-openai/src') },
get 'paperplane' () { return require('../../../datadog-plugin-paperplane/src') },
get 'pg' () { return require('../../../datadog-plugin-pg/src') },
get 'pino' () { return require('../../../datadog-plugin-pino/src') },
get oracledb () { return require('../../../datadog-plugin-oracledb/src') },
get openai () { return require('../../../datadog-plugin-openai/src') },
get paperplane () { return require('../../../datadog-plugin-paperplane/src') },
get pg () { return require('../../../datadog-plugin-pg/src') },
get pino () { return require('../../../datadog-plugin-pino/src') },
get 'pino-pretty' () { return require('../../../datadog-plugin-pino/src') },
get 'playwright' () { return require('../../../datadog-plugin-playwright/src') },
get 'redis' () { return require('../../../datadog-plugin-redis/src') },
get 'restify' () { return require('../../../datadog-plugin-restify/src') },
get 'rhea' () { return require('../../../datadog-plugin-rhea/src') },
get 'router' () { return require('../../../datadog-plugin-router/src') },
get 'sharedb' () { return require('../../../datadog-plugin-sharedb/src') },
get 'tedious' () { return require('../../../datadog-plugin-tedious/src') },
get 'winston' () { return require('../../../datadog-plugin-winston/src') }
get playwright () { return require('../../../datadog-plugin-playwright/src') },
get redis () { return require('../../../datadog-plugin-redis/src') },
get restify () { return require('../../../datadog-plugin-restify/src') },
get rhea () { return require('../../../datadog-plugin-rhea/src') },
get router () { return require('../../../datadog-plugin-router/src') },
get sharedb () { return require('../../../datadog-plugin-sharedb/src') },
get tedious () { return require('../../../datadog-plugin-tedious/src') },
get winston () { return require('../../../datadog-plugin-winston/src') }
}

@@ -87,3 +87,3 @@ 'use strict'

if (!store.span._spanContext._tags['error']) {
if (!store.span._spanContext._tags.error) {
store.span.setTag('error', error || 1)

@@ -90,0 +90,0 @@ }

@@ -7,3 +7,2 @@ 'use strict'

const { COMPONENT } = require('../constants')
const Nomenclature = require('../service-naming')

@@ -33,3 +32,3 @@ class TracingPlugin extends Plugin {

return Nomenclature.serviceName(type, kind, id, opts)
return this._tracer._nomenclature.serviceName(type, kind, id, opts)
}

@@ -44,3 +43,3 @@

return Nomenclature.opName(type, kind, id, opts)
return this._tracer._nomenclature.opName(type, kind, id, opts)
}

@@ -64,4 +63,8 @@

error (error) {
this.addError(error)
error (ctxOrError) {
if (ctxOrError?.currentStore) {
ctxOrError.currentStore?.span.setTag('error', ctxOrError?.error)
return
}
this.addError(ctxOrError)
}

@@ -98,3 +101,3 @@

addError (error, span = this.activeSpan) {
if (!span._spanContext._tags['error']) {
if (!span._spanContext._tags.error) {
// Errors may be wrapped in a context.

@@ -101,0 +104,0 @@ error = (error && error.error) || error

@@ -55,3 +55,3 @@ const path = require('path')

const TEST_IS_NEW = 'test.is_new'
const TEST_EARLY_FLAKE_IS_RETRY = 'test.early_flake.is_retry'
const TEST_IS_RETRY = 'test.is_retry'
const TEST_EARLY_FLAKE_IS_ENABLED = 'test.early_flake.is_enabled'

@@ -62,2 +62,3 @@

const JEST_TEST_RUNNER = 'test.jest.test_runner'
const JEST_DISPLAY_NAME = 'test.jest.display_name'

@@ -79,2 +80,6 @@ const TEST_ITR_TESTS_SKIPPED = '_dd.ci.itr.tests_skipped'

// Early flake detection util strings
const EFD_STRING = "Retried by Datadog's Early Flake Detection"
const EFD_TEST_NAME_REGEX = new RegExp(EFD_STRING + ' \\(#\\d+\\): ', 'g')
module.exports = {

@@ -85,2 +90,3 @@ TEST_CODE_OWNERS,

JEST_TEST_RUNNER,
JEST_DISPLAY_NAME,
TEST_TYPE,

@@ -102,3 +108,3 @@ TEST_NAME,

TEST_IS_NEW,
TEST_EARLY_FLAKE_IS_RETRY,
TEST_IS_RETRY,
TEST_EARLY_FLAKE_IS_ENABLED,

@@ -139,3 +145,7 @@ getTestEnvironmentMetadata,

removeInvalidMetadata,
parseAnnotations
parseAnnotations,
EFD_STRING,
EFD_TEST_NAME_REGEX,
removeEfdStringFromTestName,
addEfdStringToTestName
}

@@ -287,3 +297,4 @@

const testSuitePath = testSuiteAbsolutePath === sourceRoot
? testSuiteAbsolutePath : path.relative(sourceRoot, testSuiteAbsolutePath)
? testSuiteAbsolutePath
: path.relative(sourceRoot, testSuiteAbsolutePath)

@@ -562,1 +573,9 @@ return testSuitePath.replace(path.sep, '/')

}
function addEfdStringToTestName (testName, numAttempt) {
return `${EFD_STRING} (#${numAttempt}): ${testName}`
}
function removeEfdStringFromTestName (testName) {
return testName.replace(EFD_TEST_NAME_REGEX, '')
}
'use strict'
const uniq = require('../../../../utils/src/uniq')
const uniq = require('../../../../datadog-core/src/utils/src/uniq')
const analyticsSampler = require('../../analytics_sampler')

@@ -66,3 +66,3 @@ const FORMAT_HTTP_HEADERS = 'http_headers'

span.context()._name = `${name}.request`
span.context()._tags['component'] = name
span.context()._tags.component = name

@@ -267,3 +267,3 @@ web.setConfig(req, config)

const error = context.error
const hasExistingError = span.context()._tags['error'] || span.context()._tags[ERROR_MESSAGE]
const hasExistingError = span.context()._tags.error || span.context()._tags[ERROR_MESSAGE]

@@ -410,3 +410,3 @@ if (!hasExistingError && !context.config.validateStatus(statusCode)) {

function isOriginAllowed (req, headers) {
const origin = req.headers['origin']
const origin = req.headers.origin
const allowOrigin = headers['access-control-allow-origin']

@@ -504,3 +504,3 @@

const protocol = getProtocol(req)
return `${protocol}://${req.headers['host']}${req.originalUrl || req.url}`
return `${protocol}://${req.headers.host}${req.originalUrl || req.url}`
}

@@ -507,0 +507,0 @@ }

'use strict'
const RateLimiter = require('./rate_limiter')
const Sampler = require('./sampler')

@@ -46,2 +47,3 @@ const { setSamplingRules } = require('./startup-log')

this._rules = this._normalizeRules(rules, sampleRate, rateLimit)
this._limiter = new RateLimiter(rateLimit)

@@ -140,10 +142,13 @@ setSamplingRules(this._rules)

const sampled = rule.sample()
const priority = sampled ? USER_KEEP : USER_REJECT
return rule.sample() && this._isSampledByRateLimit(context)
? USER_KEEP
: USER_REJECT
}
if (sampled) {
context._trace[SAMPLING_LIMIT_DECISION] = rule.effectiveRate
}
_isSampledByRateLimit (context) {
const allowed = this._limiter.isAllowed()
return priority
context._trace[SAMPLING_LIMIT_DECISION] = this._limiter.effectiveRate()
return allowed
}

@@ -150,0 +155,0 @@

@@ -240,3 +240,3 @@ 'use strict'

return [ ...new Set(strategies) ]
return [...new Set(strategies)]
}

@@ -243,0 +243,0 @@

@@ -12,2 +12,4 @@ 'use strict'

const version = require('../../../../../package.json').version
const os = require('os')
const perf = require('perf_hooks').performance

@@ -54,3 +56,3 @@ const containerId = docker.id()

class AgentExporter {
constructor ({ url, logger, uploadTimeout } = {}) {
constructor ({ url, logger, uploadTimeout, env, host, service, version } = {}) {
this._url = url

@@ -63,34 +65,74 @@ this._logger = logger

this._backoffTries = backoffTries
this._env = env
this._host = host
this._service = service
this._appVersion = version
}
export ({ profiles, start, end, tags }) {
const types = Object.keys(profiles)
const fields = []
const fields = [
['recording-start', start.toISOString()],
['recording-end', end.toISOString()],
['language', 'javascript'],
['runtime', 'nodejs'],
['runtime_version', process.version],
['profiler_version', version],
['format', 'pprof'],
function typeToFile (type) {
return `${type}.pprof`
}
['tags[]', 'language:javascript'],
['tags[]', 'runtime:nodejs'],
['tags[]', `runtime_version:${process.version}`],
['tags[]', `process_id:${process.pid}`],
['tags[]', `profiler_version:${version}`],
['tags[]', 'format:pprof'],
...Object.entries(tags).map(([key, value]) => ['tags[]', `${key}:${value}`])
]
const event = JSON.stringify({
attachments: Object.keys(profiles).map(typeToFile),
start: start.toISOString(),
end: end.toISOString(),
family: 'node',
version: '4',
tags_profiler: [
'language:javascript',
'runtime:nodejs',
`runtime_arch:${process.arch}`,
`runtime_os:${process.platform}`,
`runtime_version:${process.version}`,
`process_id:${process.pid}`,
`profiler_version:${version}`,
'format:pprof',
...Object.entries(tags).map(([key, value]) => `${key}:${value}`)
].join(','),
info: {
application: {
env: this._env,
service: this._service,
start_time: new Date(perf.nodeTiming.nodeStart + perf.timeOrigin).toISOString(),
version: this._appVersion
},
platform: {
hostname: this._host,
kernel_name: os.type(),
kernel_release: os.release(),
kernel_version: os.version()
},
profiler: {
version
},
runtime: {
// Using `nodejs` for consistency with the existing `runtime` tag.
// Note that the event `family` property uses `node`, as that's what's
// proscribed by the Intake API, but that's an internal enum and is
// not customer visible.
engine: 'nodejs',
// strip off leading 'v'. This makes the format consistent with other
// runtimes (e.g. Ruby) but not with the existing `runtime_version` tag.
// We'll keep it like this as we want cross-engine consistency. We
// also aren't changing the format of the existing tag as we don't want
// to break it.
version: process.version.substring(1)
}
}
})
fields.push(['event', event, {
filename: 'event.json',
contentType: 'application/json'
}])
this._logger.debug(() => {
const body = fields.map(([key, value]) => ` ${key}: ${value}`).join('\n')
return `Building agent export report: ${'\n' + body}`
return `Building agent export report:\n${event}`
})
for (let index = 0; index < types.length; index++) {
const type = types[index]
const buffer = profiles[type]
for (const [type, buffer] of Object.entries(profiles)) {
this._logger.debug(() => {

@@ -101,7 +143,6 @@ const bytes = buffer.toString('hex').match(/../g).join(' ')

fields.push([`types[${index}]`, type])
fields.push([`data[${index}]`, buffer, {
filename: `${type}.pb.gz`,
contentType: 'application/octet-stream',
knownLength: buffer.length
const filename = typeToFile(type)
fields.push([filename, buffer, {
filename,
contentType: 'application/octet-stream'
}])

@@ -128,3 +169,7 @@ }

path: '/profiling/v1/input',
headers: form.getHeaders(),
headers: {
'DD-EVP-ORIGIN': 'dd-trace-js',
'DD-EVP-ORIGIN-VERSION': version,
...form.getHeaders()
},
timeout: this._backoffTime * Math.pow(2, attempt)

@@ -131,0 +176,0 @@ }

@@ -15,3 +15,3 @@ 'use strict'

constructor (options = {}) {
this._level = mapping[options.level] || mapping['error']
this._level = mapping[options.level] || mapping.error
}

@@ -18,0 +18,0 @@

@@ -18,2 +18,4 @@ const { performance, constants, PerformanceObserver } = require('perf_hooks')

const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS))
function labelFromStr (stringTable, key, valStr) {

@@ -150,2 +152,72 @@ return new Label({ key, str: stringTable.dedup(valStr) })

// Translates performance entries into pprof samples.
class EventSerializer {
constructor () {
this.stringTable = new StringTable()
this.samples = []
this.locations = []
this.functions = []
this.decorators = {}
// A synthetic single-frame location to serve as the location for timeline
// samples. We need these as the profiling backend (mimicking official pprof
// tool's behavior) ignores these.
const fn = new Function({ id: this.functions.length + 1, name: this.stringTable.dedup('') })
this.functions.push(fn)
const line = new Line({ functionId: fn.id })
const location = new Location({ id: this.locations.length + 1, line: [line] })
this.locations.push(location)
this.locationId = [location.id]
this.timestampLabelKey = this.stringTable.dedup(END_TIMESTAMP_LABEL)
}
addEvent (item) {
const { entryType, startTime, duration } = item
let decorator = this.decorators[entryType]
if (!decorator) {
const DecoratorCtor = decoratorTypes[entryType]
if (DecoratorCtor) {
decorator = new DecoratorCtor(this.stringTable)
decorator.eventTypeLabel = labelFromStrStr(this.stringTable, 'event', entryType)
this.decorators[entryType] = decorator
} else {
// Shouldn't happen but it's better to not rely on observer only getting
// requested event types.
return
}
}
const endTime = startTime + duration
const sampleInput = {
value: [Math.round(duration * MS_TO_NS)],
locationId: this.locationId,
label: [
decorator.eventTypeLabel,
new Label({ key: this.timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) })
]
}
decorator.decorateSample(sampleInput, item)
this.samples.push(new Sample(sampleInput))
}
createProfile (startDate, endDate) {
const timeValueType = new ValueType({
type: this.stringTable.dedup(pprofValueType),
unit: this.stringTable.dedup(pprofValueUnit)
})
return new Profile({
sampleType: [timeValueType],
timeNanos: endDate.getTime() * MS_TO_NS,
periodType: timeValueType,
period: 1,
durationNanos: (endDate.getTime() - startDate.getTime()) * MS_TO_NS,
sample: this.samples,
location: this.locations,
function: this.functions,
stringTable: this.stringTable
})
}
}
/**

@@ -160,3 +232,3 @@ * This class generates pprof files with timeline events sourced from Node.js

this._observer = undefined
this.entries = []
this.eventSerializer = new EventSerializer()
}

@@ -169,3 +241,5 @@

function add (items) {
this.entries.push(...items.getEntries())
for (const item of items.getEntries()) {
this.eventSerializer.addEvent(item)
}
}

@@ -184,85 +258,8 @@ this._observer = new PerformanceObserver(add.bind(this))

profile (restart, startDate, endDate) {
if (this.entries.length === 0) {
// No events in the period; don't produce a profile
return null
}
const stringTable = new StringTable()
const locations = []
const functions = []
// A synthetic single-frame location to serve as the location for timeline
// samples. We need these as the profiling backend (mimicking official pprof
// tool's behavior) ignores these.
const locationId = (() => {
const fn = new Function({ id: functions.length + 1, name: stringTable.dedup('') })
functions.push(fn)
const line = new Line({ functionId: fn.id })
const location = new Location({ id: locations.length + 1, line: [line] })
locations.push(location)
return [location.id]
})()
const decorators = {}
for (const [eventType, DecoratorCtor] of Object.entries(decoratorTypes)) {
const decorator = new DecoratorCtor(stringTable)
decorator.eventTypeLabel = labelFromStrStr(stringTable, 'event', eventType)
decorators[eventType] = decorator
}
const timestampLabelKey = stringTable.dedup(END_TIMESTAMP_LABEL)
const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS))
const lateEntries = []
const perfEndDate = endDate.getTime() - performance.timeOrigin
const samples = this.entries.map((item) => {
const decorator = decorators[item.entryType]
if (!decorator) {
// Shouldn't happen but it's better to not rely on observer only getting
// requested event types.
return null
}
const { startTime, duration } = item
if (startTime >= perfEndDate) {
// An event past the current recording end date; save it for the next
// profile. Not supposed to happen as long as there's no async activity
// between capture of the endDate value in profiler.js _collect() and
// here, but better be safe than sorry.
lateEntries.push(item)
return null
}
const endTime = startTime + duration
const sampleInput = {
value: [Math.round(duration * MS_TO_NS)],
locationId,
label: [
decorator.eventTypeLabel,
new Label({ key: timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) })
]
}
decorator.decorateSample(sampleInput, item)
return new Sample(sampleInput)
}).filter(v => v)
this.entries = lateEntries
const timeValueType = new ValueType({
type: stringTable.dedup(pprofValueType),
unit: stringTable.dedup(pprofValueUnit)
})
if (!restart) {
this.stop()
}
return new Profile({
sampleType: [timeValueType],
timeNanos: endDate.getTime() * MS_TO_NS,
periodType: timeValueType,
period: 1,
durationNanos: (endDate.getTime() - startDate.getTime()) * MS_TO_NS,
sample: samples,
location: locations,
function: functions,
stringTable: stringTable
})
const profile = this.eventSerializer.createProfile(startDate, endDate)
this.eventSerializer = new EventSerializer()
return profile
}

@@ -269,0 +266,0 @@

@@ -9,2 +9,3 @@ 'use strict'

const telemetry = require('./telemetry')
const nomenclature = require('./service-naming')
const PluginManager = require('./plugin_manager')

@@ -21,2 +22,3 @@ const remoteConfig = require('./appsec/remote_config')

this._initialized = false
this._nomenclature = nomenclature
this._pluginManager = new PluginManager(this)

@@ -122,6 +124,7 @@ this.dogstatsd = new dogstatsd.NoopDogStatsDClient()

}
} else {
} else if (this._tracingInitialized) {
require('./appsec').disable()
require('./appsec/iast').disable()
}
if (this._tracingInitialized) {

@@ -128,0 +131,0 @@ this._tracer.configure(config)

@@ -241,8 +241,11 @@ 'use strict'

*/
const captureELU = ('eventLoopUtilization' in performance) ? () => {
// if elu is undefined (first run) the measurement is from start of process
elu = performance.eventLoopUtilization(elu)
let captureELU = () => {}
if ('eventLoopUtilization' in performance) {
captureELU = () => {
// if elu is undefined (first run) the measurement is from start of process
elu = performance.eventLoopUtilization(elu)
client.gauge('runtime.node.event_loop.utilization', elu.utilization)
} : () => {}
client.gauge('runtime.node.event_loop.utilization', elu.utilization)
}
}

@@ -249,0 +252,0 @@ function captureCommonMetrics () {

@@ -7,3 +7,3 @@ 'use strict'

if (process.platform !== 'win32' && process.platform !== 'linux') {
log.error(`Serverless Mini Agent is only supported on Windows and Linux.`)
log.error('Serverless Mini Agent is only supported on Windows and Linux.')
return

@@ -38,3 +38,4 @@ }

const rustBinaryPathOsFolder = process.platform === 'win32'
? 'datadog-serverless-agent-windows-amd64' : 'datadog-serverless-agent-linux-amd64'
? 'datadog-serverless-agent-windows-amd64'
: 'datadog-serverless-agent-linux-amd64'

@@ -41,0 +42,0 @@ const rustBinaryExtension = process.platform === 'win32' ? '.exe' : ''

@@ -36,2 +36,26 @@ const { identityService, httpPluginClientService, awsServiceV0 } = require('../util')

server: {
'apollo.gateway.request': {
opName: () => 'apollo.gateway.request',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.plan': {
opName: () => 'apollo.gateway.plan',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.validate': {
opName: () => 'apollo.gateway.validate',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.execute': {
opName: () => 'apollo.gateway.execute',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.fetch': {
opName: () => 'apollo.gateway.fetch',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.postprocessing': {
opName: () => 'apollo.gateway.postprocessing',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
grpc: {

@@ -38,0 +62,0 @@ opName: () => DD_MAJOR <= 2 ? 'grpc.request' : 'grpc.server',

@@ -1,2 +0,1 @@

function configWithFallback ({ tracerService, pluginConfig }) {

@@ -3,0 +2,0 @@ return pluginConfig.service || tracerService

@@ -35,2 +35,26 @@ const { identityService, httpPluginClientService } = require('../util')

server: {
'apollo.gateway.request': {
opName: () => 'apollo.gateway.request',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.plan': {
opName: () => 'apollo.gateway.plan',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.validate': {
opName: () => 'apollo.gateway.validate',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.execute': {
opName: () => 'apollo.gateway.execute',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.fetch': {
opName: () => 'apollo.gateway.fetch',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
'apollo.gateway.postprocessing': {
opName: () => 'apollo.gateway.postprocessing',
serviceName: ({ pluginConfig, tracerService }) => pluginConfig.service || tracerService
},
grpc: {

@@ -37,0 +61,0 @@ opName: () => 'grpc.server.request',

@@ -61,7 +61,7 @@ 'use strict'

if (this._killAll) {
started.map(startedSpan => {
for (const startedSpan of started) {
if (!startedSpan._finished) {
startedSpan.finish()
}
})
}
}

@@ -68,0 +68,0 @@ }

@@ -184,3 +184,3 @@ const os = require('os')

for (const [ timeNs, bucket ] of this.buckets.entries()) {
for (const [timeNs, bucket] of this.buckets.entries()) {
const bucketAggStats = []

@@ -187,0 +187,0 @@

@@ -15,3 +15,3 @@ 'use strict'

const FILE_URI_START = `file://`
const FILE_URI_START = 'file://'
const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart')

@@ -25,8 +25,7 @@

function createBatchPayload (payload) {
const batchPayload = []
payload.map(item => {
batchPayload.push({
const batchPayload = payload.map(item => {
return {
request_type: item.reqType,
payload: item.payload
})
}
})

@@ -33,0 +32,0 @@

@@ -9,2 +9,3 @@ 'use strict'

const { manager: metricsManager } = require('./metrics')
const logs = require('./logs')

@@ -23,2 +24,3 @@ const telemetryStartChannel = dc.channel('datadog:telemetry:start')

let integrations
let configWithOrigin = []
let retryData = null

@@ -38,3 +40,3 @@ const extendedHeartbeatPayload = {}

const reqType = retryObj.payload[0].request_type
retryData = { payload: payload, reqType: reqType }
retryData = { payload, reqType }

@@ -47,13 +49,13 @@ // Since this payload failed twice it now gets save in to the extended heartbeat

if (failedReqType === 'app-integrations-change') {
if (extendedHeartbeatPayload['integrations']) {
extendedHeartbeatPayload['integrations'].push(failedPayload)
if (extendedHeartbeatPayload.integrations) {
extendedHeartbeatPayload.integrations.push(failedPayload)
} else {
extendedHeartbeatPayload['integrations'] = [failedPayload]
extendedHeartbeatPayload.integrations = [failedPayload]
}
}
if (failedReqType === 'app-dependencies-loaded') {
if (extendedHeartbeatPayload['dependencies']) {
extendedHeartbeatPayload['dependencies'].push(failedPayload)
if (extendedHeartbeatPayload.dependencies) {
extendedHeartbeatPayload.dependencies.push(failedPayload)
} else {
extendedHeartbeatPayload['dependencies'] = [failedPayload]
extendedHeartbeatPayload.dependencies = [failedPayload]
}

@@ -102,19 +104,2 @@ }

function flatten (input, result = [], prefix = [], traversedObjects = null) {
traversedObjects = traversedObjects || new WeakSet()
if (traversedObjects.has(input)) {
return
}
traversedObjects.add(input)
for (const [key, value] of Object.entries(input)) {
if (typeof value === 'object' && value !== null) {
flatten(value, result, [...prefix, key], traversedObjects)
} else {
// TODO: add correct origin value
result.push({ name: [...prefix, key].join('.'), value, origin: 'unknown' })
}
}
return result
}
function getInstallSignature (config) {

@@ -134,3 +119,3 @@ const { installSignature: sig } = config

products: getProducts(config),
configuration: flatten(config)
configuration: configWithOrigin
}

@@ -211,8 +196,7 @@ const installSignature = getInstallSignature(config)

function createBatchPayload (payload) {
const batchPayload = []
payload.map(item => {
batchPayload.push({
const batchPayload = payload.map(item => {
return {
request_type: item.reqType,
payload: item.payload
})
}
})

@@ -227,6 +211,6 @@

const batchPayload = createBatchPayload([payload, retryData])
return { 'reqType': 'message-batch', 'payload': batchPayload }
return { reqType: 'message-batch', payload: batchPayload }
}
return { 'reqType': currReqType, 'payload': currPayload }
return { reqType: currReqType, payload: currPayload }
}

@@ -237,2 +221,3 @@

metricsManager.send(config, application, host)
logs.send(config, application, host)

@@ -271,2 +256,3 @@ const { reqType, payload } = createPayload('app-heartbeat')

dependencies.start(config, application, host, getRetryData, updateRetryData)
logs.start(config)

@@ -315,2 +301,10 @@ sendData(config, application, host, 'app-started', appStarted(config))

function formatMapForTelemetry (map) {
// format from an object to a string map in order for
// telemetry intake to accept the configuration
return map
? Object.entries(map).map(([key, value]) => `${key}:${value}`).join(',')
: ''
}
function updateConfig (changes, config) {

@@ -320,9 +314,6 @@ if (!config.telemetry.enabled) return

// Hack to make system tests happy until we ship telemetry v2
if (process.env.DD_INTERNAL_TELEMETRY_V2_ENABLED !== '1') return
const application = createAppObject(config)
const host = createHostObject()
const names = {
const nameMapping = {
sampleRate: 'DD_TRACE_SAMPLE_RATE',

@@ -334,16 +325,16 @@ logInjection: 'DD_LOG_INJECTION',

const namesNeedFormatting = new Set(['DD_TAGS', 'peerServiceMapping'])
const configuration = []
const names = [] // list of config names whose values have been changed
for (const change of changes) {
if (!names.hasOwnProperty(change.name)) continue
const name = names[change.name]
const name = nameMapping[change.name] || change.name
names.push(name)
const { origin, value } = change
const entry = { name, origin, value }
const entry = { name, value, origin }
if (Array.isArray(value)) {
entry.value = value.join(',')
} else if (name === 'DD_TAGS') {
entry.value = Object.entries(value).map(([key, value]) => `${key}:${value}`)
}
if (Array.isArray(value)) entry.value = value.join(',')
if (namesNeedFormatting.has(entry.name)) entry.value = formatMapForTelemetry(entry.value)
if (entry.name === 'url' && entry.value) entry.value = entry.value.toString()

@@ -353,5 +344,15 @@ configuration.push(entry)

const { reqType, payload } = createPayload('app-client-configuration-change', { configuration })
function isNotModified (entry) {
return !names.includes(entry.name)
}
sendData(config, application, host, reqType, payload, updateRetryData)
if (!configWithOrigin.length) {
configWithOrigin = configuration
} else {
// update configWithOrigin to contain up-to-date full list of config values for app-extended-heartbeat
configWithOrigin = configWithOrigin.filter(isNotModified)
configWithOrigin = configWithOrigin.concat(configuration)
const { reqType, payload } = createPayload('app-client-configuration-change', { configuration })
sendData(config, application, host, reqType, payload, updateRetryData)
}
}

@@ -358,0 +359,0 @@

@@ -55,5 +55,5 @@ 'use strict'

const logs = { 'logs': logCollector.drain() }
const logs = logCollector.drain()
if (logs) {
sendData(config, application, host, 'logs', logs)
sendData(config, application, host, 'logs', { logs })
}

@@ -60,0 +60,0 @@ }

@@ -1,2 +0,1 @@

const request = require('../exporters/common/request')

@@ -30,5 +29,2 @@ const log = require('../log')

}
if (site === 'datadoghq.eu') {
return 'https://instrumentation-telemetry-intake.eu1.datadoghq.com'
}
return `https://instrumentation-telemetry-intake.${site}`

@@ -35,0 +31,0 @@ }

@@ -11,3 +11,3 @@ 'use strict'

const { DataStreamsProcessor } = require('./datastreams/processor')
const { decodePathwayContext } = require('./datastreams/pathway')
const { DsmPathwayCodec } = require('./datastreams/pathway')
const { DD_MAJOR } = require('../../../version')

@@ -43,4 +43,4 @@ const DataStreamsContext = require('./data_streams_context')

decodeDataStreamsContext (data) {
const ctx = decodePathwayContext(data)
decodeDataStreamsContext (carrier) {
const ctx = DsmPathwayCodec.decode(carrier)
// we erase the previous context everytime we decode a new one

@@ -47,0 +47,0 @@ DataStreamsContext.setDataStreamsContext(ctx)

@@ -35,9 +35,2 @@ 'use strict'

switch (c) {
default: // ordinary character
if (sx < subject.length && subject[sx] === c) {
px++
sx++
continue
}
break
case '?':

@@ -55,2 +48,9 @@ if (sx < subject.length) {

continue
default: // ordinary character
if (sx < subject.length && subject[sx] === c) {
px++
sx++
continue
}
break
}

@@ -57,0 +57,0 @@ }

@@ -65,92 +65,9 @@ # `dd-trace`: Node.js APM Tracer Library

## Development
## Development and Contribution
Before contributing to this open source project, read our [CONTRIBUTING.md](https://github.com/DataDog/dd-trace-js/blob/master/CONTRIBUTING.md).
Please read the [CONTRIBUTING.md](https://github.com/DataDog/dd-trace-js/blob/master/CONTRIBUTING.md) document before contributing to this open source project.
## Requirements
## Experimental ESM Support
Since this project supports multiple Node versions, using a version
manager such as [nvm](https://github.com/creationix/nvm) is recommended.
We use [yarn](https://yarnpkg.com/) for its workspace functionality, so make sure to install that as well.
To install dependencies once you have Node and yarn installed, run:
```sh
$ yarn
```
## Testing
Before running _plugin_ tests, the data stores need to be running.
The easiest way to start all of them is to use the provided
docker-compose configuration:
```sh
$ docker-compose up -d -V --remove-orphans --force-recreate
$ yarn services
```
> **Note**
> The `couchbase`, `grpc` and `oracledb` instrumentations rely on native modules
> that do not compile on ARM64 devices (for example M1/M2 Mac) - their tests
> cannot be run locally on these devices.
### Unit Tests
There are several types of unit tests, for various types of components. The
following commands may be useful:
```sh
# Tracer core tests (i.e. testing `packages/dd-trace`)
$ yarn test:trace:core
# "Core" library tests (i.e. testing `packages/datadog-core`
$ yarn test:core
# Instrumentations tests (i.e. testing `packages/datadog-instrumentations`
$ yarn test:instrumentations
```
Several other components have test commands as well. See `package.json` for
details.
To test _plugins_ (i.e. components in `packages/datadog-plugin-XXXX`
directories, set the `PLUGINS` environment variable to the plugin you're
interested in, and use `yarn test:plugins`. If you need to test multiple
plugins you may separate then with a pipe (`|`) delimiter. Here's an
example testing the `express` and `bluebird` plugins:
```sh
PLUGINS="express|bluebird" yarn test:plugins
```
### Memory Leaks
To run the memory leak tests, use:
```sh
$ yarn leak:core
# or
$ yarn leak:plugins
```
### Linting
We use [ESLint](https://eslint.org) to make sure that new code
conforms to our coding standards.
To run the linter, use:
```sh
$ yarn lint
```
### Experimental ESM Support
> **Warning**

@@ -167,2 +84,4 @@ >

Node.js < v20.6
```sh

@@ -172,18 +91,8 @@ node --loader dd-trace/loader-hook.mjs entrypoint.js

Node.js >= v20.6
### Benchmarks
Our microbenchmarks live in `benchmark/sirun`. Each directory in there
corresponds to a specific benchmark test and its variants, which are used to
track regressions and improvements over time.
In addition to those, when two or more approaches must be compared, please write
a benchmark in the `benchmark/index.js` module so that we can keep track of the
most efficient algorithm. To run your benchmark, use:
```sh
$ yarn bench
node --import dd-trace/register.js entrypoint.js
```
## Serverless / Lambda

@@ -205,2 +114,2 @@

If you have found a security issue, please contact the security team directly at [security@datadoghq.com](mailto:security@datadoghq.com).
Please refer to the [SECURITY.md](https://github.com/DataDog/dd-trace-js/blob/master/SECURITY.md) document if you have found a security issue.

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc