Socket
Socket
Sign inDemoInstall

dd-trace

Package Overview
Dependencies
Maintainers
1
Versions
574
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dd-trace - npm Package Compare versions

Comparing version 6.0.0-pre-c879030 to 6.0.0-pre-ca25754

packages/datadog-instrumentations/src/selenium.js

7

ci/init.js

@@ -6,2 +6,3 @@ /* eslint-disable no-console */

const isJestWorker = !!process.env.JEST_WORKER_ID
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID

@@ -41,2 +42,8 @@ const options = {

if (isCucumberWorker) {
options.experimental = {
exporter: 'cucumber_worker'
}
}
if (shouldInit) {

@@ -43,0 +50,0 @@ tracer.init(options)

3

ext/exporters.d.ts

@@ -6,5 +6,6 @@ declare const exporters: {

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker'
}
export = exporters

@@ -7,3 +7,4 @@ 'use strict'

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker'
}
{
"name": "dd-trace",
"version": "6.0.0-pre-c879030",
"version": "6.0.0-pre-ca25754",
"description": "Datadog APM tracing client for JavaScript",

@@ -39,2 +39,3 @@ "main": "index.js",

"test:integration:playwright": "mocha --colors --timeout 30000 \"integration-tests/playwright/*.spec.js\"",
"test:integration:selenium": "mocha --colors --timeout 30000 \"integration-tests/selenium/*.spec.js\"",
"test:integration:profiler": "mocha --colors --timeout 90000 \"integration-tests/profiler/*.spec.js\"",

@@ -41,0 +42,0 @@ "test:integration:serverless": "mocha --colors --timeout 30000 \"integration-tests/serverless/*.spec.js\"",

@@ -165,4 +165,7 @@ 'use strict'

's3',
'sfn',
'sns',
'sqs'
'sqs',
'states',
'stepfunctions'
].includes(name)

@@ -169,0 +172,0 @@ ? name

@@ -25,2 +25,4 @@ 'use strict'

const workerReportTraceCh = channel('ci:cucumber:worker-report:trace')
const itrSkippedSuitesCh = channel('ci:cucumber:itr:skipped-suites')

@@ -33,3 +35,4 @@

fromCoverageMapToCoverage,
getTestSuitePath
getTestSuitePath,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE
} = require('../../dd-trace/src/plugins/util/test')

@@ -52,2 +55,5 @@

const pickleResultByFile = {}
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
let skippableSuites = []

@@ -57,5 +63,8 @@ let itrCorrelationId = ''

let isUnskippable = false
let isSuitesSkippingEnabled = false
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let knownTests = []
let skippedSuites = []
let isSuitesSkipped = false

@@ -113,2 +122,39 @@ function getSuiteStatusFromTestStatuses (testStatuses) {

function getChannelPromise (channelToPublishTo) {
return new Promise(resolve => {
sessionAsyncResource.runInAsyncScope(() => {
channelToPublishTo.publish({ onDone: resolve })
})
})
}
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function wrapRun (pl, isLatestVersion) {

@@ -133,3 +179,4 @@ if (patched.has(pl)) return

testFileAbsolutePath,
testSourceLine
testSourceLine,
isParallel: !!process.env.CUCUMBER_WORKER_ID
})

@@ -202,8 +249,2 @@ try {

function pickleHook (PickleRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return PickleRunner
}
const pl = PickleRunner.default

@@ -217,8 +258,2 @@

function testCaseHook (TestCaseRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return TestCaseRunner
}
const pl = TestCaseRunner.default

@@ -231,44 +266,3 @@

addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function getWrappedStart (start, frameworkVersion) {
function getWrappedStart (start, frameworkVersion, isParallel = false) {
return async function () {

@@ -278,26 +272,12 @@ if (!libraryConfigurationCh.hasSubscribers) {

}
const asyncResource = new AsyncResource('bound-anonymous-fn')
let onDone
let errorSkippableRequest
const configPromise = new Promise(resolve => {
onDone = resolve
})
const configurationResponse = await getChannelPromise(libraryConfigurationCh)
asyncResource.runInAsyncScope(() => {
libraryConfigurationCh.publish({ onDone })
})
const configurationResponse = await configPromise
isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries
isSuitesSkippingEnabled = configurationResponse.libraryConfig?.isSuitesSkippingEnabled
if (isEarlyFlakeDetectionEnabled) {
const knownTestsPromise = new Promise(resolve => {
onDone = resolve
})
asyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
const knownTestsResponse = await knownTestsPromise
const knownTestsResponse = await getChannelPromise(knownTestsCh)
if (!knownTestsResponse.err) {

@@ -310,31 +290,22 @@ knownTests = knownTestsResponse.knownTests

const skippableSuitesPromise = new Promise(resolve => {
onDone = resolve
})
if (isSuitesSkippingEnabled) {
const skippableResponse = await getChannelPromise(skippableSuitesCh)
asyncResource.runInAsyncScope(() => {
skippableSuitesCh.publish({ onDone })
})
errorSkippableRequest = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
const skippableResponse = await skippableSuitesPromise
if (!errorSkippableRequest) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
const err = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
let skippedSuites = []
let isSuitesSkipped = false
this.pickleIds = picklesToRun
if (!err) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
this.pickleIds = picklesToRun
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
}
}

@@ -347,7 +318,7 @@

asyncResource.runInAsyncScope(() => {
sessionAsyncResource.runInAsyncScope(() => {
sessionStartCh.publish({ command, frameworkVersion })
})
if (!err && skippedSuites.length) {
if (!errorSkippableRequest && skippedSuites.length) {
itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })

@@ -370,3 +341,3 @@ }

asyncResource.runInAsyncScope(() => {
sessionAsyncResource.runInAsyncScope(() => {
sessionFinishCh.publish({

@@ -379,3 +350,4 @@ status: success ? 'pass' : 'fail',

hasForcedToRunSuites: isForcedToRun,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
})

@@ -449,3 +421,4 @@ })

coverageFiles,
suiteFile: testFileAbsolutePath
suiteFile: testFileAbsolutePath,
testSuitePath
})

@@ -458,3 +431,3 @@ // We need to reset coverage to get a code coverage per suite

testSuiteFinishCh.publish(testSuiteStatus)
testSuiteFinishCh.publish({ status: testSuiteStatus, testSuitePath })
}

@@ -466,6 +439,89 @@

// From 7.3.0 onwards, runPickle becomes runTestCase
function getWrappedParseWorkerMessage (parseWorkerMessageFunction) {
return function (worker, message) {
// If the message is an array, it's a dd-trace message, so we need to stop cucumber processing,
// or cucumber will throw an error
// TODO: identify the message better
if (Array.isArray(message)) {
const [messageCode, payload] = message
if (messageCode === CUCUMBER_WORKER_TRACE_PAYLOAD_CODE) {
sessionAsyncResource.runInAsyncScope(() => {
workerReportTraceCh.publish(payload)
})
return
}
}
const { jsonEnvelope } = message
if (!jsonEnvelope) {
return parseWorkerMessageFunction.apply(this, arguments)
}
let parsed = jsonEnvelope
if (typeof parsed === 'string') {
try {
parsed = JSON.parse(jsonEnvelope)
} catch (e) {
// ignore errors and continue
return parseWorkerMessageFunction.apply(this, arguments)
}
}
if (parsed.testCaseStarted) {
const { pickleId } = this.eventDataCollector.testCaseMap[parsed.testCaseStarted.testCaseId]
const pickle = this.eventDataCollector.getPickle(pickleId)
const testFileAbsolutePath = pickle.uri
// First test in suite
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = []
testSuiteStartCh.publish({
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
const parseWorkerResponse = parseWorkerMessageFunction.apply(this, arguments)
// after calling `parseWorkerMessageFunction`, the test status can already be read
if (parsed.testCaseFinished) {
const { pickle, worstTestStepResult } =
this.eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId)
const { status } = getStatusFromResultLatest(worstTestStepResult)
const testFileAbsolutePath = pickle.uri
const finished = pickleResultByFile[testFileAbsolutePath]
finished.push(status)
if (finished.length === pickleByFile[testFileAbsolutePath].length) {
testSuiteFinishCh.publish({
status: getSuiteStatusFromTestStatuses(finished),
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
return parseWorkerResponse
}
}
// Test start / finish for older versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
// Test start / finish for newer versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
// From 7.3.0 onwards, runPickle becomes runTestCase. Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/index.js'

@@ -479,2 +535,5 @@ }, (runtimePackage, frameworkVersion) => {

// Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({

@@ -490,1 +549,19 @@ name: '@cucumber/cucumber',

})
// Only executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedGiveWork` generates suite start events and sets pickleResultByFile (used by suite finish events)
// `getWrappedParseWorkerMessage` generates suite finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=8.0.0'],
file: 'lib/runtime/parallel/coordinator.js'
}, (coordinatorPackage, frameworkVersion) => {
shimmer.wrap(coordinatorPackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion, true))
shimmer.wrap(
coordinatorPackage.default.prototype,
'parseWorkerMessage',
parseWorkerMessage => getWrappedParseWorkerMessage(parseWorkerMessage)
)
return coordinatorPackage
})

@@ -10,9 +10,14 @@ 'use strict'

const req = new Request(input, init)
const headers = req.headers
const ctx = { req, headers }
if (input instanceof Request) {
const ctx = { req: input }
return ch.tracePromise(() => fetch.call(this, req, { headers: ctx.headers }), ctx)
return ch.tracePromise(() => fetch.call(this, input, init), ctx)
} else {
const req = new Request(input, init)
const ctx = { req }
return ch.tracePromise(() => fetch.call(this, req), ctx)
}
}
}
}

@@ -61,2 +61,3 @@ 'use strict'

'jest-jasmine2': () => require('../jest'),
'jest-runtime': () => require('../jest'),
'jest-worker': () => require('../jest'),

@@ -107,4 +108,5 @@ knex: () => require('../knex'),

router: () => require('../router'),
'selenium-webdriver': () => require('../selenium'),
sequelize: () => require('../sequelize'),
sharedb: () => require('../sharedb'),
sequelize: () => require('../sequelize'),
tedious: () => require('../tedious'),

@@ -111,0 +113,0 @@ when: () => require('../when'),

@@ -879,3 +879,31 @@ 'use strict'

const LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE = [
'selenium-webdriver'
]
function shouldBypassJestRequireEngine (moduleName) {
return (
LIBRARIES_BYPASSING_JEST_REQUIRE_ENGINE.some(library => moduleName.includes(library))
)
}
addHook({
name: 'jest-runtime',
versions: ['>=24.8.0']
}, (runtimePackage) => {
const Runtime = runtimePackage.default ? runtimePackage.default : runtimePackage
shimmer.wrap(Runtime.prototype, 'requireModuleOrMock', requireModuleOrMock => function (from, moduleName) {
// TODO: do this for every library that we instrument
if (shouldBypassJestRequireEngine(moduleName)) {
// To bypass jest's own require engine
return this._requireCoreModule(moduleName)
}
return requireModuleOrMock.apply(this, arguments)
})
return runtimePackage
})
addHook({
name: 'jest-worker',

@@ -882,0 +910,0 @@ versions: ['>=24.9.0'],

@@ -13,2 +13,94 @@ 'use strict'

const V4_PACKAGE_SHIMS = [
{
file: 'resources/chat/completions.js',
targetClass: 'Completions',
baseResource: 'chat.completions',
methods: ['create']
},
{
file: 'resources/completions.js',
targetClass: 'Completions',
baseResource: 'completions',
methods: ['create']
},
{
file: 'resources/embeddings.js',
targetClass: 'Embeddings',
baseResource: 'embeddings',
methods: ['create']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['create', 'del', 'list', 'retrieve']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['retrieveContent'],
versions: ['>=4.0.0 <4.17.1']
},
{
file: 'resources/files.js',
targetClass: 'Files',
baseResource: 'files',
methods: ['content'], // replaced `retrieveContent` in v4.17.1
versions: ['>=4.17.1']
},
{
file: 'resources/images.js',
targetClass: 'Images',
baseResource: 'images',
methods: ['createVariation', 'edit', 'generate']
},
{
file: 'resources/fine-tuning/jobs/jobs.js',
targetClass: 'Jobs',
baseResource: 'fine_tuning.jobs',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.34.0'] // file location changed in 4.34.0
},
{
file: 'resources/fine-tuning/jobs.js',
targetClass: 'Jobs',
baseResource: 'fine_tuning.jobs',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.1.0 <4.34.0']
},
{
file: 'resources/fine-tunes.js', // deprecated after 4.1.0
targetClass: 'FineTunes',
baseResource: 'fine-tune',
methods: ['cancel', 'create', 'list', 'listEvents', 'retrieve'],
versions: ['>=4.0.0 <4.1.0']
},
{
file: 'resources/models.js',
targetClass: 'Models',
baseResource: 'models',
methods: ['del', 'list', 'retrieve']
},
{
file: 'resources/moderations.js',
targetClass: 'Moderations',
baseResource: 'moderations',
methods: ['create']
},
{
file: 'resources/audio/transcriptions.js',
targetClass: 'Transcriptions',
baseResource: 'audio.transcriptions',
methods: ['create']
},
{
file: 'resources/audio/translations.js',
targetClass: 'Translations',
baseResource: 'audio.translations',
methods: ['create']
}
]
addHook({ name: 'openai', file: 'dist/api.js', versions: ['>=3.0.0 <4'] }, exports => {

@@ -52,1 +144,58 @@ const methodNames = Object.getOwnPropertyNames(exports.OpenAIApi.prototype)

})
for (const shim of V4_PACKAGE_SHIMS) {
const { file, targetClass, baseResource, methods } = shim
addHook({ name: 'openai', file, versions: shim.versions || ['>=4'] }, exports => {
const targetPrototype = exports[targetClass].prototype
for (const methodName of methods) {
shimmer.wrap(targetPrototype, methodName, methodFn => function () {
if (!startCh.hasSubscribers) {
return methodFn.apply(this, arguments)
}
const client = this._client || this.client
startCh.publish({
methodName: `${baseResource}.${methodName}`,
args: arguments,
basePath: client.baseURL,
apiKey: client.apiKey
})
const apiProm = methodFn.apply(this, arguments)
// wrapping `parse` avoids problematic wrapping of `then` when trying to call
// `withResponse` in userland code after. This way, we can return the whole `APIPromise`
shimmer.wrap(apiProm, 'parse', origApiPromParse => function () {
return origApiPromParse.apply(this, arguments)
// the original response is wrapped in a promise, so we need to unwrap it
.then(body => Promise.all([this.responsePromise, body]))
.then(([{ response, options }, body]) => {
finishCh.publish({
headers: response.headers,
body,
path: response.url,
method: options.method
})
return body
})
.catch(err => {
errorCh.publish({ err })
throw err
})
.finally(() => {
// maybe we don't want to unwrap here in case the promise is re-used?
// other hand: we want to avoid resource leakage
shimmer.unwrap(apiProm, 'parse')
})
})
return apiProm
})
}
return exports
})
}

@@ -7,3 +7,8 @@ 'use strict'

if (process.env.DD_TRACE_OTEL_ENABLED) {
const otelSdkEnabled = process.env.DD_TRACE_OTEL_ENABLED ||
process.env.OTEL_SDK_DISABLED
? !process.env.OTEL_SDK_DISABLED
: undefined
if (otelSdkEnabled) {
addHook({

@@ -10,0 +15,0 @@ name: '@opentelemetry/sdk-trace-node',

@@ -10,4 +10,7 @@ 'use strict'

exports.s3 = require('./s3')
exports.sfn = require('./sfn')
exports.sns = require('./sns')
exports.sqs = require('./sqs')
exports.states = require('./states')
exports.stepfunctions = require('./stepfunctions')
exports.default = require('./default')

@@ -21,3 +21,10 @@ 'use strict'

TEST_IS_NEW,
TEST_IS_RETRY
TEST_IS_RETRY,
TEST_SUITE_ID,
TEST_SESSION_ID,
TEST_COMMAND,
TEST_MODULE,
TEST_MODULE_ID,
TEST_SUITE,
CUCUMBER_IS_PARALLEL
} = require('../../dd-trace/src/plugins/util/test')

@@ -36,3 +43,19 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

} = require('../../dd-trace/src/ci-visibility/telemetry')
const id = require('../../dd-trace/src/id')
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID
function getTestSuiteTags (testSuiteSpan) {
const suiteTags = {
[TEST_SUITE_ID]: testSuiteSpan.context().toSpanId(),
[TEST_SESSION_ID]: testSuiteSpan.context().toTraceId(),
[TEST_COMMAND]: testSuiteSpan.context()._tags[TEST_COMMAND],
[TEST_MODULE]: 'cucumber'
}
if (testSuiteSpan.context()._parentId) {
suiteTags[TEST_MODULE_ID] = testSuiteSpan.context()._parentId.toString(10)
}
return suiteTags
}
class CucumberPlugin extends CiPlugin {

@@ -48,2 +71,4 @@ static get id () {

this.testSuiteSpanByPath = {}
this.addSub('ci:cucumber:session:finish', ({

@@ -56,3 +81,4 @@ status,

hasForcedToRunSuites,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
}) => {

@@ -77,2 +103,5 @@ const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {}

}
if (isParallel) {
this.testSessionSpan.setTag(CUCUMBER_IS_PARALLEL, 'true')
}

@@ -109,3 +138,3 @@ this.testSessionSpan.setTag(TEST_STATUS, status)

}
this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
const testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
childOf: this.testModuleSpan,

@@ -118,2 +147,4 @@ tags: {

})
this.testSuiteSpanByPath[testSuitePath] = testSuiteSpan
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')

@@ -125,9 +156,10 @@ if (this.libraryConfig?.isCodeCoverageEnabled) {

this.addSub('ci:cucumber:test-suite:finish', status => {
this.testSuiteSpan.setTag(TEST_STATUS, status)
this.testSuiteSpan.finish()
this.addSub('ci:cucumber:test-suite:finish', ({ status, testSuitePath }) => {
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]
testSuiteSpan.setTag(TEST_STATUS, status)
testSuiteSpan.finish()
this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')
})
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => {
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile, testSuitePath }) => {
if (!this.libraryConfig?.isCodeCoverageEnabled) {

@@ -139,2 +171,3 @@ return

}
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]

@@ -147,4 +180,4 @@ const relativeCoverageFiles = [...coverageFiles, suiteFile]

const formattedCoverage = {
sessionId: this.testSuiteSpan.context()._traceId,
suiteId: this.testSuiteSpan.context()._spanId,
sessionId: testSuiteSpan.context()._traceId,
suiteId: testSuiteSpan.context()._spanId,
files: relativeCoverageFiles

@@ -157,3 +190,3 @@ }

this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine }) => {
this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine, isParallel }) => {
const store = storage.getStore()

@@ -167,2 +200,6 @@ const testSuite = getTestSuitePath(testFileAbsolutePath, this.sourceRoot)

}
if (isParallel) {
extraTags[CUCUMBER_IS_PARALLEL] = 'true'
}
const testSpan = this.startTestSpan(testName, testSuite, extraTags)

@@ -187,2 +224,32 @@

this.addSub('ci:cucumber:worker-report:trace', (traces) => {
const formattedTraces = JSON.parse(traces).map(trace =>
trace.map(span => ({
...span,
span_id: id(span.span_id),
trace_id: id(span.trace_id),
parent_id: id(span.parent_id)
}))
)
// We have to update the test session, test module and test suite ids
// before we export them in the main process
formattedTraces.forEach(trace => {
trace.forEach(span => {
if (span.name === 'cucumber.test') {
const testSuite = span.meta[TEST_SUITE]
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
const testSuiteTags = getTestSuiteTags(testSuiteSpan)
span.meta = {
...span.meta,
...testSuiteTags
}
}
})
this.tracer._exporter.export(trace)
})
})
this.addSub('ci:cucumber:test:finish', ({ isStep, status, skipReason, errorMessage, isNew, isEfdRetry }) => {

@@ -217,2 +284,6 @@ const span = storage.getStore().span

finishAllTraceSpans(span)
// If it's a worker, flushing is cheap, as it's just sending data to the main process
if (isCucumberWorker) {
this.tracer._exporter.flush()
}
}

@@ -230,6 +301,7 @@ })

startTestSpan (testName, testSuite, extraTags) {
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
return super.startTestSpan(
testName,
testSuite,
this.testSuiteSpan,
testSuiteSpan,
extraTags

@@ -236,0 +308,0 @@ )

@@ -20,4 +20,7 @@ 'use strict'

ctx.headers = headers
ctx.req = new globalThis.Request(req, { headers })
for (const name in headers) {
if (!req.headers.has(name)) {
req.headers.set(name, headers[name])
}
}

@@ -24,0 +27,0 @@ return store

@@ -147,6 +147,5 @@ 'use strict'

const store = storage.getStore()
const span = store?.span
if (store && store.span) {
const span = store.span
if (span) {
span.setTag(TEST_STATUS, status)

@@ -153,0 +152,0 @@

@@ -87,3 +87,3 @@ 'use strict'

// createChatCompletion, createCompletion, createImage, createImageEdit, createTranscription, createTranslation
if ('prompt' in payload) {
if (payload.prompt) {
const prompt = payload.prompt

@@ -103,3 +103,3 @@ store.prompt = prompt

// createEdit, createEmbedding, createModeration
if ('input' in payload) {
if (payload.input) {
const normalized = normalizeStringOrTokenArray(payload.input, false)

@@ -119,2 +119,4 @@ tags['openai.request.input'] = truncateText(normalized)

case 'createFineTune':
case 'fine_tuning.jobs.create':
case 'fine-tune.create':
createFineTuneRequestExtraction(tags, payload)

@@ -124,4 +126,7 @@ break

case 'createImage':
case 'images.generate':
case 'createImageEdit':
case 'images.edit':
case 'createImageVariation':
case 'images.createVariation':
commonCreateImageRequestExtraction(tags, payload, store)

@@ -131,2 +136,3 @@ break

case 'createChatCompletion':
case 'chat.completions.create':
createChatCompletionRequestExtraction(tags, payload, store)

@@ -136,3 +142,5 @@ break

case 'createFile':
case 'files.create':
case 'retrieveFile':
case 'files.retrieve':
commonFileRequestExtraction(tags, payload)

@@ -142,3 +150,5 @@ break

case 'createTranscription':
case 'audio.transcriptions.create':
case 'createTranslation':
case 'audio.translations.create':
commonCreateAudioRequestExtraction(tags, payload, store)

@@ -148,2 +158,3 @@ break

case 'retrieveModel':
case 'models.retrieve':
retrieveModelRequestExtraction(tags, payload)

@@ -153,5 +164,12 @@ break

case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'deleteModel':
case 'models.del':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
commonLookupFineTuneRequestExtraction(tags, payload)

@@ -161,2 +179,3 @@ break

case 'createEdit':
case 'edits.create':
createEditRequestExtraction(tags, payload, store)

@@ -170,2 +189,6 @@ break

finish ({ headers, body, method, path }) {
if (headers.constructor.name === 'Headers') {
headers = Object.fromEntries(headers)
}
const span = this.activeSpan

@@ -179,2 +202,7 @@ const methodName = span._spanContext._tags['resource.name']

if (path.startsWith('https://') || path.startsWith('http://')) {
// basic checking for if the path was set as a full URL
// not using a full regex as it will likely be "https://api.openai.com/..."
path = new URL(path).pathname
}
const endpoint = lookupOperationEndpoint(methodName, path)

@@ -184,3 +212,3 @@

'openai.request.endpoint': endpoint,
'openai.request.method': method,
'openai.request.method': method.toUpperCase(),

@@ -236,3 +264,3 @@ 'openai.organization.id': body.organization_id, // only available in fine-tunes endpoints

if (body && ('usage' in body)) {
if (body && body.usage) {
const promptTokens = body.usage.prompt_tokens

@@ -245,15 +273,15 @@ const completionTokens = body.usage.completion_tokens

if ('x-ratelimit-limit-requests' in headers) {
if (headers['x-ratelimit-limit-requests']) {
this.metrics.gauge('openai.ratelimit.requests', Number(headers['x-ratelimit-limit-requests']), tags)
}
if ('x-ratelimit-remaining-requests' in headers) {
if (headers['x-ratelimit-remaining-requests']) {
this.metrics.gauge('openai.ratelimit.remaining.requests', Number(headers['x-ratelimit-remaining-requests']), tags)
}
if ('x-ratelimit-limit-tokens' in headers) {
if (headers['x-ratelimit-limit-tokens']) {
this.metrics.gauge('openai.ratelimit.tokens', Number(headers['x-ratelimit-limit-tokens']), tags)
}
if ('x-ratelimit-remaining-tokens' in headers) {
if (headers['x-ratelimit-remaining-tokens']) {
this.metrics.gauge('openai.ratelimit.remaining.tokens', Number(headers['x-ratelimit-remaining-tokens']), tags)

@@ -293,6 +321,6 @@ }

const message = payload.messages[i]
tags[`openai.request.${i}.content`] = truncateText(message.content)
tags[`openai.request.${i}.role`] = message.role
tags[`openai.request.${i}.name`] = message.name
tags[`openai.request.${i}.finish_reason`] = message.finish_reason
tags[`openai.request.messages.${i}.content`] = truncateText(message.content)
tags[`openai.request.messages.${i}.role`] = message.role
tags[`openai.request.messages.${i}.name`] = message.name
tags[`openai.request.messages.${i}.finish_reason`] = message.finish_reason
}

@@ -303,4 +331,5 @@ }

// createImageEdit, createImageVariation
if (payload.file && typeof payload.file === 'object' && payload.file.path) {
const file = path.basename(payload.file.path)
const img = payload.file || payload.image
if (img && typeof img === 'object' && img.path) {
const file = path.basename(img.path)
tags['openai.request.image'] = file

@@ -325,2 +354,3 @@ store.file = file

case 'createModeration':
case 'moderations.create':
createModerationResponseExtraction(tags, body)

@@ -330,4 +360,7 @@ break

case 'createCompletion':
case 'completions.create':
case 'createChatCompletion':
case 'chat.completions.create':
case 'createEdit':
case 'edits.create':
commonCreateResponseExtraction(tags, body, store)

@@ -337,4 +370,9 @@ break

case 'listFiles':
case 'files.list':
case 'listFineTunes':
case 'fine_tuning.jobs.list':
case 'fine-tune.list':
case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
commonListCountResponseExtraction(tags, body)

@@ -344,2 +382,3 @@ break

case 'createEmbedding':
case 'embeddings.create':
createEmbeddingResponseExtraction(tags, body)

@@ -349,3 +388,5 @@ break

case 'createFile':
case 'files.create':
case 'retrieveFile':
case 'files.retrieve':
createRetrieveFileResponseExtraction(tags, body)

@@ -355,2 +396,3 @@ break

case 'deleteFile':
case 'files.del':
deleteFileResponseExtraction(tags, body)

@@ -360,2 +402,4 @@ break

case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
downloadFileResponseExtraction(tags, body)

@@ -365,4 +409,10 @@ break

case 'createFineTune':
case 'fine_tuning.jobs.create':
case 'fine-tune.create':
case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
commonFineTuneResponseExtraction(tags, body)

@@ -372,3 +422,5 @@ break

case 'createTranscription':
case 'audio.transcriptions.create':
case 'createTranslation':
case 'audio.translations.create':
createAudioResponseExtraction(tags, body)

@@ -378,4 +430,7 @@ break

case 'createImage':
case 'images.generate':
case 'createImageEdit':
case 'images.edit':
case 'createImageVariation':
case 'images.createVariation':
commonImageResponseExtraction(tags, body)

@@ -385,2 +440,3 @@ break

case 'listModels':
case 'models.list':
listModelsResponseExtraction(tags, body)

@@ -390,2 +446,3 @@ break

case 'retrieveModel':
case 'models.retrieve':
retrieveModelResponseExtraction(tags, body)

@@ -463,11 +520,15 @@ break

tags['openai.response.fine_tuned_model'] = body.fine_tuned_model
if (body.hyperparams) {
tags['openai.response.hyperparams.n_epochs'] = body.hyperparams.n_epochs
tags['openai.response.hyperparams.batch_size'] = body.hyperparams.batch_size
tags['openai.response.hyperparams.prompt_loss_weight'] = body.hyperparams.prompt_loss_weight
tags['openai.response.hyperparams.learning_rate_multiplier'] = body.hyperparams.learning_rate_multiplier
const hyperparams = body.hyperparams || body.hyperparameters
const hyperparamsKey = body.hyperparams ? 'hyperparams' : 'hyperparameters'
if (hyperparams) {
tags[`openai.response.${hyperparamsKey}.n_epochs`] = hyperparams.n_epochs
tags[`openai.response.${hyperparamsKey}.batch_size`] = hyperparams.batch_size
tags[`openai.response.${hyperparamsKey}.prompt_loss_weight`] = hyperparams.prompt_loss_weight
tags[`openai.response.${hyperparamsKey}.learning_rate_multiplier`] = hyperparams.learning_rate_multiplier
}
tags['openai.response.training_files_count'] = defensiveArrayLength(body.training_files)
tags['openai.response.training_files_count'] = defensiveArrayLength(body.training_files || body.training_file)
tags['openai.response.result_files_count'] = defensiveArrayLength(body.result_files)
tags['openai.response.validation_files_count'] = defensiveArrayLength(body.validation_files)
tags['openai.response.validation_files_count'] = defensiveArrayLength(body.validation_files || body.validation_file)
tags['openai.response.updated_at'] = body.updated_at

@@ -561,14 +622,27 @@ tags['openai.response.status'] = body.status

for (let i = 0; i < body.choices.length; i++) {
const choice = body.choices[i]
tags[`openai.response.choices.${i}.finish_reason`] = choice.finish_reason
tags[`openai.response.choices.${i}.logprobs`] = ('logprobs' in choice) ? 'returned' : undefined
tags[`openai.response.choices.${i}.text`] = truncateText(choice.text)
for (let choiceIdx = 0; choiceIdx < body.choices.length; choiceIdx++) {
const choice = body.choices[choiceIdx]
// logprobs can be nullm and we still want to tag it as 'returned' even when set to 'null'
const specifiesLogProb = Object.keys(choice).indexOf('logprobs') !== -1
tags[`openai.response.choices.${choiceIdx}.finish_reason`] = choice.finish_reason
tags[`openai.response.choices.${choiceIdx}.logprobs`] = specifiesLogProb ? 'returned' : undefined
tags[`openai.response.choices.${choiceIdx}.text`] = truncateText(choice.text)
// createChatCompletion only
if ('message' in choice) {
if (choice.message) {
const message = choice.message
tags[`openai.response.choices.${i}.message.role`] = message.role
tags[`openai.response.choices.${i}.message.content`] = truncateText(message.content)
tags[`openai.response.choices.${i}.message.name`] = truncateText(message.name)
tags[`openai.response.choices.${choiceIdx}.message.role`] = message.role
tags[`openai.response.choices.${choiceIdx}.message.content`] = truncateText(message.content)
tags[`openai.response.choices.${choiceIdx}.message.name`] = truncateText(message.name)
if (message.tool_calls) {
const toolCalls = message.tool_calls
for (let toolIdx = 0; toolIdx < toolCalls.length; toolIdx++) {
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.name`] =
toolCalls[toolIdx].function.name
tags[`openai.response.choices.${choiceIdx}.message.tool_calls.${toolIdx}.arguments`] =
toolCalls[toolIdx].function.arguments
}
}
}

@@ -611,6 +685,19 @@ }

case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return { file: body }
}
return typeof body === 'object' ? body : {}
const type = typeof body
if (type === 'string') {
try {
return JSON.parse(body)
} catch {
return body
}
} else if (type === 'object') {
return body
} else {
return {}
}
}

@@ -622,20 +709,35 @@

case 'deleteModel':
case 'models.del':
case 'retrieveModel':
case 'models.retrieve':
return '/v1/models/*'
case 'deleteFile':
case 'files.del':
case 'retrieveFile':
case 'files.retrieve':
return '/v1/files/*'
case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return '/v1/files/*/content'
case 'retrieveFineTune':
case 'fine-tune.retrieve':
return '/v1/fine-tunes/*'
case 'fine_tuning.jobs.retrieve':
return '/v1/fine_tuning/jobs/*'
case 'listFineTuneEvents':
case 'fine-tune.listEvents':
return '/v1/fine-tunes/*/events'
case 'fine_tuning.jobs.listEvents':
return '/v1/fine_tuning/jobs/*/events'
case 'cancelFineTune':
case 'fine-tune.cancel':
return '/v1/fine-tunes/*/cancel'
case 'fine_tuning.jobs.cancel':
return '/v1/fine_tuning/jobs/*/cancel'
}

@@ -654,4 +756,8 @@

case 'listModels':
case 'models.list':
case 'listFiles':
case 'files.list':
case 'listFineTunes':
case 'fine_tuning.jobs.list':
case 'fine-tune.list':
// no argument

@@ -661,2 +767,3 @@ return {}

case 'retrieveModel':
case 'models.retrieve':
return { id: args[0] }

@@ -671,7 +778,13 @@

case 'deleteFile':
case 'files.del':
case 'retrieveFile':
case 'files.retrieve':
case 'downloadFile':
case 'files.retrieveContent':
case 'files.content':
return { file_id: args[0] }
case 'listFineTuneEvents':
case 'fine_tuning.jobs.listEvents':
case 'fine-tune.listEvents':
return {

@@ -683,4 +796,9 @@ fine_tune_id: args[0],

case 'retrieveFineTune':
case 'fine_tuning.jobs.retrieve':
case 'fine-tune.retrieve':
case 'deleteModel':
case 'models.del':
case 'cancelFineTune':
case 'fine_tuning.jobs.cancel':
case 'fine-tune.cancel':
return { fine_tune_id: args[0] }

@@ -742,5 +860,14 @@

function defensiveArrayLength (maybeArray) {
return Array.isArray(maybeArray) ? maybeArray.length : undefined
if (maybeArray) {
if (Array.isArray(maybeArray)) {
return maybeArray.length
} else {
// case of a singular item (ie body.training_file vs body.training_files)
return 1
}
}
return undefined
}
module.exports = OpenApiPlugin

@@ -7,3 +7,3 @@ 'use strict'

function taintObject (iastContext, object, type, keyTainting, keyType) {
function taintObject (iastContext, object, type) {
let result = object

@@ -26,5 +26,2 @@ const transactionId = iastContext?.[IAST_TRANSACTION_ID]

result = tainted
} else if (keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = tainted
} else {

@@ -39,7 +36,2 @@ parent[key] = tainted

}
if (parent && keyTainting && key) {
const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType)
parent[taintedProperty] = value
}
}

@@ -46,0 +38,0 @@ } catch (e) {

@@ -98,8 +98,10 @@ 'use strict'

const iastContext = getIastContext(storage.getStore())
taintObject(iastContext, target, HTTP_REQUEST_COOKIE_VALUE, true, HTTP_REQUEST_COOKIE_NAME)
// Prevent tainting cookie names since it leads to taint literal string with same value.
taintObject(iastContext, target, HTTP_REQUEST_COOKIE_VALUE)
}
taintHeaders (headers, iastContext) {
// Prevent tainting header names since it leads to taint literal string with same value.
this.execSource({
handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE, true, HTTP_REQUEST_HEADER_NAME),
handler: () => taintObject(iastContext, headers, HTTP_REQUEST_HEADER_VALUE),
tags: REQ_HEADER_TAGS,

@@ -106,0 +108,0 @@ iastContext

@@ -11,3 +11,3 @@ 'use strict'

function enable (config) {
function enable (config, appsec) {
rc = new RemoteConfigManager(config)

@@ -35,3 +35,3 @@ rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_CUSTOM_TAGS, true)

if (activation === Activation.ONECLICK) {
enableOrDisableAppsec(action, rcConfig, config)
enableOrDisableAppsec(action, rcConfig, config, appsec)
}

@@ -46,3 +46,3 @@

function enableOrDisableAppsec (action, rcConfig, config) {
function enableOrDisableAppsec (action, rcConfig, config, appsec) {
if (typeof rcConfig.asm?.enabled === 'boolean') {

@@ -58,5 +58,5 @@ let shouldEnable

if (shouldEnable) {
require('..').enable(config)
appsec.enable(config)
} else {
require('..').disable()
appsec.disable()
}

@@ -63,0 +63,0 @@ }

@@ -18,5 +18,61 @@ 'use strict'

const { updateConfig } = require('./telemetry')
const telemetryMetrics = require('./telemetry/metrics')
const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless')
const { ORIGIN_KEY } = require('./constants')
const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
const telemetryCounters = {
'otel.env.hiding': {},
'otel.env.invalid': {}
}
function getCounter (event, ddVar, otelVar, otelTracesSamplerArg) {
const counters = telemetryCounters[event]
const tags = []
if (ddVar) tags.push(ddVar)
if (otelVar) tags.push(otelVar)
if (otelTracesSamplerArg) tags.push(otelTracesSamplerArg)
if (!(ddVar in counters)) counters[ddVar] = {}
const counter = tracerMetrics.count(event, tags)
counters[ddVar][otelVar] = counter
return counter
}
const otelDdEnvMapping = {
DD_TRACE_LOG_LEVEL: 'OTEL_LOG_LEVEL',
DD_TRACE_PROPAGATION_STYLE: 'OTEL_PROPAGATORS',
DD_SERVICE: 'OTEL_SERVICE_NAME',
DD_TRACE_SAMPLE_RATE: 'OTEL_TRACES_SAMPLER',
DD_TRACE_ENABLED: 'OTEL_TRACES_EXPORTER',
DD_RUNTIME_METRICS_ENABLED: 'OTEL_METRICS_EXPORTER',
DD_TAGS: 'OTEL_RESOURCE_ATTRIBUTES',
DD_TRACE_OTEL_ENABLED: 'OTEL_SDK_DISABLED'
}
const otelInvalidEnv = ['OTEL_LOGS_EXPORTER']
function checkIfBothOtelAndDdEnvVarSet () {
for (const [ddVar, otelVar] of Object.entries(otelDdEnvMapping)) {
if (process.env[ddVar] && process.env[otelVar]) {
log.warn(`both ${ddVar} and ${otelVar} environment variables are set`)
getCounter('otel.env.hiding', ddVar, otelVar,
otelVar === 'OTEL_TRACES_SAMPLER' &&
process.env.OTEL_TRACES_SAMPLER_ARG
? 'OTEL_TRACES_SAMPLER_ARG'
: undefined).inc()
}
}
for (const otelVar of otelInvalidEnv) {
if (process.env[otelVar]) {
log.warn(`${otelVar} is not supported by the Datadog SDK`)
getCounter('otel.env.invalid', otelVar).inc()
}
}
}
const fromEntries = Object.fromEntries || (entries =>

@@ -93,3 +149,4 @@ entries.reduce((obj, [k, v]) => Object.assign(obj, { [k]: v }), {}))

const envKey = `DD_TRACE_PROPAGATION_STYLE_${key.toUpperCase()}`
const envVar = coalesce(process.env[envKey], process.env.DD_TRACE_PROPAGATION_STYLE)
const envVar = coalesce(process.env[envKey], process.env.DD_TRACE_PROPAGATION_STYLE, process.env.OTEL_PROPAGATORS)
if (typeof envVar !== 'undefined') {

@@ -113,2 +170,4 @@ return envVar.split(',')

checkIfBothOtelAndDdEnvVarSet()
// Configure the logger first so it can be used to warn about other configs

@@ -120,5 +179,7 @@ this.debug = isTrue(coalesce(

this.logger = options.logger
this.logLevel = coalesce(
options.logLevel,
process.env.DD_TRACE_LOG_LEVEL,
process.env.OTEL_LOG_LEVEL,
'debug'

@@ -170,3 +231,3 @@ )

}
const DD_TRACE_PROPAGATION_STYLE_INJECT = propagationStyle(
const PROPAGATION_STYLE_INJECT = propagationStyle(
'inject',

@@ -176,3 +237,3 @@ options.tracePropagationStyle,

)
const DD_TRACE_PROPAGATION_STYLE_EXTRACT = propagationStyle(
const PROPAGATION_STYLE_EXTRACT = propagationStyle(
'extract',

@@ -262,4 +323,9 @@ options.tracePropagationStyle,

this.tracePropagationStyle = {
inject: DD_TRACE_PROPAGATION_STYLE_INJECT,
extract: DD_TRACE_PROPAGATION_STYLE_EXTRACT
inject: PROPAGATION_STYLE_INJECT,
extract: PROPAGATION_STYLE_EXTRACT,
otelPropagators: process.env.DD_TRACE_PROPAGATION_STYLE ||
process.env.DD_TRACE_PROPAGATION_STYLE_INJECT ||
process.env.DD_TRACE_PROPAGATION_STYLE_EXTRACT
? false
: !!process.env.OTEL_PROPAGATORS
}

@@ -536,3 +602,8 @@ this.tracePropagationExtractFirst = isTrue(DD_TRACE_PROPAGATION_EXTRACT_FIRST)

DD_TRACING_ENABLED,
DD_VERSION
DD_VERSION,
OTEL_SERVICE_NAME,
OTEL_RESOURCE_ATTRIBUTES,
OTEL_TRACES_SAMPLER,
OTEL_TRACES_SAMPLER_ARG,
OTEL_METRICS_EXPORTER
} = process.env

@@ -543,2 +614,3 @@

tagger.add(tags, OTEL_RESOURCE_ATTRIBUTES, true)
tagger.add(tags, DD_TAGS)

@@ -604,7 +676,20 @@ tagger.add(tags, DD_TRACE_TAGS)

this._setBoolean(env, 'reportHostname', DD_TRACE_REPORT_HOSTNAME)
this._setBoolean(env, 'runtimeMetrics', DD_RUNTIME_METRICS_ENABLED)
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE)
// only used to explicitly set runtimeMetrics to false
const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none'
? false
: undefined
this._setBoolean(env, 'runtimeMetrics', DD_RUNTIME_METRICS_ENABLED ||
otelSetRuntimeMetrics)
const OTEL_TRACES_SAMPLER_MAPPING = {
always_on: '1.0',
always_off: '0.0',
traceidratio: OTEL_TRACES_SAMPLER_ARG,
parentbased_always_on: '1.0',
parentbased_always_off: '0.0',
parentbased_traceidratio: OTEL_TRACES_SAMPLER_ARG
}
this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE || OTEL_TRACES_SAMPLER_MAPPING[OTEL_TRACES_SAMPLER])
this._setValue(env, 'sampler.rateLimit', DD_TRACE_RATE_LIMIT)
this._setString(env, 'scope', DD_TRACE_SCOPE)
this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service)
this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service || OTEL_SERVICE_NAME)
this._setString(env, 'site', DD_SITE)

@@ -611,0 +696,0 @@ if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) {

@@ -21,3 +21,4 @@ 'use strict'

case exporters.JEST_WORKER:
return require('./ci-visibility/exporters/jest-worker')
case exporters.CUCUMBER_WORKER:
return require('./ci-visibility/exporters/test-worker')
default:

@@ -24,0 +25,0 @@ return inAWSLambda && !usingLambdaExtension ? require('./exporters/log') : require('./exporters/agent')

@@ -8,4 +8,8 @@ 'use strict'

module.exports = isFalse(process.env.DD_TRACE_ENABLED) || inJestWorker
const ddTraceDisabled = process.env.DD_TRACE_ENABLED
? isFalse(process.env.DD_TRACE_ENABLED)
: String(process.env.OTEL_TRACES_EXPORTER).toLowerCase() === 'none'
module.exports = ddTraceDisabled || inJestWorker
? require('./noop/proxy')
: require('./proxy')

@@ -227,9 +227,15 @@ 'use strict'

break
case 'b3': // TODO: should match "b3 single header" in next major
case 'b3' && this
._config
.tracePropagationStyle
.otelPropagators: // TODO: should match "b3 single header" in next major
case 'b3 single header': // TODO: delete in major after singular "b3"
spanContext = this._extractB3SingleContext(carrier)
break
case 'b3':
case 'b3multi':
spanContext = this._extractB3MultiContext(carrier)
break
case 'b3 single header': // TODO: delete in major after singular "b3"
spanContext = this._extractB3SingleContext(carrier)
break
default:
log.warn(`Unknown propagation style: ${extractor}`)
}

@@ -236,0 +242,0 @@

@@ -48,2 +48,3 @@ 'use strict'

get 'jest-jasmine2' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-runtime' () { return require('../../../datadog-plugin-jest/src') },
get 'jest-worker' () { return require('../../../datadog-plugin-jest/src') },

@@ -81,2 +82,3 @@ get koa () { return require('../../../datadog-plugin-koa/src') },

get router () { return require('../../../datadog-plugin-router/src') },
get 'selenium-webdriver' () { return require('../../../datadog-plugin-selenium/src') },
get sharedb () { return require('../../../datadog-plugin-sharedb/src') },

@@ -83,0 +85,0 @@ get tedious () { return require('../../../datadog-plugin-tedious/src') },

@@ -64,2 +64,4 @@ const path = require('path')

const CUCUMBER_IS_PARALLEL = 'test.cucumber.is_parallel'
const TEST_ITR_TESTS_SKIPPED = '_dd.ci.itr.tests_skipped'

@@ -76,2 +78,8 @@ const TEST_ITR_SKIPPING_ENABLED = 'test.itr.tests_skipping.enabled'

// selenium tags
const TEST_BROWSER_DRIVER = 'test.browser.driver'
const TEST_BROWSER_DRIVER_VERSION = 'test.browser.driver_version'
const TEST_BROWSER_NAME = 'test.browser.name'
const TEST_BROWSER_VERSION = 'test.browser.version'
// jest worker variables

@@ -81,2 +89,5 @@ const JEST_WORKER_TRACE_PAYLOAD_CODE = 60

// cucumber worker variables
const CUCUMBER_WORKER_TRACE_PAYLOAD_CODE = 70
// Early flake detection util strings

@@ -92,2 +103,3 @@ const EFD_STRING = "Retried by Datadog's Early Flake Detection"

JEST_DISPLAY_NAME,
CUCUMBER_IS_PARALLEL,
TEST_TYPE,

@@ -105,2 +117,3 @@ TEST_NAME,

JEST_WORKER_COVERAGE_PAYLOAD_CODE,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE,
TEST_SOURCE_START,

@@ -152,3 +165,7 @@ TEST_SKIPPED_BY_ITR,

addEfdStringToTestName,
getIsFaultyEarlyFlakeDetection
getIsFaultyEarlyFlakeDetection,
TEST_BROWSER_DRIVER,
TEST_BROWSER_DRIVER_VERSION,
TEST_BROWSER_NAME,
TEST_BROWSER_VERSION
}

@@ -155,0 +172,0 @@

@@ -18,2 +18,17 @@ 'use strict'

class LazyModule {
constructor (provider) {
this.provider = provider
}
enable (...args) {
this.module = this.provider()
this.module.enable(...args)
}
disable () {
this.module?.disable()
}
}
class Tracer extends NoopProxy {

@@ -28,2 +43,8 @@ constructor () {

this._tracingInitialized = false
// these requires must work with esm bundler
this._modules = {
appsec: new LazyModule(() => require('./appsec')),
iast: new LazyModule(() => require('./appsec/iast'))
}
}

@@ -63,3 +84,3 @@

if (config.remoteConfig.enabled && !config.isCiVisibility) {
const rc = remoteConfig.enable(config)
const rc = remoteConfig.enable(config, this._modules.appsec)

@@ -119,5 +140,4 @@ rc.on('APM_TRACING', (action, conf) => {

if (config.tracing !== false) {
// dirty require for now so zero appsec code is executed unless explicitly enabled
if (config.appsec.enabled) {
require('./appsec').enable(config)
this._modules.appsec.enable(config)
}

@@ -130,7 +150,7 @@ if (!this._tracingInitialized) {

if (config.iast.enabled) {
require('./appsec/iast').enable(config, this._tracer)
this._modules.iast.enable(config, this._tracer)
}
} else if (this._tracingInitialized) {
require('./appsec').disable()
require('./appsec/iast').disable()
this._modules.appsec.disable()
this._modules.iast.disable()
}

@@ -137,0 +157,0 @@

@@ -5,3 +5,9 @@ 'use strict'

function add (carrier, keyValuePairs) {
const otelTagMap = {
'deployment.environment': 'env',
'service.name': 'service',
'service.version': 'version'
}
function add (carrier, keyValuePairs, parseOtelTags = false) {
if (!carrier || !keyValuePairs) return

@@ -17,8 +23,12 @@

for (const segment of segments) {
const separatorIndex = segment.indexOf(':')
const separatorIndex = parseOtelTags ? segment.indexOf('=') : segment.indexOf(':')
if (separatorIndex === -1) continue
const key = segment.slice(0, separatorIndex)
let key = segment.slice(0, separatorIndex)
const value = segment.slice(separatorIndex + 1)
if (parseOtelTags && key in otelTagMap) {
key = otelTagMap[key]
}
carrier[key.trim()] = value.trim()

@@ -25,0 +35,0 @@ }

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc