Socket
Socket
Sign inDemoInstall

dd-trace

Package Overview
Dependencies
Maintainers
1
Versions
574
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dd-trace - npm Package Compare versions

Comparing version 4.43.0 to 4.44.0

packages/datadog-instrumentations/src/process.js

1

ext/formats.d.ts

@@ -8,4 +8,5 @@ import * as opentracing from 'opentracing'

LOG: 'log'
TEXT_MAP_DSM: 'text_map_dsm'
}
export = formats

3

ext/formats.js

@@ -7,3 +7,4 @@ 'use strict'

BINARY: 'binary',
LOG: 'log'
LOG: 'log',
TEXT_MAP_DSM: 'text_map_dsm'
}

@@ -5,19 +5,7 @@ 'use strict'

const Module = require('module')
const semver = require('semver')
const log = require('./packages/dd-trace/src/log')
const { isTrue } = require('./packages/dd-trace/src/util')
const telemetry = require('./packages/dd-trace/src/telemetry/init-telemetry')
const semver = require('semver')
function isTrue (envVar) {
return ['1', 'true', 'True'].includes(envVar)
}
// eslint-disable-next-line no-console
let log = { info: isTrue(process.env.DD_TRACE_DEBUG) ? console.log : () => {} }
if (semver.satisfies(process.versions.node, '>=16')) {
const Config = require('./packages/dd-trace/src/config')
log = require('./packages/dd-trace/src/log')
// eslint-disable-next-line no-new
new Config() // we need this to initialize the logger
}
let initBailout = false

@@ -24,0 +12,0 @@ let clobberBailout = false

{
"name": "dd-trace",
"version": "4.43.0",
"version": "4.44.0",
"description": "Datadog APM tracing client for JavaScript",

@@ -36,2 +36,3 @@ "main": "index.js",

"test:integration": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/*.spec.js\"",
"test:integration:appsec": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/appsec/*.spec.js\"",
"test:integration:cucumber": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cucumber/*.spec.js\"",

@@ -76,3 +77,3 @@ "test:integration:cypress": "mocha --timeout 60000 -r \"packages/dd-trace/test/setup/core.js\" \"integration-tests/cypress/*.spec.js\"",

"@datadog/native-appsec": "8.0.1",
"@datadog/native-iast-rewriter": "2.3.1",
"@datadog/native-iast-rewriter": "2.4.0",
"@datadog/native-iast-taint-tracking": "3.0.0",

@@ -79,0 +80,0 @@ "@datadog/native-metrics": "^2.0.0",

@@ -32,2 +32,6 @@ 'use strict'

if (!disabledInstrumentations.has('process')) {
require('../process')
}
const HOOK_SYMBOL = Symbol('hookExportsMap')

@@ -92,6 +96,3 @@

if (!Object.hasOwnProperty(namesAndSuccesses, name)) {
namesAndSuccesses[name] = {
success: false,
version
}
namesAndSuccesses[`${name}@${version}`] = false
}

@@ -102,3 +103,3 @@

if (hook[HOOK_SYMBOL].has(moduleExports)) {
namesAndSuccesses[name].success = true
namesAndSuccesses[`${name}@${version}`] = true
return moduleExports

@@ -123,9 +124,10 @@ }

}
namesAndSuccesses[name].success = true
namesAndSuccesses[`${name}@${version}`] = true
}
}
}
for (const name of Object.keys(namesAndSuccesses)) {
const { success, version } = namesAndSuccesses[name]
if (!success && !seenCombo.has(`${name}@${version}`)) {
for (const nameVersion of Object.keys(namesAndSuccesses)) {
const [name, version] = nameVersion.split('@')
const success = namesAndSuccesses[nameVersion]
if (!success && !seenCombo.has(nameVersion)) {
telemetry('abort.integration', [

@@ -135,4 +137,4 @@ `integration:${name}`,

])
log.info(`Found incompatible integration version: ${name}@${version}`)
seenCombo.add(`${name}@${version}`)
log.info(`Found incompatible integration version: ${nameVersion}`)
seenCombo.add(nameVersion)
}

@@ -139,0 +141,0 @@ }

@@ -46,4 +46,6 @@ 'use strict'

const ctx = { args, http }
const abortController = new AbortController()
const ctx = { args, http, abortController }
return startChannel.runStores(ctx, () => {

@@ -111,2 +113,6 @@ let finished = false

if (abortController.signal.aborted) {
req.destroy(abortController.signal.reason || new Error('Aborted'))
}
return req

@@ -113,0 +119,0 @@ } catch (e) {

@@ -15,2 +15,3 @@ 'use strict'

const finishSetHeaderCh = channel('datadog:http:server:response:set-header:finish')
const startSetHeaderCh = channel('datadog:http:server:response:set-header:start')

@@ -28,2 +29,8 @@ const requestFinishedSet = new WeakSet()

shimmer.wrap(http.ServerResponse.prototype, 'end', wrapEnd)
shimmer.wrap(http.ServerResponse.prototype, 'setHeader', wrapSetHeader)
shimmer.wrap(http.ServerResponse.prototype, 'removeHeader', wrapAppendOrRemoveHeader)
// Added in node v16.17.0
if (http.ServerResponse.prototype.appendHeader) {
shimmer.wrap(http.ServerResponse.prototype, 'appendHeader', wrapAppendOrRemoveHeader)
}
return http

@@ -70,5 +77,3 @@ })

}
if (finishSetHeaderCh.hasSubscribers) {
wrapSetHeader(res)
}
return emit.apply(this, arguments)

@@ -87,12 +92,2 @@ } catch (err) {

function wrapSetHeader (res) {
shimmer.wrap(res, 'setHeader', setHeader => {
return function (name, value) {
const setHeaderResult = setHeader.apply(this, arguments)
finishSetHeaderCh.publish({ name, value, res })
return setHeaderResult
}
})
}
function wrapWriteHead (writeHead) {

@@ -166,2 +161,44 @@ return function wrappedWriteHead (statusCode, reason, obj) {

function wrapSetHeader (setHeader) {
return function wrappedSetHeader (name, value) {
if (!startSetHeaderCh.hasSubscribers && !finishSetHeaderCh.hasSubscribers) {
return setHeader.apply(this, arguments)
}
if (startSetHeaderCh.hasSubscribers) {
const abortController = new AbortController()
startSetHeaderCh.publish({ res: this, abortController })
if (abortController.signal.aborted) {
return
}
}
const setHeaderResult = setHeader.apply(this, arguments)
if (finishSetHeaderCh.hasSubscribers) {
finishSetHeaderCh.publish({ name, value, res: this })
}
return setHeaderResult
}
}
function wrapAppendOrRemoveHeader (originalMethod) {
return function wrappedAppendOrRemoveHeader () {
if (!startSetHeaderCh.hasSubscribers) {
return originalMethod.apply(this, arguments)
}
const abortController = new AbortController()
startSetHeaderCh.publish({ res: this, abortController })
if (abortController.signal.aborted) {
return this
}
return originalMethod.apply(this, arguments)
}
}
function wrapEnd (end) {

@@ -168,0 +205,0 @@ return function wrappedEnd () {

@@ -398,5 +398,9 @@ 'use strict'

const asyncResource = testFileToSuiteAr.get(suite.file)
asyncResource.runInAsyncScope(() => {
testSuiteFinishCh.publish(status)
})
if (asyncResource) {
asyncResource.runInAsyncScope(() => {
testSuiteFinishCh.publish(status)
})
} else {
log.warn(() => `No AsyncResource found for suite ${suite.file}`)
}
})

@@ -428,12 +432,17 @@

}, (workerHandlerPackage) => {
shimmer.wrap(workerHandlerPackage.prototype, 'exec', exec => function (message, [testSuiteAbsolutePath]) {
shimmer.wrap(workerHandlerPackage.prototype, 'exec', exec => function (_, path) {
if (!testStartCh.hasSubscribers) {
return exec.apply(this, arguments)
}
if (!path?.length) {
return exec.apply(this, arguments)
}
const [testSuiteAbsolutePath] = path
const testSuiteAsyncResource = new AsyncResource('bound-anonymous-fn')
this.worker.on('message', function (message) {
function onMessage (message) {
if (Array.isArray(message)) {
const [messageCode, payload] = message
if (messageCode === MOCHA_WORKER_TRACE_PAYLOAD_CODE) {
testSessionAsyncResource.runInAsyncScope(() => {
testSuiteAsyncResource.runInAsyncScope(() => {
workerReportTraceCh.publish(payload)

@@ -443,5 +452,6 @@ })

}
})
}
const testSuiteAsyncResource = new AsyncResource('bound-anonymous-fn')
this.worker.on('message', onMessage)
testSuiteAsyncResource.runInAsyncScope(() => {

@@ -461,2 +471,3 @@ testSuiteStartCh.publish({

})
this.worker.off('message', onMessage)
},

@@ -468,2 +479,3 @@ (err) => {

})
this.worker.off('message', onMessage)
}

@@ -477,2 +489,3 @@ )

})
this.worker.off('message', onMessage)
throw err

@@ -479,0 +492,0 @@ }

@@ -35,2 +35,6 @@ const { addHook, channel, AsyncResource } = require('./helpers/instrument')

function isReporterPackageNewest (vitestPackage) {
return vitestPackage.h?.name === 'BaseSequencer'
}
function getChannelPromise (channelToPublishTo) {

@@ -150,2 +154,17 @@ return new Promise(resolve => {

function getCreateCliWrapper (vitestPackage, frameworkVersion) {
shimmer.wrap(vitestPackage, 'c', oldCreateCli => function () {
if (!testSessionStartCh.hasSubscribers) {
return oldCreateCli.apply(this, arguments)
}
sessionAsyncResource.runInAsyncScope(() => {
const processArgv = process.argv.slice(2).join(' ')
testSessionStartCh.publish({ command: `vitest ${processArgv}`, frameworkVersion })
})
return oldCreateCli.apply(this, arguments)
})
return vitestPackage
}
addHook({

@@ -211,8 +230,21 @@ name: 'vitest',

// There are multiple index* files across different versions of vitest,
// so we check for the existence of BaseSequencer to determine if we are in the right file
addHook({
name: 'vitest',
versions: ['>=2.0.0'],
versions: ['>=1.6.0 <2.0.0'],
filePattern: 'dist/vendor/index.*'
}, (vitestPackage) => {
// there are multiple index* files so we have to check the exported values
if (isReporterPackage(vitestPackage)) {
shimmer.wrap(vitestPackage.B.prototype, 'sort', getSortWrapper)
}
return vitestPackage
})
addHook({
name: 'vitest',
versions: ['>=2.0.0 <2.0.5'],
filePattern: 'dist/vendor/index.*'
}, (vitestPackage) => {
if (isReporterPackageNew(vitestPackage)) {

@@ -227,8 +259,7 @@ shimmer.wrap(vitestPackage.e.prototype, 'sort', getSortWrapper)

name: 'vitest',
versions: ['>=1.6.0'],
filePattern: 'dist/vendor/index.*'
versions: ['>=2.0.5'],
filePattern: 'dist/chunks/index.*'
}, (vitestPackage) => {
// there are multiple index* files so we have to check the exported values
if (isReporterPackage(vitestPackage)) {
shimmer.wrap(vitestPackage.B.prototype, 'sort', getSortWrapper)
if (isReporterPackageNewest(vitestPackage)) {
shimmer.wrap(vitestPackage.h.prototype, 'sort', getSortWrapper)
}

@@ -242,18 +273,11 @@

name: 'vitest',
versions: ['>=1.6.0'],
versions: ['>=1.6.0 <2.0.5'],
filePattern: 'dist/vendor/cac.*'
}, (vitestPackage, frameworkVersion) => {
shimmer.wrap(vitestPackage, 'c', oldCreateCli => function () {
if (!testSessionStartCh.hasSubscribers) {
return oldCreateCli.apply(this, arguments)
}
sessionAsyncResource.runInAsyncScope(() => {
const processArgv = process.argv.slice(2).join(' ')
testSessionStartCh.publish({ command: `vitest ${processArgv}`, frameworkVersion })
})
return oldCreateCli.apply(this, arguments)
})
}, getCreateCliWrapper)
return vitestPackage
})
addHook({
name: 'vitest',
versions: ['>=2.0.5'],
filePattern: 'dist/chunks/cac.*'
}, getCreateCliWrapper)

@@ -266,3 +290,3 @@ // test suite start and finish

file: 'dist/index.js'
}, vitestPackage => {
}, (vitestPackage, frameworkVersion) => {
shimmer.wrap(vitestPackage, 'startTests', startTests => async function (testPath) {

@@ -276,3 +300,3 @@ let testSuiteError = null

testSuiteAsyncResource.runInAsyncScope(() => {
testSuiteStartCh.publish(testPath[0])
testSuiteStartCh.publish({ testSuiteAbsolutePath: testPath[0], frameworkVersion })
})

@@ -279,0 +303,0 @@ const startTestsResponse = await startTests.apply(this, arguments)

@@ -7,2 +7,3 @@ 'use strict'

const { isTrue } = require('../../dd-trace/src/util')
const coalesce = require('koalas')

@@ -167,5 +168,18 @@ class BaseAwsSdkPlugin extends ClientPlugin {

// check if AWS batch propagation or AWS_[SERVICE] batch propagation is enabled via env variable
const serviceId = serviceIdentifier.toUpperCase()
const batchPropagationEnabled = isTrue(
coalesce(
specificConfig.batchPropagationEnabled,
process.env[`DD_TRACE_AWS_SDK_${serviceId}_BATCH_PROPAGATION_ENABLED`],
config.batchPropagationEnabled,
process.env.DD_TRACE_AWS_SDK_BATCH_PROPAGATION_ENABLED,
false
)
)
// Merge the specific config back into the main config
return Object.assign({}, config, specificConfig, {
splitByAwsService: config.splitByAwsService !== false,
batchPropagationEnabled: config.batchPropagationEnabled !== false,
batchPropagationEnabled,
hooks

@@ -172,0 +186,0 @@ })

@@ -159,3 +159,3 @@ 'use strict'

stream,
i === 0 || (this.config.kinesis && this.config.kinesis.batchPropagationEnabled)
i === 0 || (this.config.batchPropagationEnabled)
)

@@ -162,0 +162,0 @@ }

@@ -66,3 +66,3 @@ 'use strict'

params.TopicArn,
i === 0 || (this.config.sns && this.config.sns.batchPropagationEnabled)
i === 0 || (this.config.batchPropagationEnabled)
)

@@ -69,0 +69,0 @@ }

@@ -160,4 +160,4 @@ 'use strict'

return JSON.parse(textMap)
} else if (attributes.Type === 'Binary') {
const buffer = Buffer.from(attributes.Value, 'base64')
} else if (attributes.Type === 'Binary' || attributes.DataType === 'Binary') {
const buffer = Buffer.from(attributes.Value ?? attributes.BinaryValue, 'base64')
return JSON.parse(buffer)

@@ -226,3 +226,3 @@ }

params.QueueUrl,
i === 0 || (this.config.sqs && this.config.sqs.batchPropagationEnabled)
i === 0 || (this.config.batchPropagationEnabled)
)

@@ -229,0 +229,0 @@ }

@@ -261,3 +261,3 @@ const {

getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) {
getTestSpan ({ testName, testSuite, isUnskippable, isForcedToRun, testSourceFile }) {
const testSuiteTags = {

@@ -286,4 +286,7 @@ [TEST_COMMAND]: this.command,

const codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
if (testSourceFile) {
testSpanMetadata[TEST_SOURCE_FILE] = testSourceFile
}
const codeOwners = this.getTestCodeOwners({ testSuite, testSourceFile })
if (codeOwners) {

@@ -485,8 +488,12 @@ testSpanMetadata[TEST_CODE_OWNERS] = codeOwners

)
const skippedTestSpan = this.getTestSpan(cypressTestName, spec.relative)
let testSourceFile
if (spec.absolute && this.repositoryRoot) {
skippedTestSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, this.repositoryRoot))
testSourceFile = getTestSuitePath(spec.absolute, this.repositoryRoot)
} else {
skippedTestSpan.setTag(TEST_SOURCE_FILE, spec.relative)
testSourceFile = spec.relative
}
const skippedTestSpan = this.getTestSpan({ testName: cypressTestName, testSuite: spec.relative, testSourceFile })
skippedTestSpan.setTag(TEST_STATUS, 'skip')

@@ -544,7 +551,17 @@ if (isSkippedByItr) {

}
let testSourceFile
if (spec.absolute && this.repositoryRoot) {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, getTestSuitePath(spec.absolute, this.repositoryRoot))
testSourceFile = getTestSuitePath(spec.absolute, this.repositoryRoot)
} else {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, spec.relative)
testSourceFile = spec.relative
}
if (testSourceFile) {
finishedTest.testSpan.setTag(TEST_SOURCE_FILE, testSourceFile)
}
const codeOwners = this.getTestCodeOwners({ testSuite: spec.relative, testSourceFile })
if (codeOwners) {
finishedTest.testSpan.setTag(TEST_CODE_OWNERS, codeOwners)
}
finishedTest.testSpan.finish(finishedTest.finishTime)

@@ -598,3 +615,8 @@ })

if (!this.activeTestSpan) {
this.activeTestSpan = this.getTestSpan(testName, testSuite, isUnskippable, isForcedToRun)
this.activeTestSpan = this.getTestSpan({
testName,
testSuite,
isUnskippable,
isForcedToRun
})
}

@@ -666,4 +688,11 @@

}
getTestCodeOwners ({ testSuite, testSourceFile }) {
if (testSourceFile) {
return getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries)
}
return getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
}
}
module.exports = new CypressPlugin()

@@ -105,3 +105,4 @@ const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')

this.addSub('ci:vitest:test-suite:start', (testSuiteAbsolutePath) => {
this.addSub('ci:vitest:test-suite:start', ({ testSuiteAbsolutePath, frameworkVersion }) => {
this.frameworkVersion = frameworkVersion
const testSessionSpanContext = this.tracer.extract('text_map', {

@@ -108,0 +109,0 @@ 'x-datadog-trace-id': process.env.DD_CIVISIBILITY_TEST_SESSION_ID,

@@ -12,2 +12,4 @@ 'use strict'

const responseBlockedSet = new WeakSet()
const specificBlockingTypes = {

@@ -121,2 +123,4 @@ GRAPHQL: 'graphql'

responseBlockedSet.add(res)
abortController?.abort()

@@ -149,2 +153,6 @@ }

function isBlocked (res) {
return responseBlockedSet.has(res)
}
module.exports = {

@@ -156,3 +164,4 @@ addSpecificEndpoint,

getBlockingAction,
setTemplates
setTemplates,
isBlocked
}

@@ -22,3 +22,6 @@ 'use strict'

responseWriteHead: dc.channel('apm:http:server:response:writeHead:start'),
httpClientRequestStart: dc.channel('apm:http:client:request:start')
httpClientRequestStart: dc.channel('apm:http:client:request:start'),
responseSetHeader: dc.channel('datadog:http:server:response:set-header:start'),
setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start')
}
'use strict'
module.exports = {
CODE_INJECTION_ANALYZER: require('./code-injection-analyzer'),
COMMAND_INJECTION_ANALYZER: require('./command-injection-analyzer'),

@@ -5,0 +6,0 @@ HARCODED_PASSWORD_ANALYZER: require('./hardcoded-password-analyzer'),

@@ -50,2 +50,4 @@ 'use strict'

_isExcluded (location) {
if (!location) return false
return EXCLUDED_LOCATIONS.some(excludedLocation => {

@@ -52,0 +54,0 @@ return location.path.includes(excludedLocation)

@@ -17,3 +17,4 @@ 'use strict'

{ src: 'trimEnd' },
{ src: 'trimStart', dst: 'trim' }
{ src: 'trimStart', dst: 'trim' },
{ src: 'eval', allowedWithoutCallee: true }
]

@@ -20,0 +21,0 @@

@@ -13,2 +13,3 @@ 'use strict'

const mathRandomCallCh = dc.channel('datadog:random:call')
const evalCallCh = dc.channel('datadog:eval:call')

@@ -22,2 +23,3 @@ const JSON_VALUE = 'json.value'

concat: noop,
eval: noop,
join: noop,

@@ -141,2 +143,11 @@ parse: noop,

eval: function (res, fn, target, script) {
// eslint-disable-next-line no-eval
if (evalCallCh.hasSubscribers && fn === globalThis.eval) {
evalCallCh.publish({ script })
}
return res
},
parse: function (res, fn, target, json) {

@@ -143,0 +154,0 @@ if (fn === JSON.parse) {

@@ -8,2 +8,3 @@ 'use strict'

const codeInjectionSensitiveAnalyzer = require('./sensitive-analyzers/code-injection-sensitive-analyzer')
const commandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer')

@@ -27,2 +28,3 @@ const hardcodedPasswordAnalyzer = require('./sensitive-analyzers/hardcoded-password-analyzer')

this._sensitiveAnalyzers = new Map()
this._sensitiveAnalyzers.set(vulnerabilities.CODE_INJECTION, codeInjectionSensitiveAnalyzer)
this._sensitiveAnalyzers.set(vulnerabilities.COMMAND_INJECTION, commandSensitiveAnalyzer)

@@ -29,0 +31,0 @@ this._sensitiveAnalyzers.set(vulnerabilities.NOSQL_MONGODB_INJECTION, jsonSensitiveAnalyzer)

// eslint-disable-next-line max-len
const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)'
const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)'
// eslint-disable-next-line max-len
const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,}))'
const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,})|[\\w\\.-]+@[a-zA-Z\\d\\.-]+\\.[a-zA-Z]{2,})'

@@ -6,0 +6,0 @@ module.exports = {

module.exports = {
COMMAND_INJECTION: 'COMMAND_INJECTION',
CODE_INJECTION: 'CODE_INJECTION',
HARDCODED_PASSWORD: 'HARDCODED_PASSWORD',

@@ -4,0 +5,0 @@ HARDCODED_SECRET: 'HARDCODED_SECRET',

@@ -16,3 +16,4 @@ 'use strict'

responseBody,
responseWriteHead
responseWriteHead,
responseSetHeader
} = require('./channels')

@@ -27,3 +28,3 @@ const waf = require('./waf')

const { HTTP_CLIENT_IP } = require('../../../../ext/tags')
const { block, setTemplates, getBlockingAction } = require('./blocking')
const { isBlocked, block, setTemplates, getBlockingAction } = require('./blocking')
const { passportTrackEvent } = require('./passport')

@@ -67,2 +68,3 @@ const { storage } = require('../../../datadog-core')

responseWriteHead.subscribe(onResponseWriteHead)
responseSetHeader.subscribe(onResponseSetHeader)

@@ -229,7 +231,6 @@ if (_config.appsec.eventTracking.enabled) {

const responseAnalyzedSet = new WeakSet()
const responseBlockedSet = new WeakSet()
function onResponseWriteHead ({ req, res, abortController, statusCode, responseHeaders }) {
// avoid "write after end" error
if (responseBlockedSet.has(res)) {
if (isBlocked(res)) {
abortController?.abort()

@@ -262,2 +263,8 @@ return

function onResponseSetHeader ({ res, abortController }) {
if (isBlocked(res)) {
abortController?.abort()
}
}
function handleResults (actions, req, res, rootSpan, abortController) {

@@ -269,5 +276,2 @@ if (!actions || !req || !res || !rootSpan || !abortController) return

block(req, res, rootSpan, abortController, blockingAction)
if (!abortController.signal || abortController.signal.aborted) {
responseBlockedSet.add(res)
}
}

@@ -299,2 +303,3 @@ }

if (responseWriteHead.hasSubscribers) responseWriteHead.unsubscribe(onResponseWriteHead)
if (responseSetHeader.hasSubscribers) responseSetHeader.unsubscribe(onResponseSetHeader)
}

@@ -301,0 +306,0 @@

@@ -6,5 +6,7 @@ 'use strict'

const addresses = require('./addresses')
const { httpClientRequestStart } = require('./channels')
const { httpClientRequestStart, setUncaughtExceptionCaptureCallbackStart } = require('./channels')
const { reportStackTrace } = require('./stack_trace')
const waf = require('./waf')
const { getBlockingAction, block } = require('./blocking')
const log = require('../log')

@@ -15,7 +17,98 @@ const RULE_TYPES = {

let config
class DatadogRaspAbortError extends Error {
constructor (req, res, blockingAction) {
super('DatadogRaspAbortError')
this.name = 'DatadogRaspAbortError'
this.req = req
this.res = res
this.blockingAction = blockingAction
}
}
let config, abortOnUncaughtException
function removeAllListeners (emitter, event) {
const listeners = emitter.listeners(event)
emitter.removeAllListeners(event)
let cleaned = false
return function () {
if (cleaned === true) {
return
}
cleaned = true
for (let i = 0; i < listeners.length; ++i) {
emitter.on(event, listeners[i])
}
}
}
function findDatadogRaspAbortError (err, deep = 10) {
if (err instanceof DatadogRaspAbortError) {
return err
}
if (err.cause && deep > 0) {
return findDatadogRaspAbortError(err.cause, deep - 1)
}
}
function handleUncaughtExceptionMonitor (err) {
const abortError = findDatadogRaspAbortError(err)
if (!abortError) return
const { req, res, blockingAction } = abortError
block(req, res, web.root(req), null, blockingAction)
if (!process.hasUncaughtExceptionCaptureCallback()) {
const cleanUp = removeAllListeners(process, 'uncaughtException')
const handler = () => {
process.removeListener('uncaughtException', handler)
}
setTimeout(() => {
process.removeListener('uncaughtException', handler)
cleanUp()
})
process.on('uncaughtException', handler)
} else {
// uncaughtException event is not executed when hasUncaughtExceptionCaptureCallback is true
let previousCb
const cb = ({ currentCallback, abortController }) => {
setUncaughtExceptionCaptureCallbackStart.unsubscribe(cb)
if (!currentCallback) {
abortController.abort()
return
}
previousCb = currentCallback
}
setUncaughtExceptionCaptureCallbackStart.subscribe(cb)
process.setUncaughtExceptionCaptureCallback(null)
// For some reason, previous callback was defined before the instrumentation
// We can not restore it, so we let the app decide
if (previousCb) {
process.setUncaughtExceptionCaptureCallback(() => {
process.setUncaughtExceptionCaptureCallback(null)
process.setUncaughtExceptionCaptureCallback(previousCb)
})
}
}
}
function enable (_config) {
config = _config
httpClientRequestStart.subscribe(analyzeSsrf)
process.on('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor)
abortOnUncaughtException = process.execArgv?.includes('--abort-on-uncaught-exception')
if (abortOnUncaughtException) {
log.warn('The --abort-on-uncaught-exception flag is enabled. The RASP module will not block operations.')
}
}

@@ -25,2 +118,4 @@

if (httpClientRequestStart.hasSubscribers) httpClientRequestStart.unsubscribe(analyzeSsrf)
process.off('uncaughtExceptionMonitor', handleUncaughtExceptionMonitor)
}

@@ -38,6 +133,7 @@

}
// TODO: Currently this is only monitoring, we should
// block the request if SSRF attempt
const result = waf.run({ persistent }, req, RULE_TYPES.SSRF)
handleResult(result, req)
const res = store?.res
handleResult(result, req, res, ctx.abortController)
}

@@ -49,3 +145,3 @@

function handleResult (actions, req) {
function handleResult (actions, req, res, abortController) {
const generateStackTraceAction = getGenerateStackTraceAction(actions)

@@ -61,2 +157,19 @@ if (generateStackTraceAction && config.appsec.stackTrace.enabled) {

}
if (!abortController || abortOnUncaughtException) return
const blockingAction = getBlockingAction(actions)
if (blockingAction) {
const rootSpan = web.root(req)
// Should block only in express
if (rootSpan?.context()._name === 'express.request') {
const abortError = new DatadogRaspAbortError(req, res, blockingAction)
abortController.abort(abortError)
// TODO Delete this when support for node 16 is removed
if (!abortController.signal.reason) {
abortController.signal.reason = abortError
}
}
}
}

@@ -67,3 +180,4 @@

disable,
handleResult
handleResult,
handleUncaughtExceptionMonitor // exported only for testing purpose
}

@@ -32,10 +32,10 @@ 'use strict'

const tags = []
const ddVarPrefix = 'config.datadog:'
const otelVarPrefix = 'config.opentelemetry:'
const ddVarPrefix = 'config_datadog:'
const otelVarPrefix = 'config_opentelemetry:'
if (ddVar) {
ddVar = ddVarPrefix + ddVar
ddVar = ddVarPrefix + ddVar.toLowerCase()
tags.push(ddVar)
}
if (otelVar) {
otelVar = otelVarPrefix + otelVar
otelVar = otelVarPrefix + otelVar.toLowerCase()
tags.push(otelVar)

@@ -215,5 +215,4 @@ }

class Config {
constructor (options) {
options = options || {}
options = this.options = {
constructor (options = {}) {
options = {
...options,

@@ -224,22 +223,13 @@ appsec: options.appsec != null ? options.appsec : options.experimental?.appsec,

checkIfBothOtelAndDdEnvVarSet()
// Configure the logger first so it can be used to warn about other configs
this.debug = isTrue(coalesce(
process.env.DD_TRACE_DEBUG,
process.env.OTEL_LOG_LEVEL && process.env.OTEL_LOG_LEVEL === 'debug',
false
))
this.logger = options.logger
const logConfig = log.getConfig()
this.debug = logConfig.enabled
this.logger = coalesce(options.logger, logConfig.logger)
this.logLevel = coalesce(options.logLevel, logConfig.logLevel)
this.logLevel = coalesce(
options.logLevel,
process.env.DD_TRACE_LOG_LEVEL,
process.env.OTEL_LOG_LEVEL,
'debug'
)
log.use(this.logger)
log.toggle(this.debug, this.logLevel, this)
log.toggle(this.debug, this.logLevel)
checkIfBothOtelAndDdEnvVarSet()
const DD_TRACE_MEMCACHED_COMMAND_ENABLED = coalesce(

@@ -492,3 +482,3 @@ process.env.DD_TRACE_MEMCACHED_COMMAND_ENABLED,

const defaults = this._defaults = {}
const defaults = setHiddenProperty(this, '_defaults', {})

@@ -500,3 +490,3 @@ this._setValue(defaults, 'appsec.blockedTemplateHtml', undefined)

this._setValue(defaults, 'appsec.obfuscatorValueRegex', defaultWafObfuscatorValueRegex)
this._setValue(defaults, 'appsec.rasp.enabled', false)
this._setValue(defaults, 'appsec.rasp.enabled', true)
this._setValue(defaults, 'appsec.rateLimit', 100)

@@ -679,4 +669,4 @@ this._setValue(defaults, 'appsec.rules', undefined)

const tags = {}
const env = this._env = {}
this._envUnprocessed = {}
const env = setHiddenProperty(this, '_env', {})
setHiddenProperty(this, '_envUnprocessed', {})

@@ -821,7 +811,7 @@ tagger.add(tags, OTEL_RESOURCE_ATTRIBUTES, true)

_applyOptions (options) {
const opts = this._options = this._options || {}
const opts = setHiddenProperty(this, '_options', this._options || {})
const tags = {}
this._optsUnprocessed = {}
setHiddenProperty(this, '_optsUnprocessed', {})
options = this.options = Object.assign({ ingestion: {} }, options, opts)
options = setHiddenProperty(this, '_optionsArg', Object.assign({ ingestion: {} }, options, opts))

@@ -924,3 +914,3 @@ tagger.add(tags, options.tags)

return coalesce(
this.options.isCiVisibility,
this._optionsArg.isCiVisibility,
this._defaults.isCiVisibility

@@ -941,5 +931,5 @@ )

? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
: getAgentUrl(this._getTraceAgentUrl(), this.options)
: getAgentUrl(this._getTraceAgentUrl(), this._optionsArg)
const DD_AGENT_HOST = coalesce(
this.options.hostname,
this._optionsArg.hostname,
process.env.DD_AGENT_HOST,

@@ -955,3 +945,3 @@ process.env.DD_TRACE_AGENT_HOSTNAME,

coalesce(
this.options.spanAttributeSchema,
this._optionsArg.spanAttributeSchema,
process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA

@@ -962,7 +952,7 @@ )

const peerServiceSet = (
this.options.hasOwnProperty('spanComputePeerService') ||
this._optionsArg.hasOwnProperty('spanComputePeerService') ||
process.env.hasOwnProperty('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
)
const peerServiceValue = coalesce(
this.options.spanComputePeerService,
this._optionsArg.spanComputePeerService,
process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED

@@ -998,3 +988,3 @@ )

return coalesce(
this.options.stats,
this._optionsArg.stats,
process.env.DD_TRACE_STATS_COMPUTATION_ENABLED,

@@ -1007,3 +997,3 @@ getIsGCPFunction() || getIsAzureFunction()

return coalesce(
this.options.url,
this._optionsArg.url,
process.env.DD_TRACE_AGENT_URL,

@@ -1017,3 +1007,3 @@ process.env.DD_TRACE_URL,

_applyCalculated () {
const calc = this._calculated = {}
const calc = setHiddenProperty(this, '_calculated', {})

@@ -1027,3 +1017,3 @@ const {

} else {
this._setValue(calc, 'url', getAgentUrl(this._getTraceAgentUrl(), this.options))
this._setValue(calc, 'url', getAgentUrl(this._getTraceAgentUrl(), this._optionsArg))
}

@@ -1044,4 +1034,4 @@ if (this._isCiVisibility()) {

_applyRemote (options) {
const opts = this._remote = this._remote || {}
this._remoteUnprocessed = {}
const opts = setHiddenProperty(this, '_remote', this._remote || {})
setHiddenProperty(this, '_remoteUnprocessed', {})
const tags = {}

@@ -1215,2 +1205,11 @@ const headerTags = options.tracing_header_tags

function setHiddenProperty (obj, name, value) {
Object.defineProperty(obj, name, {
value,
enumerable: false,
writable: true
})
return obj[name]
}
module.exports = Config

@@ -20,7 +20,9 @@ // encoding used here is sha256

function computeHash (service, env, edgeTags, parentHash) {
const key = `${service}${env}` + edgeTags.join('') + parentHash.toString()
const hashableEdgeTags = edgeTags.filter(item => item !== 'manual_checkpoint:true')
const key = `${service}${env}` + hashableEdgeTags.join('') + parentHash.toString()
if (cache.get(key)) {
return cache.get(key)
}
const currentHash = shaHash(`${service}${env}` + edgeTags.join(''))
const currentHash = shaHash(`${service}${env}` + hashableEdgeTags.join(''))
const buf = Buffer.concat([currentHash, parentHash], 16)

@@ -27,0 +29,0 @@ const val = shaHash(buf.toString())

'use strict'
const coalesce = require('koalas')
const { isTrue } = require('../util')
const { debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels')

@@ -23,4 +25,18 @@ const logWriter = require('./writer')

const config = {
enabled: false,
logger: undefined,
logLevel: 'debug'
}
const log = {
/**
* @returns Read-only version of logging config. To modify config, call `log.use` and `log.toggle`
*/
getConfig () {
return { ...config }
},
use (logger) {
config.logger = logger
logWriter.use(logger)

@@ -31,2 +47,4 @@ return this

toggle (enabled, logLevel) {
config.enabled = enabled
config.logLevel = logLevel
logWriter.toggle(enabled, logLevel)

@@ -81,2 +99,16 @@ return this

const enabled = isTrue(coalesce(
process.env.DD_TRACE_DEBUG,
process.env.OTEL_LOG_LEVEL === 'debug',
config.enabled
))
const logLevel = coalesce(
process.env.DD_TRACE_LOG_LEVEL,
process.env.OTEL_LOG_LEVEL,
config.logLevel
)
log.toggle(enabled, logLevel)
module.exports = log

@@ -8,2 +8,3 @@ 'use strict'

const TextMapPropagator = require('./propagation/text_map')
const DSMTextMapPropagator = require('./propagation/text_map_dsm')
const HttpPropagator = require('./propagation/http')

@@ -42,3 +43,4 @@ const BinaryPropagator = require('./propagation/binary')

[formats.BINARY]: new BinaryPropagator(config),
[formats.LOG]: new LogPropagator(config)
[formats.LOG]: new LogPropagator(config),
[formats.TEXT_MAP_DSM]: new DSMTextMapPropagator(config)
}

@@ -76,10 +78,12 @@ if (config.reportHostname) {

inject (spanContext, format, carrier) {
if (spanContext instanceof Span) {
spanContext = spanContext.context()
inject (context, format, carrier) {
if (context instanceof Span) {
context = context.context()
}
try {
this._prioritySampler.sample(spanContext)
this._propagators[format].inject(spanContext, carrier)
if (format !== 'text_map_dsm') {
this._prioritySampler.sample(context)
}
this._propagators[format].inject(context, carrier)
} catch (e) {

@@ -86,0 +90,0 @@ log.error(e)

@@ -19,3 +19,4 @@ const {

TEST_SKIPPED_BY_ITR,
ITR_CORRELATION_ID
ITR_CORRELATION_ID,
TEST_SOURCE_FILE
} = require('./util/test')

@@ -211,3 +212,9 @@ const Plugin = require('./plugin')

const codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
const { [TEST_SOURCE_FILE]: testSourceFile } = extraTags
// We'll try with the test source file if available (it could be different from the test suite)
let codeOwners = getCodeOwnersForFilename(testSourceFile, this.codeOwnersEntries)
if (!codeOwners) {
codeOwners = getCodeOwnersForFilename(testSuite, this.codeOwnersEntries)
}
if (codeOwners) {

@@ -214,0 +221,0 @@ testTags[TEST_CODE_OWNERS] = codeOwners

@@ -6,2 +6,3 @@ 'use strict'

const dc = require('dc-polyfill')
const logger = require('../log')
const { storage } = require('../../../datadog-core')

@@ -76,3 +77,13 @@

addSub (channelName, handler) {
this._subscriptions.push(new Subscription(channelName, handler))
const plugin = this
const wrappedHandler = function () {
try {
return handler.apply(this, arguments)
} catch (e) {
logger.error('Error in plugin handler:', e)
logger.info('Disabling plugin:', plugin.id)
plugin.configure(false)
}
}
this._subscriptions.push(new Subscription(channelName, wrappedHandler))
}

@@ -79,0 +90,0 @@

@@ -184,2 +184,3 @@ 'use strict'

this._tracer = new DatadogTracer(config, prioritySampler)
this.dataStreamsCheckpointer = this._tracer.dataStreamsCheckpointer
this.appsec = new AppsecSdk(this._tracer, config)

@@ -186,0 +187,0 @@ this._tracingInitialized = true

@@ -14,2 +14,3 @@ 'use strict'

const DataStreamsContext = require('./data_streams_context')
const { DataStreamsCheckpointer } = require('./data_streams')
const { flushStartupLogs } = require('../../datadog-instrumentations/src/check_require_cache')

@@ -27,2 +28,3 @@ const log = require('./log/writer')

this._dataStreamsProcessor = new DataStreamsProcessor(config)
this.dataStreamsCheckpointer = new DataStreamsCheckpointer(this)
this._scope = new Scope()

@@ -29,0 +31,0 @@ setStartupLogConfig(config)

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc