Socket
Socket
Sign inDemoInstall

dd-trace

Package Overview
Dependencies
Maintainers
1
Versions
574
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

dd-trace - npm Package Compare versions

Comparing version 5.12.0 to 5.13.0

packages/datadog-instrumentations/src/lodash.js

7

ci/init.js

@@ -6,2 +6,3 @@ /* eslint-disable no-console */

const isJestWorker = !!process.env.JEST_WORKER_ID
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID

@@ -41,2 +42,8 @@ const options = {

if (isCucumberWorker) {
options.experimental = {
exporter: 'cucumber_worker'
}
}
if (shouldInit) {

@@ -43,0 +50,0 @@ tracer.init(options)

3

ext/exporters.d.ts

@@ -6,5 +6,6 @@ declare const exporters: {

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker'
}
export = exporters

@@ -7,3 +7,4 @@ 'use strict'

AGENT_PROXY: 'agent_proxy',
JEST_WORKER: 'jest_worker'
JEST_WORKER: 'jest_worker',
CUCUMBER_WORKER: 'cucumber_worker'
}
{
"name": "dd-trace",
"version": "5.12.0",
"version": "5.13.0",
"description": "Datadog APM tracing client for JavaScript",

@@ -74,4 +74,4 @@ "main": "index.js",

"@datadog/native-appsec": "7.1.1",
"@datadog/native-iast-rewriter": "2.3.0",
"@datadog/native-iast-taint-tracking": "1.7.0",
"@datadog/native-iast-rewriter": "2.3.1",
"@datadog/native-iast-taint-tracking": "2.1.0",
"@datadog/native-metrics": "^2.0.0",

@@ -85,3 +85,3 @@ "@datadog/pprof": "5.2.0",

"ignore": "^5.2.4",
"import-in-the-middle": "^1.7.3",
"import-in-the-middle": "^1.7.4",
"int64-buffer": "^0.1.9",

@@ -88,0 +88,0 @@ "ipaddr.js": "^2.1.0",

@@ -78,3 +78,6 @@ 'use strict'

} catch (err) {
console.warn(`MISSING: Unable to find "${args.path}". Is the package dead code?`)
if (DEBUG) {
console.warn(`Warning: Unable to find "${args.path}".` +
"Unless it's dead code this could cause a problem at runtime.")
}
return

@@ -97,3 +100,6 @@ }

if (!internal) {
console.warn(`MISSING: Unable to find "${extracted.pkg}/package.json". Is the package dead code?`)
if (DEBUG) {
console.warn(`Warning: Unable to find "${extracted.pkg}/package.json".` +
"Unless it's dead code this could cause a problem at runtime.")
}
}

@@ -100,0 +106,0 @@ return

@@ -165,4 +165,7 @@ 'use strict'

's3',
'sfn',
'sns',
'sqs'
'sqs',
'states',
'stepfunctions'
].includes(name)

@@ -169,0 +172,0 @@ ? name

@@ -25,2 +25,4 @@ 'use strict'

const workerReportTraceCh = channel('ci:cucumber:worker-report:trace')
const itrSkippedSuitesCh = channel('ci:cucumber:itr:skipped-suites')

@@ -33,3 +35,4 @@

fromCoverageMapToCoverage,
getTestSuitePath
getTestSuitePath,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE
} = require('../../dd-trace/src/plugins/util/test')

@@ -52,2 +55,5 @@

const pickleResultByFile = {}
const sessionAsyncResource = new AsyncResource('bound-anonymous-fn')
let skippableSuites = []

@@ -57,5 +63,8 @@ let itrCorrelationId = ''

let isUnskippable = false
let isSuitesSkippingEnabled = false
let isEarlyFlakeDetectionEnabled = false
let earlyFlakeDetectionNumRetries = 0
let knownTests = []
let skippedSuites = []
let isSuitesSkipped = false

@@ -113,2 +122,39 @@ function getSuiteStatusFromTestStatuses (testStatuses) {

function getChannelPromise (channelToPublishTo) {
return new Promise(resolve => {
sessionAsyncResource.runInAsyncScope(() => {
channelToPublishTo.publish({ onDone: resolve })
})
})
}
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function wrapRun (pl, isLatestVersion) {

@@ -133,3 +179,4 @@ if (patched.has(pl)) return

testFileAbsolutePath,
testSourceLine
testSourceLine,
isParallel: !!process.env.CUCUMBER_WORKER_ID
})

@@ -202,8 +249,2 @@ try {

function pickleHook (PickleRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return PickleRunner
}
const pl = PickleRunner.default

@@ -217,8 +258,2 @@

function testCaseHook (TestCaseRunner) {
if (process.env.CUCUMBER_WORKER_ID) {
// Parallel mode is not supported
log.warn('Unable to initialize CI Visibility because Cucumber is running in parallel mode.')
return TestCaseRunner
}
const pl = TestCaseRunner.default

@@ -231,44 +266,3 @@

addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
function getFilteredPickles (runtime, suitesToSkip) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
const testSuitePath = getTestSuitePath(test.uri, process.cwd())
const isUnskippable = isMarkedAsUnskippable(test)
const isSkipped = suitesToSkip.includes(testSuitePath)
if (isSkipped && !isUnskippable) {
acc.skippedSuites.add(testSuitePath)
} else {
acc.picklesToRun.push(pickleId)
}
return acc
}, { skippedSuites: new Set(), picklesToRun: [] })
}
function getPickleByFile (runtime) {
return runtime.pickleIds.reduce((acc, pickleId) => {
const test = runtime.eventDataCollector.getPickle(pickleId)
if (acc[test.uri]) {
acc[test.uri].push(test)
} else {
acc[test.uri] = [test]
}
return acc
}, {})
}
function getWrappedStart (start, frameworkVersion) {
function getWrappedStart (start, frameworkVersion, isParallel = false) {
return async function () {

@@ -278,26 +272,12 @@ if (!libraryConfigurationCh.hasSubscribers) {

}
const asyncResource = new AsyncResource('bound-anonymous-fn')
let onDone
let errorSkippableRequest
const configPromise = new Promise(resolve => {
onDone = resolve
})
const configurationResponse = await getChannelPromise(libraryConfigurationCh)
asyncResource.runInAsyncScope(() => {
libraryConfigurationCh.publish({ onDone })
})
const configurationResponse = await configPromise
isEarlyFlakeDetectionEnabled = configurationResponse.libraryConfig?.isEarlyFlakeDetectionEnabled
earlyFlakeDetectionNumRetries = configurationResponse.libraryConfig?.earlyFlakeDetectionNumRetries
isSuitesSkippingEnabled = configurationResponse.libraryConfig?.isSuitesSkippingEnabled
if (isEarlyFlakeDetectionEnabled) {
const knownTestsPromise = new Promise(resolve => {
onDone = resolve
})
asyncResource.runInAsyncScope(() => {
knownTestsCh.publish({ onDone })
})
const knownTestsResponse = await knownTestsPromise
const knownTestsResponse = await getChannelPromise(knownTestsCh)
if (!knownTestsResponse.err) {

@@ -310,31 +290,22 @@ knownTests = knownTestsResponse.knownTests

const skippableSuitesPromise = new Promise(resolve => {
onDone = resolve
})
if (isSuitesSkippingEnabled) {
const skippableResponse = await getChannelPromise(skippableSuitesCh)
asyncResource.runInAsyncScope(() => {
skippableSuitesCh.publish({ onDone })
})
errorSkippableRequest = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
const skippableResponse = await skippableSuitesPromise
if (!errorSkippableRequest) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
const err = skippableResponse.err
skippableSuites = skippableResponse.skippableSuites
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
let skippedSuites = []
let isSuitesSkipped = false
this.pickleIds = picklesToRun
if (!err) {
const filteredPickles = getFilteredPickles(this, skippableSuites)
const { picklesToRun } = filteredPickles
isSuitesSkipped = picklesToRun.length !== this.pickleIds.length
log.debug(
() => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.`
)
this.pickleIds = picklesToRun
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
skippedSuites = Array.from(filteredPickles.skippedSuites)
itrCorrelationId = skippableResponse.itrCorrelationId
}
}

@@ -347,7 +318,7 @@

asyncResource.runInAsyncScope(() => {
sessionAsyncResource.runInAsyncScope(() => {
sessionStartCh.publish({ command, frameworkVersion })
})
if (!err && skippedSuites.length) {
if (!errorSkippableRequest && skippedSuites.length) {
itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion })

@@ -370,3 +341,3 @@ }

asyncResource.runInAsyncScope(() => {
sessionAsyncResource.runInAsyncScope(() => {
sessionFinishCh.publish({

@@ -379,3 +350,4 @@ status: success ? 'pass' : 'fail',

hasForcedToRunSuites: isForcedToRun,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
})

@@ -449,3 +421,4 @@ })

coverageFiles,
suiteFile: testFileAbsolutePath
suiteFile: testFileAbsolutePath,
testSuitePath
})

@@ -458,3 +431,3 @@ // We need to reset coverage to get a code coverage per suite

testSuiteFinishCh.publish(testSuiteStatus)
testSuiteFinishCh.publish({ status: testSuiteStatus, testSuitePath })
}

@@ -466,6 +439,89 @@

// From 7.3.0 onwards, runPickle becomes runTestCase
function getWrappedParseWorkerMessage (parseWorkerMessageFunction) {
return function (worker, message) {
// If the message is an array, it's a dd-trace message, so we need to stop cucumber processing,
// or cucumber will throw an error
// TODO: identify the message better
if (Array.isArray(message)) {
const [messageCode, payload] = message
if (messageCode === CUCUMBER_WORKER_TRACE_PAYLOAD_CODE) {
sessionAsyncResource.runInAsyncScope(() => {
workerReportTraceCh.publish(payload)
})
return
}
}
const { jsonEnvelope } = message
if (!jsonEnvelope) {
return parseWorkerMessageFunction.apply(this, arguments)
}
let parsed = jsonEnvelope
if (typeof parsed === 'string') {
try {
parsed = JSON.parse(jsonEnvelope)
} catch (e) {
// ignore errors and continue
return parseWorkerMessageFunction.apply(this, arguments)
}
}
if (parsed.testCaseStarted) {
const { pickleId } = this.eventDataCollector.testCaseMap[parsed.testCaseStarted.testCaseId]
const pickle = this.eventDataCollector.getPickle(pickleId)
const testFileAbsolutePath = pickle.uri
// First test in suite
if (!pickleResultByFile[testFileAbsolutePath]) {
pickleResultByFile[testFileAbsolutePath] = []
testSuiteStartCh.publish({
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
const parseWorkerResponse = parseWorkerMessageFunction.apply(this, arguments)
// after calling `parseWorkerMessageFunction`, the test status can already be read
if (parsed.testCaseFinished) {
const { pickle, worstTestStepResult } =
this.eventDataCollector.getTestCaseAttempt(parsed.testCaseFinished.testCaseStartedId)
const { status } = getStatusFromResultLatest(worstTestStepResult)
const testFileAbsolutePath = pickle.uri
const finished = pickleResultByFile[testFileAbsolutePath]
finished.push(status)
if (finished.length === pickleByFile[testFileAbsolutePath].length) {
testSuiteFinishCh.publish({
status: getSuiteStatusFromTestStatuses(finished),
testSuitePath: getTestSuitePath(testFileAbsolutePath, process.cwd())
})
}
}
return parseWorkerResponse
}
}
// Test start / finish for older versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['7.0.0 - 7.2.1'],
file: 'lib/runtime/pickle_runner.js'
}, pickleHook)
// Test start / finish for newer versions. The only hook executed in workers when in parallel mode
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/test_case_runner.js'
}, testCaseHook)
// From 7.3.0 onwards, runPickle becomes runTestCase. Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=7.3.0'],
file: 'lib/runtime/index.js'

@@ -479,2 +535,5 @@ }, (runtimePackage, frameworkVersion) => {

// Not executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedRunTest` generates suite start and finish events
addHook({

@@ -490,1 +549,19 @@ name: '@cucumber/cucumber',

})
// Only executed in parallel mode.
// `getWrappedStart` generates session start and finish events
// `getWrappedGiveWork` generates suite start events and sets pickleResultByFile (used by suite finish events)
// `getWrappedParseWorkerMessage` generates suite finish events
addHook({
name: '@cucumber/cucumber',
versions: ['>=8.0.0'],
file: 'lib/runtime/parallel/coordinator.js'
}, (coordinatorPackage, frameworkVersion) => {
shimmer.wrap(coordinatorPackage.default.prototype, 'start', start => getWrappedStart(start, frameworkVersion, true))
shimmer.wrap(
coordinatorPackage.default.prototype,
'parseWorkerMessage',
parseWorkerMessage => getWrappedParseWorkerMessage(parseWorkerMessage)
)
return coordinatorPackage
})

@@ -69,2 +69,3 @@ 'use strict'

'limitd-client': () => require('../limitd-client'),
lodash: () => require('../lodash'),
mariadb: () => require('../mariadb'),

@@ -71,0 +72,0 @@ memcached: () => require('../memcached'),

@@ -300,3 +300,8 @@ const semver = require('semver')

function dispatcherRunWrapperNew (run) {
return function () {
return function (testGroups) {
if (!this._allTests) {
// Removed in https://github.com/microsoft/playwright/commit/1e52c37b254a441cccf332520f60225a5acc14c7
// Not available from >=1.44.0
this._allTests = testGroups.map(g => g.tests).flat()
}
remainingTestsByFile = getTestsBySuiteFromTestGroups(arguments[0])

@@ -303,0 +308,0 @@ return run.apply(this, arguments)

@@ -10,4 +10,7 @@ 'use strict'

exports.s3 = require('./s3')
exports.sfn = require('./sfn')
exports.sns = require('./sns')
exports.sqs = require('./sqs')
exports.states = require('./states')
exports.stepfunctions = require('./stepfunctions')
exports.default = require('./default')

@@ -21,3 +21,10 @@ 'use strict'

TEST_IS_NEW,
TEST_IS_RETRY
TEST_IS_RETRY,
TEST_SUITE_ID,
TEST_SESSION_ID,
TEST_COMMAND,
TEST_MODULE,
TEST_MODULE_ID,
TEST_SUITE,
CUCUMBER_IS_PARALLEL
} = require('../../dd-trace/src/plugins/util/test')

@@ -36,3 +43,19 @@ const { RESOURCE_NAME } = require('../../../ext/tags')

} = require('../../dd-trace/src/ci-visibility/telemetry')
const id = require('../../dd-trace/src/id')
const isCucumberWorker = !!process.env.CUCUMBER_WORKER_ID
function getTestSuiteTags (testSuiteSpan) {
const suiteTags = {
[TEST_SUITE_ID]: testSuiteSpan.context().toSpanId(),
[TEST_SESSION_ID]: testSuiteSpan.context().toTraceId(),
[TEST_COMMAND]: testSuiteSpan.context()._tags[TEST_COMMAND],
[TEST_MODULE]: 'cucumber'
}
if (testSuiteSpan.context()._parentId) {
suiteTags[TEST_MODULE_ID] = testSuiteSpan.context()._parentId.toString(10)
}
return suiteTags
}
class CucumberPlugin extends CiPlugin {

@@ -48,2 +71,4 @@ static get id () {

this.testSuiteSpanByPath = {}
this.addSub('ci:cucumber:session:finish', ({

@@ -56,3 +81,4 @@ status,

hasForcedToRunSuites,
isEarlyFlakeDetectionEnabled
isEarlyFlakeDetectionEnabled,
isParallel
}) => {

@@ -77,2 +103,5 @@ const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.libraryConfig || {}

}
if (isParallel) {
this.testSessionSpan.setTag(CUCUMBER_IS_PARALLEL, 'true')
}

@@ -109,3 +138,3 @@ this.testSessionSpan.setTag(TEST_STATUS, status)

}
this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
const testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', {
childOf: this.testModuleSpan,

@@ -118,2 +147,4 @@ tags: {

})
this.testSuiteSpanByPath[testSuitePath] = testSuiteSpan
this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite')

@@ -125,9 +156,10 @@ if (this.libraryConfig?.isCodeCoverageEnabled) {

this.addSub('ci:cucumber:test-suite:finish', status => {
this.testSuiteSpan.setTag(TEST_STATUS, status)
this.testSuiteSpan.finish()
this.addSub('ci:cucumber:test-suite:finish', ({ status, testSuitePath }) => {
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]
testSuiteSpan.setTag(TEST_STATUS, status)
testSuiteSpan.finish()
this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite')
})
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => {
this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile, testSuitePath }) => {
if (!this.libraryConfig?.isCodeCoverageEnabled) {

@@ -139,2 +171,3 @@ return

}
const testSuiteSpan = this.testSuiteSpanByPath[testSuitePath]

@@ -147,4 +180,4 @@ const relativeCoverageFiles = [...coverageFiles, suiteFile]

const formattedCoverage = {
sessionId: this.testSuiteSpan.context()._traceId,
suiteId: this.testSuiteSpan.context()._spanId,
sessionId: testSuiteSpan.context()._traceId,
suiteId: testSuiteSpan.context()._spanId,
files: relativeCoverageFiles

@@ -157,3 +190,3 @@ }

this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine }) => {
this.addSub('ci:cucumber:test:start', ({ testName, testFileAbsolutePath, testSourceLine, isParallel }) => {
const store = storage.getStore()

@@ -167,2 +200,6 @@ const testSuite = getTestSuitePath(testFileAbsolutePath, this.sourceRoot)

}
if (isParallel) {
extraTags[CUCUMBER_IS_PARALLEL] = 'true'
}
const testSpan = this.startTestSpan(testName, testSuite, extraTags)

@@ -187,2 +224,32 @@

this.addSub('ci:cucumber:worker-report:trace', (traces) => {
const formattedTraces = JSON.parse(traces).map(trace =>
trace.map(span => ({
...span,
span_id: id(span.span_id),
trace_id: id(span.trace_id),
parent_id: id(span.parent_id)
}))
)
// We have to update the test session, test module and test suite ids
// before we export them in the main process
formattedTraces.forEach(trace => {
trace.forEach(span => {
if (span.name === 'cucumber.test') {
const testSuite = span.meta[TEST_SUITE]
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
const testSuiteTags = getTestSuiteTags(testSuiteSpan)
span.meta = {
...span.meta,
...testSuiteTags
}
}
})
this.tracer._exporter.export(trace)
})
})
this.addSub('ci:cucumber:test:finish', ({ isStep, status, skipReason, errorMessage, isNew, isEfdRetry }) => {

@@ -217,2 +284,6 @@ const span = storage.getStore().span

finishAllTraceSpans(span)
// If it's a worker, flushing is cheap, as it's just sending data to the main process
if (isCucumberWorker) {
this.tracer._exporter.flush()
}
}

@@ -230,6 +301,7 @@ })

startTestSpan (testName, testSuite, extraTags) {
const testSuiteSpan = this.testSuiteSpanByPath[testSuite]
return super.startTestSpan(
testName,
testSuite,
this.testSuiteSpan,
testSuiteSpan,
extraTags

@@ -236,0 +308,0 @@ )

@@ -51,2 +51,3 @@ 'use strict'

column: match.location.column,
ident: match.location.ident,
data: match.ruleId

@@ -53,0 +54,0 @@ }))

@@ -12,4 +12,8 @@ 'use strict'

}
_getEvidence (value) {
return { value: `${value.ident}` }
}
}
module.exports = new HardcodedPasswordAnalyzer()

@@ -5,2 +5,3 @@ 'use strict'

{ src: 'concat' },
{ src: 'join' },
{ src: 'parse' },

@@ -13,2 +14,4 @@ { src: 'plusOperator', operator: true },

{ src: 'substring' },
{ src: 'toLowerCase', dst: 'stringCase' },
{ src: 'toUpperCase', dst: 'stringCase' },
{ src: 'trim' },

@@ -15,0 +18,0 @@ { src: 'trimEnd' },

'use strict'
const dc = require('dc-polyfill')
const TaintedUtils = require('@datadog/native-iast-taint-tracking')

@@ -8,5 +9,11 @@ const { IAST_TRANSACTION_ID } = require('../iast-context')

const { isInfoAllowed } = require('../telemetry/verbosity')
const { getTaintTrackingImpl, getTaintTrackingNoop } = require('./taint-tracking-impl')
const {
getTaintTrackingImpl,
getTaintTrackingNoop,
lodashTaintTrackingHandler
} = require('./taint-tracking-impl')
const { taintObject } = require('./operations-taint-object')
const lodashOperationCh = dc.channel('datadog:lodash:operation')
function createTransaction (id, iastContext) {

@@ -96,2 +103,3 @@ if (id && iastContext) {

global._ddiast = getTaintTrackingImpl(telemetryVerbosity)
lodashOperationCh.subscribe(lodashTaintTrackingHandler)
}

@@ -101,2 +109,3 @@

global._ddiast = getTaintTrackingNoop()
lodashOperationCh.unsubscribe(lodashTaintTrackingHandler)
}

@@ -103,0 +112,0 @@

@@ -21,2 +21,3 @@ 'use strict'

concat: noop,
join: noop,
parse: noop,

@@ -29,2 +30,3 @@ plusOperator: noop,

substring: noop,
stringCase: noop,
trim: noop,

@@ -118,2 +120,9 @@ trimEnd: noop

stringCase: getCsiFn(
(transactionId, res, target) => TaintedUtils.stringCase(transactionId, res, target),
getContext,
String.prototype.toLowerCase,
String.prototype.toUpperCase
),
trim: getCsiFn(

@@ -154,2 +163,18 @@ (transactionId, res, target) => TaintedUtils.trim(transactionId, res, target),

return res
},
join: function (res, fn, target, separator) {
if (fn === Array.prototype.join) {
try {
const iastContext = getContext()
const transactionId = getTransactionId(iastContext)
if (transactionId) {
res = TaintedUtils.arrayJoin(transactionId, res, target, separator)
}
} catch (e) {
iastLog.error(e)
}
}
return res
}

@@ -183,5 +208,34 @@ }

const lodashFns = {
join: TaintedUtils.arrayJoin,
toLower: TaintedUtils.stringCase,
toUpper: TaintedUtils.stringCase,
trim: TaintedUtils.trim,
trimEnd: TaintedUtils.trimEnd,
trimStart: TaintedUtils.trim
}
function getLodashTaintedUtilFn (lodashFn) {
return lodashFns[lodashFn] || ((transactionId, result) => result)
}
function lodashTaintTrackingHandler (message) {
try {
if (!message.result) return
const context = getContextDefault()
const transactionId = getTransactionId(context)
if (transactionId) {
message.result = getLodashTaintedUtilFn(message.operation)(transactionId, message.result, ...message.arguments)
}
} catch (e) {
iastLog.error(`Error invoking CSI lodash ${message.operation}`)
.errorAndPublish(e)
}
}
module.exports = {
getTaintTrackingImpl,
getTaintTrackingNoop
getTaintTrackingNoop,
lodashTaintTrackingHandler
}

@@ -9,2 +9,3 @@ 'use strict'

const commandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer')
const hardcodedPasswordAnalyzer = require('./sensitive-analyzers/hardcoded-password-analyzer')
const headerSensitiveAnalyzer = require('./sensitive-analyzers/header-sensitive-analyzer')

@@ -35,2 +36,5 @@ const jsonSensitiveAnalyzer = require('./sensitive-analyzers/json-sensitive-analyzer')

})
this._sensitiveAnalyzers.set(vulnerabilities.HARDCODED_PASSWORD, (evidence) => {
return hardcodedPasswordAnalyzer(evidence, this._valuePattern)
})
}

@@ -56,3 +60,5 @@

const sensitiveRanges = sensitiveAnalyzer(evidence)
return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources)
if (evidence.ranges || sensitiveRanges?.length) {
return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources)
}
}

@@ -73,3 +79,3 @@ return null

let nextTainted = ranges.shift()
let nextTainted = ranges?.shift()
let nextSensitive = sensitive.shift()

@@ -76,0 +82,0 @@

@@ -52,2 +52,6 @@ 'use strict'

if (!evidence.ranges) {
return { value: evidence.value }
}
evidence.ranges.forEach((range, rangeIndex) => {

@@ -69,8 +73,4 @@ if (fromIndex < range.start) {

formatEvidence (type, evidence, sourcesIndexes, sources) {
if (!evidence.ranges && !evidence.rangesToApply) {
if (typeof evidence.value === 'undefined') {
return undefined
} else {
return { value: evidence.value }
}
if (typeof evidence.value === 'undefined') {
return undefined
}

@@ -77,0 +77,0 @@

@@ -449,2 +449,3 @@ 'use strict'

this._setValue(defaults, 'appsec.rules', undefined)
this._setValue(defaults, 'appsec.sca.enabled', null)
this._setValue(defaults, 'appsec.wafTimeout', 5e3) // µs

@@ -520,2 +521,3 @@ this._setValue(defaults, 'clientIpEnabled', false)

this._setValue(defaults, 'version', pkg.version)
this._setValue(defaults, 'instrumentation_config_id', undefined)
}

@@ -533,2 +535,3 @@

DD_APPSEC_RULES,
DD_APPSEC_SCA_ENABLED,
DD_APPSEC_TRACE_RATE_LIMIT,

@@ -553,2 +556,3 @@ DD_APPSEC_WAF_TIMEOUT,

DD_INSTRUMENTATION_TELEMETRY_ENABLED,
DD_INSTRUMENTATION_CONFIG_ID,
DD_LOGS_INJECTION,

@@ -622,2 +626,4 @@ DD_OPENAI_LOGS_ENABLED,

this._setString(env, 'appsec.rules', DD_APPSEC_RULES)
// DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend
this._setBoolean(env, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED)
this._setValue(env, 'appsec.wafTimeout', maybeInt(DD_APPSEC_WAF_TIMEOUT))

@@ -703,2 +709,3 @@ this._setBoolean(env, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED)

))
this._setString(env, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID)
this._setBoolean(env, 'telemetry.debug', DD_TELEMETRY_DEBUG)

@@ -705,0 +712,0 @@ this._setBoolean(env, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED)

@@ -21,3 +21,4 @@ 'use strict'

case exporters.JEST_WORKER:
return require('./ci-visibility/exporters/jest-worker')
case exporters.CUCUMBER_WORKER:
return require('./ci-visibility/exporters/test-worker')
default:

@@ -24,0 +25,0 @@ return inAWSLambda && !usingLambdaExtension ? require('./exporters/log') : require('./exporters/agent')

'use strict'
const StoragePlugin = require('./storage')
const { PEER_SERVICE_KEY } = require('../constants')
const { PEER_SERVICE_KEY, PEER_SERVICE_SOURCE_KEY } = require('../constants')

@@ -31,3 +31,3 @@ class DatabasePlugin extends StoragePlugin {

createDBMPropagationCommentService (serviceName) {
createDBMPropagationCommentService (serviceName, span) {
this.encodingServiceTags('dddbs', 'encodedDddbs', serviceName)

@@ -37,7 +37,22 @@ this.encodingServiceTags('dde', 'encodedDde', this.tracer._env)

this.encodingServiceTags('ddpv', 'encodedDdpv', this.tracer._version)
if (span.context()._tags['out.host']) {
this.encodingServiceTags('ddh', 'encodedDdh', span._spanContext._tags['out.host'])
}
if (span.context()._tags['db.name']) {
this.encodingServiceTags('dddb', 'encodedDddb', span._spanContext._tags['db.name'])
}
const { encodedDddbs, encodedDde, encodedDdps, encodedDdpv } = this.serviceTags
const { encodedDddb, encodedDddbs, encodedDde, encodedDdh, encodedDdps, encodedDdpv } = this.serviceTags
return `dddbs='${encodedDddbs}',dde='${encodedDde}',` +
let dbmComment = `dddb='${encodedDddb}',dddbs='${encodedDddbs}',dde='${encodedDde}',ddh='${encodedDdh}',` +
`ddps='${encodedDdps}',ddpv='${encodedDdpv}'`
const peerData = this.getPeerService(span.context()._tags)
if (peerData !== undefined && peerData[PEER_SERVICE_SOURCE_KEY] === PEER_SERVICE_KEY) {
this.encodingServiceTags('ddprs', 'encodedDdprs', peerData[PEER_SERVICE_KEY])
const { encodedDdprs } = this.serviceTags
dbmComment += `,ddprs='${encodedDdprs}'`
}
return dbmComment
}

@@ -61,3 +76,3 @@

const servicePropagation = this.createDBMPropagationCommentService(dbmService)
const servicePropagation = this.createDBMPropagationCommentService(dbmService, span)

@@ -64,0 +79,0 @@ if (isPreparedStatement || mode === 'service') {

@@ -64,2 +64,4 @@ const path = require('path')

const CUCUMBER_IS_PARALLEL = 'test.cucumber.is_parallel'
const TEST_ITR_TESTS_SKIPPED = '_dd.ci.itr.tests_skipped'

@@ -86,2 +88,5 @@ const TEST_ITR_SKIPPING_ENABLED = 'test.itr.tests_skipping.enabled'

// cucumber worker variables
const CUCUMBER_WORKER_TRACE_PAYLOAD_CODE = 70
// Early flake detection util strings

@@ -97,2 +102,3 @@ const EFD_STRING = "Retried by Datadog's Early Flake Detection"

JEST_DISPLAY_NAME,
CUCUMBER_IS_PARALLEL,
TEST_TYPE,

@@ -110,2 +116,3 @@ TEST_NAME,

JEST_WORKER_COVERAGE_PAYLOAD_CODE,
CUCUMBER_WORKER_TRACE_PAYLOAD_CODE,
TEST_SOURCE_START,

@@ -112,0 +119,0 @@ TEST_SKIPPED_BY_ITR,

@@ -9,3 +9,4 @@ 'use strict'

const { manager: metricsManager } = require('./metrics')
const logs = require('./logs')
const telemetryLogger = require('./logs')
const logger = require('../log')

@@ -215,3 +216,3 @@ const telemetryStartChannel = dc.channel('datadog:telemetry:start')

metricsManager.send(config, application, host)
logs.send(config, application, host)
telemetryLogger.send(config, application, host)

@@ -240,2 +241,6 @@ const { reqType, payload } = createPayload('app-heartbeat')

if (!aConfig.telemetry.enabled) {
if (aConfig.sca?.enabled) {
logger.warn('DD_APPSEC_SCA_ENABLED requires enabling telemetry to work.')
}
return

@@ -251,3 +256,3 @@ }

dependencies.start(config, application, host, getRetryData, updateRetryData)
logs.start(config)
telemetryLogger.start(config)

@@ -325,2 +330,3 @@ sendData(config, application, host, 'app-started', appStarted(config))

const name = nameMapping[change.name] || change.name
names.push(name)

@@ -327,0 +333,0 @@ const { origin, value } = change

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc