| 'use strict' | ||
| const log = require('../log') | ||
| /** | ||
| * Extracts the context from the given Lambda handler arguments. | ||
| * | ||
| * It is possible for users to define a lambda function without specifying a | ||
| * context arg. In these cases, this function returns null instead of throwing | ||
| * an error. | ||
| * | ||
| * @param {unknown[]} args any amount of arguments | ||
| * @returns {object | null} | ||
| */ | ||
| exports.extractContext = function extractContext (args) { | ||
| let context = null | ||
| for (let i = 0; i < args.length && i < 3; i++) { | ||
| if (args[i] && typeof args[i].getRemainingTimeInMillis === 'function') { | ||
| context = args[i] | ||
| break | ||
| } | ||
| } | ||
| if (!context) { | ||
| log.debug('Unable to extract context object from Lambda handler arguments') | ||
| } | ||
| return context | ||
| } |
+1
-1
| { | ||
| "name": "dd-trace", | ||
| "version": "5.85.0", | ||
| "version": "5.86.0", | ||
| "description": "Datadog APM tracing client for JavaScript", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -62,19 +62,37 @@ 'use strict' | ||
| } | ||
| } | ||
| // To handle all versions always correct, feature detect AsyncContextFrame and | ||
| // fallback to manual approach if not active. With ACF `run` delegates to | ||
| // `enterWith`, without ACF `run` does not. | ||
| const isACFActive = (() => { | ||
| let active = false | ||
| const als = new AsyncLocalStorage() | ||
| als.enterWith = () => { active = true } | ||
| als.run(1, () => {}) | ||
| als.disable() | ||
| return active | ||
| })() | ||
| if (!isACFActive) { | ||
| const superGetStore = AsyncLocalStorage.prototype.getStore | ||
| const superEnterWith = AsyncLocalStorage.prototype.enterWith | ||
| /** | ||
| * Here, we replicate the behavior of the original `run()` method. We ensure | ||
| * that our `enterWith()` is called internally, so that the handle to the | ||
| * store is set. As an optimization, we use super for getStore and enterWith | ||
| * when dealing with the parent store, so that we don't have to access the | ||
| * WeakMap. | ||
| * Override the `run` method to manually call `enterWith` and `getStore` | ||
| * when not using AsyncContextFrame. | ||
| * | ||
| * Without ACF, super.run() won't call this.enterWith(), so the WeakMap handle | ||
| * is never created and getStore() would fail. | ||
| * | ||
| * @template R | ||
| * @template TArgs = unknown[] | ||
| * @template {unknown[]} TArgs | ||
| * @param {Store<unknown>} store | ||
| * @param {() => R} fn | ||
| * @param {...TArgs} args | ||
| * @param {(...args: TArgs) => R} fn | ||
| * @param {TArgs} args | ||
| * @returns {R} | ||
| * @override | ||
| */ | ||
| run (store, fn, ...args) { | ||
| const prior = super.getStore() | ||
| DatadogStorage.prototype.run = function run (store, fn, ...args) { | ||
| const prior = superGetStore.call(this) | ||
| this.enterWith(store) | ||
@@ -84,3 +102,3 @@ try { | ||
| } finally { | ||
| super.enterWith(prior) | ||
| superEnterWith.call(this, prior) | ||
| } | ||
@@ -116,2 +134,2 @@ } | ||
| module.exports = { storage } | ||
| module.exports = { storage, isACFActive } |
@@ -10,2 +10,3 @@ 'use strict' | ||
| JEST_WORKER_COVERAGE_PAYLOAD_CODE, | ||
| JEST_WORKER_TELEMETRY_PAYLOAD_CODE, | ||
| getTestLineStart, | ||
@@ -20,2 +21,3 @@ getTestSuitePath, | ||
| const { | ||
| SEED_SUFFIX_RE, | ||
| getFormattedJestTestParameters, | ||
@@ -40,2 +42,3 @@ getJestTestName, | ||
| const workerReportLogsCh = channel('ci:jest:worker-report:logs') | ||
| const workerReportTelemetryCh = channel('ci:jest:worker-report:telemetry') | ||
@@ -61,2 +64,3 @@ const testSuiteCodeCoverageCh = channel('ci:jest:test-suite:code-coverage') | ||
| const CHILD_MESSAGE_CALL = 1 | ||
| // Maximum time we'll wait for the tracer to flush | ||
@@ -522,2 +526,3 @@ const FLUSH_TIMEOUT = 10_000 | ||
| if (event.name === 'test_done') { | ||
| const originalError = event.test?.errors?.[0] | ||
| let status = 'pass' | ||
@@ -602,3 +607,3 @@ if (event.test.errors && event.test.errors.length) { | ||
| ...ctx.currentStore, | ||
| error: formatJestError(event.test.errors[0]), | ||
| error: formatJestError(originalError), | ||
| shouldSetProbe, | ||
@@ -963,3 +968,3 @@ promises, | ||
| } else { | ||
| testManagementTests = receivedTestManagementTests | ||
| testManagementTests = receivedTestManagementTests || {} | ||
| } | ||
@@ -1022,12 +1027,12 @@ } catch (err) { | ||
| */ | ||
| let numEfdFailedTestsToIgnore = 0 | ||
| if (isEarlyFlakeDetectionEnabled) { | ||
| let numFailedTestsToIgnore = 0 | ||
| for (const testStatuses of newTestsTestStatuses.values()) { | ||
| const { pass, fail } = getTestStats(testStatuses) | ||
| if (pass > 0) { // as long as one passes, we'll consider the test passed | ||
| numFailedTestsToIgnore += fail | ||
| numEfdFailedTestsToIgnore += fail | ||
| } | ||
| } | ||
| // If every test that failed was an EFD retry, we'll consider the suite passed | ||
| if (numFailedTestsToIgnore !== 0 && result.results.numFailedTests === numFailedTestsToIgnore) { | ||
| if (numEfdFailedTestsToIgnore !== 0 && result.results.numFailedTests === numEfdFailedTestsToIgnore) { | ||
| result.results.success = true | ||
@@ -1037,2 +1042,4 @@ } | ||
| let numFailedQuarantinedTests = 0 | ||
| let numFailedQuarantinedOrDisabledAttemptedToFixTests = 0 | ||
| if (isTestManagementTestsEnabled) { | ||
@@ -1043,3 +1050,10 @@ const failedTests = result | ||
| testResults.map(({ fullName: testName, status }) => ( | ||
| { testName, testSuiteAbsolutePath, status } | ||
| { | ||
| // Strip @fast-check/jest seed suffix so the name matches what was reported via TEST_NAME | ||
| testName: testSuiteAbsolutePathsWithFastCheck.has(testSuiteAbsolutePath) | ||
| ? testName.replace(SEED_SUFFIX_RE, '') | ||
| : testName, | ||
| testSuiteAbsolutePath, | ||
| status, | ||
| } | ||
| )) | ||
@@ -1049,5 +1063,2 @@ )) | ||
| let numFailedQuarantinedTests = 0 | ||
| let numFailedQuarantinedOrDisabledAttemptedToFixTests = 0 | ||
| for (const { testName, testSuiteAbsolutePath } of failedTests) { | ||
@@ -1083,2 +1094,12 @@ const testSuite = getTestSuitePath(testSuiteAbsolutePath, result.globalConfig.rootDir) | ||
| // Combined check: if all failed tests are accounted for by EFD (flaky retries) and/or quarantine, | ||
| // we should consider the suite passed even when neither check alone covers all failures. | ||
| if (!result.results.success && (isEarlyFlakeDetectionEnabled || isTestManagementTestsEnabled)) { | ||
| const totalIgnoredFailures = | ||
| numEfdFailedTestsToIgnore + numFailedQuarantinedTests + numFailedQuarantinedOrDisabledAttemptedToFixTests | ||
| if (totalIgnoredFailures !== 0 && result.results.numFailedTests === totalIgnoredFailures) { | ||
| result.results.success = true | ||
| } | ||
| } | ||
| // Determine session status after EFD and quarantine checks have potentially modified success | ||
@@ -1549,2 +1570,6 @@ let status, error | ||
| } | ||
| if (code === JEST_WORKER_TELEMETRY_PAYLOAD_CODE) { // datadog telemetry payload | ||
| workerReportTelemetryCh.publish(data) | ||
| return | ||
| } | ||
| return onMessage.apply(this, arguments) | ||
@@ -1551,0 +1576,0 @@ } |
@@ -168,3 +168,12 @@ 'use strict' | ||
| // Recompute status after EFD and quarantine adjustments have reduced failure counts | ||
| if (status === 'fail') { | ||
| if (this.stats) { | ||
| status = this.stats.failures === 0 ? 'pass' : 'fail' | ||
| } else { | ||
| status = this.failures === 0 ? 'pass' : 'fail' | ||
| } | ||
| } | ||
| if (status === 'fail') { | ||
| error = new Error(`Failed tests: ${this.failures}.`) | ||
@@ -171,0 +180,0 @@ } |
@@ -7,25 +7,157 @@ 'use strict' | ||
| /** | ||
| * @typedef {object} EnvValue | ||
| * @property {string|undefined} [value] | ||
| * @property {string|null|undefined} [fromEnvVar] | ||
| */ | ||
| /** | ||
| * @typedef {object} DatasourceConfig | ||
| * @property {string|EnvValue|undefined} [url] | ||
| */ | ||
| /** | ||
| * @typedef {object} PrismaRuntimeConfig | ||
| * @property {Record<string, DatasourceConfig>|undefined} [inlineDatasources] | ||
| * @property {Record<string, { url?: string }>|undefined} [overrideDatasources] | ||
| * @property {Record<string, { url?: string }>|undefined} [datasources] | ||
| * @property {string[]|undefined} [datasourceNames] | ||
| */ | ||
| /** | ||
| * @typedef {object} DbConfig | ||
| * @property {string|undefined} [user] | ||
| * @property {string|undefined} [password] | ||
| * @property {string|undefined} [host] | ||
| * @property {string|number|undefined} [port] | ||
| * @property {string|undefined} [database] | ||
| */ | ||
| /** | ||
| * @typedef {object} AdapterConfig | ||
| * @property {string|undefined} [connectionString] | ||
| * @property {string|undefined} [user] | ||
| * @property {string|undefined} [password] | ||
| * @property {string|undefined} [host] | ||
| * @property {string|number|undefined} [port] | ||
| * @property {string|undefined} [database] | ||
| */ | ||
| /** | ||
| * @typedef {object} Adapter | ||
| * @property {AdapterConfig|undefined} [config] | ||
| * @property {{ options?: AdapterConfig }|undefined} [externalPool] | ||
| */ | ||
| /** | ||
| * @typedef {object} PrismaClientConfig | ||
| * @property {string|undefined} [datasourceUrl] | ||
| * @property {Record<string, { url?: string }>|undefined} [datasources] | ||
| * @property {Adapter|undefined} [adapter] | ||
| */ | ||
| /** | ||
| * @typedef {object} PrismaHelperCtx | ||
| * @property {DbConfig} [dbConfig] | ||
| * @property {import('../../datadog-plugin-prisma/src/datadog-tracing-helper')} [helper] | ||
| */ | ||
| /** | ||
| * @param {string|EnvValue|undefined} envValue | ||
| * @returns {string|undefined} | ||
| */ | ||
| function resolveEnvValue (envValue) { | ||
| return typeof envValue === 'object' && envValue | ||
| ? (envValue.value || getEnvironmentVariable(envValue.fromEnvVar ?? '')) | ||
| : envValue | ||
| } | ||
| /** | ||
| * @param {PrismaRuntimeConfig|undefined} config | ||
| * @param {string} datasourceName | ||
| * @returns {string|undefined} | ||
| */ | ||
| function resolveDatasourceUrl (config, datasourceName) { | ||
| return resolveEnvValue(config?.inlineDatasources?.[datasourceName]?.url) ?? | ||
| config?.overrideDatasources?.[datasourceName]?.url ?? | ||
| config?.datasources?.[datasourceName]?.url ?? | ||
| getEnvironmentVariable('DATABASE_URL') | ||
| } | ||
| /** | ||
| * @param {DbConfig} dbConfig | ||
| * @returns {DbConfig|undefined} | ||
| */ | ||
| function normalizeDbConfig (dbConfig) { | ||
| dbConfig.port = dbConfig.port == null ? undefined : String(dbConfig.port) | ||
| const hasValues = dbConfig.user || dbConfig.password || dbConfig.host || dbConfig.port || dbConfig.database | ||
| return hasValues ? dbConfig : undefined | ||
| } | ||
| /** | ||
| * @param {Adapter|undefined} adapter | ||
| * @returns {DbConfig|undefined} | ||
| */ | ||
| function resolveAdapterDbConfig (adapter) { | ||
| const adapterConfig = adapter?.config || adapter?.externalPool?.options | ||
| if (!adapterConfig) { | ||
| return | ||
| } | ||
| if (typeof adapterConfig === 'string') { | ||
| return parseDBString(adapterConfig) | ||
| } | ||
| const parsed = parseDBString(adapterConfig.connectionString) | ||
| if (parsed) { | ||
| return parsed | ||
| } | ||
| return normalizeDbConfig({ | ||
| user: adapterConfig.user, | ||
| password: adapterConfig.password, | ||
| host: adapterConfig.host, | ||
| port: adapterConfig.port, | ||
| database: adapterConfig.database, | ||
| }) | ||
| } | ||
| /** | ||
| * @param {PrismaClientConfig|undefined} clientConfig | ||
| * @param {string} datasourceName | ||
| * @param {DbConfig|undefined} runtimeDbConfig | ||
| * @returns {DbConfig|undefined} | ||
| */ | ||
| function resolveClientDbConfig (clientConfig, datasourceName, runtimeDbConfig) { | ||
| return resolveAdapterDbConfig(clientConfig?.adapter) || | ||
| parseDBString(clientConfig?.datasources?.[datasourceName]?.url ?? clientConfig?.datasourceUrl) || | ||
| runtimeDbConfig | ||
| } | ||
| /** | ||
| * @param {unknown} runtime | ||
| * @param {string} versions | ||
| * @param {string} [name] | ||
| * @param {boolean} [isIitm] | ||
| * @returns {object} | ||
| */ | ||
| const prismaHook = (runtime, versions, name, isIitm) => { | ||
| const originalGetPrismaClient = runtime.getPrismaClient | ||
| /** | ||
| * @typedef {{ getPrismaClient?: (config: PrismaRuntimeConfig, ...args: unknown[]) => Function }} PrismaRuntime | ||
| */ | ||
| const prismaRuntime = /** @type {PrismaRuntime} */ (runtime) | ||
| const originalGetPrismaClient = prismaRuntime.getPrismaClient | ||
| if (!originalGetPrismaClient) return runtime | ||
| if (!originalGetPrismaClient) { | ||
| return runtime | ||
| } | ||
| const prismaHelperCtx = {} | ||
| /** | ||
| * @param {PrismaRuntimeConfig|undefined} config | ||
| */ | ||
| const wrappedGetPrismaClient = function (config) { | ||
| // Prisma config shapes vary by version/runtime entrypoint. We try a few known locations | ||
| // and fall back to DATABASE_URL when present. | ||
| const datasourceUrl = | ||
| config?.inlineDatasources?.db?.url?.value ?? | ||
| config?.inlineDatasources?.db?.url ?? | ||
| config?.overrideDatasources?.db?.url ?? | ||
| config?.datasources?.db?.url ?? | ||
| config?.datasourceUrl ?? | ||
| getEnvironmentVariable('DATABASE_URL') | ||
| const datasourceName = config?.datasourceNames?.[0] || 'db' | ||
| const runtimeDatasourceUrl = resolveDatasourceUrl(config, datasourceName) | ||
| const runtimeDbConfig = parseDBString(runtimeDatasourceUrl) | ||
| if (datasourceUrl && !prismaHelperCtx.dbConfig) { | ||
| prismaHelperCtx.dbConfig = parseDBString(datasourceUrl) | ||
| } | ||
| prismaHelperInit.publish(prismaHelperCtx) | ||
| const PrismaClient = originalGetPrismaClient.call(this, config) | ||
@@ -35,4 +167,13 @@ return class WrappedPrismaClientClass extends PrismaClient { | ||
| super(clientConfig) | ||
| this._tracingHelper = prismaHelperCtx.helper | ||
| this._engine.tracingHelper = prismaHelperCtx.helper | ||
| /** | ||
| * @type {PrismaHelperCtx} | ||
| */ | ||
| const prismaHelperCtx = { | ||
| dbConfig: resolveClientDbConfig(clientConfig, datasourceName, runtimeDbConfig), | ||
| } | ||
| prismaHelperInit.publish(prismaHelperCtx) | ||
| const helper = prismaHelperCtx.helper | ||
| this._tracingHelper = helper | ||
| this._engine.tracingHelper = helper | ||
| } | ||
@@ -43,7 +184,7 @@ } | ||
| if (isIitm) { | ||
| runtime.getPrismaClient = wrappedGetPrismaClient | ||
| prismaRuntime.getPrismaClient = wrappedGetPrismaClient | ||
| return runtime | ||
| } | ||
| return new Proxy(runtime, { | ||
| return new Proxy(prismaRuntime, { | ||
| get (target, prop) { | ||
@@ -68,12 +209,66 @@ if (prop === 'getPrismaClient') { | ||
| /** | ||
| * @param {string|undefined} dbString | ||
| * @returns {DbConfig|undefined} | ||
| */ | ||
| function parseDBString (dbString) { | ||
| const url = new URL(dbString) | ||
| const dbConfig = { | ||
| user: url.username, | ||
| password: url.password, | ||
| host: url.hostname, | ||
| port: url.port, | ||
| database: url.pathname.slice(1), // Remove leading slash | ||
| if (!dbString || typeof dbString !== 'string') { | ||
| return | ||
| } | ||
| return dbConfig | ||
| const sqlServerConfig = parseSqlServerConnectionString(dbString) | ||
| if (sqlServerConfig) { | ||
| return sqlServerConfig | ||
| } | ||
| try { | ||
| const url = new URL(dbString) | ||
| return normalizeDbConfig({ | ||
| user: url.username, | ||
| password: url.password, | ||
| host: url.hostname, | ||
| port: url.port, | ||
| database: url.pathname?.slice(1) || undefined, | ||
| }) | ||
| } catch {} | ||
| } | ||
| /** | ||
| * @param {string} dbString | ||
| * @returns {DbConfig|undefined} | ||
| */ | ||
| function parseSqlServerConnectionString (dbString) { | ||
| if (!dbString.startsWith('sqlserver://')) { | ||
| return | ||
| } | ||
| const segments = dbString.slice(12).split(';').filter(Boolean) | ||
| if (!segments.length) { | ||
| return | ||
| } | ||
| let hostPart = segments.shift() | ||
| let user | ||
| let password | ||
| if (hostPart?.includes('@')) { | ||
| const [userInfo, hostInfo] = hostPart.split('@') | ||
| hostPart = hostInfo | ||
| ;[user, password] = userInfo.split(':') | ||
| } | ||
| let database | ||
| for (const segment of segments) { | ||
| const [rawKey, ...rawValue] = segment.split('=') | ||
| const value = rawValue.join('=').trim() | ||
| const key = rawKey?.trim().toLowerCase() | ||
| if (!key || !value) { | ||
| continue | ||
| } | ||
| if (key === 'database' || key === 'databasename' || key === 'db') database = value | ||
| else if (key === 'user' || key === 'username' || key === 'uid') user = value | ||
| else if (key === 'password' || key === 'pwd') password = value | ||
| } | ||
| const [host, port] = hostPart?.split(':') ?? [] | ||
| return normalizeDbConfig({ user, password, host, port, database }) | ||
| } |
@@ -16,2 +16,3 @@ 'use strict' | ||
| const emitCh = channel('tracing:ws:server:connect:emit') | ||
| const setSocketCh = channel('tracing:ws:server:connect:setSocket') | ||
| // TODO: Add a error channel / handle error events properly. | ||
@@ -185,2 +186,22 @@ | ||
| /** | ||
| * Prevent internal event handlers (data, close, etc.) registered by the ws library to | ||
| * capture the connection span in their async context. Otherwise, the | ||
| * finished connection span is retained for the entire lifetime of the WebSocket | ||
| * (via ACF -> handle -> WeakMap). | ||
| * | ||
| * @param {Function} setSocket | ||
| * @returns {(...args: unknown[]) => unknown} | ||
| */ | ||
| function wrapSetSocket (setSocket) { | ||
| return function wrappedSetSocket (...args) { | ||
| if (!setSocketCh.hasSubscribers) { | ||
| return setSocket.apply(this, args) | ||
| } | ||
| return setSocketCh.runStores({}, () => { | ||
| return setSocket.apply(this, args) | ||
| }) | ||
| } | ||
| } | ||
| addHook({ | ||
@@ -192,2 +213,3 @@ name: 'ws', | ||
| const ws = /** @type {WebSocketClass} */ (moduleExports) | ||
| shimmer.wrap(ws.prototype, 'setSocket', wrapSetSocket) | ||
| shimmer.wrap(ws.prototype, 'send', wrapSend) | ||
@@ -194,0 +216,0 @@ shimmer.wrap(ws.prototype, 'close', wrapClose) |
@@ -16,3 +16,2 @@ 'use strict' | ||
| ITR_CORRELATION_ID, | ||
| TEST_BROWSER_DRIVER, | ||
| TEST_CODE_OWNERS, | ||
@@ -25,3 +24,2 @@ TEST_EARLY_FLAKE_ABORT_REASON, | ||
| TEST_IS_RETRY, | ||
| TEST_IS_RUM_ACTIVE, | ||
| TEST_ITR_FORCED_RUN, | ||
@@ -113,3 +111,5 @@ TEST_ITR_UNSKIPPABLE, | ||
| this.testSessionSpan.finish() | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session', { | ||
| hasFailedTestReplay: this.libraryConfig?.isDiEnabled || undefined, | ||
| }) | ||
| finishAllTraceSpans(this.testSessionSpan) | ||
@@ -368,9 +368,3 @@ this.telemetry.count(TELEMETRY_TEST_SESSION, { | ||
| const spanTags = span.context()._tags | ||
| const telemetryTags = { | ||
| hasCodeOwners: !!spanTags[TEST_CODE_OWNERS], | ||
| isNew, | ||
| isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true', | ||
| browserDriver: spanTags[TEST_BROWSER_DRIVER], | ||
| } | ||
| const telemetryTags = this.getTestTelemetryTags(span) | ||
| span.finish() | ||
@@ -377,0 +371,0 @@ if (!isStep) { |
@@ -979,7 +979,11 @@ 'use strict' | ||
| // test spans are finished at after:spec | ||
| const activeSpanTags = this.activeTestSpan.context()._tags | ||
| this.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', { | ||
| hasCodeOwners: !!this.activeTestSpan.context()._tags[TEST_CODE_OWNERS], | ||
| hasCodeOwners: !!activeSpanTags[TEST_CODE_OWNERS], | ||
| isNew, | ||
| isRum: isRUMActive, | ||
| browserDriver: 'cypress', | ||
| isQuarantined: isQuarantinedFromSupport, | ||
| isModified, | ||
| isDisabled: activeSpanTags[TEST_MANAGEMENT_IS_DISABLED] === 'true', | ||
| }) | ||
@@ -986,0 +990,0 @@ this.activeTestSpan = null |
@@ -130,5 +130,6 @@ 'use strict' | ||
| 'pubsub.topic': topic, | ||
| 'pubsub.message_id': message.messageId, | ||
| 'span.kind': 'consumer', | ||
| 'pubsub.subscription': subscription.name, | ||
| 'pubsub.subscription_type': 'pull', | ||
| 'pubsub.span_type': 'message_processing', | ||
| 'messaging.operation': 'receive', | ||
@@ -175,5 +176,2 @@ base_service: baseService, | ||
| if (message.id) { | ||
| span.setTag('pubsub.message_id', message.id) | ||
| } | ||
| if (message.publishTime) { | ||
@@ -180,0 +178,0 @@ span.setTag('pubsub.publish_time', message.publishTime.toISOString()) |
@@ -16,3 +16,2 @@ 'use strict' | ||
| super(...args) | ||
| this._parentStore = undefined | ||
| this.addTraceSub('exit', message => this.exit(message)) | ||
@@ -22,3 +21,3 @@ } | ||
| start ({ req, res, abortController }) { | ||
| const store = storage('legacy').getStore() | ||
| let store = storage('legacy').getStore() | ||
| const span = web.startSpan( | ||
@@ -37,7 +36,17 @@ this.tracer, | ||
| this._parentStore = store | ||
| this.enter(span, { ...store, req, res }) | ||
| const context = web.getContext(req) | ||
| if (context) { | ||
| context.parentStore = store | ||
| } | ||
| // Only AppSec needs the request scope to be active for any async work that | ||
| // may be scheduled after the synchronous `request` event returns (e.g. | ||
| // Fastify). | ||
| if (incomingHttpRequestStart.hasSubscribers) { | ||
| store = { ...store, req, res } | ||
| } | ||
| this.enter(span, store) | ||
| if (!context.instrumented) { | ||
@@ -70,5 +79,11 @@ context.res.writeHead = web.wrapWriteHead(context) | ||
| exit ({ req }) { | ||
| const span = this._parentStore && this._parentStore.span | ||
| this.enter(span, this._parentStore) | ||
| this._parentStore = undefined | ||
| const context = web.getContext(req) | ||
| const parentStore = context?.parentStore | ||
| const span = parentStore?.span | ||
| this.enter(span, parentStore) | ||
| if (context) { | ||
| context.parentStore = undefined | ||
| } | ||
| } | ||
@@ -75,0 +90,0 @@ |
@@ -6,2 +6,3 @@ 'use strict' | ||
| const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper') | ||
| const { appClosing: appClosingTelemetry } = require('../../dd-trace/src/telemetry') | ||
@@ -29,4 +30,2 @@ const { | ||
| JEST_DISPLAY_NAME, | ||
| TEST_IS_RUM_ACTIVE, | ||
| TEST_BROWSER_DRIVER, | ||
| getFormattedError, | ||
@@ -162,3 +161,5 @@ TEST_RETRY_REASON, | ||
| this.testSessionSpan.finish() | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session', { | ||
| hasFailedTestReplay: this.libraryConfig?.isDiEnabled || undefined, | ||
| }) | ||
| finishAllTraceSpans(this.testSessionSpan) | ||
@@ -171,2 +172,3 @@ | ||
| appClosingTelemetry() | ||
| this.tracer._exporter.flush(() => { | ||
@@ -284,2 +286,19 @@ if (onDone) { | ||
| this.addSub('ci:jest:worker-report:telemetry', data => { | ||
| const telemetryEvents = JSON.parse(data) | ||
| for (const event of telemetryEvents) { | ||
| if (event.type === 'ciVisEvent') { | ||
| this.telemetry.ciVisEvent(event.name, event.testLevel, { | ||
| ...event.tags, | ||
| testFramework: event.testFramework, | ||
| isUnsupportedCIProvider: event.isUnsupportedCIProvider, | ||
| }) | ||
| } else if (event.type === 'count') { | ||
| this.telemetry.count(event.name, event.tags, event.value) | ||
| } else if (event.type === 'distribution') { | ||
| this.telemetry.distribution(event.name, event.tags, event.measure) | ||
| } | ||
| } | ||
| }) | ||
| this.addSub('ci:jest:test-suite:finish', ({ status, errorMessage, error, testSuiteAbsolutePath }) => { | ||
@@ -399,12 +418,6 @@ const testSuiteSpan = this.testSuiteSpanPerTestSuiteAbsolutePath.get(testSuiteAbsolutePath) | ||
| const spanTags = span.context()._tags | ||
| this.telemetry.ciVisEvent( | ||
| TELEMETRY_EVENT_FINISHED, | ||
| 'test', | ||
| { | ||
| hasCodeOwners: !!spanTags[TEST_CODE_OWNERS], | ||
| isNew: spanTags[TEST_IS_NEW] === 'true', | ||
| isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true', | ||
| browserDriver: spanTags[TEST_BROWSER_DRIVER], | ||
| } | ||
| this.getTestTelemetryTags(span) | ||
| ) | ||
@@ -442,2 +455,8 @@ | ||
| this.telemetry.ciVisEvent( | ||
| TELEMETRY_EVENT_FINISHED, | ||
| 'test', | ||
| this.getTestTelemetryTags(span) | ||
| ) | ||
| span.finish() | ||
@@ -444,0 +463,0 @@ }) |
@@ -169,2 +169,8 @@ 'use strict' | ||
| module.exports = { getFormattedJestTestParameters, getJestTestName, getJestSuitesToRun, isMarkedAsUnskippable } | ||
| module.exports = { | ||
| SEED_SUFFIX_RE, | ||
| getFormattedJestTestParameters, | ||
| getJestTestName, | ||
| getJestSuitesToRun, | ||
| isMarkedAsUnskippable, | ||
| } |
@@ -26,4 +26,2 @@ 'use strict' | ||
| MOCHA_IS_PARALLEL, | ||
| TEST_IS_RUM_ACTIVE, | ||
| TEST_BROWSER_DRIVER, | ||
| TEST_RETRY_REASON, | ||
@@ -240,12 +238,6 @@ TEST_MANAGEMENT_ENABLED, | ||
| const spanTags = span.context()._tags | ||
| this.telemetry.ciVisEvent( | ||
| TELEMETRY_EVENT_FINISHED, | ||
| 'test', | ||
| { | ||
| hasCodeOwners: !!spanTags[TEST_CODE_OWNERS], | ||
| isNew: spanTags[TEST_IS_NEW] === 'true', | ||
| isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true', | ||
| browserDriver: spanTags[TEST_BROWSER_DRIVER], | ||
| } | ||
| this.getTestTelemetryTags(span) | ||
| ) | ||
@@ -315,12 +307,6 @@ | ||
| const spanTags = span.context()._tags | ||
| this.telemetry.ciVisEvent( | ||
| TELEMETRY_EVENT_FINISHED, | ||
| 'test', | ||
| { | ||
| hasCodeOwners: !!spanTags[TEST_CODE_OWNERS], | ||
| isNew: spanTags[TEST_IS_NEW] === 'true', | ||
| isRum: spanTags[TEST_IS_RUM_ACTIVE] === 'true', | ||
| browserDriver: spanTags[TEST_BROWSER_DRIVER], | ||
| } | ||
| this.getTestTelemetryTags(span) | ||
| ) | ||
@@ -408,3 +394,5 @@ if (isFirstAttempt && willBeRetried && this.di && this.libraryConfig?.isDiEnabled) { | ||
| this.testSessionSpan.finish() | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session', { | ||
| hasFailedTestReplay: this.libraryConfig?.isDiEnabled || undefined, | ||
| }) | ||
| finishAllTraceSpans(this.testSessionSpan) | ||
@@ -411,0 +399,0 @@ this.telemetry.count(TELEMETRY_TEST_SESSION, { |
@@ -391,2 +391,5 @@ 'use strict' | ||
| browserDriver: 'playwright', | ||
| isQuarantined, | ||
| isDisabled, | ||
| isModified, | ||
| } | ||
@@ -393,0 +396,0 @@ ) |
| 'use strict' | ||
| const tracingChannel = require('dc-polyfill').tracingChannel | ||
| const { tracingChannel } = /** @type {import('node:diagnostics_channel')} */ (require('dc-polyfill')) | ||
| const clientCH = tracingChannel('apm:prisma') | ||
@@ -13,11 +13,2 @@ const { storage } = require('../../datadog-core') | ||
| /** | ||
| * @typedef {object} DbConfig | ||
| * @property {string} [user] | ||
| * @property {string} [password] | ||
| * @property {string} [host] | ||
| * @property {string} [port] | ||
| * @property {string} [database] | ||
| */ | ||
| class DatadogTracingHelper { | ||
@@ -28,3 +19,3 @@ #prismaClient | ||
| /** | ||
| * @param {DbConfig|undefined} dbConfig | ||
| * @param {import('../../datadog-instrumentations/src/prisma').DbConfig|undefined} dbConfig | ||
| * @param {import('./index')} prismaClient | ||
@@ -41,3 +32,8 @@ */ | ||
| // needs a sampled tracecontext to generate engine spans | ||
| /** | ||
| * Needs a sampled tracecontext to generate engine spans | ||
| * | ||
| * @param {object} [context] | ||
| * @returns {string} | ||
| */ | ||
| getTraceParent (context) { | ||
@@ -63,8 +59,27 @@ const store = storage('legacy').getStore() | ||
| /** | ||
| * @param {object[]} spans | ||
| */ | ||
| dispatchEngineSpans (spans) { | ||
| if (!spans?.length) { | ||
| return | ||
| } | ||
| const childrenByParent = new Map() | ||
| for (const span of spans) { | ||
| if (span.parentId === null) { | ||
| this.#prismaClient.startEngineSpan({ engineSpan: span, allEngineSpans: spans, dbConfig: this.#dbConfig }) | ||
| const parentId = span.parentId | ||
| const children = childrenByParent.get(parentId) | ||
| if (children) { | ||
| children.push(span) | ||
| } else { | ||
| childrenByParent.set(parentId, [span]) | ||
| } | ||
| } | ||
| const roots = childrenByParent.get(null) | ||
| if (!roots) { | ||
| return | ||
| } | ||
| for (const span of roots) { | ||
| this.#prismaClient.startEngineSpan({ engineSpan: span, childrenByParent, dbConfig: this.#dbConfig }) | ||
| } | ||
| } | ||
@@ -77,3 +92,11 @@ | ||
| /** | ||
| * @param {object} options | ||
| * @param {Function} callback | ||
| * @returns {unknown} | ||
| */ | ||
| runInChildSpan (options, callback) { | ||
| if (!clientCH.start?.hasSubscribers) { | ||
| return callback.apply(this, arguments) | ||
| } | ||
| if (typeof options === 'string') { | ||
@@ -80,0 +103,0 @@ options = { |
@@ -35,3 +35,5 @@ 'use strict' | ||
| // Subscribe to helper initialization to inject callbacks | ||
| this.addSub('apm:prisma:helper:init', (prismaHelperCtx) => { | ||
| this.addSub('apm:prisma:helper:init', (ctx) => { | ||
| const prismaHelperCtx = | ||
| /** @type {import('../../datadog-instrumentations/src/prisma').PrismaHelperCtx} */ (ctx) | ||
| prismaHelperCtx.helper = new DatadogTracingHelper(prismaHelperCtx.dbConfig, this) | ||
@@ -42,3 +44,3 @@ }) | ||
| startEngineSpan (ctx) { | ||
| const { engineSpan, allEngineSpans, childOf, dbConfig } = ctx | ||
| const { engineSpan, childrenByParent, childOf, dbConfig } = ctx | ||
| const service = this.serviceName({ pluginConfig: this.config, system: this.system }) | ||
@@ -76,5 +78,6 @@ const spanName = engineSpan.name.slice(14) // remove 'prisma:engine:' prefix | ||
| activeSpan._startTime = hrTimeToUnixTimeMs(engineSpan.startTime) | ||
| for (const span of allEngineSpans) { | ||
| if (span.parentId === engineSpan.id) { | ||
| const startCtx = { engineSpan: span, allEngineSpans, childOf: activeSpan, dbConfig } | ||
| const children = childrenByParent.get(engineSpan.id) | ||
| if (children) { | ||
| for (const span of children) { | ||
| const startCtx = { engineSpan: span, childrenByParent, childOf: activeSpan, dbConfig } | ||
| this.startEngineSpan(startCtx) | ||
@@ -81,0 +84,0 @@ } |
@@ -12,15 +12,15 @@ 'use strict' | ||
| #storeStacks = new WeakMap() | ||
| #contexts = new WeakMap() | ||
| constructor (...args) { | ||
| super(...args) | ||
| this._storeStack = [] | ||
| this._contexts = new WeakMap() | ||
| this.addSub(`apm:${this.constructor.id}:middleware:enter`, ({ req, name, route }) => { | ||
| const childOf = this._getActive(req) || this._getStoreSpan() | ||
| const childOf = this.#getActive(req) || this.#getStoreSpan() | ||
| if (!childOf) return | ||
| const span = this._getMiddlewareSpan(name, childOf) | ||
| const context = this._createContext(req, route, childOf) | ||
| const span = this.#getMiddlewareSpan(name, childOf) | ||
| const context = this.#createContext(req, route, childOf) | ||
@@ -32,3 +32,8 @@ if (childOf !== span) { | ||
| const store = storage('legacy').getStore() | ||
| this._storeStack.push(store) | ||
| let storeStack = this.#storeStacks.get(req) | ||
| if (!storeStack) { | ||
| storeStack = [] | ||
| this.#storeStacks.set(req, storeStack) | ||
| } | ||
| storeStack.push(store) | ||
| this.enter(span, store) | ||
@@ -41,3 +46,3 @@ | ||
| this.addSub(`apm:${this.constructor.id}:middleware:next`, ({ req }) => { | ||
| const context = this._contexts.get(req) | ||
| const context = this.#contexts.get(req) | ||
@@ -50,3 +55,3 @@ if (!context) return | ||
| this.addSub(`apm:${this.constructor.id}:middleware:finish`, ({ req }) => { | ||
| const context = this._contexts.get(req) | ||
| const context = this.#contexts.get(req) | ||
@@ -59,3 +64,7 @@ if (!context || context.middleware.length === 0) return | ||
| this.addSub(`apm:${this.constructor.id}:middleware:exit`, ({ req }) => { | ||
| const savedStore = this._storeStack.pop() | ||
| const storeStack = this.#storeStacks.get(req) | ||
| const savedStore = storeStack && storeStack.pop() | ||
| if (storeStack && storeStack.length === 0) { | ||
| this.#storeStacks.delete(req) | ||
| } | ||
| const span = savedStore && savedStore.span | ||
@@ -70,3 +79,3 @@ this.enter(span, savedStore) | ||
| const span = this._getActive(req) | ||
| const span = this.#getActive(req) | ||
@@ -79,3 +88,3 @@ if (!span) return | ||
| this.addSub('apm:http:server:request:finish', ({ req }) => { | ||
| const context = this._contexts.get(req) | ||
| const context = this.#contexts.get(req) | ||
@@ -92,4 +101,4 @@ if (!context) return | ||
| _getActive (req) { | ||
| const context = this._contexts.get(req) | ||
| #getActive (req) { | ||
| const context = this.#contexts.get(req) | ||
@@ -102,3 +111,3 @@ if (!context) return | ||
| _getStoreSpan () { | ||
| #getStoreSpan () { | ||
| const store = storage('legacy').getStore() | ||
@@ -109,3 +118,3 @@ | ||
| _getMiddlewareSpan (name, childOf) { | ||
| #getMiddlewareSpan (name, childOf) { | ||
| if (this.config.middleware === false) { | ||
@@ -129,4 +138,4 @@ return childOf | ||
| _createContext (req, route, span) { | ||
| let context = this._contexts.get(req) | ||
| #createContext (req, route, span) { | ||
| let context = this.#contexts.get(req) | ||
@@ -154,3 +163,3 @@ if (!route || route === '/' || route === '*') { | ||
| this._contexts.set(req, context) | ||
| this.#contexts.set(req, context) | ||
| } | ||
@@ -157,0 +166,0 @@ |
@@ -209,5 +209,3 @@ 'use strict' | ||
| if (span) { | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', { | ||
| hasCodeowners: !!span.context()._tags[TEST_CODE_OWNERS], | ||
| }) | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', this.getTestTelemetryTags(span)) | ||
| span.setTag(TEST_STATUS, 'pass') | ||
@@ -240,5 +238,3 @@ span.finish(this.taskToFinishTime.get(task)) | ||
| } | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', { | ||
| hasCodeowners: !!span.context()._tags[TEST_CODE_OWNERS], | ||
| }) | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', this.getTestTelemetryTags(span)) | ||
| span.setTag(TEST_STATUS, 'fail') | ||
@@ -277,5 +273,3 @@ | ||
| ) | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', { | ||
| hasCodeowners: !!testSpan.context()._tags[TEST_CODE_OWNERS], | ||
| }) | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test', this.getTestTelemetryTags(testSpan)) | ||
| testSpan.finish() | ||
@@ -407,3 +401,5 @@ }) | ||
| this.testSessionSpan.finish() | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') | ||
| this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session', { | ||
| hasFailedTestReplay: this.libraryConfig?.isDiEnabled || undefined, | ||
| }) | ||
| finishAllTraceSpans(this.testSessionSpan) | ||
@@ -410,0 +406,0 @@ this.telemetry.count(TELEMETRY_TEST_SESSION, { |
@@ -20,2 +20,10 @@ 'use strict' | ||
| constructor (...args) { | ||
| super(...args) | ||
| // Bind the setSocket channel so internal ws event handlers (data, close) | ||
| // don't capture their async context. | ||
| this.addBind('tracing:ws:server:connect:setSocket', () => {}) | ||
| } | ||
| bindStart (ctx) { | ||
@@ -22,0 +30,0 @@ const req = ctx.req |
@@ -25,2 +25,3 @@ 'use strict' | ||
| 'async_hooks', | ||
| 'node:internal/async_local_storage', | ||
| ] | ||
@@ -27,0 +28,0 @@ |
@@ -98,3 +98,3 @@ 'use strict' | ||
| incrementCountMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS, {}, numTests) | ||
| distributionMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_TESTS, {}, numTests) | ||
| distributionMetric(TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES, {}, res.length) | ||
@@ -101,0 +101,0 @@ |
@@ -6,2 +6,3 @@ 'use strict' | ||
| JEST_WORKER_TRACE_PAYLOAD_CODE, | ||
| JEST_WORKER_TELEMETRY_PAYLOAD_CODE, | ||
| CUCUMBER_WORKER_TRACE_PAYLOAD_CODE, | ||
@@ -60,2 +61,9 @@ MOCHA_WORKER_TRACE_PAYLOAD_CODE, | ||
| function getInterprocessTelemetryCode () { | ||
| if (getEnvironmentVariable('JEST_WORKER_ID')) { | ||
| return JEST_WORKER_TELEMETRY_PAYLOAD_CODE | ||
| } | ||
| return null | ||
| } | ||
| /** | ||
@@ -71,2 +79,3 @@ * Lightweight exporter whose writers only do simple JSON serialization | ||
| const interprocessLogsCode = getInterprocessLogsCode() | ||
| const interprocessTelemetryCode = getInterprocessTelemetryCode() | ||
@@ -76,2 +85,9 @@ this._writer = new Writer(interprocessTraceCode) | ||
| this._logsWriter = new Writer(interprocessLogsCode) | ||
| // TODO: add support for test workers other than Jest | ||
| if (interprocessTelemetryCode) { | ||
| this._telemetryWriter = new Writer(interprocessTelemetryCode) | ||
| this.exportTelemetry = function (telemetryEvent) { | ||
| this._telemetryWriter.append(telemetryEvent) | ||
| } | ||
| } | ||
| } | ||
@@ -96,2 +112,5 @@ | ||
| this._logsWriter.flush() | ||
| if (this._telemetryWriter) { | ||
| this._telemetryWriter.flush() | ||
| } | ||
| } | ||
@@ -98,0 +117,0 @@ } |
@@ -10,2 +10,10 @@ 'use strict' | ||
| const { getValueFromEnvSources } = require('../../config/helper') | ||
| const { | ||
| incrementCountMetric, | ||
| distributionMetric, | ||
| TELEMETRY_COVERAGE_UPLOAD, | ||
| TELEMETRY_COVERAGE_UPLOAD_MS, | ||
| TELEMETRY_COVERAGE_UPLOAD_ERRORS, | ||
| TELEMETRY_COVERAGE_UPLOAD_BYTES, | ||
| } = require('../telemetry') | ||
@@ -83,4 +91,11 @@ const UPLOAD_TIMEOUT_MS = 30_000 | ||
| incrementCountMetric(TELEMETRY_COVERAGE_UPLOAD) | ||
| distributionMetric(TELEMETRY_COVERAGE_UPLOAD_BYTES, {}, compressedCoverage.length) | ||
| const startTime = Date.now() | ||
| request(form, options, (err, res, statusCode) => { | ||
| distributionMetric(TELEMETRY_COVERAGE_UPLOAD_MS, {}, Date.now() - startTime) | ||
| if (err) { | ||
| incrementCountMetric(TELEMETRY_COVERAGE_UPLOAD_ERRORS, { statusCode }) | ||
| log.error('Error uploading coverage report: %s', err.message) | ||
@@ -87,0 +102,0 @@ return callback(err) |
@@ -20,2 +20,18 @@ 'use strict' | ||
| autoInjected: 'auto_injected', | ||
| isQuarantined: 'is_quarantined', | ||
| isDisabled: 'is_disabled', | ||
| isTestManagementEnabled: 'test_management_enabled', | ||
| isItrEnabled: 'itr_enabled', | ||
| isEarlyFlakeDetectionEnabled: 'early_flake_detection_enabled', | ||
| isFlakyTestRetriesEnabled: 'flaky_test_retries_enabled', | ||
| isKnownTestsEnabled: 'known_tests_enabled', | ||
| isImpactedTestsEnabled: 'impacted_tests_enabled', | ||
| hasFailedTestReplay: 'has_failed_test_replay', | ||
| isFailedTestReplayEnabled: 'is_failed_test_replay_enabled', | ||
| // isDiEnabled is specifically for the settings endpoint telemetry | ||
| isDiEnabled: 'failed_test_replay_enabled', | ||
| requireGit: 'require_git', | ||
| isModified: 'is_modified', | ||
| isRetry: 'is_retry', | ||
| retryReason: 'retry_reason', | ||
| } | ||
@@ -105,2 +121,13 @@ | ||
| const TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES = 'early_flake_detection.response_bytes' | ||
| // coverage upload | ||
| const TELEMETRY_COVERAGE_UPLOAD = 'coverage_upload.request' | ||
| const TELEMETRY_COVERAGE_UPLOAD_MS = 'coverage_upload.request_ms' | ||
| const TELEMETRY_COVERAGE_UPLOAD_ERRORS = 'coverage_upload.request_errors' | ||
| const TELEMETRY_COVERAGE_UPLOAD_BYTES = 'coverage_upload.request_bytes' | ||
| // test management | ||
| const TELEMETRY_TEST_MANAGEMENT_TESTS = 'test_management_tests.request' | ||
| const TELEMETRY_TEST_MANAGEMENT_TESTS_MS = 'test_management_tests.request_ms' | ||
| const TELEMETRY_TEST_MANAGEMENT_TESTS_ERRORS = 'test_management_tests.request_errors' | ||
| const TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_TESTS = 'test_management_tests.response_tests' | ||
| const TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_BYTES = 'test_management_tests.response_bytes' | ||
@@ -170,2 +197,11 @@ function isStatusCode400 (statusCode) { | ||
| TELEMETRY_KNOWN_TESTS_RESPONSE_BYTES, | ||
| TELEMETRY_COVERAGE_UPLOAD, | ||
| TELEMETRY_COVERAGE_UPLOAD_MS, | ||
| TELEMETRY_COVERAGE_UPLOAD_ERRORS, | ||
| TELEMETRY_COVERAGE_UPLOAD_BYTES, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_MS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_ERRORS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_TESTS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_BYTES, | ||
| } |
@@ -8,2 +8,34 @@ 'use strict' | ||
| const { | ||
| incrementCountMetric, | ||
| distributionMetric, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_MS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_ERRORS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_TESTS, | ||
| TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_BYTES, | ||
| } = require('../telemetry') | ||
| // Calculate the number of tests from the test management tests response, which has a shape like: | ||
| // { module: { suites: { suite: { tests: { testName: { properties: {...} } } } } } } | ||
| function getNumFromTestManagementTests (testManagementTests) { | ||
| if (!testManagementTests) { | ||
| return 0 | ||
| } | ||
| let totalNumTests = 0 | ||
| for (const testModule of Object.values(testManagementTests)) { | ||
| const { suites } = testModule | ||
| if (!suites) continue | ||
| for (const testSuite of Object.values(suites)) { | ||
| const { tests } = testSuite | ||
| if (!tests) continue | ||
| totalNumTests += Object.keys(tests).length | ||
| } | ||
| } | ||
| return totalNumTests | ||
| } | ||
| function getTestManagementTests ({ | ||
@@ -62,4 +94,10 @@ url, | ||
| request(data, options, (err, res) => { | ||
| incrementCountMetric(TELEMETRY_TEST_MANAGEMENT_TESTS) | ||
| const startTime = Date.now() | ||
| request(data, options, (err, res, statusCode) => { | ||
| distributionMetric(TELEMETRY_TEST_MANAGEMENT_TESTS_MS, {}, Date.now() - startTime) | ||
| if (err) { | ||
| incrementCountMetric(TELEMETRY_TEST_MANAGEMENT_TESTS_ERRORS, { statusCode }) | ||
| done(err) | ||
@@ -70,2 +108,7 @@ } else { | ||
| const numTests = getNumFromTestManagementTests(testManagementTests) | ||
| distributionMetric(TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_TESTS, {}, numTests) | ||
| distributionMetric(TELEMETRY_TEST_MANAGEMENT_TESTS_RESPONSE_BYTES, {}, res.length) | ||
| log.debug('Test management tests received: %j', testManagementTests) | ||
@@ -72,0 +115,0 @@ |
@@ -131,46 +131,46 @@ 'use strict' | ||
| const store = storage('legacy').getStore() | ||
| storage('legacy').enterWith({ noop: true }) | ||
| storage('legacy').run({ noop: true }, () => { | ||
| let finished = false | ||
| const finalize = () => { | ||
| if (finished) return | ||
| finished = true | ||
| activeRequests-- | ||
| } | ||
| let finished = false | ||
| const finalize = () => { | ||
| if (finished) return | ||
| finished = true | ||
| activeRequests-- | ||
| } | ||
| const req = client.request(options, (res) => onResponse(res, finalize)) | ||
| const req = client.request(options, (res) => onResponse(res, finalize)) | ||
| req.once('close', finalize) | ||
| req.once('timeout', finalize) | ||
| req.once('close', finalize) | ||
| req.once('timeout', finalize) | ||
| req.once('error', err => { | ||
| finalize() | ||
| onError(err) | ||
| }) | ||
| req.once('error', err => { | ||
| finalize() | ||
| onError(err) | ||
| }) | ||
| req.setTimeout(timeout, () => { | ||
| try { | ||
| if (typeof req.abort === 'function') { | ||
| req.abort() | ||
| } else { | ||
| req.destroy() | ||
| } | ||
| } catch { | ||
| // ignore | ||
| } | ||
| }) | ||
| req.setTimeout(timeout, () => { | ||
| try { | ||
| if (typeof req.abort === 'function') { | ||
| req.abort() | ||
| } else { | ||
| req.destroy() | ||
| } | ||
| } catch { | ||
| // ignore | ||
| if (isReadable) { | ||
| data.pipe(req) // TODO: Validate whether this is actually retriable. | ||
| } else { | ||
| for (const buffer of dataArray) req.write(buffer) | ||
| req.end() | ||
| } | ||
| }) | ||
| if (isReadable) { | ||
| data.pipe(req) // TODO: Validate whether this is actually retriable. | ||
| } else { | ||
| for (const buffer of dataArray) req.write(buffer) | ||
| req.end() | ||
| } | ||
| storage('legacy').enterWith(store) | ||
| } | ||
| // TODO: Figure out why setTimeout is needed to avoid losing the async context | ||
| // in the retry request before socket.connect() is called. | ||
| // The setTimeout is needed to avoid losing the async context in the retry | ||
| // request before socket.connect() is called. This is a workaround for the | ||
| // issue that the AsyncLocalStorage.run() method does not call the | ||
| // AsyncLocalStorage.enterWith() method when not using AsyncContextFrame. | ||
| // | ||
| // TODO: Test that this doesn't trace itself on retry when the diagnostics | ||
@@ -177,0 +177,0 @@ // channel events are available in the agent exporter. |
@@ -246,3 +246,3 @@ 'use strict' | ||
| module.exports = function createIdentifier (value, radix) { | ||
| return new Identifier(value, radix) | ||
| return new Identifier(value ?? '', radix) | ||
| } |
@@ -8,2 +8,3 @@ 'use strict' | ||
| const { ImpendingTimeout } = require('./runtime/errors') | ||
| const { extractContext } = require('./context') | ||
@@ -65,19 +66,2 @@ const globalTracer = global._ddtrace | ||
| /** | ||
| * Extracts the context from the given Lambda handler arguments. | ||
| * | ||
| * @param {unknown[]} args any amount of arguments | ||
| * @returns the context, if extraction was succesful. | ||
| */ | ||
| function extractContext (args) { | ||
| let context = args.length > 1 ? args[1] : undefined | ||
| if (context === undefined || context.getRemainingTimeInMillis === undefined) { | ||
| context = args.length > 2 ? args[2] : undefined | ||
| if (context === undefined || context.getRemainingTimeInMillis === undefined) { | ||
| throw new Error('Could not extract context') | ||
| } | ||
| } | ||
| return context | ||
| } | ||
| /** | ||
| * Patches your AWS Lambda handler function to add some tracing support. | ||
@@ -91,3 +75,6 @@ * | ||
| checkTimeout(context) | ||
| if (context) { | ||
| checkTimeout(context) | ||
| } | ||
| const result = lambdaHandler.apply(this, args) | ||
@@ -94,0 +81,0 @@ if (result && typeof result.then === 'function') { |
@@ -17,7 +17,3 @@ 'use strict' | ||
| function withNoop (fn) { | ||
| const store = storage('legacy').getStore() | ||
| storage('legacy').enterWith({ noop: true }) | ||
| fn() | ||
| storage('legacy').enterWith(store) | ||
| storage('legacy').run({ noop: true }, fn) | ||
| } | ||
@@ -24,0 +20,0 @@ |
@@ -67,2 +67,10 @@ 'use strict' | ||
| TEST_IS_TEST_FRAMEWORK_WORKER, | ||
| TEST_IS_NEW, | ||
| TEST_IS_RUM_ACTIVE, | ||
| TEST_BROWSER_DRIVER, | ||
| TEST_MANAGEMENT_IS_QUARANTINED, | ||
| TEST_MANAGEMENT_IS_DISABLED, | ||
| TEST_IS_MODIFIED, | ||
| TEST_IS_RETRY, | ||
| TEST_RETRY_REASON, | ||
| } = require('./util/test') | ||
@@ -358,2 +366,40 @@ | ||
| const testFramework = this.constructor.id | ||
| const exporter = this.tracer?._exporter | ||
| // TODO: only jest worker supported yet | ||
| const isSupportedWorker = exporter && typeof exporter.exportTelemetry === 'function' | ||
| const ciProviderName = this.ciProviderName | ||
| if (isSupportedWorker) { | ||
| // In supported worker: send telemetry events to main process | ||
| return { | ||
| ciVisEvent: function (name, testLevel, tags = {}) { | ||
| exporter.exportTelemetry({ | ||
| type: 'ciVisEvent', | ||
| name, | ||
| testLevel, | ||
| testFramework, | ||
| isUnsupportedCIProvider: !ciProviderName, | ||
| tags, | ||
| }) | ||
| }, | ||
| count: function (name, tags, value = 1) { | ||
| exporter.exportTelemetry({ | ||
| type: 'count', | ||
| name, | ||
| tags, | ||
| value, | ||
| }) | ||
| }, | ||
| distribution: function (name, tags, measure) { | ||
| exporter.exportTelemetry({ | ||
| type: 'distribution', | ||
| name, | ||
| tags, | ||
| measure, | ||
| }) | ||
| }, | ||
| } | ||
| } | ||
| // In main process or unsupported worker: execute telemetry directly | ||
| return { | ||
@@ -364,3 +410,3 @@ ciVisEvent: function (name, testLevel, tags = {}) { | ||
| testFramework, | ||
| isUnsupportedCIProvider: !this.ciProviderName, | ||
| isUnsupportedCIProvider: !ciProviderName, | ||
| ...tags, | ||
@@ -675,2 +721,18 @@ }) | ||
| } | ||
| getTestTelemetryTags (testSpan) { | ||
| const activeSpanTags = testSpan.context()._tags | ||
| return { | ||
| hasCodeOwners: !!activeSpanTags[TEST_CODE_OWNERS] || undefined, | ||
| isNew: activeSpanTags[TEST_IS_NEW] === 'true' || undefined, | ||
| isRum: activeSpanTags[TEST_IS_RUM_ACTIVE] === 'true' || undefined, | ||
| browserDriver: activeSpanTags[TEST_BROWSER_DRIVER], | ||
| isQuarantined: activeSpanTags[TEST_MANAGEMENT_IS_QUARANTINED] === 'true' || undefined, | ||
| isDisabled: activeSpanTags[TEST_MANAGEMENT_IS_DISABLED] === 'true' || undefined, | ||
| isModified: activeSpanTags[TEST_IS_MODIFIED] === 'true' || undefined, | ||
| isRetry: activeSpanTags[TEST_IS_RETRY] === 'true' || undefined, | ||
| retryReason: activeSpanTags[TEST_RETRY_REASON], | ||
| isFailedTestReplayEnabled: activeSpanTags[DI_ERROR_DEBUG_INFO_CAPTURED] === 'true' || undefined, | ||
| } | ||
| } | ||
| } |
@@ -50,36 +50,33 @@ 'use strict' | ||
| ) { | ||
| const store = storage('legacy').getStore() | ||
| storage('legacy').enterWith({ noop: true }) | ||
| let startTime | ||
| if (operationMetric) { | ||
| incrementCountMetric(operationMetric.name, operationMetric.tags) | ||
| } | ||
| if (durationMetric) { | ||
| startTime = Date.now() | ||
| } | ||
| try { | ||
| let result = cachedExec(cmd, flags, { stdio: 'pipe' }).toString() | ||
| if (shouldTrim) { | ||
| result = result.replaceAll(/(\r\n|\n|\r)/gm, '') | ||
| return storage('legacy').run({ noop: true }, () => { | ||
| let startTime | ||
| if (operationMetric) { | ||
| incrementCountMetric(operationMetric.name, operationMetric.tags) | ||
| } | ||
| if (durationMetric) { | ||
| distributionMetric(durationMetric.name, durationMetric.tags, Date.now() - startTime) | ||
| startTime = Date.now() | ||
| } | ||
| return result | ||
| } catch (err) { | ||
| if (errorMetric) { | ||
| incrementCountMetric(errorMetric.name, { | ||
| ...errorMetric.tags, | ||
| errorType: err.code, | ||
| exitCode: err.status || err.errno, | ||
| }) | ||
| try { | ||
| let result = cachedExec(cmd, flags, { stdio: 'pipe' }).toString() | ||
| if (shouldTrim) { | ||
| result = result.replaceAll(/(\r\n|\n|\r)/gm, '') | ||
| } | ||
| if (durationMetric) { | ||
| distributionMetric(durationMetric.name, durationMetric.tags, Date.now() - startTime) | ||
| } | ||
| return result | ||
| } catch (err) { | ||
| if (errorMetric) { | ||
| incrementCountMetric(errorMetric.name, { | ||
| ...errorMetric.tags, | ||
| errorType: err.code, | ||
| exitCode: err.status || err.errno, | ||
| }) | ||
| } | ||
| log.error('Git plugin error executing command', err) | ||
| return '' | ||
| } | ||
| log.error('Git plugin error executing command', err) | ||
| return '' | ||
| } finally { | ||
| storage('legacy').enterWith(store) | ||
| } | ||
| }) | ||
| } | ||
@@ -86,0 +83,0 @@ |
@@ -122,2 +122,3 @@ 'use strict' | ||
| const JEST_WORKER_LOGS_PAYLOAD_CODE = 62 | ||
| const JEST_WORKER_TELEMETRY_PAYLOAD_CODE = 63 | ||
@@ -229,2 +230,3 @@ // cucumber worker variables | ||
| JEST_WORKER_LOGS_PAYLOAD_CODE, | ||
| JEST_WORKER_TELEMETRY_PAYLOAD_CODE, | ||
| CUCUMBER_WORKER_TRACE_PAYLOAD_CODE, | ||
@@ -578,3 +580,3 @@ MOCHA_WORKER_TRACE_PAYLOAD_CODE, | ||
| return testSuitePath.replace(path.sep, '/') | ||
| return testSuitePath.replaceAll(path.sep, '/') | ||
| } | ||
@@ -581,0 +583,0 @@ |
@@ -57,2 +57,3 @@ 'use strict' | ||
| TYPE: WEB, | ||
| /** @type {TracingPlugin | null} */ | ||
| plugin: null, | ||
@@ -59,0 +60,0 @@ |
| 'use strict' | ||
| const os = require('os') | ||
| const path = require('path') | ||
| const { pathToFileURL } = require('url') | ||
| const satisfies = require('../../../../vendor/dist/semifies') | ||
@@ -13,2 +13,4 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') | ||
| const { getAgentUrl } = require('../agent/url') | ||
| const { isACFActive } = require('../../../datadog-core/src/storage') | ||
| const { AgentExporter } = require('./exporters/agent') | ||
@@ -22,2 +24,3 @@ const { FileExporter } = require('./exporters/file') | ||
| const { tagger } = require('./tagger') | ||
| class Config { | ||
@@ -28,3 +31,2 @@ constructor (options = {}) { | ||
| const AWS_LAMBDA_FUNCTION_NAME = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') | ||
| const NODE_OPTIONS = getEnvironmentVariable('NODE_OPTIONS') | ||
@@ -56,3 +58,2 @@ // TODO: Move initialization of these values to packages/dd-trace/src/config/index.js, and just read from config | ||
| const host = os.hostname() | ||
| // Must be longer than one minute so pad with five seconds | ||
@@ -66,3 +67,2 @@ const flushInterval = options.interval ?? (Number(DD_PROFILING_UPLOAD_PERIOD) * 1000 || 65 * 1000) | ||
| this.env = options.env | ||
| this.host = host | ||
| this.functionname = AWS_LAMBDA_FUNCTION_NAME | ||
@@ -76,3 +76,3 @@ | ||
| env: options.env, | ||
| host, | ||
| host: options.reportHostname ? require('os').hostname() : undefined, | ||
| service: this.service, | ||
@@ -228,12 +228,4 @@ version: this.version, | ||
| const hasExecArg = (arg) => process.execArgv.includes(arg) || String(NODE_OPTIONS).includes(arg) | ||
| const canUseAsyncContextFrame = samplingContextsAvailable && isACFActive | ||
| let canUseAsyncContextFrame = false | ||
| if (samplingContextsAvailable) { | ||
| if (isAtLeast24) { | ||
| canUseAsyncContextFrame = !hasExecArg('--no-async-context-frame') | ||
| } else if (satisfies(process.versions.node, '>=22.9.0')) { | ||
| canUseAsyncContextFrame = hasExecArg('--experimental-async-context-frame') | ||
| } | ||
| } | ||
| this.asyncContextFrameEnabled = isTrue(DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? canUseAsyncContextFrame) | ||
@@ -240,0 +232,0 @@ if (this.asyncContextFrameEnabled && !canUseAsyncContextFrame) { |
@@ -41,28 +41,27 @@ 'use strict' | ||
| const store = storage('legacy').getStore() | ||
| storage('legacy').enterWith({ noop: true }) | ||
| requestCounter.inc() | ||
| const start = perf.now() | ||
| const req = request(options, res => { | ||
| durationDistribution.track(perf.now() - start) | ||
| countStatusCode(res.statusCode) | ||
| if (res.statusCode >= 400) { | ||
| statusCodeErrorCounter.inc() | ||
| const error = new Error(`HTTP Error ${res.statusCode}`) | ||
| error.status = res.statusCode | ||
| callback(error) | ||
| } else { | ||
| callback(null, res) | ||
| storage('legacy').run({ noop: true }, () => { | ||
| requestCounter.inc() | ||
| const start = perf.now() | ||
| const req = request(options, res => { | ||
| durationDistribution.track(perf.now() - start) | ||
| countStatusCode(res.statusCode) | ||
| if (res.statusCode >= 400) { | ||
| statusCodeErrorCounter.inc() | ||
| const error = new Error(`HTTP Error ${res.statusCode}`) | ||
| error.status = res.statusCode | ||
| callback(error) | ||
| } else { | ||
| callback(null, res) | ||
| } | ||
| }) | ||
| req.on('error', (err) => { | ||
| networkErrorCounter.inc() | ||
| callback(err) | ||
| }) | ||
| if (form) { | ||
| sizeDistribution.track(form.size()) | ||
| form.pipe(req) | ||
| } | ||
| }) | ||
| req.on('error', (err) => { | ||
| networkErrorCounter.inc() | ||
| callback(err) | ||
| }) | ||
| if (form) { | ||
| sizeDistribution.track(form.size()) | ||
| form.pipe(req) | ||
| } | ||
| storage('legacy').enterWith(store) | ||
| } | ||
@@ -69,0 +68,0 @@ |
@@ -79,2 +79,3 @@ 'use strict' | ||
| injectionEnabled, | ||
| reportHostname, | ||
| } = config | ||
@@ -116,2 +117,3 @@ const { enabled, sourceMap, exporters } = config.profiling | ||
| heartbeatInterval, | ||
| reportHostname, | ||
| } | ||
@@ -118,0 +120,0 @@ |
@@ -78,2 +78,3 @@ 'use strict' | ||
| appsec_enabled: !!config.appsec.enabled, | ||
| data_streams_enabled: !!config.dsmEnabled, | ||
| } | ||
@@ -80,0 +81,0 @@ |
Sorry, the diff of this file is too big to display
Network access
Supply chain riskThis module accesses the network.
Found 6 instances in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 2 instances in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 6 instances in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 15 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
AI-detected potential code anomaly
Supply chain riskAI has identified unusual behaviors that may pose a security risk.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 6 instances in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 2 instances in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 6 instances in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 15 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
4173917
0.42%918
0.11%93612
0.52%219
0.46%