| 'use strict' | ||
| const shimmer = require('../../datadog-shimmer') | ||
| const { channel, addHook } = require('./helpers/instrument') | ||
| const checkoutSessionCreateFinishCh = channel('datadog:stripe:checkoutSession:create:finish') | ||
| const paymentIntentCreateFinishCh = channel('datadog:stripe:paymentIntent:create:finish') | ||
| const constructEventFinishCh = channel('datadog:stripe:constructEvent:finish') | ||
| function wrapSessionCreate (create) { | ||
| return function wrappedSessionCreate () { | ||
| const promise = create.apply(this, arguments) | ||
| if (!checkoutSessionCreateFinishCh.hasSubscribers) return promise | ||
| return promise.then((result) => { | ||
| checkoutSessionCreateFinishCh.publish(result) | ||
| return result | ||
| }) | ||
| } | ||
| } | ||
| function wrapPaymentIntentCreate (create) { | ||
| return function wrappedPaymentIntentCreate () { | ||
| const promise = create.apply(this, arguments) | ||
| if (!paymentIntentCreateFinishCh.hasSubscribers) return promise | ||
| return promise.then((result) => { | ||
| paymentIntentCreateFinishCh.publish(result) | ||
| return result | ||
| }) | ||
| } | ||
| } | ||
| function wrapConstructEvent (constructEvent) { | ||
| return function wrappedConstructEvent () { | ||
| const result = constructEvent.apply(this, arguments) | ||
| // no need to check for hasSubscribers, | ||
| // if it's false, the publish function will be noop | ||
| constructEventFinishCh.publish(result) | ||
| return result | ||
| } | ||
| } | ||
| function wrapConstructEventAsync (constructEventAsync) { | ||
| return function wrappedConstructEventAsync () { | ||
| const promise = constructEventAsync.apply(this, arguments) | ||
| if (!constructEventFinishCh.hasSubscribers) return promise | ||
| return promise.then((result) => { | ||
| constructEventFinishCh.publish(result) | ||
| return result | ||
| }) | ||
| } | ||
| } | ||
| function wrapStripe (Stripe) { | ||
| return function wrappedStripe () { | ||
| let stripe = Stripe.apply(this, arguments) | ||
| // to support both with and without "new" operator syntax | ||
| if (this instanceof Stripe) { | ||
| stripe = this | ||
| } | ||
| if (typeof stripe.checkout?.sessions?.create === 'function') { | ||
| shimmer.wrap(stripe.checkout.sessions, 'create', wrapSessionCreate) | ||
| } | ||
| if (typeof stripe.paymentIntents?.create === 'function') { | ||
| shimmer.wrap(stripe.paymentIntents, 'create', wrapPaymentIntentCreate) | ||
| } | ||
| if (typeof stripe.webhooks?.constructEvent === 'function') { | ||
| shimmer.wrap(stripe.webhooks, 'constructEvent', wrapConstructEvent) | ||
| } | ||
| if (typeof stripe.webhooks?.constructEventAsync === 'function') { | ||
| shimmer.wrap(stripe.webhooks, 'constructEventAsync', wrapConstructEventAsync) | ||
| } | ||
| return stripe | ||
| } | ||
| } | ||
| addHook({ | ||
| name: 'stripe', | ||
| versions: ['9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '>=20.0.0'], | ||
| }, Stripe => { | ||
| return shimmer.wrapFunction(Stripe, wrapStripe) | ||
| }) |
| 'use strict' | ||
| const web = require('../plugins/util/web') | ||
| const log = require('../log') | ||
| const { | ||
| HTTP_OUTGOING_METHOD, | ||
| HTTP_OUTGOING_HEADERS, | ||
| HTTP_OUTGOING_RESPONSE_STATUS, | ||
| HTTP_OUTGOING_RESPONSE_HEADERS, | ||
| HTTP_OUTGOING_RESPONSE_BODY, | ||
| } = require('./addresses') | ||
| const KNUTH_FACTOR = 11400714819323199488n // eslint-disable-line unicorn/numeric-separators-style | ||
| const UINT64_MAX = (1n << 64n) - 1n | ||
| let config | ||
| let samplingRate | ||
| let globalRequestCounter | ||
| let bodyAnalysisCount | ||
| let downstreamAnalysisCount | ||
| let redirectBodyCollectionDecisions | ||
| function enable (_config) { | ||
| config = _config | ||
| globalRequestCounter = 0n | ||
| bodyAnalysisCount = new WeakMap() | ||
| downstreamAnalysisCount = new WeakMap() | ||
| redirectBodyCollectionDecisions = new WeakMap() | ||
| const bodyAnalysisSampleRate = config.appsec.apiSecurity?.downstreamBodyAnalysisSampleRate | ||
| samplingRate = Math.min(Math.max(bodyAnalysisSampleRate, 0), 1) | ||
| if (samplingRate !== bodyAnalysisSampleRate) { | ||
| log.warn( | ||
| 'DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE value is %s and it\'s out of range', | ||
| bodyAnalysisSampleRate) | ||
| } | ||
| } | ||
| function disable () { | ||
| config = null | ||
| globalRequestCounter = null | ||
| bodyAnalysisCount = null | ||
| downstreamAnalysisCount = null | ||
| redirectBodyCollectionDecisions = null | ||
| } | ||
| /** | ||
| * Check we have a stored redirect body collection decision for a given URL. | ||
| * @param {import('http').IncomingMessage} req outgoing request. | ||
| * @param {string} outgoingUrl the URL being requested. | ||
| * @returns {boolean} the stored decision | ||
| */ | ||
| function consumeRedirectBodyCollectionDecision (req, outgoingUrl) { | ||
| const decisions = redirectBodyCollectionDecisions.get(req) | ||
| if (!decisions) return false | ||
| return decisions.delete(outgoingUrl) | ||
| } | ||
| /** | ||
| * Stores a redirect body collection decision for a follow-up request. | ||
| * @param {import('http').IncomingMessage} req outgoing request. | ||
| * @param {string} redirectUrl the URL to redirect to. | ||
| */ | ||
| function storeRedirectBodyCollectionDecision (req, redirectUrl) { | ||
| let decisions = redirectBodyCollectionDecisions.get(req) | ||
| if (!decisions) { | ||
| decisions = new Set() | ||
| redirectBodyCollectionDecisions.set(req, decisions) | ||
| } | ||
| decisions.add(redirectUrl) | ||
| } | ||
| /** | ||
| * Determines whether the current downstream request/responses bodies should be sampled for analysis. | ||
| * @param {import('http').IncomingMessage} req outgoing request. | ||
| * @param {string} outgoingUrl the URL being requested (to check for redirect decisions). | ||
| * @returns {boolean} true when the downstream response body should be captured. | ||
| */ | ||
| function shouldSampleBody (req, outgoingUrl) { | ||
| // Check if there's a stored decision from a previous redirect | ||
| const storedDecision = consumeRedirectBodyCollectionDecision(req, outgoingUrl) | ||
| if (storedDecision) return true | ||
| globalRequestCounter = (globalRequestCounter + 1n) & UINT64_MAX | ||
| const currentCount = bodyAnalysisCount.get(req) || 0 | ||
| if (currentCount >= config.appsec.apiSecurity?.maxDownstreamRequestBodyAnalysis) { | ||
| return false | ||
| } | ||
| const hashed = (globalRequestCounter * KNUTH_FACTOR) % UINT64_MAX | ||
| // Replace 1000n with the accuraccy that we want to maintain | ||
| const threshold = (UINT64_MAX * BigInt(Math.round(samplingRate * 1000))) / 1000n | ||
| const shouldCollectBody = hashed <= threshold | ||
| // Track body analysis count if we're sampling the response body | ||
| if (shouldCollectBody) { | ||
| incrementBodyAnalysisCount(req) | ||
| } | ||
| return shouldCollectBody | ||
| } | ||
| /** | ||
| * Increments the number of downstream body analyses performed for the given request. | ||
| * @param {import('http').IncomingMessage} req outgoing request. | ||
| */ | ||
| function incrementBodyAnalysisCount (req) { | ||
| const currentCount = bodyAnalysisCount.get(req) || 0 | ||
| bodyAnalysisCount.set(req, currentCount + 1) | ||
| } | ||
| /** | ||
| * | ||
| * @param {object} headers | ||
| * @returns {object} the headers with all keys converted to lowercase | ||
| */ | ||
| function lowercaseHeaderKeys (headers) { | ||
| return Object.fromEntries(Object.entries(headers).map(([key, value]) => [key.toLowerCase(), value])) | ||
| } | ||
| /** | ||
| * Extracts request data from the context for WAF analysis | ||
| * @param {object} ctx context for the outgoing downstream request. | ||
| * @returns {object} a map of addresses and request data. | ||
| */ | ||
| function extractRequestData (ctx) { | ||
| const addresses = {} | ||
| const options = ctx?.args?.options || {} | ||
| addresses[HTTP_OUTGOING_METHOD] = getMethod(options.method) | ||
| const headers = options?.headers | ||
| if (headers && Object.keys(headers).length > 0) { | ||
| addresses[HTTP_OUTGOING_HEADERS] = lowercaseHeaderKeys(headers) | ||
| } | ||
| return addresses | ||
| } | ||
| /** | ||
| * Checks if a response is a redirect | ||
| * @param {import('http').IncomingMessage} req incoming server request. | ||
| * @param {import('http').IncomingMessage} res downstream response object. | ||
| * @returns {boolean} is redirect. | ||
| */ | ||
| function handleRedirectResponse (req, res) { | ||
| const isRedirect = res.statusCode >= 300 && res.statusCode < 400 | ||
| const redirectLocation = res.headers?.location || '' | ||
| if (isRedirect && redirectLocation) { | ||
| // Store the body collection decision for the redirect target | ||
| storeRedirectBodyCollectionDecision(req, redirectLocation) | ||
| return true | ||
| } | ||
| return false | ||
| } | ||
| /** | ||
| * Extracts response data for WAF analysis. | ||
| * @param {import('http').IncomingMessage} res downstream response object. | ||
| * @param {Buffer|string|object|null} responseBody response body. | ||
| * @returns {object} a map of addresses and response data. | ||
| */ | ||
| function extractResponseData (res, responseBody) { | ||
| const addresses = {} | ||
| if (res.statusCode) { | ||
| addresses[HTTP_OUTGOING_RESPONSE_STATUS] = String(res.statusCode) | ||
| } | ||
| const headers = res.headers | ||
| if (headers && Object.keys(headers).length > 0) { | ||
| addresses[HTTP_OUTGOING_RESPONSE_HEADERS] = headers | ||
| } | ||
| if (responseBody) { | ||
| // Parse the body based on content-type | ||
| const contentType = res.headers?.['content-type'] | ||
| const body = parseBody(responseBody, contentType) | ||
| if (body) { | ||
| addresses[HTTP_OUTGOING_RESPONSE_BODY] = body | ||
| } | ||
| } | ||
| return addresses | ||
| } | ||
| /** | ||
| * Tracks how many downstream analyses were executed for a given request and updates tracing tags. | ||
| * @param {import('http').IncomingMessage} req outgoing request. | ||
| */ | ||
| function incrementDownstreamAnalysisCount (req) { | ||
| const currentCount = downstreamAnalysisCount.get(req) || 0 | ||
| downstreamAnalysisCount.set(req, currentCount + 1) | ||
| const span = web.root(req) | ||
| if (span) { | ||
| span.setTag('_dd.appsec.downstream_request', currentCount + 1) | ||
| } | ||
| } | ||
| /** | ||
| * Returns the HTTP method to use for a downstream request, defaulting to GET. | ||
| * @param {string} method method supplied in the outgoing request options. | ||
| * @returns {string} validated HTTP method. | ||
| */ | ||
| function getMethod (method) { | ||
| return typeof method === 'string' && method ? method : 'GET' | ||
| } | ||
| /** | ||
| * Parses a downstream response body. | ||
| * @param {Buffer|string|object|null} body raw response body | ||
| * @param {string|null} contentType response content-type used to select the parser. | ||
| * @returns {object|null} parsed body object or null when not supported. | ||
| */ | ||
| function parseBody (body, contentType) { | ||
| if (!body || !contentType) { | ||
| return null | ||
| } | ||
| const mime = extractMimeType(contentType) | ||
| try { | ||
| if (mime === 'application/json' || mime === 'text/json') { | ||
| if (typeof body === 'string') { | ||
| return JSON.parse(body) | ||
| } | ||
| if (Buffer.isBuffer(body)) { | ||
| return JSON.parse(body.toString('utf8')) | ||
| } | ||
| return null | ||
| } | ||
| if (mime === 'application/x-www-form-urlencoded') { | ||
| const formBody = Buffer.isBuffer(body) ? body.toString('utf8') : String(body) | ||
| const params = new URLSearchParams(formBody) | ||
| const result = {} | ||
| for (const [key, value] of params.entries()) { | ||
| if (key in result) { | ||
| const existing = result[key] | ||
| if (Array.isArray(existing)) { | ||
| existing.push(value) | ||
| } else { | ||
| result[key] = [existing, value] | ||
| } | ||
| } else { | ||
| result[key] = value | ||
| } | ||
| } | ||
| return result | ||
| } | ||
| // multipart/form-data is mentioned in RFC but parsing is complex. | ||
| // Other content-types also discarded per RFC | ||
| return null | ||
| } catch { | ||
| // Parsing failed: return null to avoid sending malformed body to WAF | ||
| return null | ||
| } | ||
| } | ||
| /** | ||
| * Extracts the MIME type portion of a content-type header value. | ||
| * @param {string|null} contentType raw content-type header value. | ||
| * @returns {string|null} lowercase mime type | ||
| */ | ||
| function extractMimeType (contentType) { | ||
| if (typeof contentType !== 'string') { | ||
| return null | ||
| } | ||
| return contentType.split(';', 1)[0].trim().toLowerCase() | ||
| } | ||
| module.exports = { | ||
| enable, | ||
| disable, | ||
| shouldSampleBody, | ||
| handleRedirectResponse, | ||
| incrementDownstreamAnalysisCount, | ||
| extractRequestData, | ||
| extractResponseData, | ||
| // exports for tests | ||
| parseBody, | ||
| getMethod, | ||
| storeRedirectBodyCollectionDecision, | ||
| } |
+1
-1
| { | ||
| "name": "dd-trace", | ||
| "version": "5.86.0", | ||
| "version": "5.87.0", | ||
| "description": "Datadog APM tracing client for JavaScript", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -356,2 +356,16 @@ 'use strict' | ||
| // Check if all EFD retries failed | ||
| if (isEfdRetry && (isNew || isModified)) { | ||
| const statuses = lastStatusByPickleId.get(this.pickle.id) | ||
| if (statuses.length === earlyFlakeDetectionNumRetries + 1) { | ||
| const { fail } = statuses.reduce((acc, status) => { | ||
| acc[status]++ | ||
| return acc | ||
| }, { pass: 0, fail: 0 }) | ||
| if (fail === earlyFlakeDetectionNumRetries + 1) { | ||
| hasFailedAllRetries = true | ||
| } | ||
| } | ||
| } | ||
| const attemptCtx = numAttemptToCtx.get(numAttempt) | ||
@@ -358,0 +372,0 @@ |
@@ -139,2 +139,3 @@ 'use strict' | ||
| sharedb: () => require('../sharedb'), | ||
| stripe: () => require('../stripe'), | ||
| tedious: () => require('../tedious'), | ||
@@ -141,0 +142,0 @@ tinypool: { esmFirst: true, fn: () => require('../vitest') }, |
@@ -17,2 +17,3 @@ 'use strict' | ||
| const errorChannel = channel('apm:http:client:request:error') | ||
| const responseFinishChannel = channel('apm:http:client:response:finish') | ||
@@ -43,2 +44,108 @@ const names = ['http', 'https', 'node:http', 'node:https'] | ||
| /** | ||
| * Wires the downstream response so we can observe when the customer consumes | ||
| * the body and when the stream finishes | ||
| * | ||
| * @param {object} ctx - Instrumentation context | ||
| * @param {import('http').IncomingMessage} res - The downstream response object. | ||
| * @returns {{ finalizeIfNeeded: () => void }|null} Cleanup helper used for drain. | ||
| */ | ||
| function setupResponseInstrumentation (ctx, res) { | ||
| const shouldInstrumentFinish = responseFinishChannel.hasSubscribers | ||
| if (!shouldInstrumentFinish) { | ||
| return null | ||
| } | ||
| let bodyConsumed = false | ||
| let finishCalled = false | ||
| let originalRead = null | ||
| let dataListenerAdded = false | ||
| let dataReadStarted = false | ||
| const { shouldCollectBody } = ctx | ||
| const bodyChunks = shouldCollectBody ? [] : null | ||
| const collectChunk = chunk => { | ||
| if (!shouldCollectBody || !chunk) return | ||
| if (typeof chunk === 'string') { | ||
| bodyChunks.push(chunk) | ||
| } else if (Buffer.isBuffer(chunk)) { | ||
| bodyChunks.push(chunk) | ||
| } else { | ||
| // Handle Uint8Array or other array-like types | ||
| bodyChunks.push(Buffer.from(chunk)) | ||
| } | ||
| } | ||
| // Listen for body consumption | ||
| const onNewListener = (eventName) => { | ||
| if (eventName === 'data' || eventName === 'readable') { | ||
| bodyConsumed = true | ||
| // For 'data' events, add our own listener to collect chunks | ||
| if (eventName === 'data' && !dataListenerAdded && !dataReadStarted) { | ||
| dataListenerAdded = true | ||
| res.on('data', collectChunk) | ||
| } | ||
| // For 'readable' events, wrap the read() method | ||
| if (eventName === 'readable' && !originalRead && !dataListenerAdded && typeof res.read === 'function') { | ||
| originalRead = res.read | ||
| res.read = function () { | ||
| const chunk = originalRead.apply(this, arguments) | ||
| if (!dataListenerAdded) { | ||
| dataReadStarted = true | ||
| collectChunk(chunk) | ||
| } | ||
| return chunk | ||
| } | ||
| } | ||
| } | ||
| } | ||
| res.on('newListener', onNewListener) | ||
| // Cleanup function to restore original behavior | ||
| const cleanup = () => { | ||
| res.off('newListener', onNewListener) | ||
| res.off('data', collectChunk) | ||
| if (originalRead) { | ||
| res.read = originalRead | ||
| originalRead = null | ||
| } | ||
| } | ||
| const notifyFinish = () => { | ||
| if (finishCalled) return | ||
| finishCalled = true | ||
| // Combine collected chunks into a single body | ||
| let body = null | ||
| if (bodyChunks?.length) { | ||
| const firstChunk = bodyChunks[0] | ||
| body = typeof firstChunk === 'string' | ||
| ? bodyChunks.join('') | ||
| : Buffer.concat(bodyChunks) | ||
| } | ||
| responseFinishChannel.publish({ ctx, res, body }) | ||
| cleanup() | ||
| } | ||
| res.once('end', notifyFinish) | ||
| res.once('close', notifyFinish) | ||
| return { | ||
| finalizeIfNeeded () { | ||
| if (!bodyConsumed) { | ||
| // Body not consumed, resume to complete the response | ||
| notifyFinish() | ||
| } | ||
| }, | ||
| } | ||
| } | ||
| function patch (http, methodName) { | ||
@@ -108,3 +215,14 @@ shimmer.wrap(http, methodName, instrumentRequest) | ||
| res.once(errorMonitor, finish) | ||
| break | ||
| const instrumentation = setupResponseInstrumentation(ctx, res) | ||
| if (!instrumentation) { | ||
| break | ||
| } | ||
| const result = emit.apply(this, arguments) | ||
| instrumentation.finalizeIfNeeded() | ||
| return result | ||
| } | ||
@@ -111,0 +229,0 @@ case 'connect': |
@@ -274,2 +274,3 @@ 'use strict' | ||
| const isLastAttempt = testStatuses.length === config.testManagementAttemptToFixRetries + 1 | ||
| const isLastEfdRetry = testStatuses.length === config.earlyFlakeDetectionNumRetries + 1 | ||
@@ -287,2 +288,7 @@ if (test._ddIsAttemptToFix && isLastAttempt) { | ||
| if (test._ddIsEfdRetry && isLastEfdRetry && | ||
| testStatuses.every(status => status === 'fail')) { | ||
| hasFailedAllRetries = true | ||
| } | ||
| const isAttemptToFixRetry = test._ddIsAttemptToFix && testStatuses.length > 1 | ||
@@ -289,0 +295,0 @@ const isAtrRetry = config.isFlakyTestRetriesEnabled && |
@@ -9,2 +9,18 @@ 'use strict' | ||
| /** @type {WeakMap<object, Function>} */ | ||
| const wrappedOnResult = new WeakMap() | ||
| /** | ||
| * @param {unknown} sql | ||
| * @returns {string|undefined} | ||
| */ | ||
| function resolveSqlString (sql) { | ||
| return typeof sql === 'string' ? sql : /** @type {{ sql?: string }} */ (sql)?.sql | ||
| } | ||
| /** | ||
| * @param {Function} Connection | ||
| * @param {string} version | ||
| * @returns {Function} | ||
| */ | ||
| function wrapConnection (Connection, version) { | ||
@@ -23,12 +39,24 @@ const startCh = channel('apm:mysql2:query:start') | ||
| const name = cmd && cmd.constructor && cmd.constructor.name | ||
| const isCommand = typeof cmd.execute === 'function' | ||
| const isQuery = isCommand && (name === 'Execute' || name === 'Query') | ||
| const command = /** @type {{ execute?: Function, constructor?: { name?: string } }} */ (cmd) | ||
| if (typeof command.execute !== 'function') return addCommand.apply(this, arguments) | ||
| const name = command.constructor?.name | ||
| const isQuery = name === 'Execute' || name === 'Query' | ||
| const ctx = {} | ||
| // TODO: consider supporting all commands and not just queries | ||
| cmd.execute = isQuery | ||
| ? wrapExecute(cmd, cmd.execute, ctx, this.config) | ||
| : bindExecute(cmd.execute, ctx) | ||
| if (isQuery) { | ||
| command.execute = wrapExecute(command, command.execute, ctx, this.config) | ||
| return commandAddCh.runStores(ctx, addCommand, this, ...arguments) | ||
| } | ||
| wrapCommandOnResult(command, ctx) | ||
| command.execute = shimmer.wrapFunction( | ||
| command.execute, | ||
| execute => function executeWithTrace (_packet_, _connection_) { | ||
| return commandStartCh.runStores(ctx, execute, this, ...arguments) | ||
| } | ||
| ) | ||
| return commandAddCh.runStores(ctx, addCommand, this, ...arguments) | ||
@@ -40,8 +68,7 @@ }) | ||
| if (sql !== null && typeof sql === 'object') sql = sql.sql | ||
| const resolvedSql = resolveSqlString(sql) | ||
| if (resolvedSql === undefined) return query.apply(this, arguments) | ||
| if (!sql) return query.apply(this, arguments) | ||
| const abortController = new AbortController() | ||
| startOuterQueryCh.publish({ sql, abortController }) | ||
| startOuterQueryCh.publish({ sql: resolvedSql, abortController }) | ||
@@ -62,3 +89,3 @@ if (abortController.signal.aborted) { | ||
| process.nextTick(() => { | ||
| if (cb) { | ||
| if (typeof cb === 'function') { | ||
| cb(abortController.signal.reason) | ||
@@ -83,8 +110,7 @@ } else { | ||
| if (sql !== null && typeof sql === 'object') sql = sql.sql | ||
| const resolvedSql = resolveSqlString(sql) | ||
| if (resolvedSql === undefined) return execute.apply(this, arguments) | ||
| if (!sql) return execute.apply(this, arguments) | ||
| const abortController = new AbortController() | ||
| startOuterQueryCh.publish({ sql, abortController }) | ||
| startOuterQueryCh.publish({ sql: resolvedSql, abortController }) | ||
@@ -102,3 +128,5 @@ if (abortController.signal.aborted) { | ||
| result?.onResult(abortController.signal.reason) | ||
| if (typeof result?.onResult === 'function') { | ||
| result.onResult(abortController.signal.reason) | ||
| } | ||
@@ -113,29 +141,44 @@ return result | ||
| function bindExecute (execute, ctx) { | ||
| return shimmer.wrapFunction(execute, execute => function executeWithTrace (packet, connection) { | ||
| const onResult = this.onResult | ||
| /** | ||
| * @param {object} cmd | ||
| * @param {object} ctx | ||
| * @returns {void} | ||
| */ | ||
| function wrapCommandOnResult (cmd, ctx) { | ||
| const onResult = cmd?.onResult | ||
| if (typeof onResult !== 'function') return | ||
| if (onResult) { | ||
| this.onResult = function () { | ||
| return commandFinishCh.runStores(ctx, onResult, this, ...arguments) | ||
| } | ||
| } | ||
| const cached = wrappedOnResult.get(cmd) | ||
| return commandStartCh.runStores(ctx, execute, this, ...arguments) | ||
| }) | ||
| if (cached === onResult) return | ||
| const wrapped = function () { | ||
| return commandFinishCh.runStores(ctx, onResult, this, ...arguments) | ||
| } | ||
| wrappedOnResult.set(cmd, wrapped) | ||
| cmd.onResult = wrapped | ||
| } | ||
| /** | ||
| * @param {object} cmd | ||
| * @param {Function} execute | ||
| * @param {object} ctx | ||
| * @param {object} config | ||
| * @returns {Function} | ||
| */ | ||
| function wrapExecute (cmd, execute, ctx, config) { | ||
| return shimmer.wrapFunction(execute, execute => function executeWithTrace (packet, connection) { | ||
| ctx.sql = cmd.statement ? cmd.statement.query : cmd.sql | ||
| const command = /** @type {{ statement?: { query?: unknown }, sql?: unknown }} */ (cmd) | ||
| ctx.sql = command.statement ? command.statement.query : command.sql | ||
| ctx.conf = config | ||
| return startCh.runStores(ctx, () => { | ||
| if (cmd.statement) { | ||
| cmd.statement.query = ctx.sql | ||
| if (command.statement) { | ||
| command.statement.query = ctx.sql | ||
| } else { | ||
| cmd.sql = ctx.sql | ||
| command.sql = ctx.sql | ||
| } | ||
| if (this.onResult) { | ||
| if (typeof this.onResult === 'function') { | ||
| const onResult = this.onResult | ||
@@ -151,7 +194,10 @@ | ||
| } else { | ||
| this.once(errorMonitor, error => { | ||
| ctx.error = error | ||
| errorCh.publish(ctx) | ||
| }) | ||
| this.once('end', () => finishCh.publish(ctx)) | ||
| const command = /** @type {{ once?: Function }} */ (this) | ||
| if (typeof command.once === 'function') { | ||
| command.once(errorMonitor, error => { | ||
| ctx.error = error | ||
| errorCh.publish(ctx) | ||
| }) | ||
| command.once('end', () => finishCh.publish(ctx)) | ||
| } | ||
| } | ||
@@ -168,5 +214,10 @@ | ||
| }) | ||
| }, cmd) | ||
| }) | ||
| } | ||
| } | ||
| /** | ||
| * @param {Function} Pool | ||
| * @param {string} version | ||
| * @returns {Function} | ||
| */ | ||
| function wrapPool (Pool, version) { | ||
@@ -179,8 +230,7 @@ const startOuterQueryCh = channel('datadog:mysql2:outerquery:start') | ||
| if (sql !== null && typeof sql === 'object') sql = sql.sql | ||
| const resolvedSql = resolveSqlString(sql) | ||
| if (resolvedSql === undefined) return query.apply(this, arguments) | ||
| if (!sql) return query.apply(this, arguments) | ||
| const abortController = new AbortController() | ||
| startOuterQueryCh.publish({ sql, abortController }) | ||
| startOuterQueryCh.publish({ sql: resolvedSql, abortController }) | ||
@@ -219,8 +269,7 @@ if (abortController.signal.aborted) { | ||
| if (sql !== null && typeof sql === 'object') sql = sql.sql | ||
| const resolvedSql = resolveSqlString(sql) | ||
| if (resolvedSql === undefined) return execute.apply(this, arguments) | ||
| if (!sql) return execute.apply(this, arguments) | ||
| const abortController = new AbortController() | ||
| startOuterQueryCh.publish({ sql, abortController }) | ||
| startOuterQueryCh.publish({ sql: resolvedSql, abortController }) | ||
@@ -232,5 +281,7 @@ if (abortController.signal.aborted) { | ||
| process.nextTick(() => { | ||
| cb(abortController.signal.reason) | ||
| }) | ||
| if (typeof cb === 'function') { | ||
| process.nextTick(() => { | ||
| /** @type {Function} */ (cb)(abortController.signal.reason) | ||
| }) | ||
| } | ||
| return | ||
@@ -245,2 +296,6 @@ } | ||
| /** | ||
| * @param {Function} PoolCluster | ||
| * @returns {Function} | ||
| */ | ||
| function wrapPoolCluster (PoolCluster) { | ||
@@ -255,8 +310,7 @@ const startOuterQueryCh = channel('datadog:mysql2:outerquery:start') | ||
| shimmer.wrap(poolNamespace, 'query', query => function (sql, values, cb) { | ||
| if (sql !== null && typeof sql === 'object') sql = sql.sql | ||
| const resolvedSql = resolveSqlString(sql) | ||
| if (resolvedSql === undefined) return query.apply(this, arguments) | ||
| if (!sql) return query.apply(this, arguments) | ||
| const abortController = new AbortController() | ||
| startOuterQueryCh.publish({ sql, abortController }) | ||
| startOuterQueryCh.publish({ sql: resolvedSql, abortController }) | ||
@@ -291,8 +345,7 @@ if (abortController.signal.aborted) { | ||
| shimmer.wrap(poolNamespace, 'execute', execute => function (sql, values, cb) { | ||
| if (sql !== null && typeof sql === 'object') sql = sql.sql | ||
| const resolvedSql = resolveSqlString(sql) | ||
| if (resolvedSql === undefined) return execute.apply(this, arguments) | ||
| if (!sql) return execute.apply(this, arguments) | ||
| const abortController = new AbortController() | ||
| startOuterQueryCh.publish({ sql, abortController }) | ||
| startOuterQueryCh.publish({ sql: resolvedSql, abortController }) | ||
@@ -304,5 +357,7 @@ if (abortController.signal.aborted) { | ||
| process.nextTick(() => { | ||
| cb(abortController.signal.reason) | ||
| }) | ||
| if (typeof cb === 'function') { | ||
| process.nextTick(() => { | ||
| /** @type {Function} */ (cb)(abortController.signal.reason) | ||
| }) | ||
| } | ||
@@ -324,7 +379,19 @@ return | ||
| addHook({ name: 'mysql2', file: 'lib/base/connection.js', versions: ['>=3.11.5'] }, wrapConnection) | ||
| addHook({ name: 'mysql2', file: 'lib/connection.js', versions: ['1 - 3.11.4'] }, wrapConnection) | ||
| addHook({ name: 'mysql2', file: 'lib/pool.js', versions: ['1 - 3.11.4'] }, wrapPool) | ||
| addHook( | ||
| { name: 'mysql2', file: 'lib/base/connection.js', versions: ['>=3.11.5'] }, | ||
| /** @type {(moduleExports: unknown, version: string) => unknown} */ (wrapConnection) | ||
| ) | ||
| addHook( | ||
| { name: 'mysql2', file: 'lib/connection.js', versions: ['1 - 3.11.4'] }, | ||
| /** @type {(moduleExports: unknown, version: string) => unknown} */ (wrapConnection) | ||
| ) | ||
| addHook( | ||
| { name: 'mysql2', file: 'lib/pool.js', versions: ['1 - 3.11.4'] }, | ||
| /** @type {(moduleExports: unknown, version: string) => unknown} */ (wrapPool) | ||
| ) | ||
| // PoolNamespace.prototype.query does not exist in mysql2<2.3.0 | ||
| addHook({ name: 'mysql2', file: 'lib/pool_cluster.js', versions: ['2.3.0 - 3.11.4'] }, wrapPoolCluster) | ||
| addHook( | ||
| { name: 'mysql2', file: 'lib/pool_cluster.js', versions: ['2.3.0 - 3.11.4'] }, | ||
| /** @type {(moduleExports: unknown, version: string) => unknown} */ (wrapPoolCluster) | ||
| ) |
@@ -399,2 +399,10 @@ 'use strict' | ||
| // Check if all EFD retries failed | ||
| if (testStatuses.length === earlyFlakeDetectionNumRetries + 1 && | ||
| (test._ddIsNew || test._ddIsModified) && | ||
| test._ddIsEfdRetry && | ||
| testStatuses.every(status => status === 'fail')) { | ||
| test._ddHasFailedAllRetries = true | ||
| } | ||
| // this handles tests that do not go through the worker process (because they're skipped) | ||
@@ -401,0 +409,0 @@ if (shouldCreateTestSpan) { |
@@ -1039,2 +1039,13 @@ 'use strict' | ||
| // Check if all EFD retries failed | ||
| const providedContext = getProvidedContext() | ||
| if (providedContext.isEarlyFlakeDetectionEnabled && (newTasks.has(task) || modifiedTasks.has(task))) { | ||
| const statuses = taskToStatuses.get(task) | ||
| // statuses only includes repetitions (not the initial run), so we check against numRepeats (not +1) | ||
| if (statuses && statuses.length === providedContext.numRepeats && | ||
| statuses.every(status => status === 'fail')) { | ||
| hasFailedAllRetries = true | ||
| } | ||
| } | ||
| if (testCtx) { | ||
@@ -1041,0 +1052,0 @@ const isRetry = task.result?.retryCount > 0 |
| 'use strict' | ||
| const TracingPlugin = require('../../dd-trace/src/plugins/tracing') | ||
| const serverless = require('../../dd-trace/src/plugins/util/serverless') | ||
| const web = require('../../dd-trace/src/plugins/util/web') | ||
@@ -29,15 +28,47 @@ | ||
| const triggerType = triggerMap[ctx.methodName] | ||
| const isHttpTrigger = triggerType === 'Http' | ||
| const isMessagingService = (triggerType === 'ServiceBus' || triggerType === 'EventHubs') | ||
| const childOf = isMessagingService ? null : extractTraceContext(this._tracer, ctx) | ||
| const span = this.startSpan(this.operationName(), { | ||
| childOf, | ||
| service: this.serviceName(), | ||
| type: 'serverless', | ||
| meta, | ||
| }, ctx) | ||
| if (isMessagingService) { | ||
| setSpanLinks(triggerType, this.tracer, span, ctx) | ||
| let span | ||
| if (isHttpTrigger) { | ||
| const { httpRequest } = ctx | ||
| const path = (new URL(httpRequest.url)).pathname | ||
| const req = { | ||
| method: httpRequest.method, | ||
| headers: Object.fromEntries(httpRequest.headers), | ||
| url: path, | ||
| } | ||
| // Patch the request to create web context | ||
| const webContext = web.patch(req) | ||
| webContext.config = this.config | ||
| webContext.tracer = this.tracer | ||
| webContext.paths = [path] | ||
| // Creates a standard span and an inferred proxy span if headers are present | ||
| span = web.startServerlessSpanWithInferredProxy( | ||
| this.tracer, | ||
| this.config, | ||
| this.operationName(), | ||
| req, | ||
| ctx | ||
| ) | ||
| span._integrationName = 'azure-functions' | ||
| span.context()._tags.component = 'azure-functions' | ||
| span.addTags(meta) | ||
| webContext.span = span | ||
| webContext.azureFunctionCtx = ctx | ||
| ctx.webContext = webContext | ||
| } else { | ||
| // For non-HTTP triggers, use standard flow | ||
| span = this.startSpan(this.operationName(), { | ||
| service: this.serviceName(), | ||
| type: 'serverless', | ||
| meta, | ||
| }, ctx) | ||
| if (isMessagingService) { | ||
| setSpanLinks(triggerType, this.tracer, span, ctx) | ||
| } | ||
| } | ||
| ctx.span = span | ||
@@ -52,20 +83,12 @@ return ctx.currentStore | ||
| asyncEnd (ctx) { | ||
| const { httpRequest, methodName, result = {} } = ctx | ||
| if (triggerMap[methodName] === 'Http') { | ||
| // If the method is an HTTP trigger, we need to patch the request and finish the span | ||
| const path = (new URL(httpRequest.url)).pathname | ||
| const req = { | ||
| method: httpRequest.method, | ||
| headers: Object.fromEntries(httpRequest.headers), | ||
| url: path, | ||
| asyncStart (ctx) { | ||
| const { methodName, result = {}, webContext } = ctx | ||
| const triggerType = triggerMap[methodName] | ||
| // For HTTP triggers, use web utilities to finish all spans (including inferred proxy) | ||
| if (triggerType === 'Http') { | ||
| if (webContext) { | ||
| webContext.res = { statusCode: result.status } | ||
| web.finishAll(webContext, 'serverless') | ||
| } | ||
| const context = web.patch(req) | ||
| context.config = this.config | ||
| context.paths = [path] | ||
| context.res = { statusCode: result.status } | ||
| context.span = ctx.currentStore.span | ||
| serverless.finishSpan(context) | ||
| // Fallback for other trigger types | ||
| } else { | ||
@@ -85,2 +108,3 @@ super.finish() | ||
| 'aas.function.trigger': mapTriggerTag(methodName), | ||
| 'span.type': 'serverless', | ||
| } | ||
@@ -118,10 +142,2 @@ | ||
| function extractTraceContext (tracer, ctx) { | ||
| if (triggerMap[ctx.methodName] === 'Http') { | ||
| return tracer.extract('http_headers', Object.fromEntries(ctx.httpRequest.headers)) | ||
| } | ||
| // Returning null indicates that the span is a root span | ||
| return null | ||
| } | ||
| // message & messages & batch with cardinality of 1 == applicationProperties | ||
@@ -128,0 +144,0 @@ // messages with cardinality of many == applicationPropertiesArray |
@@ -940,2 +940,9 @@ 'use strict' | ||
| } | ||
| // Check if all EFD retries failed | ||
| if ((isNew || isModified) && this.earlyFlakeDetectionNumRetries > 0) { | ||
| const isLastEfdAttempt = testStatuses.length === this.earlyFlakeDetectionNumRetries + 1 | ||
| if (isLastEfdAttempt && testStatuses.every(status => status === 'fail')) { | ||
| this.activeTestSpan.setTag(TEST_HAS_FAILED_ALL_RETRIES, 'true') | ||
| } | ||
| } | ||
| if (isAttemptToFix) { | ||
@@ -942,0 +949,0 @@ this.activeTestSpan.setTag(TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX, 'true') |
@@ -30,2 +30,8 @@ 'use strict' | ||
| HTTP_OUTGOING_URL: 'server.io.net.url', | ||
| HTTP_OUTGOING_METHOD: 'server.io.net.request.method', | ||
| HTTP_OUTGOING_HEADERS: 'server.io.net.request.headers', | ||
| HTTP_OUTGOING_RESPONSE_STATUS: 'server.io.net.response.status', | ||
| HTTP_OUTGOING_RESPONSE_HEADERS: 'server.io.net.response.headers', | ||
| HTTP_OUTGOING_RESPONSE_BODY: 'server.io.net.response.body', | ||
| FS_OPERATION_PATH: 'server.io.fs.file', | ||
@@ -41,2 +47,7 @@ | ||
| LOGIN_FAILURE: 'server.business_logic.users.login.failure', | ||
| PAYMENT_CREATION: 'server.business_logic.payment.creation', | ||
| PAYMENT_SUCCESS: 'server.business_logic.payment.success', | ||
| PAYMENT_FAILURE: 'server.business_logic.payment.failure', | ||
| PAYMENT_CANCELLATION: 'server.business_logic.payment.cancellation', | ||
| } |
@@ -26,2 +26,3 @@ 'use strict' | ||
| httpClientRequestStart: dc.channel('apm:http:client:request:start'), | ||
| httpClientResponseFinish: dc.channel('apm:http:client:response:finish'), | ||
| incomingHttpRequestEnd: dc.channel('dd-trace:incomingHttpRequestEnd'), | ||
@@ -41,8 +42,11 @@ incomingHttpRequestStart: dc.channel('dd-trace:incomingHttpRequestStart'), | ||
| responseWriteHead: dc.channel('apm:http:server:response:writeHead:start'), | ||
| routerMiddlewareError: dc.channel('apm:router:middleware:error'), | ||
| routerParam: dc.channel('datadog:router:param:start'), | ||
| routerMiddlewareError: dc.channel('apm:router:middleware:error'), | ||
| setCookieChannel: dc.channel('datadog:iast:set-cookie'), | ||
| setUncaughtExceptionCaptureCallbackStart: dc.channel('datadog:process:setUncaughtExceptionCaptureCallback:start'), | ||
| startGraphqlResolve: dc.channel('datadog:graphql:resolver:start'), | ||
| stripeCheckoutSessionCreate: dc.channel('datadog:stripe:checkoutSession:create:finish'), | ||
| stripeConstructEvent: dc.channel('datadog:stripe:constructEvent:finish'), | ||
| stripePaymentIntentCreate: dc.channel('datadog:stripe:paymentIntent:create:finish'), | ||
| wafRunFinished: dc.channel('datadog:waf:run:finish'), | ||
| } |
@@ -33,2 +33,5 @@ 'use strict' | ||
| fastifyPathParams, | ||
| stripeCheckoutSessionCreate, | ||
| stripePaymentIntentCreate, | ||
| stripeConstructEvent, | ||
| } = require('./channels') | ||
@@ -96,2 +99,5 @@ const waf = require('./waf') | ||
| responseSetHeader.subscribe(onResponseSetHeader) | ||
| stripeCheckoutSessionCreate.subscribe(onStripeCheckoutSessionCreate) | ||
| stripePaymentIntentCreate.subscribe(onStripePaymentIntentCreate) | ||
| stripeConstructEvent.subscribe(onStripeConstructEvent) | ||
@@ -387,2 +393,96 @@ isEnabled = true | ||
| function onStripeCheckoutSessionCreate (payload) { | ||
| if (payload?.mode !== 'payment') return | ||
| waf.run({ | ||
| persistent: { | ||
| [addresses.PAYMENT_CREATION]: { | ||
| integration: 'stripe', | ||
| id: payload.id, | ||
| amount_total: payload.amount_total, | ||
| client_reference_id: payload.client_reference_id, | ||
| currency: payload.currency, | ||
| 'discounts.coupon': payload.discounts?.[0]?.coupon, | ||
| 'discounts.promotion_code': payload.discounts?.[0]?.promotion_code, | ||
| livemode: payload.livemode, | ||
| 'total_details.amount_discount': payload.total_details?.amount_discount, | ||
| 'total_details.amount_shipping': payload.total_details?.amount_shipping, | ||
| }, | ||
| }, | ||
| }) | ||
| } | ||
| function onStripePaymentIntentCreate (payload) { | ||
| if (payload === null || typeof payload !== 'object') return | ||
| waf.run({ | ||
| persistent: { | ||
| [addresses.PAYMENT_CREATION]: { | ||
| integration: 'stripe', | ||
| id: payload.id, | ||
| amount: payload.amount, | ||
| currency: payload.currency, | ||
| livemode: payload.livemode, | ||
| payment_method: payload.payment_method, | ||
| }, | ||
| }, | ||
| }) | ||
| } | ||
| function onStripeConstructEvent (payload) { | ||
| const object = payload?.data?.object | ||
| if (object === null || typeof object !== 'object') return | ||
| let persistent | ||
| switch (payload.type) { | ||
| case 'payment_intent.succeeded': | ||
| persistent = { | ||
| [addresses.PAYMENT_SUCCESS]: { | ||
| integration: 'stripe', | ||
| id: object.id, | ||
| amount: object.amount, | ||
| currency: object.currency, | ||
| livemode: object.livemode, | ||
| payment_method: object.payment_method, | ||
| }, | ||
| } | ||
| break | ||
| case 'payment_intent.payment_failed': | ||
| persistent = { | ||
| [addresses.PAYMENT_FAILURE]: { | ||
| integration: 'stripe', | ||
| id: object.id, | ||
| amount: object.amount, | ||
| currency: object.currency, | ||
| 'last_payment_error.code': object.last_payment_error?.code, | ||
| 'last_payment_error.decline_code': object.last_payment_error?.decline_code, | ||
| 'last_payment_error.payment_method.id': object.last_payment_error?.payment_method?.id, | ||
| 'last_payment_error.payment_method.type': object.last_payment_error?.payment_method?.type, | ||
| livemode: object.livemode, | ||
| }, | ||
| } | ||
| break | ||
| case 'payment_intent.canceled': | ||
| persistent = { | ||
| [addresses.PAYMENT_CANCELLATION]: { | ||
| integration: 'stripe', | ||
| id: object.id, | ||
| amount: object.amount, | ||
| cancellation_reason: object.cancellation_reason, | ||
| currency: object.currency, | ||
| livemode: object.livemode, | ||
| }, | ||
| } | ||
| break | ||
| default: | ||
| return | ||
| } | ||
| waf.run({ persistent }) | ||
| } | ||
| function handleResults (actions, req, res, rootSpan, abortController) { | ||
@@ -433,2 +533,5 @@ if (!actions || !req || !res || !rootSpan || !abortController) return | ||
| if (responseSetHeader.hasSubscribers) responseSetHeader.unsubscribe(onResponseSetHeader) | ||
| if (stripeCheckoutSessionCreate.hasSubscribers) stripeCheckoutSessionCreate.unsubscribe(onStripeCheckoutSessionCreate) | ||
| if (stripePaymentIntentCreate.hasSubscribers) stripePaymentIntentCreate.unsubscribe(onStripePaymentIntentCreate) | ||
| if (stripeConstructEvent.hasSubscribers) stripeConstructEvent.unsubscribe(onStripeConstructEvent) | ||
| } | ||
@@ -435,0 +538,0 @@ |
| 'use strict' | ||
| const { format } = require('url') | ||
| const { httpClientRequestStart } = require('../channels') | ||
| const { | ||
| httpClientRequestStart, | ||
| httpClientResponseFinish, | ||
| } = require('../channels') | ||
| const { storage } = require('../../../../datadog-core') | ||
| const addresses = require('../addresses') | ||
| const waf = require('../waf') | ||
| const downstream = require('../downstream_requests') | ||
| const { updateRaspRuleMatchMetricTags } = require('../telemetry') | ||
| const { RULE_TYPES, handleResult } = require('./utils') | ||
@@ -14,7 +19,13 @@ | ||
| config = _config | ||
| downstream.enable(_config) | ||
| httpClientRequestStart.subscribe(analyzeSsrf) | ||
| httpClientResponseFinish.subscribe(handleResponseFinish) | ||
| } | ||
| function disable () { | ||
| downstream.disable() | ||
| if (httpClientRequestStart.hasSubscribers) httpClientRequestStart.unsubscribe(analyzeSsrf) | ||
| if (httpClientResponseFinish.hasSubscribers) httpClientResponseFinish.unsubscribe(handleResponseFinish) | ||
| } | ||
@@ -29,14 +40,65 @@ | ||
| // Determine if we should collect the response body based on sampling rate and redirect URL | ||
| ctx.shouldCollectBody = downstream.shouldSampleBody(req, outgoingUrl) | ||
| const requestAddresses = downstream.extractRequestData(ctx) | ||
| const ephemeral = { | ||
| [addresses.HTTP_OUTGOING_URL]: outgoingUrl, | ||
| ...requestAddresses, | ||
| } | ||
| const raspRule = { type: RULE_TYPES.SSRF } | ||
| const raspRule = { type: RULE_TYPES.SSRF, variant: 'request' } | ||
| const result = waf.run({ ephemeral }, req, raspRule) | ||
| const res = store?.res | ||
| handleResult(result, req, res, ctx.abortController, config, raspRule) | ||
| handleResult(result, req, store?.res, ctx.abortController, config, raspRule) | ||
| downstream.incrementDownstreamAnalysisCount(req) | ||
| } | ||
| /** | ||
| * Finalizes body collection for the response and triggers RASP analysis. | ||
| * @param {{ | ||
| * ctx: object, | ||
| * res: import('http').IncomingMessage, | ||
| * body: string|Buffer|null | ||
| * }} payload event payload from the channel. | ||
| */ | ||
| function handleResponseFinish ({ ctx, res, body }) { | ||
| // downstream response object | ||
| if (!res) return | ||
| const store = storage('legacy').getStore() | ||
| const originatingRequest = store?.req | ||
| if (!originatingRequest) return | ||
| // Skip body analysis for redirect responses | ||
| const evaluateBody = ctx.shouldCollectBody && !downstream.handleRedirectResponse(originatingRequest, res) | ||
| const responseBody = evaluateBody ? body : null | ||
| runResponseEvaluation(res, originatingRequest, responseBody) | ||
| } | ||
| /** | ||
| * Evaluates the downstream response and records telemetry. | ||
| * @param {import('http').IncomingMessage} res incoming response from downstream service. | ||
| * @param {import('http').IncomingMessage} req originating request. | ||
| * @param {string|Buffer|null} responseBody collected downstream response body. | ||
| */ | ||
| function runResponseEvaluation (res, req, responseBody) { | ||
| const responseAddresses = downstream.extractResponseData(res, responseBody) | ||
| if (!Object.keys(responseAddresses).length) return | ||
| const raspRule = { type: RULE_TYPES.SSRF, variant: 'response' } | ||
| const result = waf.run({ ephemeral: responseAddresses }, req, raspRule) | ||
| // TODO: this should be done in the waf functions directly instead of calling it everywhere | ||
| const ruleTriggered = !!result?.events?.length | ||
| if (ruleTriggered) { | ||
| updateRaspRuleMatchMetricTags(req, raspRule, false, false) | ||
| } | ||
| } | ||
| module.exports = { enable, disable } |
@@ -19,2 +19,8 @@ 'use strict' | ||
| const { getLocalStateForCallFrame } = require('../../../debugger/devtools_client/snapshot') | ||
| const { | ||
| DEFAULT_MAX_REFERENCE_DEPTH, | ||
| DEFAULT_MAX_COLLECTION_SIZE, | ||
| DEFAULT_MAX_FIELD_COUNT, | ||
| DEFAULT_MAX_LENGTH, | ||
| } = require('../../../debugger/devtools_client/snapshot/constants') | ||
| // TODO: move debugger/devtools_client/state to common place | ||
@@ -33,2 +39,9 @@ const { | ||
| const limits = { | ||
| maxReferenceDepth: DEFAULT_MAX_REFERENCE_DEPTH, | ||
| maxCollectionSize: DEFAULT_MAX_COLLECTION_SIZE, | ||
| maxFieldCount: DEFAULT_MAX_FIELD_COUNT, | ||
| maxLength: DEFAULT_MAX_LENGTH, | ||
| } | ||
| session.on('Debugger.paused', async ({ params: { hitBreakpoints: [hitBreakpoint], callFrames } }) => { | ||
@@ -43,3 +56,3 @@ const probe = breakpointIdToProbe.get(hitBreakpoint) | ||
| const { processLocalState } = await getLocalStateForCallFrame(callFrames[0]) | ||
| const { processLocalState } = await getLocalStateForCallFrame(callFrames[0], limits) | ||
@@ -46,0 +59,0 @@ await session.post('Debugger.resume') |
@@ -29,2 +29,4 @@ 'use strict' | ||
| 'appsec.apiSecurity.endpointCollectionMessageLimit': 300, | ||
| 'appsec.apiSecurity.downstreamBodyAnalysisSampleRate': 0.5, | ||
| 'appsec.apiSecurity.maxDownstreamRequestBodyAnalysis': 1, | ||
| 'appsec.blockedTemplateGraphql': undefined, | ||
@@ -31,0 +33,0 @@ 'appsec.blockedTemplateHtml': undefined, |
@@ -18,2 +18,4 @@ { | ||
| "DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT": ["A"], | ||
| "DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE": ["A"], | ||
| "DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS": ["A"], | ||
| "DD_APM_FLUSH_DEADLINE_MILLISECONDS": ["A"], | ||
@@ -20,0 +22,0 @@ "DD_APM_TRACING_ENABLED": ["A"], |
@@ -6,5 +6,11 @@ 'use strict' | ||
| const session = require('./session') | ||
| const { compile: compileCondition, compileSegments, templateRequiresEvaluation } = require('./condition') | ||
| const { compile, compileSegments, templateRequiresEvaluation } = require('./condition') | ||
| const { MAX_SNAPSHOTS_PER_SECOND_PER_PROBE, MAX_NON_SNAPSHOTS_PER_SECOND_PER_PROBE } = require('./defaults') | ||
| const { | ||
| DEFAULT_MAX_REFERENCE_DEPTH, | ||
| DEFAULT_MAX_COLLECTION_SIZE, | ||
| DEFAULT_MAX_FIELD_COUNT, | ||
| DEFAULT_MAX_LENGTH, | ||
| } = require('./snapshot/constants') | ||
| const { | ||
| findScriptFromPartialPath, | ||
@@ -103,3 +109,3 @@ clearState, | ||
| try { | ||
| probe.condition = probe.when?.json && compileCondition(probe.when.json) | ||
| probe.condition = probe.when?.json && compile(probe.when.json) | ||
| } catch (err) { | ||
@@ -112,2 +118,41 @@ throw new Error( | ||
| if (probe.captureSnapshot) { | ||
| probe.capture = { | ||
| maxReferenceDepth: probe.capture?.maxReferenceDepth ?? DEFAULT_MAX_REFERENCE_DEPTH, | ||
| maxCollectionSize: probe.capture?.maxCollectionSize ?? DEFAULT_MAX_COLLECTION_SIZE, | ||
| maxFieldCount: probe.capture?.maxFieldCount ?? DEFAULT_MAX_FIELD_COUNT, | ||
| maxLength: probe.capture?.maxLength ?? DEFAULT_MAX_LENGTH, | ||
| } | ||
| } | ||
| if (probe.captureExpressions?.length > 0) { | ||
| probe.compiledCaptureExpressions = [] | ||
| for (const captureExpr of probe.captureExpressions) { | ||
| let expression | ||
| try { | ||
| expression = compile(captureExpr.expr.json) | ||
| } catch (err) { | ||
| throw new Error( | ||
| `Cannot compile capture expression: ${captureExpr.name} (probe: ${probe.id}, version: ${probe.version})`, | ||
| { cause: err } | ||
| ) | ||
| } | ||
| probe.compiledCaptureExpressions.push({ | ||
| name: captureExpr.name, | ||
| expression, | ||
| limits: { | ||
| maxReferenceDepth: captureExpr.capture?.maxReferenceDepth ?? | ||
| probe.capture?.maxReferenceDepth ?? DEFAULT_MAX_REFERENCE_DEPTH, | ||
| maxCollectionSize: captureExpr.capture?.maxCollectionSize ?? | ||
| probe.capture?.maxCollectionSize ?? DEFAULT_MAX_COLLECTION_SIZE, | ||
| maxFieldCount: captureExpr.capture?.maxFieldCount ?? | ||
| probe.capture?.maxFieldCount ?? DEFAULT_MAX_FIELD_COUNT, | ||
| maxLength: captureExpr.capture?.maxLength ?? | ||
| probe.capture?.maxLength ?? DEFAULT_MAX_LENGTH, | ||
| }, | ||
| }) | ||
| } | ||
| } | ||
| const locationKey = generateLocationKey(scriptId, lineNumber, columnNumber) | ||
@@ -114,0 +159,0 @@ const breakpoint = locationToBreakpoint.get(locationKey) |
@@ -9,3 +9,3 @@ 'use strict' | ||
| const session = require('./session') | ||
| const { getLocalStateForCallFrame } = require('./snapshot') | ||
| const { getLocalStateForCallFrame, evaluateCaptureExpressions } = require('./snapshot') | ||
| const send = require('./send') | ||
@@ -71,5 +71,9 @@ const { getStackFromCallFrames } = require('./state') | ||
| let maxReferenceDepth, maxCollectionSize, maxFieldCount, maxLength | ||
| let maxReferenceDepth = 0 | ||
| let maxCollectionSize = 0 | ||
| let maxFieldCount = 0 | ||
| let maxLength = 0 | ||
| let sampled = false | ||
| let numberOfProbesWithSnapshots = 0 | ||
| let probesWithCaptureExpressions = false | ||
| const probes = [] | ||
@@ -109,3 +113,3 @@ let templateExpressions = '' | ||
| if (probe.captureSnapshot === true) { | ||
| if (probe.captureSnapshot === true || probe.compiledCaptureExpressions !== undefined) { | ||
| // This algorithm to calculate number of sampled snapshots within the last second is not perfect, as it's not a | ||
@@ -122,7 +126,11 @@ // sliding window. But it's quick and easy :) | ||
| snapshotProbeIndex[numberOfProbesWithSnapshots++] = probes.length | ||
| maxReferenceDepth = highestOrUndefined(probe.capture?.maxReferenceDepth, maxReferenceDepth) | ||
| maxCollectionSize = highestOrUndefined(probe.capture?.maxCollectionSize, maxCollectionSize) | ||
| maxFieldCount = highestOrUndefined(probe.capture?.maxFieldCount, maxFieldCount) | ||
| maxLength = highestOrUndefined(probe.capture?.maxLength, maxLength) | ||
| if (probe.captureSnapshot === true) { | ||
| snapshotProbeIndex[numberOfProbesWithSnapshots++] = probes.length | ||
| maxReferenceDepth = Math.max(probe.capture.maxReferenceDepth, maxReferenceDepth) | ||
| maxCollectionSize = Math.max(probe.capture.maxCollectionSize, maxCollectionSize) | ||
| maxFieldCount = Math.max(probe.capture.maxFieldCount, maxFieldCount) | ||
| maxLength = Math.max(probe.capture.maxLength, maxLength) | ||
| } else { | ||
| probesWithCaptureExpressions = true | ||
| } | ||
| } | ||
@@ -180,12 +188,28 @@ | ||
| // TODO: Create unique states for each affected probe based on that probes unique `capture` settings (DEBUG-2863) | ||
| let processLocalState, captureErrors | ||
| let processLocalState | ||
| /** @type {Error[] | undefined} */ | ||
| let fatalSnapshotErrors | ||
| if (numberOfProbesWithSnapshots !== 0) { | ||
| const opts = { | ||
| maxReferenceDepth, | ||
| maxCollectionSize, | ||
| maxFieldCount, | ||
| maxLength, | ||
| deadlineNs: start + config.dynamicInstrumentation.captureTimeoutNs, | ||
| const result = await getLocalStateForCallFrame( | ||
| params.callFrames[0], | ||
| { maxReferenceDepth, maxCollectionSize, maxFieldCount, maxLength }, | ||
| start + config.dynamicInstrumentation.captureTimeoutNs | ||
| ) | ||
| processLocalState = result.processLocalState | ||
| fatalSnapshotErrors = result.fatalErrors | ||
| } | ||
| // Evaluate capture expressions for probes that have them | ||
| let captureExpressionResults = null | ||
| if (probesWithCaptureExpressions === true) { | ||
| captureExpressionResults = new Map() | ||
| for (const probe of probes) { | ||
| if (probe.compiledCaptureExpressions === undefined) continue | ||
| // eslint-disable-next-line no-await-in-loop | ||
| captureExpressionResults.set(probe.id, await evaluateCaptureExpressions( | ||
| params.callFrames[0], | ||
| probe.compiledCaptureExpressions, | ||
| start + config.dynamicInstrumentation.captureTimeoutNs | ||
| )) | ||
| } | ||
| ;({ processLocalState, captureErrors } = await getLocalStateForCallFrame(params.callFrames[0], opts)) | ||
| } | ||
@@ -236,6 +260,6 @@ | ||
| if (probe.captureSnapshot) { | ||
| if (captureErrors?.length > 0) { | ||
| if (fatalSnapshotErrors && fatalSnapshotErrors.length > 0) { | ||
| // There was an error collecting the snapshot for this probe, let's not try again | ||
| probe.captureSnapshot = false | ||
| probe.permanentEvaluationErrors = captureErrors.map(error => ({ | ||
| probe.permanentEvaluationErrors = fatalSnapshotErrors.map(error => ({ | ||
| expr: '', | ||
@@ -246,4 +270,36 @@ message: error.message, | ||
| snapshot.captures = { | ||
| lines: { [probe.location.lines[0]]: { locals: processLocalState() } }, | ||
| lines: { [probe.location.lines[0]]: { locals: /** @type {Function} */ (processLocalState)() } }, | ||
| } | ||
| } else if (probe.compiledCaptureExpressions !== undefined) { | ||
| const expressionResult = /** @type {Map} */ (captureExpressionResults).get(probe.id) | ||
| if (expressionResult) { | ||
| // Handle fatal capture errors - disable capture expressions for this probe permanently | ||
| if (expressionResult.fatalErrors?.length > 0) { | ||
| probe.compiledCaptureExpressions = undefined | ||
| probe.permanentEvaluationErrors = expressionResult.fatalErrors.map(error => ({ | ||
| expr: '', | ||
| message: error.message, | ||
| })) | ||
| } | ||
| snapshot.captures = { | ||
| lines: { [probe.location.lines[0]]: { captureExpressions: expressionResult.processCaptureExpressions() } }, | ||
| } | ||
| // Handle transient evaluation errors - include in snapshot for this capture | ||
| if (expressionResult.evaluationErrors?.length > 0) { | ||
| if (snapshot.evaluationErrors === undefined) { | ||
| snapshot.evaluationErrors = expressionResult.evaluationErrors | ||
| } else { | ||
| snapshot.evaluationErrors.push(...expressionResult.evaluationErrors) | ||
| } | ||
| } | ||
| } else { | ||
| log.error('[debugger:devtools_client] Missing capture expression results for probe %s (version: %s)', | ||
| probe.id, probe.version) | ||
| snapshot.evaluationErrors = [{ | ||
| expr: '', | ||
| message: 'Internal error: capture expression results not found', | ||
| }] | ||
| } | ||
| } | ||
@@ -285,8 +341,4 @@ | ||
| function highestOrUndefined (num, max) { | ||
| return num === undefined ? max : Math.max(num, max ?? 0) | ||
| } | ||
| function processDD (result) { | ||
| return result?.trace_id === undefined ? undefined : result | ||
| } |
@@ -8,3 +8,3 @@ 'use strict' | ||
| // Example log line probe (simplified): | ||
| // Example log line probe with captureSnapshot (simplified): | ||
| // { | ||
@@ -23,2 +23,19 @@ // id: '100c9a5c-45ad-49dc-818b-c570d31e11d1', | ||
| // | ||
| // Example log line probe with captureExpressions (simplified): | ||
| // Note: captureSnapshot and captureExpressions are mutually exclusive | ||
| // { | ||
| // id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890', | ||
| // version: 0, | ||
| // type: 'LOG_PROBE', | ||
| // where: { sourceFile: 'index.js', lines: ['25'] }, | ||
| // template: 'Captured expressions', | ||
| // segments: [{ str: 'Captured expressions' }], | ||
| // captureExpressions: [ | ||
| // { name: 'myVar', expr: { dsl: 'myVar', json: { ref: 'myVar' } }, capture: { maxReferenceDepth: 2 } }, | ||
| // { name: 'obj.foo', expr: { dsl: 'obj.foo', json: { getmember: [{ ref: 'obj' }, 'foo'] } } } | ||
| // ], | ||
| // capture: { maxReferenceDepth: 3 }, // default limits for expressions without explicit capture | ||
| // sampling: { snapshotsPerSecond: 1 } | ||
| // } | ||
| // | ||
| // Example log method probe (simplified): | ||
@@ -68,2 +85,7 @@ // { | ||
| } | ||
| if (probe.captureSnapshot && probe.captureExpressions?.length > 0) { | ||
| throw new Error( | ||
| `Cannot set both captureSnapshot and captureExpressions (probe: ${probe.id}, version: ${probe.version})` | ||
| ) | ||
| } | ||
@@ -70,0 +92,0 @@ switch (action) { |
@@ -59,3 +59,3 @@ 'use strict' | ||
| * @typedef {object} GetObjectOptions | ||
| * @property {object} maxReferenceDepth - The maximum depth of the object to traverse | ||
| * @property {number} maxReferenceDepth - The maximum depth of the object to traverse | ||
| * @property {number} maxCollectionSize - The maximum size of a collection to include in the snapshot | ||
@@ -66,3 +66,3 @@ * @property {number} maxFieldCount - The maximum number of properties on an object to include in the snapshot | ||
| * @property {boolean} ctx.deadlineReached - Will be set to `true` if the deadline has been reached. | ||
| * @property {Error[]} ctx.captureErrors - An array on which errors can be pushed if an issue is detected while | ||
| * @property {Error[]} ctx.fatalErrors - An array on which errors can be pushed if an issue is detected while | ||
| * collecting the snapshot. | ||
@@ -104,3 +104,3 @@ */ | ||
| if (size > LARGE_OBJECT_SKIP_THRESHOLD) { | ||
| opts.ctx.captureErrors.push(new Error( | ||
| opts.ctx.fatalErrors.push(new Error( | ||
| `An object with ${size} properties was detected while collecting a snapshot. ` + | ||
@@ -107,0 +107,0 @@ `This exceeds the maximum number of allowed properties of ${LARGE_OBJECT_SKIP_THRESHOLD}. ` + |
| 'use strict' | ||
| const { | ||
| DEFAULT_MAX_REFERENCE_DEPTH, | ||
| DEFAULT_MAX_COLLECTION_SIZE, | ||
| DEFAULT_MAX_FIELD_COUNT, | ||
| DEFAULT_MAX_LENGTH, | ||
| } = require('./constants') | ||
| const session = require('../session') | ||
| const { collectObjectProperties } = require('./collector') | ||
| const { processRawState } = require('./processor') | ||
| const { processRawState, processRemoteObject } = require('./processor') | ||
@@ -16,16 +11,11 @@ const BIGINT_MAX = (1n << 256n) - 1n | ||
| getLocalStateForCallFrame, | ||
| evaluateCaptureExpressions, | ||
| } | ||
| /** | ||
| * @typedef {object} GetLocalStateForCallFrameOptions | ||
| * @property {number} [maxReferenceDepth] - The maximum depth of the object to traverse. Defaults to | ||
| * {@link DEFAULT_MAX_REFERENCE_DEPTH}. | ||
| * @property {number} [maxCollectionSize] - The maximum size of a collection to include in the snapshot. Defaults to | ||
| * {@link DEFAULT_MAX_COLLECTION_SIZE}. | ||
| * @property {number} [maxFieldCount] - The maximum number of properties on an object to include in the snapshot. | ||
| * Defaults to {@link DEFAULT_MAX_FIELD_COUNT}. | ||
| * @property {number} [maxLength] - The maximum length of a string to include in the snapshot. Defaults to | ||
| * {@link DEFAULT_MAX_LENGTH}. | ||
| * @property {bigint} [deadlineNs] - The deadline in nanoseconds compared to `process.hrtime.bigint()`. Defaults to | ||
| * {@link BIGINT_MAX}. If the deadline is reached, the snapshot will be truncated. | ||
| * @typedef {object} CaptureLimits - Fully resolved capture limits (all fallbacks already applied) | ||
| * @property {number} maxReferenceDepth - The maximum depth of the object to traverse | ||
| * @property {number} maxCollectionSize - The maximum size of a collection to include in the snapshot | ||
| * @property {number} maxFieldCount - The maximum number of properties on an object to include in the snapshot | ||
| * @property {number} maxLength - The maximum length of a string to include in the snapshot | ||
| */ | ||
@@ -37,19 +27,15 @@ | ||
| * @param {import('inspector').Debugger.CallFrame} callFrame - The call frame to get the local state for | ||
| * @param {GetLocalStateForCallFrameOptions} [opts] - The options for the snapshot | ||
| * @returns {Promise<object>} The local state for the call frame | ||
| * @param {CaptureLimits} limits - The capture limits | ||
| * @param {bigint} [deadlineNs] - The deadline in nanoseconds compared to `process.hrtime.bigint()`. Defaults to | ||
| * {@link BIGINT_MAX}. If the deadline is reached, the snapshot will be truncated. | ||
| * @returns {Promise<{ processLocalState: () => ReturnType<typeof processRawState>, fatalErrors: Error[] }>} The local | ||
| * state for the call frame | ||
| */ | ||
| async function getLocalStateForCallFrame ( | ||
| callFrame, | ||
| { | ||
| maxReferenceDepth = DEFAULT_MAX_REFERENCE_DEPTH, | ||
| maxCollectionSize = DEFAULT_MAX_COLLECTION_SIZE, | ||
| maxFieldCount = DEFAULT_MAX_FIELD_COUNT, | ||
| maxLength = DEFAULT_MAX_LENGTH, | ||
| deadlineNs = BIGINT_MAX, | ||
| } = {} | ||
| ) { | ||
| /** @type {{ deadlineReached: boolean, captureErrors: Error[] }} */ | ||
| const ctx = { deadlineReached: false, captureErrors: [] } | ||
| async function getLocalStateForCallFrame (callFrame, limits, deadlineNs = BIGINT_MAX) { | ||
| const { maxReferenceDepth, maxCollectionSize, maxFieldCount, maxLength } = limits | ||
| /** @type {{ deadlineReached: boolean, fatalErrors: Error[] }} */ | ||
| const ctx = { deadlineReached: false, fatalErrors: [] } | ||
| const opts = { maxReferenceDepth, maxCollectionSize, maxFieldCount, deadlineNs, ctx } | ||
| const rawState = [] | ||
| /** @type {ReturnType<typeof processRawState> | null} */ | ||
| let processedState = null | ||
@@ -68,5 +54,5 @@ | ||
| } catch (err) { | ||
| ctx.captureErrors.push(new Error( | ||
| ctx.fatalErrors.push(new Error( | ||
| `Error getting local state for closure scope (type: ${scope.type}). ` + | ||
| 'Future snapshots for existing probes in this location will be skipped until the Node.js process is restarted', | ||
| 'Future snapshots for existing probes in this location will be skipped until the probes are re-applied', | ||
| { cause: err } // TODO: The cause is not used by the backend | ||
@@ -78,3 +64,3 @@ )) | ||
| // Delay calling `processRawState` so the caller gets a chance to resume the main thread before processing `rawState` | ||
| // Delay calling `processRawState` so caller can resume the main thread before processing `rawState` | ||
| return { | ||
@@ -85,4 +71,150 @@ processLocalState () { | ||
| }, | ||
| captureErrors: ctx.captureErrors, | ||
| fatalErrors: ctx.fatalErrors, | ||
| } | ||
| } | ||
| /** | ||
| * @typedef {object} CompiledCaptureExpression | ||
| * @property {string} name - The name of the expression (used as key in snapshot) | ||
| * @property {string} expression - The compiled expression string to evaluate | ||
| * @property {CaptureLimits} limits - Fully resolved capture limits (precomputed at probe setup) | ||
| */ | ||
| /** | ||
| * @typedef {object} CaptureExpressionResult | ||
| * @property {() => Record<string, ReturnType<typeof processRemoteObject>>} processCaptureExpressions - Callback to | ||
| * process raw data into snapshot format | ||
| * @property {{ expr: string, message: string }[]} evaluationErrors - Transient errors from expression evaluation | ||
| * (safe to retry) | ||
| * @property {Error[]} fatalErrors - Fatal errors that should disable capture expressions for this probe permanently | ||
| */ | ||
| /** | ||
| * @typedef {object} EvaluateOnCallFrameResult | ||
| * @property {import('./processor').RemoteObjectWithProperties} result - The result of the evaluation | ||
| * @property {import('inspector').Runtime.ExceptionDetails} [exceptionDetails] - Exception details if evaluation failed | ||
| */ | ||
| /** | ||
| * Evaluate capture expressions for a call frame. | ||
| * | ||
| * Collects raw data while paused, returns a callback to process after resume. | ||
| * | ||
| * @param {import('inspector').Debugger.CallFrame} callFrame - The call frame to evaluate expressions on | ||
| * @param {CompiledCaptureExpression[]} expressions - The compiled expressions with precomputed capture limits | ||
| * @param {bigint} [deadlineNs] - The deadline in nanoseconds. Defaults to {@link BIGINT_MAX}. If the deadline is | ||
| * reached, the snapshot will be truncated. | ||
| * @returns {Promise<CaptureExpressionResult>} Raw results with deferred processing callback | ||
| */ | ||
| async function evaluateCaptureExpressions (callFrame, expressions, deadlineNs = BIGINT_MAX) { | ||
| /** @type {{ name: string, remoteObject: object, maxLength: number }[]} */ | ||
| const rawResults = [] | ||
| /** @type {{ expr: string, message: string }[]} */ | ||
| const evaluationErrors = [] | ||
| /** @type {Error[]} */ | ||
| const fatalErrors = [] | ||
| /** @type {Record<string, ReturnType<typeof processRemoteObject>> | null} */ | ||
| let processedResult = null | ||
| for (let i = 0; i < expressions.length; i++) { | ||
| const { name, expression, limits } = expressions[i] | ||
| const { maxReferenceDepth, maxCollectionSize, maxFieldCount, maxLength } = limits | ||
| try { | ||
| const { result, exceptionDetails } = /** @type {EvaluateOnCallFrameResult} */ ( | ||
| // eslint-disable-next-line no-await-in-loop | ||
| await session.post('Debugger.evaluateOnCallFrame', { | ||
| callFrameId: callFrame.callFrameId, | ||
| expression, | ||
| }) | ||
| ) | ||
| // Handle evaluation exceptions (maybe transient - bad expression, undefined var, etc.) | ||
| if (exceptionDetails) { | ||
| evaluationErrors.push({ expr: name, message: extractErrorMessage(exceptionDetails) }) | ||
| continue | ||
| } | ||
| // Collect raw properties for objects/functions while still paused | ||
| if ((result.type === 'object' || result.type === 'function') && result.objectId && maxReferenceDepth > 0) { | ||
| const ctx = { deadlineReached: false, fatalErrors: [] } | ||
| const isCollection = result.subtype === 'array' || result.subtype === 'typedarray' | ||
| // eslint-disable-next-line no-await-in-loop | ||
| result.properties = await collectObjectProperties( | ||
| result.objectId, | ||
| { | ||
| // The expression result itself is depth 0, so we subtract 1 when collecting its properties (depth 1+) | ||
| maxReferenceDepth: maxReferenceDepth - 1, | ||
| maxCollectionSize, | ||
| maxFieldCount, | ||
| deadlineNs, | ||
| ctx, | ||
| }, | ||
| 0, | ||
| isCollection | ||
| ) | ||
| // Propagate fatal errors from nested collection | ||
| if (ctx.fatalErrors.length > 0) { | ||
| fatalErrors.push(...ctx.fatalErrors) | ||
| } | ||
| if (ctx.deadlineReached === true) { | ||
| // Add the current expression (properties may be incomplete due to timeout) | ||
| rawResults.push({ name, remoteObject: result, maxLength }) | ||
| // Add stub entries for remaining uncaptured expressions | ||
| for (let j = i + 1; j < expressions.length; j++) { | ||
| rawResults.push({ | ||
| name: expressions[j].name, | ||
| remoteObject: { notCapturedReason: 'timeout' }, | ||
| maxLength: 0, | ||
| }) | ||
| } | ||
| break | ||
| } | ||
| } | ||
| rawResults.push({ name, remoteObject: result, maxLength }) | ||
| } catch (err) { | ||
| fatalErrors.push(new Error( | ||
| `Error capturing expression "${name}". ` + | ||
| 'Capture expressions for this probe will be skipped until the probe is re-applied', | ||
| { cause: err } // TODO: The cause is not used by the backend | ||
| )) | ||
| } | ||
| } | ||
| // Delay calling `processRemoteObject` so caller can resume the main thread before processing `remoteObject` | ||
| return { | ||
| processCaptureExpressions () { | ||
| if (processedResult !== null) return processedResult | ||
| processedResult = {} | ||
| for (const { name, remoteObject, maxLength } of rawResults) { | ||
| // If the remote object has notCapturedReason (e.g., timeout), use it as-is without processing | ||
| processedResult[name] = remoteObject.notCapturedReason === undefined | ||
| ? processRemoteObject(remoteObject, maxLength) | ||
| : remoteObject | ||
| } | ||
| return processedResult | ||
| }, | ||
| evaluationErrors, | ||
| fatalErrors, | ||
| } | ||
| } | ||
| /** | ||
| * Extract the error message from the exception details. | ||
| * | ||
| * @param {import('inspector').Runtime.ExceptionDetails} exceptionDetails - The exception details | ||
| * @returns {string} The error message | ||
| */ | ||
| function extractErrorMessage (exceptionDetails) { | ||
| const description = exceptionDetails.exception?.description | ||
| if (!description) return 'Unknown evaluation error' | ||
| const startOfStackTraceIndex = description.indexOf('\n at ') | ||
| if (startOfStackTraceIndex === -1) return description | ||
| return description.slice(0, startOfStackTraceIndex) | ||
| } |
@@ -9,4 +9,22 @@ 'use strict' | ||
| processRawState: processProperties, | ||
| processRemoteObject, | ||
| } | ||
| /** | ||
| * A RemoteObject with collected properties attached. | ||
| * | ||
| * @typedef {import('inspector').Runtime.RemoteObject & { properties?: object[] }} RemoteObjectWithProperties | ||
| */ | ||
| /** | ||
| * Process a RemoteObject into the snapshot format. | ||
| * | ||
| * @param {RemoteObjectWithProperties} remoteObject | ||
| * @param {number} maxLength - Maximum string length | ||
| * @returns {object} The processed value in snapshot format | ||
| */ | ||
| function processRemoteObject (remoteObject, maxLength) { | ||
| return getPropertyValueRaw({ value: remoteObject }, maxLength) | ||
| } | ||
| // Matches classes in source code, no matter how it's written: | ||
@@ -13,0 +31,0 @@ // - Named: class MyClass {} |
@@ -41,3 +41,3 @@ 'use strict' | ||
| httpAgent: new HttpAgent(), | ||
| HttpsAgent: new HttpsAgent(), | ||
| httpsAgent: new HttpsAgent(), | ||
| } |
@@ -13,3 +13,3 @@ 'use strict' | ||
| EVALUATIONS_EVENT_TYPE: 'evaluation_metric', | ||
| EVALUATIONS_ENDPOINT: '/api/intake/llm-obs/v1/eval-metric', | ||
| EVALUATIONS_ENDPOINT: '/api/intake/llm-obs/v2/eval-metric', | ||
@@ -16,0 +16,0 @@ EVP_PAYLOAD_SIZE_LIMIT: 5 << 20, // 5MB (actual limit is 5.1MB) |
@@ -362,3 +362,3 @@ 'use strict' | ||
| const { label, value, tags } = options | ||
| const { label, value, tags, reasoning, assessment, metadata } = options | ||
| const metricType = options.metricType?.toLowerCase() | ||
@@ -369,5 +369,5 @@ if (!label) { | ||
| } | ||
| if (!metricType || !['categorical', 'score', 'boolean'].includes(metricType)) { | ||
| if (!metricType || !['categorical', 'score', 'boolean', 'json'].includes(metricType)) { | ||
| err = 'invalid_metric_type' | ||
| throw new Error('metricType must be one of "categorical" or "score"') | ||
| throw new Error('metricType must be one of "categorical", "score", "boolean" or "json"') | ||
| } | ||
@@ -386,2 +386,18 @@ if (metricType === 'categorical' && typeof value !== 'string') { | ||
| } | ||
| if (metricType === 'json' && !(typeof value === 'object' && value != null && !Array.isArray(value))) { | ||
| err = 'invalid_metric_value' | ||
| throw new Error('value must be a JSON object for a json metric') | ||
| } | ||
| if (assessment != null && assessment !== 'pass' && assessment !== 'fail') { | ||
| err = 'invalid_assessment' | ||
| throw new Error('assessment must be pass or fail') | ||
| } | ||
| if (reasoning != null && typeof reasoning !== 'string') { | ||
| err = 'invalid_reasoning' | ||
| throw new Error('reasoning must be a string') | ||
| } | ||
| if (metadata != null && (typeof metadata !== 'object' || Array.isArray(metadata))) { | ||
| err = 'invalid_metadata' | ||
| throw new Error('metadata must be a JSON object') | ||
| } | ||
@@ -418,4 +434,8 @@ const evaluationTags = { | ||
| const payload = { | ||
| span_id: spanId, | ||
| trace_id: traceId, | ||
| join_on: { | ||
| span: { | ||
| span_id: spanId, | ||
| trace_id: traceId, | ||
| }, | ||
| }, | ||
| label, | ||
@@ -428,2 +448,11 @@ metric_type: metricType, | ||
| } | ||
| if (reasoning != null) { | ||
| payload.reasoning = reasoning | ||
| } | ||
| if (metadata != null) { | ||
| payload.metadata = metadata | ||
| } | ||
| if (assessment != null) { | ||
| payload.assessment = assessment | ||
| } | ||
| const currentStore = storage.getStore() | ||
@@ -430,0 +459,0 @@ const routing = currentStore?.routingContext |
@@ -10,46 +10,22 @@ 'use strict' | ||
| constructor (...args) { | ||
| super(...args) | ||
| this.serviceTags = { | ||
| dddbs: '', | ||
| encodedDddbs: '', | ||
| dde: '', | ||
| encodedDde: '', | ||
| ddps: '', | ||
| encodedDdps: '', | ||
| ddpv: '', | ||
| encodedDdpv: '', | ||
| } | ||
| } | ||
| /** | ||
| * @param {string} serviceName | ||
| * @param {import('../../../..').Span} span | ||
| * @param {object} peerData | ||
| * @returns {string} | ||
| */ | ||
| #createDBMPropagationCommentService (serviceName, span, peerData) { | ||
| const spanTags = span.context()._tags | ||
| const encodedDddb = encode(spanTags['db.name']) | ||
| const encodedDddbs = encode(serviceName) | ||
| const encodedDde = encode(this.tracer._env) | ||
| const encodedDdh = encode(spanTags['out.host']) | ||
| const encodedDdps = this.tracer._service ?? '' | ||
| const encodedDdpv = this.tracer._version | ||
| encodingServiceTags (serviceTag, encodeATag, spanConfig) { | ||
| if (serviceTag !== spanConfig) { | ||
| this.serviceTags[serviceTag] = spanConfig | ||
| this.serviceTags[encodeATag] = encodeURIComponent(spanConfig) | ||
| } | ||
| } | ||
| createDBMPropagationCommentService (serviceName, span) { | ||
| this.encodingServiceTags('dddbs', 'encodedDddbs', serviceName) | ||
| this.encodingServiceTags('dde', 'encodedDde', this.tracer._env) | ||
| this.encodingServiceTags('ddps', 'encodedDdps', this.tracer._service) | ||
| this.encodingServiceTags('ddpv', 'encodedDdpv', this.tracer._version) | ||
| if (span.context()._tags['out.host']) { | ||
| this.encodingServiceTags('ddh', 'encodedDdh', span._spanContext._tags['out.host']) | ||
| } | ||
| if (span.context()._tags['db.name']) { | ||
| this.encodingServiceTags('dddb', 'encodedDddb', span._spanContext._tags['db.name']) | ||
| } | ||
| const { encodedDddb, encodedDddbs, encodedDde, encodedDdh, encodedDdps, encodedDdpv } = this.serviceTags | ||
| let dbmComment = `dddb='${encodedDddb}',dddbs='${encodedDddbs}',dde='${encodedDde}',ddh='${encodedDdh}',` + | ||
| `ddps='${encodedDdps}',ddpv='${encodedDdpv}'` | ||
| const peerData = this.getPeerService(span.context()._tags) | ||
| if (peerData !== undefined && peerData[PEER_SERVICE_SOURCE_KEY] === PEER_SERVICE_KEY) { | ||
| this.encodingServiceTags('ddprs', 'encodedDdprs', peerData[PEER_SERVICE_KEY]) | ||
| const { encodedDdprs } = this.serviceTags | ||
| dbmComment += `,ddprs='${encodedDdprs}'` | ||
| dbmComment += `,ddprs='${encode(peerData[PEER_SERVICE_KEY])}'` | ||
| } | ||
@@ -59,5 +35,9 @@ return dbmComment | ||
| getDbmServiceName (span, tracerService) { | ||
| /** | ||
| * @param {string} tracerService | ||
| * @param {object} peerData | ||
| * @returns {string} | ||
| */ | ||
| #getDbmServiceName (tracerService, peerData) { | ||
| if (this._tracerConfig.spanComputePeerService) { | ||
| const peerData = this.getPeerService(span.context()._tags) | ||
| return this.getPeerServiceRemap(peerData)[PEER_SERVICE_KEY] || tracerService | ||
@@ -68,5 +48,9 @@ } | ||
| /** | ||
| * @param {import('../../../..').Span} span | ||
| * @param {string} serviceName | ||
| * @param {boolean} disableFullMode | ||
| */ | ||
| createDbmComment (span, serviceName, disableFullMode = false) { | ||
| const mode = this.config.dbmPropagationMode | ||
| const dbmService = this.getDbmServiceName(span, serviceName) | ||
@@ -77,3 +61,5 @@ if (mode === 'disabled') { | ||
| const servicePropagation = this.createDBMPropagationCommentService(dbmService, span) | ||
| const peerData = this.getPeerService(span.context()._tags) | ||
| const dbmService = this.#getDbmServiceName(serviceName, peerData) | ||
| const servicePropagation = this.#createDBMPropagationCommentService(dbmService, span, peerData) | ||
@@ -90,2 +76,9 @@ if (disableFullMode || mode === 'service') { | ||
| /** | ||
| * @param {import('../../../..').Span} span | ||
| * @param {string} query | ||
| * @param {string} serviceName | ||
| * @param {boolean} disableFullMode | ||
| * @returns {string} | ||
| */ | ||
| injectDbmQuery (span, query, serviceName, disableFullMode = false) { | ||
@@ -103,2 +96,6 @@ const dbmTraceComment = this.createDbmComment(span, serviceName, disableFullMode) | ||
| /** | ||
| * @param {string} query | ||
| * @returns {string} | ||
| */ | ||
| maybeTruncate (query) { | ||
@@ -117,2 +114,4 @@ const maxLength = typeof this.config.truncate === 'number' | ||
| const encode = value => value ? encodeURIComponent(value) : '' | ||
| module.exports = DatabasePlugin |
@@ -21,2 +21,6 @@ 'use strict' | ||
| class OutboundPlugin extends TracingPlugin { | ||
| /** | ||
| * | ||
| * @type {string[]} | ||
| */ | ||
| static peerServicePrecursors = [] | ||
@@ -32,2 +36,5 @@ | ||
| /** | ||
| * @param {{ parentStore?: { span: import('../../../..').Span } }} ctx | ||
| */ | ||
| bindFinish (ctx) { | ||
@@ -37,4 +44,4 @@ return ctx.parentStore | ||
| startSpan (...args) { | ||
| const span = super.startSpan(...args) | ||
| startSpan (name, options, enterOrCtx) { | ||
| const span = super.startSpan(name, options, enterOrCtx) | ||
| if ( | ||
@@ -49,2 +56,5 @@ this._tracerConfig.codeOriginForSpans.enabled && | ||
| /** | ||
| * @param {Record<string, string>} tags | ||
| */ | ||
| getPeerService (tags) { | ||
@@ -82,2 +92,5 @@ /** | ||
| /** | ||
| * @param {Record<string, string>} peerData | ||
| */ | ||
| getPeerServiceRemap (peerData) { | ||
@@ -100,2 +113,5 @@ /** | ||
| /** | ||
| * @param {{ currentStore?: { span: import('../../../..').Span } }} ctx | ||
| */ | ||
| finish (ctx) { | ||
@@ -113,2 +129,5 @@ const span = ctx?.currentStore?.span || this.activeSpan | ||
| /** | ||
| * @param {import('../../../..').Span} span | ||
| */ | ||
| tagPeerService (span) { | ||
@@ -123,2 +142,5 @@ if (this._tracerConfig.spanComputePeerService) { | ||
| /** | ||
| * @param {object} ctx | ||
| */ | ||
| connect (ctx) { | ||
@@ -128,2 +150,5 @@ this.addHost(ctx) | ||
| /** | ||
| * @param {{ hostname: string, port: number, currentStore?: { span: import('../../../..').Span } }} ctx | ||
| */ | ||
| addHost (ctx) { | ||
@@ -130,0 +155,0 @@ const { hostname, port } = ctx |
@@ -19,7 +19,14 @@ 'use strict' | ||
| get activeSpan () { | ||
| const store = storage('legacy').getStore() | ||
| const store = /** @type {{ span?: import('../../../..').Span }} */ (storage('legacy').getStore()) | ||
| return store && store.span | ||
| return store?.span | ||
| } | ||
| /** | ||
| * @param {object} opts | ||
| * @param {string} [opts.type] | ||
| * @param {string} [opts.id] | ||
| * @param {string} [opts.kind] | ||
| * @returns {string} | ||
| */ | ||
| serviceName (opts = {}) { | ||
@@ -35,2 +42,9 @@ const { | ||
| /** | ||
| * @param {object} opts | ||
| * @param {string} [opts.type] | ||
| * @param {string} [opts.id] | ||
| * @param {string} [opts.kind] | ||
| * @returns {string} | ||
| */ | ||
| operationName (opts = {}) { | ||
@@ -46,2 +60,6 @@ const { | ||
| /** | ||
| * @param {object} config | ||
| * @returns {object} | ||
| */ | ||
| configure (config) { | ||
@@ -59,2 +77,5 @@ return super.configure({ | ||
| /** | ||
| * @param {{ currentStore?: { span: import('../../../..').Span } }} ctx | ||
| */ | ||
| finish (ctx) { | ||
@@ -65,2 +86,5 @@ const span = ctx?.currentStore?.span || this.activeSpan | ||
| /** | ||
| * @param {{ currentStore?: { span: import('../../../..').Span }, error?: unknown }} ctxOrError | ||
| */ | ||
| error (ctxOrError) { | ||
@@ -92,2 +116,6 @@ if (ctxOrError?.currentStore) { | ||
| /** | ||
| * @param {string} eventName | ||
| * @param {Function} handler | ||
| */ | ||
| addTraceSub (eventName, handler) { | ||
@@ -98,2 +126,6 @@ const prefix = this.constructor.prefix || `apm:${this.component}:${this.operation}` | ||
| /** | ||
| * @param {string} eventName | ||
| * @param {Function} transform | ||
| */ | ||
| addTraceBind (eventName, transform) { | ||
@@ -104,7 +136,10 @@ const prefix = this.constructor.prefix || `apm:${this.component}:${this.operation}` | ||
| /** | ||
| * @param {unknown} error | ||
| * @param {import('../../../..').Span} [span] | ||
| */ | ||
| addError (error, span = this.activeSpan) { | ||
| if (span && !span._spanContext._tags.error) { | ||
| // Errors may be wrapped in a context. | ||
| error = (error && error.error) || error | ||
| span.setTag('error', error || 1) | ||
| span.setTag('error', error?.error || error || 1) | ||
| } | ||
@@ -111,0 +146,0 @@ } |
@@ -17,2 +17,3 @@ 'use strict' | ||
| const PROXY_HEADER_STAGE = 'x-dd-proxy-stage' | ||
| const PROXY_HEADER_REGION = 'x-dd-proxy-region' | ||
@@ -24,2 +25,6 @@ const supportedProxies = { | ||
| }, | ||
| 'azure-apim': { | ||
| spanName: 'azure.apim', | ||
| component: 'azure-apim', | ||
| }, | ||
| } | ||
@@ -58,2 +63,3 @@ | ||
| stage: proxyContext.stage, | ||
| region: proxyContext.region, | ||
| }, | ||
@@ -97,2 +103,3 @@ }, traceCtx, config) | ||
| proxySystemName: headers[PROXY_HEADER_SYSTEM], | ||
| region: headers[PROXY_HEADER_REGION], | ||
| } | ||
@@ -99,0 +106,0 @@ } |
@@ -122,3 +122,3 @@ 'use strict' | ||
| } else { | ||
| span = web.startChildSpan(tracer, config, name, req, traceCtx) | ||
| span = web.startServerlessSpanWithInferredProxy(tracer, config, name, req, traceCtx) | ||
| } | ||
@@ -279,3 +279,3 @@ | ||
| startChildSpan (tracer, config, name, req, traceCtx) { | ||
| startServerlessSpanWithInferredProxy (tracer, config, name, req, traceCtx) { | ||
| const headers = req.headers | ||
@@ -343,3 +343,3 @@ const reqCtx = contexts.get(req) | ||
| finishSpan (context) { | ||
| finishSpan (context, spanType) { | ||
| const { req, res } = context | ||
@@ -349,3 +349,3 @@ | ||
| addRequestTags(context, this.TYPE) | ||
| addRequestTags(context, spanType) | ||
| addResponseTags(context) | ||
@@ -360,3 +360,3 @@ | ||
| finishAll (context) { | ||
| finishAll (context, spanType) { | ||
| for (const beforeEnd of context.beforeEnd) { | ||
@@ -368,3 +368,3 @@ beforeEnd() | ||
| web.finishSpan(context) | ||
| web.finishSpan(context, spanType) | ||
@@ -466,2 +466,3 @@ finishInferredProxySpan(context) | ||
| const url = extractURL(req) | ||
| const type = spanType ?? WEB | ||
@@ -472,3 +473,3 @@ span.addTags({ | ||
| [SPAN_KIND]: SERVER, | ||
| [SPAN_TYPE]: spanType, | ||
| [SPAN_TYPE]: type, | ||
| [HTTP_USERAGENT]: req.headers['user-agent'], | ||
@@ -475,0 +476,0 @@ }) |
@@ -7,3 +7,3 @@ 'use strict' | ||
| const { getAgentUrl } = require('./agent/url') | ||
| const { info, warn } = require('./log/writer') | ||
| const { warn } = require('./log/writer') | ||
@@ -33,3 +33,3 @@ const errors = {} | ||
| info('DATADOG TRACER CONFIGURATION - ' + out) | ||
| warn('DATADOG TRACER CONFIGURATION - ' + out) | ||
| if (agentError) { | ||
@@ -36,0 +36,0 @@ warn('DATADOG TRACER DIAGNOSTIC - Agent Error: ' + agentError.message) |
| 'use strict' | ||
| const types = require('../../../../../ext/types') | ||
| const web = require('./web') | ||
| const serverless = { ...web, TYPE: types.SERVERLESS } | ||
| module.exports = serverless |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Network access
Supply chain riskThis module accesses the network.
Found 5 instances in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 2 instances in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 6 instances in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 15 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 5 instances in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 2 instances in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 6 instances in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 15 instances in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 2 instances in 1 package
AI-detected potential code anomaly
Supply chain riskAI has identified unusual behaviors that may pose a security risk.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
4218506
1.07%919
0.11%94777
1.24%228
4.11%