@newrelic/aws-sdk
Advanced tools
Comparing version
@@ -0,1 +1,19 @@ | ||
| ### v7.2.0 (2024-02-29) | ||
| #### Features | ||
| * Added ability to disable instrumenting streaming via `ai_monitoring.streaming.enabled` = `false` ([#257](https://github.com/newrelic/node-newrelic-aws-sdk/pull/257)) ([9b5abd6](https://github.com/newrelic/node-newrelic-aws-sdk/commit/9b5abd6a454924da065e7dbf29839f338ec8fa6b)) | ||
| #### Code refactoring | ||
| * moved mock server to v3 so deps can be installed during versioned test run ([#250](https://github.com/newrelic/node-newrelic-aws-sdk/pull/250)) ([33bf934](https://github.com/newrelic/node-newrelic-aws-sdk/commit/33bf934bf123f43839cb7d3e0c3d6583ef61bdc5)) | ||
| * Removed storing message, request and completion id to be used to post LlmFeedbackEvents ([#261](https://github.com/newrelic/node-newrelic-aws-sdk/pull/261)) ([0ab4890](https://github.com/newrelic/node-newrelic-aws-sdk/commit/0ab4890082e7492429cce22642d22e687c63b105)) | ||
| * Updated aws sdk instrumentation to construct specs at instrumentation ([#259](https://github.com/newrelic/node-newrelic-aws-sdk/pull/259)) ([94de420](https://github.com/newrelic/node-newrelic-aws-sdk/commit/94de4208ebbf1161c2c2aaf66aced97a45c610b8)) | ||
| #### Miscellaneous chores | ||
| * Apply LLM updates based on specification changes ([#253](https://github.com/newrelic/node-newrelic-aws-sdk/pull/253)) ([a32f0bd](https://github.com/newrelic/node-newrelic-aws-sdk/commit/a32f0bd4ff5e0d0fc6313e17b40f23d817608383)) | ||
| * Refactor LLM user added metadata ([#256](https://github.com/newrelic/node-newrelic-aws-sdk/pull/256)) ([c2a0314](https://github.com/newrelic/node-newrelic-aws-sdk/commit/c2a0314f0c0090763fb6be77808a75e72df3a0a4)) | ||
| * Replace inlined bedrock server with test-utils version ([#251](https://github.com/newrelic/node-newrelic-aws-sdk/pull/251)) ([94a2562](https://github.com/newrelic/node-newrelic-aws-sdk/commit/94a2562a641e5b66b0a198a9c205bf1d3d97c0f4)) | ||
| ### v7.1.0 (2024-01-18) | ||
@@ -2,0 +20,0 @@ |
@@ -42,6 +42,5 @@ /* | ||
| const { agent, content, isResponse, index, completionId } = params | ||
| const { content, isResponse, index, completionId } = params | ||
| this.is_response = isResponse | ||
| this.conversation_id = this.conversationId(agent) | ||
| this.completion_id = completionId | ||
@@ -48,0 +47,0 @@ this.sequence = index |
@@ -26,5 +26,4 @@ /* | ||
| const { agent, segment, isError } = params | ||
| const { segment, isError } = params | ||
| this.error = isError | ||
| this.conversation_id = this.conversationId(agent) | ||
| this.duration = segment.getDurationInMillis() | ||
@@ -31,0 +30,0 @@ this['request.max_tokens'] = this.bedrockCommand.maxTokens |
@@ -9,2 +9,3 @@ /* | ||
| const { randomUUID } = require('crypto') | ||
| const { DESTINATIONS } = require('../util') | ||
@@ -16,5 +17,2 @@ /** | ||
| * @property {BedrockResponse} bedrockResponse A parsed response from the API. | ||
| * @property {object} credentials An object representing the credentials that | ||
| * will be used by the AWS client. This should match the result of | ||
| * `await client.credentials()`. | ||
| * @property {object} segment The current segment for the trace. | ||
@@ -29,5 +27,2 @@ */ | ||
| bedrockResponse: {}, | ||
| credentials: { | ||
| accessKeyId: '' | ||
| }, | ||
| segment: { | ||
@@ -58,3 +53,3 @@ transaction: {} | ||
| const { agent, bedrockCommand, bedrockResponse, credentials, segment } = params | ||
| const { agent, bedrockCommand, bedrockResponse, segment } = params | ||
| this.bedrockCommand = bedrockCommand | ||
@@ -67,3 +62,2 @@ this.bedrockResponse = bedrockResponse | ||
| this.appName = agent.config.applications()[0] | ||
| this.api_key_last_four_digits = credentials?.accessKeyId.slice(-4) | ||
| this.span_id = segment.id | ||
@@ -73,2 +67,3 @@ this.transaction_id = segment.transaction.id | ||
| this.request_id = this.bedrockResponse.requestId | ||
| this.metadata = agent | ||
@@ -81,19 +76,17 @@ this['response.model'] = this.bedrockCommand.modelId | ||
| /** | ||
| * Retrieve the conversation identifier from the custom attributes | ||
| * stored in the current transaction. | ||
| * Pull user set `llm.*` attributes from the current transaction and | ||
| * add them to the event. | ||
| * | ||
| * @param {object} agent The New Relic agent that provides access to the | ||
| * transaction. | ||
| * | ||
| * @returns {string} | ||
| */ | ||
| conversationId(agent) { | ||
| set metadata(agent) { | ||
| const tx = agent.tracer.getTransaction() | ||
| // This magic number is brought to you by: | ||
| // https://github.com/newrelic/node-newrelic/blob/10762a7/lib/config/attribute-filter.js#L10-L23 | ||
| // We hard code it here because we'd have a cyclic dependency if we tried | ||
| // to import it from `newrelic` (`newrelic` uses this module to provide | ||
| // the AWS instrumentation). | ||
| const attrs = tx?.trace?.custom.get(0x01 | 0x02 | 0x04 | 0x08) | ||
| return attrs?.['llm.conversation_id'] | ||
| const attrs = tx?.trace?.custom.get(DESTINATIONS.TRANS_SCOPE) || {} | ||
| for (const [k, v] of Object.entries(attrs)) { | ||
| if (k.startsWith('llm.') === false) { | ||
| continue | ||
| } | ||
| this[k] = v | ||
| } | ||
| } | ||
@@ -100,0 +93,0 @@ |
@@ -15,5 +15,4 @@ /* | ||
| LlmEvent: require('./event'), | ||
| LlmTrackedIds: require('./tracked-ids'), | ||
| LlmError: require('./error'), | ||
| StreamHandler: require('./stream-handler') | ||
| } |
@@ -9,3 +9,10 @@ /* | ||
| const DESTINATIONS = { | ||
| TRANS_EVENT: 0x01 | ||
| TRANS_EVENT: 0x01, | ||
| // This magic number is brought to you by: | ||
| // https://github.com/newrelic/node-newrelic/blob/10762a7/lib/config/attribute-filter.js#L10-L23 | ||
| // We hard code it here because we'd have a cyclic dependency if we tried | ||
| // to import it from `newrelic` (`newrelic` uses this module to provide | ||
| // the AWS instrumentation). | ||
| TRANS_SCOPE: 0x01 | 0x02 | 0x04 | 0x08 | ||
| } | ||
@@ -24,2 +31,3 @@ | ||
| * | ||
| * @param {function} DatastoreParameters constructor of `shim.spec.DatastoreParameters` | ||
| * @param {Object} endpoint instance of ddb endpoint | ||
@@ -29,8 +37,8 @@ * @param {Object} params parameters passed to a ddb command | ||
| */ | ||
| function setDynamoParameters(endpoint, params) { | ||
| return { | ||
| function setDynamoParameters(DatastoreParameters, endpoint, params) { | ||
| return new DatastoreParameters({ | ||
| host: endpoint && (endpoint.host || endpoint.hostname), | ||
| port_path_or_id: (endpoint && endpoint.port) || 443, | ||
| collection: (params && params.TableName) || UNKNOWN | ||
| } | ||
| }) | ||
| } | ||
@@ -37,0 +45,0 @@ |
@@ -46,8 +46,12 @@ /* | ||
| return { | ||
| return new shim.specs.OperationSpec({ | ||
| name: operationName, | ||
| parameters: setDynamoParameters(this.endpoint, params), | ||
| parameters: setDynamoParameters( | ||
| shim.specs.params.DatastoreParameters, | ||
| this.endpoint, | ||
| params | ||
| ), | ||
| callback: shim.LAST, | ||
| opaque: true | ||
| } | ||
| }) | ||
| } | ||
@@ -74,12 +78,12 @@ ) | ||
| return { | ||
| return new shim.specs.OperationSpec({ | ||
| name: dynamoOperation, | ||
| parameters: { | ||
| host: endpoint && endpoint.host, | ||
| port_path_or_id: endpoint && endpoint.port, | ||
| collection: (params && params.TableName) || 'Unknown' | ||
| }, | ||
| parameters: new shim.specs.params.DatastoreParameters({ | ||
| host: endpoint?.host, | ||
| port_path_or_id: endpoint?.port, | ||
| collection: params?.TableName || 'Unknown' | ||
| }), | ||
| callback: shim.LAST, | ||
| opaque: true | ||
| } | ||
| }) | ||
| } | ||
@@ -86,0 +90,0 @@ ) |
@@ -30,3 +30,3 @@ /* | ||
| function wrapPublish(shim, original, name, args) { | ||
| return { | ||
| return new shim.specs.MessageSpec({ | ||
| callback: shim.LAST, | ||
@@ -36,3 +36,3 @@ destinationName: getDestinationName(args[0]), | ||
| opaque: true | ||
| } | ||
| }) | ||
| } | ||
@@ -39,0 +39,0 @@ |
@@ -41,3 +41,3 @@ /* | ||
| return { | ||
| return new shim.specs.MessageSpec({ | ||
| callback: shim.LAST, | ||
@@ -47,3 +47,3 @@ destinationName: queueName, | ||
| opaque: true | ||
| } | ||
| }) | ||
| } |
@@ -12,3 +12,2 @@ /* | ||
| LlmError, | ||
| LlmTrackedIds, | ||
| BedrockCommand, | ||
@@ -20,2 +19,3 @@ BedrockResponse, | ||
| const { DESTINATIONS } = require('../util') | ||
| const AI_PREFIX = 'Supportability/Nodejs/ML' | ||
@@ -35,2 +35,17 @@ let TRACKING_METRIC | ||
| /** | ||
| * Helper to determine if streaming is enabled | ||
| * | ||
| * @param {object} params to function | ||
| * @param {string} params.commandName name of command | ||
| * @param {object} params.config agent configuration | ||
| * @returns {boolean} if streaming command and `ai_monitoring.streaming.enabled` is truthy | ||
| */ | ||
| function isStreamingEnabled({ commandName, config }) { | ||
| return ( | ||
| commandName === 'InvokeModelWithResponseStreamCommand' && | ||
| config.ai_monitoring?.streaming?.enabled | ||
| ) | ||
| } | ||
| /** | ||
| * Enqueues a LLM event to the custom event aggregator | ||
@@ -43,3 +58,2 @@ * @param {object} params input params | ||
| function recordEvent({ agent, type, msg }) { | ||
| agent.metrics.getOrCreateMetric(TRACKING_METRIC).incrementCallCount() | ||
| msg.serialize() | ||
@@ -50,29 +64,24 @@ agent.customEventAggregator.add([{ type, timestamp: Date.now() }, msg]) | ||
| /** | ||
| * Assigns requestId, conversationId and messageIds for a given | ||
| * chat completion response on the active transaction. | ||
| * This is used for generating LlmFeedbackEvent via `api.recordLlmFeedbackEvent` | ||
| * Increments the tracking metric and sets the llm attribute on transactions | ||
| * | ||
| * @param {object} params input params | ||
| * @param {Transaction} params.tx active transaction | ||
| * @param {LlmChatCompletionMessage} params.msg chat completion message | ||
| * @param {string} params.responseId id of response | ||
| * @param {Agent} params.agent NR agent instance | ||
| * @param {TraceSegment} params.segment active segment | ||
| */ | ||
| function assignIdsToTx({ tx, msg, responseId }) { | ||
| const tracker = tx.llm.responses | ||
| const trackedIds = | ||
| tracker.get(responseId) ?? | ||
| new LlmTrackedIds({ | ||
| requestId: msg.request_id, | ||
| conversationId: msg.conversation_id | ||
| }) | ||
| trackedIds.message_ids.push(msg.id) | ||
| tracker.set(responseId, trackedIds) | ||
| function addLlmMeta({ agent, segment }) { | ||
| agent.metrics.getOrCreateMetric(TRACKING_METRIC).incrementCallCount() | ||
| segment.transaction.trace.attributes.addAttribute(DESTINATIONS.TRANS_EVENT, 'llm', true) | ||
| // end segment to get a consistent segment duration | ||
| // for both the LLM events and the segment | ||
| segment.end() | ||
| } | ||
| /** | ||
| * Creates and enqueues the LlmChatCompletionSummary and n LlmChatCompletionMessage events and adds an error to transaction if it exists. It will also assign the request, conversation and messages ids by the response id | ||
| * Creates and enqueues the LlmChatCompletionSummary and | ||
| * LlmChatCompletionMessage events and adds an error to transaction if it | ||
| * exists. It will also assign the request, conversation and messages ids by | ||
| * the response id. | ||
| * | ||
| * @param {object} params function params | ||
| * @param {object} params.agent instance of agent | ||
| * @param {object} params.credentials aws resolved credentials | ||
| * @param {object} params.segment active segment | ||
@@ -82,13 +91,4 @@ * @param {BedrockCommand} params.bedrockCommand parsed input | ||
| */ | ||
| function recordChatCompletionMessages({ | ||
| agent, | ||
| credentials, | ||
| segment, | ||
| bedrockCommand, | ||
| bedrockResponse, | ||
| err | ||
| }) { | ||
| const tx = segment.transaction | ||
| function recordChatCompletionMessages({ agent, segment, bedrockCommand, bedrockResponse, err }) { | ||
| const summary = new LlmChatCompletionSummary({ | ||
| credentials, | ||
| agent, | ||
@@ -109,3 +109,2 @@ bedrockResponse, | ||
| }) | ||
| assignIdsToTx({ tx, responseId: bedrockResponse.requestId, msg }) | ||
| recordEvent({ agent, type: 'LlmChatCompletionMessage', msg }) | ||
@@ -124,3 +123,2 @@ | ||
| }) | ||
| assignIdsToTx({ tx, responseId: bedrockResponse.requestId, msg: chatCompletionMessage }) | ||
| recordEvent({ agent, type: 'LlmChatCompletionMessage', msg: chatCompletionMessage }) | ||
@@ -138,7 +136,7 @@ }) | ||
| /** | ||
| * Creates and enqueues the LlmEmbedding event and adds an error to transaction if it exists | ||
| * Creates and enqueues the LlmEmbedding event and adds an error to transaction | ||
| * if it exists. | ||
| * | ||
| * @param {object} params function params | ||
| * @param {object} params.agent instance of agent | ||
| * @param {object} params.credentials aws resolved credentials | ||
| * @param {object} params.segment active segment | ||
@@ -148,13 +146,5 @@ * @param {BedrockCommand} params.bedrockCommand parsed input | ||
| */ | ||
| function recordEmbeddingMessage({ | ||
| agent, | ||
| credentials, | ||
| segment, | ||
| bedrockCommand, | ||
| bedrockResponse, | ||
| err | ||
| }) { | ||
| function recordEmbeddingMessage({ agent, segment, bedrockCommand, bedrockResponse, err }) { | ||
| const embedding = new LlmEmbedding({ | ||
| agent, | ||
| credentials, | ||
| segment, | ||
@@ -204,3 +194,4 @@ bedrockCommand, | ||
| */ | ||
| function getBedrockSpec({ config, commandName }, _shim, _original, _name, args) { | ||
| function getBedrockSpec({ commandName }, shim, _original, _name, args) { | ||
| const { agent } = shim | ||
| const { input } = args[0] | ||
@@ -210,12 +201,3 @@ const bedrockCommand = new BedrockCommand(input) | ||
| /** 🚨 Code Smell 🚨 | ||
| * spec functions cannot be async, nor can after hooks. | ||
| * this works due to the nature of the event loop. | ||
| * the promise resolves before the after hook fires. | ||
| */ | ||
| let credentials = null | ||
| config.credentials().then((creds) => { | ||
| credentials = creds | ||
| }) | ||
| return { | ||
| return new shim.specs.RecorderSpec({ | ||
| promise: true, | ||
@@ -230,3 +212,2 @@ name: `Llm/${modelType}/Bedrock/${commandName}`, | ||
| segment, | ||
| credentials, | ||
| bedrockCommand, | ||
@@ -241,3 +222,3 @@ modelType | ||
| handleResponse(passThroughParams) | ||
| } else { | ||
| } else if (isStreamingEnabled({ commandName, config: agent.config })) { | ||
| // stream response | ||
@@ -250,20 +231,21 @@ const handler = new StreamHandler({ | ||
| response.output.body = handler.generator(handleResponse) | ||
| } else if (!isStreamingEnabled({ commandName, config: agent.config })) { | ||
| shim.logger.warn( | ||
| 'ai_monitoring.streaming.enabled is set to `false`, stream will not be instrumented.' | ||
| ) | ||
| agent.metrics.getOrCreateMetric(`${AI_PREFIX}/Streaming/Disabled`).incrementCallCount() | ||
| addLlmMeta({ agent, segment }) | ||
| } | ||
| } | ||
| } | ||
| }) | ||
| } | ||
| function handleResponse({ shim, err, response, segment, credentials, bedrockCommand, modelType }) { | ||
| function handleResponse({ shim, err, response, segment, bedrockCommand, modelType }) { | ||
| const { agent } = shim | ||
| const bedrockResponse = createBedrockResponse({ bedrockCommand, response, err }) | ||
| segment.transaction.trace.attributes.addAttribute(DESTINATIONS.TRANS_EVENT, 'llm', true) | ||
| // end segment to get a consistent segment duration | ||
| // for both the LLM events and the segment | ||
| segment.end() | ||
| addLlmMeta({ agent, segment }) | ||
| if (modelType === 'completion') { | ||
| recordChatCompletionMessages({ | ||
| agent, | ||
| credentials, | ||
| segment, | ||
@@ -277,3 +259,2 @@ bedrockCommand, | ||
| agent, | ||
| credentials, | ||
| segment, | ||
@@ -302,3 +283,3 @@ bedrockCommand, | ||
| ) { | ||
| return shim.record(next, getBedrockSpec.bind(null, { config, commandName })) | ||
| return shim.record(next, getBedrockSpec.bind(null, { commandName })) | ||
| } | ||
@@ -321,3 +302,3 @@ | ||
| TRACKING_METRIC = `Nodejs/ML/Bedrock/${shim.pkgVersion}` | ||
| TRACKING_METRIC = `${AI_PREFIX}/Bedrock/${shim.pkgVersion}` | ||
| return true | ||
@@ -324,0 +305,0 @@ }, |
@@ -20,9 +20,9 @@ /* | ||
| const [{ input }] = args | ||
| return { | ||
| return new shim.specs.OperationSpec({ | ||
| name: this.commandName, | ||
| parameters: setDynamoParameters(this.endpoint, input), | ||
| parameters: setDynamoParameters(shim.specs.params.DatastoreParameters, this.endpoint, input), | ||
| callback: shim.LAST, | ||
| opaque: true, | ||
| promise: true | ||
| } | ||
| }) | ||
| } | ||
@@ -29,0 +29,0 @@ |
@@ -36,3 +36,3 @@ /* | ||
| const [command] = args | ||
| return { | ||
| return new shim.specs.MessageSpec({ | ||
| promise: true, | ||
@@ -43,3 +43,3 @@ callback: shim.LAST, | ||
| opaque: true | ||
| } | ||
| }) | ||
| } | ||
@@ -46,0 +46,0 @@ |
@@ -45,3 +45,3 @@ /* | ||
| const { QueueUrl } = command.input | ||
| return { | ||
| return new shim.specs.MessageSpec({ | ||
| callback: shim.LAST, | ||
@@ -51,3 +51,3 @@ destinationName: grabLastUrlSegment(QueueUrl), | ||
| opaque: true | ||
| } | ||
| }) | ||
| } | ||
@@ -54,0 +54,0 @@ |
| { | ||
| "name": "@newrelic/aws-sdk", | ||
| "version": "7.1.0", | ||
| "version": "7.2.0", | ||
| "description": "New Relic instrumentation of the aws-sdk package.", | ||
@@ -31,3 +31,3 @@ "scripts": { | ||
| "@newrelic/newrelic-oss-cli": "^0.1.2", | ||
| "@newrelic/test-utilities": "^8.1.0", | ||
| "@newrelic/test-utilities": "^8.2.0", | ||
| "aws-sdk": "^2.1372.0", | ||
@@ -39,3 +39,3 @@ "c8": "^7.12.0", | ||
| "lockfile-lint": "^4.9.6", | ||
| "newrelic": "^11.9.0", | ||
| "newrelic": "^11.11.0", | ||
| "sinon": "^7.2.3", | ||
@@ -42,0 +42,0 @@ "tap": "^16.0.1" |
Sorry, the diff of this file is too big to display
Unidentified License
License(Experimental) Something that seems like a license was found, but its contents could not be matched with a known license.
Found 1 instance in 1 package
199780
0.46%30
-3.23%3
50%1758
-2.39%