Skip to content

Commit 3c50606

Browse files
svetlanabrennanbizob2828
andauthoredMar 5, 2024··
feat: Added instrumentation for VectorStore.similaritySearch for langchain.js (#2049)
Co-authored-by: Bob Evans <robert.evans25@gmail.com>
1 parent 47a925e commit 3c50606

File tree

16 files changed

+551
-38
lines changed

16 files changed

+551
-38
lines changed
 

‎docker-compose.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ version: "3"
22
services:
33
elasticsearch:
44
container_name: nr_node_elastic
5-
image: docker.elastic.co/elasticsearch/elasticsearch:8.7.1
5+
image: docker.elastic.co/elasticsearch/elasticsearch:8.8.0
66
environment:
77
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
88
# Set cluster to single node

‎lib/instrumentation/langchain/nr-hooks.js

+6
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
const toolsInstrumentation = require('./tools')
88
const cbManagerInstrumentation = require('./callback-manager')
99
const runnableInstrumentation = require('./runnable')
10+
const vectorstoreInstrumentation = require('./vectorstore')
1011

1112
module.exports = [
1213
{
@@ -23,5 +24,10 @@ module.exports = [
2324
type: 'generic',
2425
moduleName: '@langchain/core/dist/runnables/base',
2526
onRequire: runnableInstrumentation
27+
},
28+
{
29+
type: 'generic',
30+
moduleName: '@langchain/core/vectorstores',
31+
onRequire: vectorstoreInstrumentation
2632
}
2733
]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,108 @@
1+
/*
2+
* Copyright 2024 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
const {
8+
AI: { LANGCHAIN }
9+
} = require('../../metrics/names')
10+
const { LangChainVectorSearch, LangChainVectorSearchResult } = require('../../llm-events/langchain')
11+
const { recordEvent, shouldSkipInstrumentation } = require('./common')
12+
const { DESTINATIONS } = require('../../config/attribute-filter')
13+
const { RecorderSpec } = require('../../shim/specs')
14+
const LlmErrorMessage = require('../../llm-events/error-message')
15+
16+
/**
17+
* Generates a LangChainVectorSearch for entire search request.
18+
* Also iterates over documents in output and generates a
19+
* LangChainVectorSearchResult for each document.
20+
*
21+
* @param {object} params input params
22+
* @param {string} params.request vector search query
23+
* @param {number} params.k vector search top k
24+
* @param {object} params.output vector search documents
25+
* @param {Agent} params.agent NR agent instance
26+
* @param {TraceSegment} params.segment active segment from vector search
27+
* @param {string} params.pkgVersion langchain version
28+
* @param {err} params.err if it exists
29+
*/
30+
function recordVectorSearch({ request, k, output, agent, segment, pkgVersion, err }) {
31+
const vectorSearch = new LangChainVectorSearch({
32+
agent,
33+
segment,
34+
query: request,
35+
k,
36+
documents: output,
37+
error: err !== null
38+
})
39+
40+
recordEvent({ agent, type: 'LlmVectorSearch', pkgVersion, msg: vectorSearch })
41+
42+
output.forEach((document, sequence) => {
43+
const vectorSearchResult = new LangChainVectorSearchResult({
44+
agent,
45+
segment,
46+
metadata: document.metadata,
47+
pageContent: document.pageContent,
48+
sequence,
49+
search_id: vectorSearch.id
50+
})
51+
52+
recordEvent({
53+
agent,
54+
type: 'LlmVectorSearchResult',
55+
pkgVersion,
56+
msg: vectorSearchResult
57+
})
58+
})
59+
60+
if (err) {
61+
agent.errors.add(
62+
segment.transaction,
63+
err,
64+
new LlmErrorMessage({
65+
response: output,
66+
cause: err,
67+
vectorsearch: vectorSearch
68+
})
69+
)
70+
}
71+
}
72+
73+
module.exports = function initialize(shim, vectorstores) {
74+
const { agent, pkgVersion } = shim
75+
76+
if (shouldSkipInstrumentation(agent.config)) {
77+
shim.logger.debug(
78+
'langchain instrumentation is disabled. To enable set `config.ai_monitoring.enabled` to true'
79+
)
80+
return
81+
}
82+
83+
shim.record(
84+
vectorstores.VectorStore.prototype,
85+
'similaritySearch',
86+
function wrapCall(shim, similaritySearch, fnName, args) {
87+
const [request, k] = args
88+
89+
return new RecorderSpec({
90+
name: `${LANGCHAIN.VECTORSTORE}/${fnName}`,
91+
promise: true,
92+
// eslint-disable-next-line max-params
93+
after(_shim, _fn, _name, err, output, segment) {
94+
if (!output) {
95+
// If we get an error, it is possible that `output = null`.
96+
// In that case, we define it to be an empty array.
97+
output = []
98+
}
99+
100+
segment.end()
101+
recordVectorSearch({ request, k, output, agent, segment, pkgVersion, err })
102+
103+
segment.transaction.trace.attributes.addAttribute(DESTINATIONS.TRANS_EVENT, 'llm', true)
104+
}
105+
})
106+
}
107+
)
108+
}

‎lib/llm-events/error-message.js

+4-1
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,17 @@ module.exports = class LlmErrorMessage {
1818
* conversation if it was a chat completion conversation.
1919
* @param {LlmEmbedding} [params.embedding] Details about the conversation
2020
* if it was an embedding conversation.
21+
* @param {LlmVectorStoreSearch} [params.vectorsearch] Details about the vector
22+
* search if it was a vector search event.
2123
*/
22-
constructor({ response, cause, summary, embedding } = {}) {
24+
constructor({ response, cause, summary, embedding, vectorsearch } = {}) {
2325
this['http.statusCode'] = response?.status ?? cause?.status
2426
this['error.message'] = cause?.message
2527
this['error.code'] = response?.code ?? cause?.error?.code
2628
this['error.param'] = response?.param ?? cause?.error?.param
2729
this.completion_id = summary?.id
2830
this.embedding_id = embedding?.id
31+
this.vector_store_id = vectorsearch?.id
2932
}
3033

3134
get [Symbol.toStringTag]() {

‎lib/llm-events/langchain/vector-search-result.js

+2-3
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,13 @@
66
'use strict'
77

88
const LangChainEvent = require('./event')
9-
const crypto = require('crypto')
109

1110
/**
1211
* @typedef {object} LangChainVectorSearchResultParams
1312
* @augments LangChainEventParams
1413
* @property {string} pageContent The stringified contents of the pageContent attribute on each returned search result document.
1514
* @property {number} [sequence=0] The index of the document in the search result documents list.
15+
* @property {string} search_id The identifier from the LangChainVectorSearch event.
1616
*/
1717
/**
1818
* @type {LangChainVectorSearchResultParams}
@@ -23,13 +23,12 @@ const defaultParams = {
2323
}
2424

2525
class LangChainVectorSearchResult extends LangChainEvent {
26-
search_id = crypto.randomUUID()
27-
2826
constructor(params) {
2927
params = Object.assign({}, defaultParams, params)
3028
super(params)
3129
const { agent } = params
3230

31+
this.search_id = params.search_id
3332
this.sequence = params.sequence
3433

3534
if (agent.config.ai_monitoring.record_content.enabled === true) {

‎lib/metrics/names.js

+4-2
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,8 @@ const AI = {
170170
EMBEDDING: 'Llm/embedding',
171171
COMPLETION: 'Llm/completion',
172172
TOOL: 'Llm/tool',
173-
CHAIN: 'Llm/chain'
173+
CHAIN: 'Llm/chain',
174+
VECTORSTORE: 'Llm/vectorstore'
174175
}
175176

176177
AI.OPENAI = {
@@ -184,7 +185,8 @@ AI.LANGCHAIN = {
184185
EMBEDDING: `${AI.EMBEDDING}/Langchain`,
185186
COMPLETION: `${AI.COMPLETION}/Langchain`,
186187
TOOL: `${AI.TOOL}/Langchain`,
187-
CHAIN: `${AI.CHAIN}/Langchain`
188+
CHAIN: `${AI.CHAIN}/Langchain`,
189+
VECTORSTORE: `${AI.VECTORSTORE}/Langchain`
188190
}
189191

190192
const RESTIFY = {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
/*
2+
* Copyright 2023 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
8+
const { test } = require('tap')
9+
const helper = require('../../../lib/agent_helper')
10+
const GenericShim = require('../../../../lib/shim/shim')
11+
const sinon = require('sinon')
12+
13+
test('langchain/core/vectorstore unit tests', (t) => {
14+
t.beforeEach(function (t) {
15+
const sandbox = sinon.createSandbox()
16+
const agent = helper.loadMockedAgent()
17+
agent.config.ai_monitoring = { enabled: true }
18+
agent.config.feature_flag = { langchain_instrumentation: true }
19+
const shim = new GenericShim(agent, 'langchain')
20+
shim.pkgVersion = '0.1.26'
21+
sandbox.stub(shim.logger, 'debug')
22+
sandbox.stub(shim.logger, 'warn')
23+
24+
t.context.agent = agent
25+
t.context.shim = shim
26+
t.context.sandbox = sandbox
27+
t.context.initialize = require('../../../../lib/instrumentation/langchain/vectorstore')
28+
})
29+
30+
t.afterEach(function (t) {
31+
helper.unloadAgent(t.context.agent)
32+
t.context.sandbox.restore()
33+
})
34+
35+
function getMockModule() {
36+
function VectorStore() {}
37+
VectorStore.prototype.similaritySearch = async function call() {}
38+
return { VectorStore }
39+
}
40+
41+
;[
42+
{ aiMonitoring: false, langChain: true },
43+
{ aiMonitoring: true, langChain: false },
44+
{ aiMonitoring: false, langChain: false }
45+
].forEach(({ aiMonitoring, langChain }) => {
46+
t.test(
47+
`should not register instrumentation if ai_monitoring is ${aiMonitoring} and langchain_instrumentation is ${langChain}`,
48+
(t) => {
49+
const { shim, agent, initialize } = t.context
50+
const MockVectorstore = getMockModule()
51+
agent.config.ai_monitoring.enabled = aiMonitoring
52+
agent.config.feature_flag.langchain_instrumentation = langChain
53+
54+
initialize(shim, MockVectorstore)
55+
t.equal(shim.logger.debug.callCount, 1, 'should log 1 debug messages')
56+
t.equal(
57+
shim.logger.debug.args[0][0],
58+
'langchain instrumentation is disabled. To enable set `config.ai_monitoring.enabled` to true'
59+
)
60+
const isWrapped = shim.isWrapped(MockVectorstore.VectorStore.prototype.similaritySearch)
61+
t.equal(isWrapped, false, 'should not wrap vectorstore similaritySearch')
62+
t.end()
63+
}
64+
)
65+
})
66+
67+
t.end()
68+
})

‎test/unit/llm-events/langchain/vector-search-result.test.js

+15-4
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
const tap = require('tap')
99
const LangChainVectorSearchResult = require('../../../../lib/llm-events/langchain/vector-search-result')
10+
const LangChainVectorSearch = require('../../../../lib/llm-events/langchain/vector-search')
1011

1112
tap.beforeEach((t) => {
1213
t.context._tx = {
@@ -44,6 +45,9 @@ tap.beforeEach((t) => {
4445
transaction: {
4546
id: 'tx-1',
4647
traceId: 'trace-1'
48+
},
49+
getDurationInMillis() {
50+
return 42
4751
}
4852
}
4953

@@ -52,12 +56,19 @@ tap.beforeEach((t) => {
5256
})
5357

5458
tap.test('create entity', async (t) => {
55-
const search = new LangChainVectorSearchResult({
59+
const search = new LangChainVectorSearch({
60+
...t.context,
61+
query: 'hello world',
62+
k: 1
63+
})
64+
65+
const searchResult = new LangChainVectorSearchResult({
5666
...t.context,
5767
sequence: 1,
58-
pageContent: 'hello world'
68+
pageContent: 'hello world',
69+
search_id: search.id
5970
})
60-
t.match(search, {
71+
t.match(searchResult, {
6172
id: /[a-z0-9-]{36}/,
6273
appName: 'test-app',
6374
['llm.conversation_id']: 'test-conversation',
@@ -71,7 +82,7 @@ tap.test('create entity', async (t) => {
7182
virtual_llm: true,
7283
sequence: 1,
7384
page_content: 'hello world',
74-
search_id: /[a-z0-9-]{36}/
85+
search_id: search.id
7586
})
7687
})
7788

‎test/unit/llm-events/openai/error.test.js

+2-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@ tap.test('LlmErrorMessage', (t) => {
1818
'error.code': 'insufficient_quota',
1919
'error.param': 'test-param',
2020
'completion_id': undefined,
21-
'embedding_id': undefined
21+
'embedding_id': undefined,
22+
'vector_store_id': undefined
2223
}
2324
t.same(errorMsg, expected)
2425
t.end()

‎test/versioned/langchain/common.js

+53-2
Original file line numberDiff line numberDiff line change
@@ -14,13 +14,62 @@ function filterLangchainEvents(events) {
1414
})
1515
}
1616

17-
function filterLangchainMessages(events, msgType) {
17+
function filterLangchainEventsByType(events, msgType) {
1818
return events.filter((event) => {
1919
const [{ type }] = event
2020
return type === msgType
2121
})
2222
}
2323

24+
function assertLangChainVectorSearch({ tx, vectorSearch, responseDocumentSize }) {
25+
const expectedSearch = {
26+
'id': /[a-f0-9]{36}/,
27+
'appName': 'New Relic for Node.js tests',
28+
'span_id': tx.trace.root.children[0].id,
29+
'trace_id': tx.traceId,
30+
'transaction_id': tx.id,
31+
'request.k': 1,
32+
'request.query': 'This is an embedding test.',
33+
'ingest_source': 'Node',
34+
'vendor': 'langchain',
35+
'virtual_llm': true,
36+
['response.number_of_documents']: responseDocumentSize,
37+
'duration': tx.trace.root.children[0].getDurationInMillis()
38+
}
39+
40+
this.equal(vectorSearch[0].type, 'LlmVectorSearch')
41+
this.match(vectorSearch[1], expectedSearch, 'should match vector search')
42+
}
43+
44+
function assertLangChainVectorSearchResult({ tx, vectorSearchResult, vectorSearchId }) {
45+
const baseSearchResult = {
46+
'id': /[a-f0-9]{36}/,
47+
'search_id': vectorSearchId,
48+
'appName': 'New Relic for Node.js tests',
49+
'span_id': tx.trace.root.children[0].id,
50+
'trace_id': tx.traceId,
51+
'transaction_id': tx.id,
52+
'ingest_source': 'Node',
53+
'vendor': 'langchain',
54+
'metadata.id': '2',
55+
'virtual_llm': true
56+
}
57+
58+
vectorSearchResult.forEach((search) => {
59+
const expectedChatMsg = { ...baseSearchResult }
60+
if (search[1].sequence === 0) {
61+
expectedChatMsg.sequence = 0
62+
expectedChatMsg.page_content = 'This is an embedding test.'
63+
} else if (search[1].sequence === 1) {
64+
expectedChatMsg.sequence = 1
65+
expectedChatMsg.page_content = '212 degrees Fahrenheit is equal to 100 degrees Celsius.'
66+
}
67+
68+
this.equal(search[0].type, 'LlmVectorSearchResult')
69+
this.match(search[1], expectedChatMsg, 'should match vector search result')
70+
})
71+
}
72+
2473
function assertLangChainChatCompletionSummary({ tx, chatSummary, withCallback }) {
2574
const expectedSummary = {
2675
'id': /[a-f0-9]{36}/,
@@ -93,8 +142,10 @@ function assertLangChainChatCompletionMessages({
93142

94143
tap.Test.prototype.addAssert('langchainMessages', 1, assertLangChainChatCompletionMessages)
95144
tap.Test.prototype.addAssert('langchainSummary', 1, assertLangChainChatCompletionSummary)
145+
tap.Test.prototype.addAssert('langchainVectorSearch', 1, assertLangChainVectorSearch)
146+
tap.Test.prototype.addAssert('langchainVectorSearchResult', 1, assertLangChainVectorSearchResult)
96147

97148
module.exports = {
98149
filterLangchainEvents,
99-
filterLangchainMessages
150+
filterLangchainEventsByType
100151
}

‎test/versioned/langchain/package.json

+5-2
Original file line numberDiff line numberDiff line change
@@ -12,12 +12,15 @@
1212
},
1313
"dependencies": {
1414
"@langchain/core": ">=0.1.17",
15-
"@langchain/openai": "latest"
15+
"@langchain/openai": "latest",
16+
"@langchain/community": "latest",
17+
"@elastic/elasticsearch": "latest"
1618
},
1719
"files": [
1820
"tools.tap.js",
1921
"runnables.tap.js",
20-
"runnables-streaming.tap.js"
22+
"runnables-streaming.tap.js",
23+
"vectorstore.tap.js"
2124
]
2225
}
2326
]

‎test/versioned/langchain/runnables-streaming.tap.js

+10-10
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const tap = require('tap')
99
const helper = require('../../lib/agent_helper')
1010
// load the assertSegments assertion
1111
require('../../lib/metrics_helper')
12-
const { filterLangchainEvents, filterLangchainMessages } = require('./common')
12+
const { filterLangchainEvents, filterLangchainEventsByType } = require('./common')
1313
const { version: pkgVersion } = require('@langchain/core/package.json')
1414
const createOpenAIMockServer = require('../openai/mock-server')
1515
const mockResponses = require('../openai/mock-responses')
@@ -135,11 +135,11 @@ tap.test('Langchain instrumentation - chain streaming', (t) => {
135135
const events = agent.customEventAggregator.events.toArray()
136136

137137
const langchainEvents = filterLangchainEvents(events)
138-
const langChainMessageEvents = filterLangchainMessages(
138+
const langChainMessageEvents = filterLangchainEventsByType(
139139
langchainEvents,
140140
'LlmChatCompletionMessage'
141141
)
142-
const langChainSummaryEvents = filterLangchainMessages(
142+
const langChainSummaryEvents = filterLangchainEventsByType(
143143
langchainEvents,
144144
'LlmChatCompletionSummary'
145145
)
@@ -180,11 +180,11 @@ tap.test('Langchain instrumentation - chain streaming', (t) => {
180180
const events = agent.customEventAggregator.events.toArray()
181181

182182
const langchainEvents = filterLangchainEvents(events)
183-
const langChainMessageEvents = filterLangchainMessages(
183+
const langChainMessageEvents = filterLangchainEventsByType(
184184
langchainEvents,
185185
'LlmChatCompletionMessage'
186186
)
187-
const langChainSummaryEvents = filterLangchainMessages(
187+
const langChainSummaryEvents = filterLangchainEventsByType(
188188
langchainEvents,
189189
'LlmChatCompletionSummary'
190190
)
@@ -229,11 +229,11 @@ tap.test('Langchain instrumentation - chain streaming', (t) => {
229229
const events = agent.customEventAggregator.events.toArray()
230230

231231
const langchainEvents = filterLangchainEvents(events)
232-
const langChainMessageEvents = filterLangchainMessages(
232+
const langChainMessageEvents = filterLangchainEventsByType(
233233
langchainEvents,
234234
'LlmChatCompletionMessage'
235235
)
236-
const langChainSummaryEvents = filterLangchainMessages(
236+
const langChainSummaryEvents = filterLangchainEventsByType(
237237
langchainEvents,
238238
'LlmChatCompletionSummary'
239239
)
@@ -322,11 +322,11 @@ tap.test('Langchain instrumentation - chain streaming', (t) => {
322322
const events = agent.customEventAggregator.events.toArray()
323323

324324
const langchainEvents = filterLangchainEvents(events)
325-
const langChainMessageEvents = filterLangchainMessages(
325+
const langChainMessageEvents = filterLangchainEventsByType(
326326
langchainEvents,
327327
'LlmChatCompletionMessage'
328328
)
329-
const langChainSummaryEvents = filterLangchainMessages(
329+
const langChainSummaryEvents = filterLangchainEventsByType(
330330
langchainEvents,
331331
'LlmChatCompletionSummary'
332332
)
@@ -429,7 +429,7 @@ tap.test('Langchain instrumentation - chain streaming', (t) => {
429429
const events = agent.customEventAggregator.events.toArray()
430430

431431
const langchainEvents = filterLangchainEvents(events)
432-
const langChainMessageEvents = filterLangchainMessages(
432+
const langChainMessageEvents = filterLangchainEventsByType(
433433
langchainEvents,
434434
'LlmChatCompletionMessage'
435435
)

‎test/versioned/langchain/runnables.tap.js

+10-10
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const tap = require('tap')
99
const helper = require('../../lib/agent_helper')
1010
// load the assertSegments assertion
1111
require('../../lib/metrics_helper')
12-
const { filterLangchainEvents, filterLangchainMessages } = require('./common')
12+
const { filterLangchainEvents, filterLangchainEventsByType } = require('./common')
1313
const { version: pkgVersion } = require('@langchain/core/package.json')
1414
const createOpenAIMockServer = require('../openai/mock-server')
1515
const config = {
@@ -116,11 +116,11 @@ tap.test('Langchain instrumentation - runnable sequence', (t) => {
116116
const events = agent.customEventAggregator.events.toArray()
117117

118118
const langchainEvents = filterLangchainEvents(events)
119-
const langChainMessageEvents = filterLangchainMessages(
119+
const langChainMessageEvents = filterLangchainEventsByType(
120120
langchainEvents,
121121
'LlmChatCompletionMessage'
122122
)
123-
const langChainSummaryEvents = filterLangchainMessages(
123+
const langChainSummaryEvents = filterLangchainEventsByType(
124124
langchainEvents,
125125
'LlmChatCompletionSummary'
126126
)
@@ -155,11 +155,11 @@ tap.test('Langchain instrumentation - runnable sequence', (t) => {
155155
const events = agent.customEventAggregator.events.toArray()
156156

157157
const langchainEvents = filterLangchainEvents(events)
158-
const langChainMessageEvents = filterLangchainMessages(
158+
const langChainMessageEvents = filterLangchainEventsByType(
159159
langchainEvents,
160160
'LlmChatCompletionMessage'
161161
)
162-
const langChainSummaryEvents = filterLangchainMessages(
162+
const langChainSummaryEvents = filterLangchainEventsByType(
163163
langchainEvents,
164164
'LlmChatCompletionSummary'
165165
)
@@ -198,11 +198,11 @@ tap.test('Langchain instrumentation - runnable sequence', (t) => {
198198
const events = agent.customEventAggregator.events.toArray()
199199

200200
const langchainEvents = filterLangchainEvents(events)
201-
const langChainMessageEvents = filterLangchainMessages(
201+
const langChainMessageEvents = filterLangchainEventsByType(
202202
langchainEvents,
203203
'LlmChatCompletionMessage'
204204
)
205-
const langChainSummaryEvents = filterLangchainMessages(
205+
const langChainSummaryEvents = filterLangchainEventsByType(
206206
langchainEvents,
207207
'LlmChatCompletionSummary'
208208
)
@@ -280,11 +280,11 @@ tap.test('Langchain instrumentation - runnable sequence', (t) => {
280280
const events = agent.customEventAggregator.events.toArray()
281281

282282
const langchainEvents = filterLangchainEvents(events)
283-
const langChainMessageEvents = filterLangchainMessages(
283+
const langChainMessageEvents = filterLangchainEventsByType(
284284
langchainEvents,
285285
'LlmChatCompletionMessage'
286286
)
287-
const langChainSummaryEvents = filterLangchainMessages(
287+
const langChainSummaryEvents = filterLangchainEventsByType(
288288
langchainEvents,
289289
'LlmChatCompletionSummary'
290290
)
@@ -374,7 +374,7 @@ tap.test('Langchain instrumentation - runnable sequence', (t) => {
374374
const events = agent.customEventAggregator.events.toArray()
375375

376376
const langchainEvents = filterLangchainEvents(events)
377-
const langChainMessageEvents = filterLangchainMessages(
377+
const langChainMessageEvents = filterLangchainEventsByType(
378378
langchainEvents,
379379
'LlmChatCompletionMessage'
380380
)
+257
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,257 @@
1+
/*
2+
* Copyright 2024 New Relic Corporation. All rights reserved.
3+
* SPDX-License-Identifier: Apache-2.0
4+
*/
5+
6+
'use strict'
7+
8+
const tap = require('tap')
9+
const helper = require('../../lib/agent_helper')
10+
// load the assertSegments assertion
11+
require('../../lib/metrics_helper')
12+
const { version: pkgVersion } = require('@langchain/core/package.json')
13+
const createOpenAIMockServer = require('../openai/mock-server')
14+
const { filterLangchainEvents, filterLangchainEventsByType } = require('./common')
15+
const { DESTINATIONS } = require('../../../lib/config/attribute-filter')
16+
const params = require('../../lib/params')
17+
const { Document } = require('@langchain/core/documents')
18+
19+
const config = {
20+
ai_monitoring: {
21+
enabled: true
22+
},
23+
feature_flag: {
24+
langchain_instrumentation: true
25+
}
26+
}
27+
28+
tap.test('Langchain instrumentation - vectorstore', (t) => {
29+
t.autoend()
30+
31+
t.beforeEach(async (t) => {
32+
const { host, port, server } = await createOpenAIMockServer()
33+
t.context.server = server
34+
t.context.agent = helper.instrumentMockedAgent(config)
35+
const { OpenAIEmbeddings } = require('@langchain/openai')
36+
37+
const { Client } = require('@elastic/elasticsearch')
38+
const clientArgs = {
39+
client: new Client({
40+
node: `http://${params.elastic_host}:${params.elastic_port}`
41+
})
42+
}
43+
const { ElasticVectorSearch } = require('@langchain/community/vectorstores/elasticsearch')
44+
45+
t.context.embedding = new OpenAIEmbeddings({
46+
openAIApiKey: 'fake-key',
47+
configuration: {
48+
baseURL: `http://${host}:${port}`
49+
}
50+
})
51+
const docs = [
52+
new Document({
53+
metadata: { id: '2' },
54+
pageContent: 'This is an embedding test.'
55+
})
56+
]
57+
const vectorStore = new ElasticVectorSearch(t.context.embedding, clientArgs)
58+
await vectorStore.deleteIfExists()
59+
await vectorStore.addDocuments(docs)
60+
t.context.vs = vectorStore
61+
})
62+
63+
t.afterEach(async (t) => {
64+
t.context?.server?.close()
65+
helper.unloadAgent(t.context.agent)
66+
// bust the require-cache so it can re-instrument
67+
Object.keys(require.cache).forEach((key) => {
68+
if (
69+
key.includes('@langchain/core') ||
70+
key.includes('openai') ||
71+
key.includes('@elastic') ||
72+
key.includes('@langchain/community')
73+
) {
74+
delete require.cache[key]
75+
}
76+
})
77+
})
78+
79+
t.test('should create vectorstore events for every similarity search call', (t) => {
80+
const { agent, vs } = t.context
81+
82+
helper.runInNamedTransaction(agent, async (tx) => {
83+
await vs.similaritySearch('This is an embedding test.', 1)
84+
85+
const events = agent.customEventAggregator.events.toArray()
86+
t.equal(events.length, 3, 'should create 3 events')
87+
88+
const langchainEvents = events.filter((event) => {
89+
const [, chainEvent] = event
90+
return chainEvent.vendor === 'langchain'
91+
})
92+
93+
t.equal(langchainEvents.length, 2, 'should create 2 langchain events')
94+
95+
tx.end()
96+
t.end()
97+
})
98+
})
99+
100+
t.test('should create span on successful vectorstore create', (t) => {
101+
const { agent, vs } = t.context
102+
helper.runInTransaction(agent, async (tx) => {
103+
const result = await vs.similaritySearch('This is an embedding test.', 1)
104+
t.ok(result)
105+
t.assertSegments(tx.trace.root, ['Llm/vectorstore/Langchain/similaritySearch'], {
106+
exact: false
107+
})
108+
tx.end()
109+
t.end()
110+
})
111+
})
112+
113+
t.test('should increment tracking metric for each langchain vectorstore event', (t) => {
114+
const { agent, vs } = t.context
115+
116+
helper.runInTransaction(agent, async (tx) => {
117+
await vs.similaritySearch('This is an embedding test.', 1)
118+
119+
const metrics = agent.metrics.getOrCreateMetric(
120+
`Supportability/Nodejs/ML/Langchain/${pkgVersion}`
121+
)
122+
t.equal(metrics.callCount > 0, true)
123+
124+
tx.end()
125+
t.end()
126+
})
127+
})
128+
129+
t.test(
130+
'should create vectorstore events for every similarity search call with embeddings',
131+
(t) => {
132+
const { agent, vs } = t.context
133+
134+
helper.runInNamedTransaction(agent, async (tx) => {
135+
await vs.similaritySearch('This is an embedding test.', 1)
136+
137+
const events = agent.customEventAggregator.events.toArray()
138+
const langchainEvents = filterLangchainEvents(events)
139+
140+
const vectorSearchResultEvents = filterLangchainEventsByType(
141+
langchainEvents,
142+
'LlmVectorSearchResult'
143+
)
144+
145+
const vectorSearchEvents = filterLangchainEventsByType(langchainEvents, 'LlmVectorSearch')
146+
147+
t.langchainVectorSearch({
148+
tx,
149+
vectorSearch: vectorSearchEvents[0],
150+
responseDocumentSize: 1
151+
})
152+
t.langchainVectorSearchResult({
153+
tx,
154+
vectorSearchResult: vectorSearchResultEvents,
155+
vectorSearchId: vectorSearchEvents[0][1].id
156+
})
157+
158+
tx.end()
159+
t.end()
160+
})
161+
}
162+
)
163+
164+
t.test(
165+
'should create only vectorstore search event for similarity search call with embeddings and invalid metadata filter',
166+
(t) => {
167+
const { agent, vs } = t.context
168+
169+
helper.runInNamedTransaction(agent, async (tx) => {
170+
// search for documents with invalid filter
171+
await vs.similaritySearch('This is an embedding test.', 1, {
172+
a: 'some filter'
173+
})
174+
175+
const events = agent.customEventAggregator.events.toArray()
176+
const langchainEvents = filterLangchainEvents(events)
177+
178+
const vectorSearchResultEvents = filterLangchainEventsByType(
179+
langchainEvents,
180+
'LlmVectorSearchResult'
181+
)
182+
183+
const vectorSearchEvents = filterLangchainEventsByType(langchainEvents, 'LlmVectorSearch')
184+
185+
// there are no documents in vector store with that filter
186+
t.equal(vectorSearchResultEvents.length, 0, 'should have 0 events')
187+
t.langchainVectorSearch({
188+
tx,
189+
vectorSearch: vectorSearchEvents[0],
190+
responseDocumentSize: 0
191+
})
192+
193+
tx.end()
194+
t.end()
195+
})
196+
}
197+
)
198+
199+
t.test('should not create vectorstore events when not in a transaction', async (t) => {
200+
const { agent, vs } = t.context
201+
202+
await vs.similaritySearch('This is an embedding test.', 1)
203+
204+
const events = agent.customEventAggregator.events.toArray()
205+
t.equal(events.length, 0, 'should not create vectorstore events')
206+
t.end()
207+
})
208+
209+
t.test('should add llm attribute to transaction', (t) => {
210+
const { agent, vs } = t.context
211+
212+
helper.runInTransaction(agent, async (tx) => {
213+
await vs.similaritySearch('This is an embedding test.', 1)
214+
215+
const attributes = tx.trace.attributes.get(DESTINATIONS.TRANS_EVENT)
216+
t.equal(attributes.llm, true)
217+
218+
tx.end()
219+
t.end()
220+
})
221+
})
222+
223+
t.test('should create error events', (t) => {
224+
const { agent, vs } = t.context
225+
226+
helper.runInNamedTransaction(agent, async (tx) => {
227+
try {
228+
await vs.similaritySearch('Embedding not allowed.', 1)
229+
} catch (error) {
230+
t.ok(error)
231+
}
232+
233+
const events = agent.customEventAggregator.events.toArray()
234+
// Only LlmEmbedding and LlmVectorSearch events will be created
235+
// LangChainVectorSearchResult event won't be created since there was an error
236+
t.equal(events.length, 2, 'should create 2 events')
237+
238+
const langchainEvents = events.filter((event) => {
239+
const [, chainEvent] = event
240+
return chainEvent.vendor === 'langchain'
241+
})
242+
243+
t.equal(langchainEvents.length, 1, 'should create 1 langchain vectorsearch event')
244+
t.equal(langchainEvents[0][1].error, true)
245+
246+
// But, we should also get two error events: 1xLLM and 1xLangChain
247+
const exceptions = tx.exceptions
248+
for (const e of exceptions) {
249+
const str = Object.prototype.toString.call(e.customAttributes)
250+
t.equal(str, '[object LlmErrorMessage]')
251+
}
252+
253+
tx.end()
254+
t.end()
255+
})
256+
})
257+
})

‎test/versioned/openai/mock-responses.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,8 @@ responses.set('This is an embedding test.', {
7676
body: {
7777
data: [
7878
{
79-
embedding:
80-
'SLewvFF6iztXKj07UOCQO41IorspWOk79KHuu12FrbwjqLe8FCTnvBKqj7sz6bM8qqUEvFSfITpPrJu7uOSbPM8agzyYYqM7YJl/PBF2mryNN967uRiRO9lGcbszcuq7RZIavAnnNLwWA5s8mnb1vG+UGTyqpYS846PGO2M1X7wIxAO8HfgFvc8s8LuQXPQ5qgsKPOinEL15ndY8/MrOu1LRMTxCbQS7PEYJOyMx7rwDJj+79dVjO5P4UzmoPZq8jUgivL36UjzA/Lc8Jt6Ru4bKAL1jRiM70i5VO4neUjwneAy7mlNEPBVpoDuayo28TO2KvAmBrzzwvyy8B3/KO0ZgCry3sKa6QTmPO0a1Szz46Iw87AAcPF0O5DyJVZw8Ac+Yu1y3Pbqzesw8DUDAuq8hQbyALLy7TngmPL6lETxXxLc6TzXSvKJrYLy309c8OHa0OU3NZ7vru2K8mIXUPCxrErxLU5C5s/EVPI+wjLp7BcE74TvcO+2aFrx4A9w80j+Zu/aAojwmzU08k/hTvBpL4rvHFFQ76YftutrxL7wyxgK9BsIevLkYkTq4B028OZnlPPkcgjxhzfS79oCiuB34BbwITTq97nrzOugwRzwGS1U7CqTgvFxROLx4aWG7E/DxPA3J9jwd+AU8dVWPvGlc2jzwWae57nrzu569E72GU7e8Vn9+vFLA7TtVbZE8eOCqPG+3Sjxr5/W8s+DRPE+sm7wFKKQ8A8A5vUSBVryeIxk8hsqAPAeQjryeIxm8gU/tuxVpoDxVXM250GDlOlEDwjs0t6O8Tt6rOVrGHLvmyFy6dhI7PLPxlbv3YP88B/YTPEZgCrxqKsq8Xh+ou96wQLp5rpo8LSg+vL63/rsFjqk8E/DxPEi3MDzTcw66PjcqPNgSfLwqnaK85QuxPI7iHL2+pRE8Z+ICOxzEELvph+07jHqyu2ltnrwNQMC82BL8vAOdiDwSqo88CLM/PCKFBrzmP6a85Nc7PBaM0bvh1VY7NB2pvMkF9Tx3New87mgGPAoKZjo+nS+/Rk/GucqwMz3fwYS8yrCzPMo56jyDHV08XLe9vB4+aLwXwMY8dVUPvCFATbx2eMC8V7NzvEnrpTsIxIO7yVmNu2lc2ryGQnM8A6/1PH/VFbySO6g80i5VPOY/prv6cyi7W5QMPJVP+jsyLIi84H6wPKM50DrZNIS8UEaWPPrIaTzvrmg8rcoaPRuQm7ysH9y8OxIUO7ss4zq3Od08paG6vAPAuTjYAI88/qmCuuROhbzBMK08R4M7u67+j7uClKa6/KedOsqNArzysM08QJ8UvMD8t7v5P7M799fIvAWx2jxiEi48ja6nPL0LFzxFkpq7LAWNPA1AQLyWlLO6qrfxvOGypTxJUau8aJ8uPceLnTtS0TG9omtgPO7xPDvzbfm7FfJWu2CqwzwAASk96FN4PLPgUbwRdhq8Vn9+PLk7wjs8NUW84yx9vHJCZjzysM079hodO/NbDL2BxrY6CE26OzpEpDv7DaM8y0quO41IIr1+Kte8QdMJvKlxDzy9+lI8hfyQPA3J9jzWmKS7z6O5u4a5vLtXKj088XzYO1fEtzwY4/e7Js1NugbCnjymxOu7906SvPSPAb1ieDO8dnjAu/EW0zp/b5C8mGIjvWTPWTwIxIM8YgFqPKvrZrwKpOA7/jK5O2vViDyfaXs8DR2Pu0AFGrvTc446IIOhvDreHrxRnTw8ROdbu55Gyrsht5Y8tVmAvHK5rzzZvTo8bx1QPMglmLvigBU8oIuDvAFYz7pblIw8OZnlOsTvPbxhzfS8BxnFOpkwE72E60w7cNp7utp6ZrtvHdC4uwmyO5dRX7sAm6M7kqEtvElRK7yWg++7JHanvM6ACDvrZqG8Xh+oupQsyTwkZWO8VzuBu5xVKbzEZoc7wB9pvA796zyZlpi8YbsHvQs+W7u9cZy8gKMFOxYDGzyu7Uu71KeDPJxVqbxwyI68VpDCu9VT67xKqFG7KWmtuvNteTocs0w7aJ8uPMUSbzz6cyg8MiwIPEtlfTo+wOA75tkgu7VZgDw8WPa8mGIjPKq38bsr0Zc7Ot4evNNiyju9C5c7YCENPP6pAj3uV8I7X3bOusfxIjvpZLy655bMvL9ivbxO3iu8NKbfPNe7VTz9ZMk88RZTu5QsybxeQtk7qpTAOzGSjTxSwO27mGIjPO7OC7x7FoW8wJayvI2uJzttxqk84H4wOUtlfbxblAw8uTtCPIO3Vzxkz9k8ENwfvfQYuLvHFNQ8LvatPF65ojzPLHA8+RyCvK3Kmjx27wk8Dcn2PARatDv3tBc8hkLzPEOz5jyQSoe8gU/tPMRmhzzp2wU90shPPBv2oLsNQMA8jTdevIftMTt/Xsw7MMQdPICjBT012tS7SLewvJBtuDuevZM8LyojPa6HxjtOAd07v9mGusZXqDoPqKo8qdeUvETnW7y5occ5pOSOvPPkwjsDN4O8Mk85vKnXlDtp06O7kZDpO6GuNDtRFAY9lAkYPGHNdDx2Afc7RRtROy5/5LyUoxI9mu0+u/dOEryrYrC867vivJp29TtVbZG8SVGrO0im7LnhsqU80frfPL/IwryBT+07/+/kPLZ8sTwoNbg7ZkiIOxadlbxlnUm68RbTuxkX7Tu/cwG7aqGTPO8CAbzTYsq6AIpfvA50tbzllOc7s3rMO0SBVjzXzJm8eZ3Wu4vgtzwPDrA8W6b5uwJpEzwLtaQ81pgkPJuqarxmro288369u48WkjwREBU9JP/dPJ69kzvw4t27h3bouxhrBbwrNx29F9EKPFmSJ7v8px08Tt6rvEJthLxon648UYz4u61TUTz4lPQ7ERAVuhwqFrzfSjs8RRtRO6lxD7zHelm87lfCu10O5LrXMh886YftvL9iPTxCf/E6MZKNOmAhDb2diZ47eRSgPBfRCrznlsw5MiwIvHW7FD3tI807uG3SPE7eqzx1VY864TtcO3zTMDw7EhS8c+0kPLr47TvUDQm8domEvEi3MLruaAa7tUi8u4FgsTwbkBu6pQfAvEJthLwDnQg8S1OQO55GSrxZLCK8nkZKvFXTFr01dM+8W6Z5vO+u6Luh0eW8rofGvFsdw7x7KHK8sN5svCFAzbo/0SS8f9UVu7Qli7wr0Re95E4FvSg1ODok/907AAGpPHQhGrwtS++71pgkvCtazjsSzcC7exYFPLVZgLzZmom7W6Z5PHr0fLtn9O86oUivukvcRrzjPcE8a8REPAei+zoBNZ685aUrPNBg5bqeIxk8FJuwPPdOkrtUOZy8GRftO4KD4rz/72Q7ERCVu8WJODy5O8I5L7NZuxJECjxFkpq8Uq4AOy2fh7wY9Du8GRdtu48o/7mHdug803MOvCUQIrw2hZM8v+tzvE54pruyI6a6exYFvDXrGDwNQEA8zyxwO7c53TwUJGe8Wk9Tu6ouu7yqCwo8vi7IvNe71TxB04m8domEvKTkDrzsidK8+nOovLfT1zr11eM7SVErO3EOcbzqMqw74Tvcut4WRrz5pbi8oznQvMi/Er0aS+I87lfCvK+qdztd6zI83eJQPFy3vbyACQu9/8wzO/k/s7weG7e8906SPA3J9jw8NUU8TUQxPfEWU7wjH4E8J3gMPC72LTp6SJU8exaFOXBiibyf4MS6EXYaO3DIjjy61by7ACRaO5NvnTvMGB48Dw6wPFEUBr30j4E7niMZvIZC87s7EpS8OZnlPJZxgrxug9U7/DDUvNrxL7yV14e3E2c7PBdaQTwT8HE8oIuDPGIB6rvMB9o6cR+1OwbCHrylfgm8z6M5vIiqXbxFG1G8a9WIPItp7rpGT8Y838GEvAoK5jyAG3g7xRJvPPxBGLzJWQ28XYWtO85vRLp0IZq8cR81vc7mDb28PSe89LKyuig1uDyxEuK8GlwmPIbKgLwHGcW7/qkCvC8ZXzzSyE89F8BGOxPw8Tx+Ktc8BkvVurXiNryRkOk8jyj/OcKH0zp69Pw8apDPPFuUjLwPDrC8xuBeuD43KrxuYKQ7qXGPvF0OZDx1VQ88VVzNvD9rn7ushWE7EZlLvSL9+DrHi528dzXsu3k30bzeFka7hrm8vD3gAz1/Xsy80D20PNPZE7sorAG86WS8u2Y3xDtvHVC7PKwOO5DkAT3KOeo8c+0kvI+fyLuY61k8SKbsO4TrzLrrZqE87O9XvMkF9Tynb6q847SKvBjjdzyhSK88zTtPPNNzjjsvGV87UQPCvMD8t7stn4e7GRftPBQkZ7x4eiW7sqzcu3ufO7yAG3g8OHa0u0T4n7wcxJC7r6r3vAbCnrth3rg7BxnFumqQzzyXyCi8V8Q3vEPEqjyIu6E8Ac+YvGR6GLulkHY8um83PMqNgrv5pTi8N7kIPOhTeLy6TIY8B5COvDLGArvEzAy9IbcWvIUfQjxQ4BC7B/aTvCfwfrz15ie8ucR4PD1pursLtSS8AgMOOzIsiLv0srI7Q01hPCvRF7vySsg6O5tKunh6JTvCZCI7xuDevLc53btvLhQ8/pi+PJU9Dbugi4O8Qn/xvLpMhrth3ji8n/GIPKouu7tBS3y853MbPGAQyTt27wk7iokRO8d62bzZRnG7sN5svAG+1Lqvqve8JGXjur0Ll7tCf/E75/xRPIWFx7wgDNi8ucT4OZNvHb2nktu8qrfxuyR2J7zWh2A6juKcPDhlcLx/1RU9IAxYPGJ4szylB8C8qfrFO276HjuWcQK9QdOJvCUQIjzjo8a8SeslvBrCKztCf/E66MrBOx1eCz2Xt+Q66YdtvKg9mrrLSq47fFznO1uUjDsoNTg8QyqwuzH4Ejz/Zi67A8A5uKg9GrtFkhq862ahOzSmXzkMDEs8q+vmvNVkLzwc1n28mu0+vCbekTyCg+K7ekgVvO8CAT2yRtc8apBPu1b2R7zUp4M8VW2RvPc9zrx69Hw753ObvCcSB71sG+u8OwHQuv67b7zLSi65HrWxO0ZPRrxmwPq7t7CmPGxvAzygnfC8oIsDvKY7tbwZF+07p2+qvOnbhbv0oW47/2auuThlcDwIxIM8n/EIO6ijH7vHetk7uRiRPGUDT7pgh5I85shcPpGQabykShS7FWmgPPjojDvJ8wc8mlPEOY2uJzt7FoW7HNb9O7rVvDzKjQI80NcuuqvINbvNTBO8TgFdvEJ/cbzEZoe8SVGrvMvkqLyHdui7P2ufvBSbMDw0t6O82GaUPOLmGrxSNze8KVjpuwizPzwqjN48Xh8ovE4B3TtiAeo8azsOO8eLnbyO4py7x/GiPIvgNzzvi7c8BFq0O/dOEj1fU5282ZoJPCL9+LqyIyY8IoUGPNI/mbwKpGC7EkQKuzrN2jwVzyU7QpA1vLIjpjwi64s8HYE8u6eSW7yryLU8yK5OOzysjjwi6wu8GsIrOu7xPDwCaRO8dzVsPP/vZLwT3oQ8cQ7xvOJv0TtWBww8hlM3PBPeBDxT9OK71pgkPPSysrugiwO90GDlvHOHHz3xfNg8904SPVpglzzmP6a7Cgrmu9/BBLyH7bG85QsxvVSfIb2Xt2Q8paG6vOqYsTos9Mi8nqxPu8wHWjuYhdS7GAWAvCIOvTp/bxA8j7CMPG1P4Dxd67I7xxRUvOM9wbxMhwU9Kp0iPfF82LvQYOU6XkJZPBxNx7y0nX28B5COO8FT3rp4eiW8R/oEvSfw/jtC9rq8n/GIux3nQTw8WPY8LBf6uzSmXzzSPxm88rDNvDysDjwyPnW7tdFyPBLNwDo8WHa8bPi5vOO0CrylGAQ8YgFqvEFLfDy7LOO7TIeFPAHPmDv3YP+6/+9kPBKqjzt5rpo8VJ+hvE7eKzyc3t88P2sfvLQUR7wJ1vC6exaFvD6dr7zNO888i+A3ulwuhzuF/JC8gKMFveoyLLxqBxk7YgFquws+2zwOUYS8agcZvGJ4M71AjtC747QKvAizP73UH3a7LvatPJBtuLzEzIy8bG8DvJEHM75E59s7zbIYPObZIL2uZJW7WRveugblTzy6TIa802JKvD9rH7xlA088QAWavIFP7bwL2FW8vqWRu0ZgijyRkGm7ZGnUvIeHLD1c2m48THbBPPkcAr1NzWc8+JT0uulkvLvXMp+7lU96u7kYET1xhTo8e3wKvItGPTxb+hG87mgGPWqhk7uhrrQ73rBAPCbNTT13rDW8K8DTus8s8DsNt4k8gpQmPLES4ryyvSA8lcbDO60woDyLVwE9BFq0u+cNFj3C7Vi8UXoLPDYOyryQ0z083+S1Ox34hTzEzIw7pX4Ju6ouuzxIpmw8w5iXuylYaTy5sgu9Js3NOo+fyLyjFp+8MMSdvOROBb2n+OA7b7fKOeIJzDoNpkW8WsYct7SdfTxXxLc7TO2KO3YB9zynktu7OkSkPKnXFLvtRv47AJujuzGSDT0twjg8AgOOO4d26DvpZDy8lAkYPI5r0zcGS9W8OGXwu9xIVjyH7TG9IUDNuiqMXrwb9qA79I+BPL1xHLuVPY07MOfOO0ztCruvMoW8BuXPu4AbeLyIRNg8uG3SPO5XQjuFH0K8zm9EPEAoSz0tKL652ZqJOgABqbwsjsM8mlPEPLewpjsVWNw8OGXwOlYHjLzfwQQ81iFbOyJ0Qj3d85S7cQ7xvIqswjxKhSC7906SvAFYz72xiau8LAWNPB1eCz09jGu72ZoJPfDiXTwPDrA8CYGvvNH6XzxTa6y8+RwCvY8of7xxDnG8Ef/QvJ9p+zqh0eU8a16/OzBN1LyDLiE9PFh2u+0jTbxLUxA9ZZ3JvItXgbqL4Dc8BuXPvKnXFDzmPyY8k/hTOlum+bqAksG8OZnluPmluLxRnTy6/KcdvKAUOrzRcSm8fqEgPcTeebzeOXc8KCR0OnN2W7xRA0K8Wsacu+M9wToyLIi8mTATu21P4LuadvW8Dtq6vPmlODsjqLe88ieXPJEHszySoa08U/RiPNQNCbwb9qC8bG+DOXW7FL0OdLW7Tc3nvG8dULsAJNo7fNMwO7sJMr2O4hy85ZTnuwAkWjw+Nyq8rcoaO+8lsrvx86E8U/TivGUUkzp6SJW8lT0NvWz4uTzeFka6qguKvIKD4rt/1ZU8LBf6vD6dr7es/Ko7qWBLvIlVHDxwUUU6Jt4RvRJEijnRcSk88235PGvVCL3zbfm8DaZFO+7xvLs3qES8oznQO9XKNDxZLKK8IIMhvComWb0CAw48fDk2O+nbBb29C5e8ogVbu1EUBryYhdS7OTPgOul1AD25sgs7i1cBPBYmzLtSroA8hfyQvP3bErz9h/o82ZoJO7/ZhjxtT+A8UZ28uzaFk7wJ1nA6dd7FPGg5Kbwb9iC8psRrvBXyVjzGRuS8uAfNu0+smzvFAAK96FN4vC2fhzy65oC7tgXou/9mLjxMELw8GSgxPRBlVjxDxCq80j8ZveinkDxHgzu70j8ZvPGNnDyPn0i8Vn9+urXR8ju10fI7sRJiPDBemLt8OTa8tJ39O4ne0rsaXKa7t0ohPHQhGrdYXjI824sqvDw1RT2/2YY8E/BxPIUOfjv9dQ08PM8/PMwYHrwwXpi7nqxPPM8aA7w+wOC7ROdbO79iPTxVbRE8U45dPOOjRjxwYok8ME1Uu1SfIbyifKQ8UXqLPI85wzsITTq8R+lAPMRVQzzcv58892B/Oqg9mjw3MXu7P9EkvM6AiLyx7zA8eHolPLYWLLugFLq8AJsjvEOzZjk6RKQ8uRgRPXVVjzw0HSk9PWk6PLss47spzzK93rBAvJpTxDun+OC7OTPgvEa1yzvAH+k5fZDcOid4jLuN0di8N7kIPPe0F7wVaSC8zxoDvJVgvrvUpwO9dd7FPKUHQLxn4oI7Ng7KPIydYzzZRvE8LTkCu3bvCTy10fK7QAWaPGHeOLu6+O27omvgO8Rmh7xrXj87AzeDvORg8jnGRuS8UEYWPLPg0TvYZpQ9FJuwPLC7O7xug1U8bvoevAnW8DvxFtM8kEoHPDxYdrzcWZq8n3q/O94nCjvZI0C82yUlvayWpbyHh6y7ME1UO9b+KTzbFGG89oCiPFpgFzzhTKA84gnMPKgsVjyia+C7XNpuPHxc5zyDLqG8ukyGvKqUQLwG5U88wB/pO+B+ML2O4py8MOdOPHt8irsDnYg6rv6PumJ4szzuV0I80qWePKTkDj14A9y8fqEgu9DXLjykbUU7yEhJvLYFaLyfVw68',
79+
// a small sample of a real embedding response
80+
embedding: [-0.021616805, 0.004173375, 0.002796262, 0.004489489, -0.004940119],
8181
index: 0,
8282
object: 'embedding'
8383
}

‎test/versioned/openai/mock-server.js

+4
Original file line numberDiff line numberDiff line change
@@ -153,5 +153,9 @@ function getShortenedPrompt(reqBody) {
153153
const prompt =
154154
reqBody.prompt || reqBody.input || reqBody.messages.map((m) => m.content).join('\n')
155155

156+
if (Array.isArray(prompt)) {
157+
return prompt[0]
158+
}
159+
156160
return prompt.split('\n')[0]
157161
}

0 commit comments

Comments
 (0)
Please sign in to comment.