Skip to content

Commit 5ee44ca

Browse files
authoredJun 4, 2024··
feat (provider): langchain StringOutputParser support (#1826)
1 parent 0237ad9 commit 5ee44ca

File tree

11 files changed

+187
-97
lines changed

11 files changed

+187
-97
lines changed
 

‎.changeset/eight-wolves-visit.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
feat (provider): langchain StringOutputParser support
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
---
2+
title: LangChainAdapter
3+
description: API Reference for LangChainAdapter.
4+
---
5+
6+
# `LangChainAdapter`
7+
8+
The `LangChainAdapter` module provides a way to transform LangChain output streams into AI streams.
9+
See the [LangChain Adapter documentation](/providers/adapters/langchain) for more information.
10+
11+
## Import
12+
13+
<Snippet text={`import { LangChainAdapter } from "ai"`} prompt={false} />
14+
15+
## API Signature
16+
17+
### Methods
18+
19+
<PropertiesTable
20+
content={[
21+
{
22+
name: 'toAIStream',
23+
type: '(stream: ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>, AIStreamCallbacksAndOptions) => AIStream',
24+
description: 'Converts LangChain output streams to AIStream.',
25+
},
26+
]}
27+
/>
28+
29+
## Examples
30+
31+
### Convert LangChain Expression Language Stream
32+
33+
```tsx filename="app/api/completion/route.ts" highlight={"14"}
34+
import { ChatOpenAI } from '@langchain/openai';
35+
import { LangChainAdapter, StreamingTextResponse } from 'ai';
36+
37+
export async function POST(req: Request) {
38+
const { prompt } = await req.json();
39+
40+
const model = new ChatOpenAI({
41+
model: 'gpt-3.5-turbo-0125',
42+
temperature: 0,
43+
});
44+
45+
const stream = await model.stream(prompt);
46+
47+
const aiStream = LangChainAdapter.toAIStream(stream);
48+
49+
return new StreamingTextResponse(aiStream);
50+
}
51+
```
52+
53+
### Convert StringOutputParser Stream
54+
55+
```tsx filename="app/api/completion/route.ts" highlight={"16"}
56+
import { ChatOpenAI } from '@langchain/openai';
57+
import { LangChainAdapter, StreamingTextResponse } from 'ai';
58+
import { StringOutputParser } from '@langchain/core/output_parsers';
59+
60+
export async function POST(req: Request) {
61+
const { prompt } = await req.json();
62+
63+
const model = new ChatOpenAI({
64+
model: 'gpt-3.5-turbo-0125',
65+
temperature: 0,
66+
});
67+
68+
const parser = new StringOutputParser();
69+
const stream = await model.pipe(parser).stream(prompt);
70+
71+
const aiStream = LangChainAdapter.toAIStream(stream);
72+
73+
return new StreamingTextResponse(aiStream);
74+
}
75+
```

‎content/docs/07-reference/stream-helpers/16-langchain-stream.mdx

+1-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
---
22
title: LangChainStream
3-
description: Learn to use LangChainStream helper function in your application.
3+
description: API Reference for LangChainStream.
44
---
55

66
# `LangChainStream`
@@ -15,8 +15,6 @@ Helps with the integration of LangChain. It is compatible with useChat and useCo
1515

1616
## Import
1717

18-
### React
19-
2018
<Snippet text={`import { LangChainStream } from "ai"`} prompt={false} />
2119

2220
<ReferenceTable packageName="streams" functionName="LangChainStream" />

‎content/docs/07-reference/stream-helpers/index.mdx

+6
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,12 @@ description: Learn to use help functions that help stream generations from diffe
9191
"Transforms the response from LangChain's language models into a readable stream.",
9292
href: '/docs/reference/stream-helpers/langchain-stream',
9393
},
94+
{
95+
title: 'LangChainAdapter',
96+
description:
97+
"Transforms the response from LangChain's stream into AI streams.",
98+
href: '/docs/reference/stream-helpers/langchain-adapter',
99+
},
94100
{
95101
title: 'MistralStream',
96102
description:
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import { ChatOpenAI } from '@langchain/openai';
2+
import { LangChainAdapter, StreamingTextResponse } from 'ai';
3+
import { StringOutputParser } from '@langchain/core/output_parsers';
4+
5+
// Allow streaming responses up to 30 seconds
6+
export const maxDuration = 30;
7+
8+
export async function POST(req: Request) {
9+
const { prompt } = await req.json();
10+
11+
const model = new ChatOpenAI({
12+
model: 'gpt-3.5-turbo-0125',
13+
temperature: 0,
14+
});
15+
16+
const parser = new StringOutputParser();
17+
18+
const stream = await model.pipe(parser).stream(prompt);
19+
20+
return new StreamingTextResponse(LangChainAdapter.toAIStream(stream));
21+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
'use client';
2+
3+
import { useCompletion } from 'ai/react';
4+
5+
export default function Chat() {
6+
const { completion, input, handleInputChange, handleSubmit, error } =
7+
useCompletion();
8+
9+
return (
10+
<div className="flex flex-col w-full max-w-md py-24 mx-auto stretch">
11+
<h4 className="pb-4 text-xl font-bold text-gray-900 md:text-xl">
12+
useCompletion Example
13+
</h4>
14+
{error && (
15+
<div className="fixed top-0 left-0 w-full p-4 text-center text-white bg-red-500">
16+
{error.message}
17+
</div>
18+
)}
19+
{completion}
20+
<form onSubmit={handleSubmit}>
21+
<input
22+
className="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
23+
value={input}
24+
placeholder="Say something..."
25+
onChange={handleInputChange}
26+
/>
27+
</form>
28+
</div>
29+
);
30+
}

‎examples/next-langchain/app/completion/page.tsx

+3-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@ import { useCompletion } from 'ai/react';
44

55
export default function Chat() {
66
const { completion, input, handleInputChange, handleSubmit, error } =
7-
useCompletion();
7+
useCompletion({
8+
api: '/api/completion-string-output-parser',
9+
});
810

911
return (
1012
<div className="flex flex-col w-full max-w-md py-24 mx-auto stretch">

‎examples/next-langchain/package.json

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
},
1111
"dependencies": {
1212
"@langchain/openai": "0.0.28",
13+
"@langchain/core": "0.1.63",
1314
"ai": "latest",
1415
"langchain": "0.1.36",
1516
"next": "latest",
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
import { convertArrayToReadableStream } from '../core/test/convert-array-to-readable-stream';
2+
import { convertReadableStreamToArray } from '../core/test/convert-readable-stream-to-array';
3+
import { toAIStream } from './langchain-adapter';
4+
5+
describe('toAIStream', () => {
6+
it('should convert ReadableStream<LangChainAIMessageChunk>', async () => {
7+
const inputStream = convertArrayToReadableStream([
8+
{ content: 'Hello' },
9+
{ content: [{ type: 'text', text: 'World' }] },
10+
]);
11+
12+
assert.deepStrictEqual(
13+
await convertReadableStreamToArray(
14+
toAIStream(inputStream).pipeThrough(new TextDecoderStream()),
15+
),
16+
['0:"Hello"\n', '0:"World"\n'],
17+
);
18+
});
19+
20+
it('should convert ReadableStream<string> (LangChain StringOutputParser)', async () => {
21+
const inputStream = convertArrayToReadableStream(['Hello', 'World']);
22+
23+
assert.deepStrictEqual(
24+
await convertReadableStreamToArray(
25+
toAIStream(inputStream).pipeThrough(new TextDecoderStream()),
26+
),
27+
['0:"Hello"\n', '0:"World"\n'],
28+
);
29+
});
30+
});

‎packages/core/streams/langchain-adapter.ts

+10-4
Original file line numberDiff line numberDiff line change
@@ -38,17 +38,23 @@ type LangChainAIMessageChunk = {
3838
};
3939

4040
/**
41-
Converts the result of a LangChain Expression Language stream invocation to an AIStream.
41+
Converts LangChain output streams to AIStream.
42+
43+
The following streams are supported:
44+
- `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
45+
- `string` streams (LangChain `StringOutputParser` output)
4246
*/
4347
export function toAIStream(
44-
stream: ReadableStream<LangChainAIMessageChunk>,
48+
stream: ReadableStream<LangChainAIMessageChunk> | ReadableStream<string>,
4549
callbacks?: AIStreamCallbacksAndOptions,
4650
) {
4751
return stream
4852
.pipeThrough(
49-
new TransformStream({
53+
new TransformStream<LangChainAIMessageChunk | string>({
5054
transform: async (chunk, controller) => {
51-
if (typeof chunk.content === 'string') {
55+
if (typeof chunk === 'string') {
56+
controller.enqueue(chunk);
57+
} else if (typeof chunk.content === 'string') {
5258
controller.enqueue(chunk.content);
5359
} else {
5460
const content: LangChainMessageContentComplex[] = chunk.content;

‎pnpm-lock.yaml

+5-89
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)
Please sign in to comment.