Skip to content

Commit 69ca8f5

Browse files
lgrammelsnettahMaxLeiter
authoredNov 16, 2023
Introduce useAssistant (experimental). (#728)
Co-authored-by: Safi Nettah <nettah.safi@protonmail.com> Co-authored-by: Max Leiter <max.leiter@vercel.com>
1 parent fd7d445 commit 69ca8f5

File tree

13 files changed

+628
-57
lines changed

13 files changed

+628
-57
lines changed
 

‎.changeset/afraid-spies-remain.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
ai/react: add experimental_useAssistant hook and experimental_AssistantResponse
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
# Home Automation Assistant Example
2+
3+
## Setup
4+
5+
### Create OpenAI Assistant
6+
7+
[OpenAI Assistant Website](https://platform.openai.com/assistants)
8+
9+
Create a new assistant. Enable Code interpreter. Add the following functions and instructions to the assistant.
10+
11+
Then add the assistant id to the `.env.local` file as `ASSISTANT_ID=your-assistant-id`.
12+
13+
### Instructions
14+
15+
```
16+
You are an assistant with access to a home automation system. You can get and set the temperature in the bedroom, home office, living room, kitchen and bathroom.
17+
18+
The system uses temperature in Celsius. If the user requests Fahrenheit, you should convert the temperature to Fahrenheit.
19+
```
20+
21+
### getRoomTemperature function
22+
23+
```json
24+
{
25+
"name": "getRoomTemperature",
26+
"description": "Get the temperature in a room",
27+
"parameters": {
28+
"type": "object",
29+
"properties": {
30+
"room": {
31+
"type": "string",
32+
"enum": ["bedroom", "home office", "living room", "kitchen", "bathroom"]
33+
}
34+
},
35+
"required": ["room"]
36+
}
37+
}
38+
```
39+
40+
### setRoomTemperature function
41+
42+
```json
43+
{
44+
"name": "setRoomTemperature",
45+
"description": "Set the temperature in a room",
46+
"parameters": {
47+
"type": "object",
48+
"properties": {
49+
"room": {
50+
"type": "string",
51+
"enum": ["bedroom", "home office", "living room", "kitchen", "bathroom"]
52+
},
53+
"temperature": { "type": "number" }
54+
},
55+
"required": ["room", "temperature"]
56+
}
57+
}
58+
```
59+
60+
## Run
61+
62+
1. Run `pnpm run dev` in `examples/next-openai`
63+
2. Go to http://localhost:3000/assistant
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,140 @@
1+
import { experimental_AssistantResponse } from 'ai';
2+
import OpenAI from 'openai';
3+
import { MessageContentText } from 'openai/resources/beta/threads/messages/messages';
4+
5+
// Create an OpenAI API client (that's edge friendly!)
6+
const openai = new OpenAI({
7+
apiKey: process.env.OPENAI_API_KEY || '',
8+
});
9+
10+
// IMPORTANT! Set the runtime to edge
11+
export const runtime = 'edge';
12+
13+
const homeTemperatures = {
14+
bedroom: 20,
15+
'home office': 21,
16+
'living room': 21,
17+
kitchen: 22,
18+
bathroom: 23,
19+
};
20+
21+
export async function POST(req: Request) {
22+
// Parse the request body
23+
const input: {
24+
threadId: string | null;
25+
message: string;
26+
} = await req.json();
27+
28+
// Create a thread if needed
29+
const threadId = input.threadId ?? (await openai.beta.threads.create({})).id;
30+
31+
// Add a message to the thread
32+
const createdMessage = await openai.beta.threads.messages.create(threadId, {
33+
role: 'user',
34+
content: input.message,
35+
});
36+
37+
return experimental_AssistantResponse(
38+
{ threadId, messageId: createdMessage.id },
39+
async ({ threadId, sendMessage }) => {
40+
// Run the assistant on the thread
41+
const run = await openai.beta.threads.runs.create(threadId, {
42+
assistant_id:
43+
process.env.ASSISTANT_ID ??
44+
(() => {
45+
throw new Error('ASSISTANT_ID is not set');
46+
})(),
47+
});
48+
49+
async function waitForRun(run: OpenAI.Beta.Threads.Runs.Run) {
50+
// Poll for status change
51+
while (run.status === 'queued' || run.status === 'in_progress') {
52+
// delay for 500ms:
53+
await new Promise(resolve => setTimeout(resolve, 500));
54+
55+
run = await openai.beta.threads.runs.retrieve(threadId!, run.id);
56+
}
57+
58+
// Check the run status
59+
if (
60+
run.status === 'cancelled' ||
61+
run.status === 'cancelling' ||
62+
run.status === 'failed' ||
63+
run.status === 'expired'
64+
) {
65+
throw new Error(run.status);
66+
}
67+
68+
if (run.status === 'requires_action') {
69+
if (run.required_action?.type === 'submit_tool_outputs') {
70+
const tool_outputs =
71+
run.required_action.submit_tool_outputs.tool_calls.map(
72+
toolCall => {
73+
const parameters = JSON.parse(toolCall.function.arguments);
74+
75+
switch (toolCall.function.name) {
76+
case 'getRoomTemperature': {
77+
const temperature =
78+
homeTemperatures[
79+
parameters.room as keyof typeof homeTemperatures
80+
];
81+
82+
return {
83+
tool_call_id: toolCall.id,
84+
output: temperature.toString(),
85+
};
86+
}
87+
88+
case 'setRoomTemperature': {
89+
homeTemperatures[
90+
parameters.room as keyof typeof homeTemperatures
91+
] = parameters.temperature;
92+
93+
return {
94+
tool_call_id: toolCall.id,
95+
output: `temperature set successfully`,
96+
};
97+
}
98+
99+
default:
100+
throw new Error(
101+
`Unknown tool call function: ${toolCall.function.name}`,
102+
);
103+
}
104+
},
105+
);
106+
107+
run = await openai.beta.threads.runs.submitToolOutputs(
108+
threadId!,
109+
run.id,
110+
{ tool_outputs },
111+
);
112+
113+
await waitForRun(run);
114+
}
115+
}
116+
}
117+
118+
await waitForRun(run);
119+
120+
// Get new thread messages (after our message)
121+
const responseMessages = (
122+
await openai.beta.threads.messages.list(threadId, {
123+
after: createdMessage.id,
124+
order: 'asc',
125+
})
126+
).data;
127+
128+
// Send the messages
129+
for (const message of responseMessages) {
130+
sendMessage({
131+
id: message.id,
132+
role: 'assistant',
133+
content: message.content.filter(
134+
content => content.type === 'text',
135+
) as Array<MessageContentText>,
136+
});
137+
}
138+
},
139+
);
140+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
'use client';
2+
3+
import { Message, experimental_useAssistant as useAssistant } from 'ai/react';
4+
import { useEffect, useRef } from 'react';
5+
6+
const roleToColorMap: Record<Message['role'], string> = {
7+
system: 'red',
8+
user: 'black',
9+
function: 'blue',
10+
assistant: 'green',
11+
};
12+
13+
export default function Chat() {
14+
const { status, messages, input, submitMessage, handleInputChange, error } =
15+
useAssistant({
16+
api: '/api/assistant',
17+
});
18+
19+
// When status changes to accepting messages, focus the input:
20+
const inputRef = useRef<HTMLInputElement>(null);
21+
useEffect(() => {
22+
if (status === 'awaiting_message') {
23+
inputRef.current?.focus();
24+
}
25+
}, [status]);
26+
27+
return (
28+
<div className="flex flex-col w-full max-w-md py-24 mx-auto stretch">
29+
{error != null && (
30+
<div className="relative bg-red-500 text-white px-6 py-4 rounded-md">
31+
<span className="block sm:inline">
32+
Error: {(error as any).toString()}
33+
</span>
34+
</div>
35+
)}
36+
37+
{messages.map((m: Message) => (
38+
<div
39+
key={m.id}
40+
className="whitespace-pre-wrap"
41+
style={{ color: roleToColorMap[m.role] }}
42+
>
43+
<strong>{`${m.role}: `}</strong>
44+
{m.content}
45+
<br />
46+
<br />
47+
</div>
48+
))}
49+
50+
{status === 'in_progress' && (
51+
<div className="h-8 w-full max-w-md p-2 mb-8 bg-gray-300 dark:bg-gray-600 rounded-lg animate-pulse" />
52+
)}
53+
54+
<form onSubmit={submitMessage}>
55+
<input
56+
ref={inputRef}
57+
disabled={status !== 'awaiting_message'}
58+
className="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
59+
value={input}
60+
placeholder="What is the temperature in the living room?"
61+
onChange={handleInputChange}
62+
/>
63+
</form>
64+
</div>
65+
);
66+
}

‎packages/core/react/index.ts

+1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
export * from './use-chat';
22
export * from './use-completion';
3+
export * from './use-assistant';

‎packages/core/react/use-assistant.ts

+109
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,109 @@
1+
/* eslint-disable react-hooks/rules-of-hooks */
2+
3+
import { useState } from 'react';
4+
import { processMessageStream } from '../shared/process-message-stream';
5+
import { Message } from '../shared/types';
6+
import { parseStreamPart } from '../shared/stream-parts';
7+
8+
export type AssistantStatus = 'in_progress' | 'awaiting_message';
9+
10+
export function experimental_useAssistant({
11+
api,
12+
threadId: threadIdParam,
13+
}: {
14+
api: string;
15+
threadId?: string | undefined;
16+
}) {
17+
const [messages, setMessages] = useState<Message[]>([]);
18+
const [input, setInput] = useState('');
19+
const [threadId, setThreadId] = useState<string | undefined>(undefined);
20+
const [status, setStatus] = useState<AssistantStatus>('awaiting_message');
21+
const [error, setError] = useState<unknown | undefined>(undefined);
22+
23+
const handleInputChange = (e: any) => {
24+
setInput(e.target.value);
25+
};
26+
27+
const submitMessage = async (e: any) => {
28+
e.preventDefault();
29+
30+
if (input === '') {
31+
return;
32+
}
33+
34+
setStatus('in_progress');
35+
36+
setMessages(messages => [
37+
...messages,
38+
{ id: '', role: 'user', content: input },
39+
]);
40+
41+
setInput('');
42+
43+
const result = await fetch(api, {
44+
method: 'POST',
45+
headers: { 'Content-Type': 'application/json' },
46+
body: JSON.stringify({
47+
// always use user-provided threadId when available:
48+
threadId: threadIdParam ?? threadId ?? null,
49+
message: input,
50+
}),
51+
});
52+
53+
if (result.body == null) {
54+
throw new Error('The response body is empty.');
55+
}
56+
57+
await processMessageStream(result.body.getReader(), (message: string) => {
58+
try {
59+
const { type, value } = parseStreamPart(message);
60+
61+
switch (type) {
62+
case 'assistant_message': {
63+
// append message:
64+
setMessages(messages => [
65+
...messages,
66+
{
67+
id: value.id,
68+
role: value.role,
69+
content: value.content[0].text.value,
70+
},
71+
]);
72+
break;
73+
}
74+
75+
case 'assistant_control_data': {
76+
setThreadId(value.threadId);
77+
78+
// set id of last message:
79+
setMessages(messages => {
80+
const lastMessage = messages[messages.length - 1];
81+
lastMessage.id = value.messageId;
82+
return [...messages.slice(0, messages.length - 1), lastMessage];
83+
});
84+
85+
break;
86+
}
87+
88+
case 'error': {
89+
setError(value);
90+
break;
91+
}
92+
}
93+
} catch (error) {
94+
setError(error);
95+
}
96+
});
97+
98+
setStatus('awaiting_message');
99+
};
100+
101+
return {
102+
messages,
103+
input,
104+
handleInputChange,
105+
submitMessage,
106+
status,
107+
error,
108+
};
109+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
export async function processMessageStream(
2+
reader: ReadableStreamDefaultReader<Uint8Array>,
3+
processMessage: (message: string) => void | Promise<void>,
4+
) {
5+
const decoder = new TextDecoder();
6+
let buffer = '';
7+
while (true) {
8+
const { done, value } = await reader.read();
9+
10+
if (done) {
11+
if (buffer.length > 0) {
12+
processMessage(buffer);
13+
}
14+
break;
15+
}
16+
17+
buffer += decoder.decode(value, { stream: true });
18+
19+
let endIndex: number;
20+
while ((endIndex = buffer.indexOf('\n')) !== -1) {
21+
processMessage(buffer.substring(0, endIndex).trim());
22+
buffer = buffer.substring(endIndex + 1); // Remove the processed instruction + delimiter
23+
}
24+
}
25+
}

‎packages/core/shared/stream-parts.ts

+144-25
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { FunctionCall, JSONValue } from './types';
1+
import { AssistantMessage, FunctionCall, JSONValue } from './types';
22
import { StreamString } from './utils';
33

44
export interface StreamPart<CODE extends string, NAME extends string, TYPE> {
@@ -7,7 +7,7 @@ export interface StreamPart<CODE extends string, NAME extends string, TYPE> {
77
parse: (value: JSONValue) => { type: NAME; value: TYPE };
88
}
99

10-
export const textStreamPart: StreamPart<'0', 'text', string> = {
10+
const textStreamPart: StreamPart<'0', 'text', string> = {
1111
code: '0',
1212
name: 'text',
1313
parse: (value: JSONValue) => {
@@ -18,7 +18,7 @@ export const textStreamPart: StreamPart<'0', 'text', string> = {
1818
},
1919
};
2020

21-
export const functionCallStreamPart: StreamPart<
21+
const functionCallStreamPart: StreamPart<
2222
'1',
2323
'function_call',
2424
{ function_call: FunctionCall }
@@ -29,34 +29,27 @@ export const functionCallStreamPart: StreamPart<
2929
if (
3030
value == null ||
3131
typeof value !== 'object' ||
32-
!('function_call' in value)
32+
!('function_call' in value) ||
33+
typeof value.function_call !== 'object' ||
34+
value.function_call == null ||
35+
!('name' in value.function_call) ||
36+
!('arguments' in value.function_call) ||
37+
typeof value.function_call.name !== 'string' ||
38+
typeof value.function_call.arguments !== 'string'
3339
) {
3440
throw new Error(
3541
'"function_call" parts expect an object with a "function_call" property.',
3642
);
3743
}
3844

39-
const functionCall = value.function_call;
40-
41-
if (
42-
functionCall == null ||
43-
typeof functionCall !== 'object' ||
44-
!('name' in functionCall) ||
45-
!('arguments' in functionCall)
46-
) {
47-
throw new Error(
48-
'"function_call" parts expect an object with a "name" and "arguments" property.',
49-
);
50-
}
51-
5245
return {
5346
type: 'function_call',
5447
value: value as unknown as { function_call: FunctionCall },
5548
};
5649
},
5750
};
5851

59-
export const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>> = {
52+
const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>> = {
6053
code: '2',
6154
name: 'data',
6255
parse: (value: JSONValue) => {
@@ -68,17 +61,108 @@ export const dataStreamPart: StreamPart<'2', 'data', Array<JSONValue>> = {
6861
},
6962
};
7063

64+
const errorStreamPart: StreamPart<'3', 'error', string> = {
65+
code: '3',
66+
name: 'error',
67+
parse: (value: JSONValue) => {
68+
if (typeof value !== 'string') {
69+
throw new Error('"error" parts expect a string value.');
70+
}
71+
return { type: 'error', value };
72+
},
73+
};
74+
75+
const assistantMessage: StreamPart<'4', 'assistant_message', AssistantMessage> =
76+
{
77+
code: '4',
78+
name: 'assistant_message',
79+
parse: (value: JSONValue) => {
80+
if (
81+
value == null ||
82+
typeof value !== 'object' ||
83+
!('id' in value) ||
84+
!('role' in value) ||
85+
!('content' in value) ||
86+
typeof value.id !== 'string' ||
87+
typeof value.role !== 'string' ||
88+
value.role !== 'assistant' ||
89+
!Array.isArray(value.content) ||
90+
!value.content.every(
91+
item =>
92+
item != null &&
93+
typeof item === 'object' &&
94+
'type' in item &&
95+
item.type === 'text' &&
96+
'text' in item &&
97+
item.text != null &&
98+
typeof item.text === 'object' &&
99+
'value' in item.text &&
100+
typeof item.text.value === 'string',
101+
)
102+
) {
103+
throw new Error(
104+
'"assistant_message" parts expect an object with an "id", "role", and "content" property.',
105+
);
106+
}
107+
108+
return {
109+
type: 'assistant_message',
110+
value: value as AssistantMessage,
111+
};
112+
},
113+
};
114+
115+
const assistantControlData: StreamPart<
116+
'5',
117+
'assistant_control_data',
118+
{
119+
threadId: string;
120+
messageId: string;
121+
}
122+
> = {
123+
code: '5',
124+
name: 'assistant_control_data',
125+
parse: (value: JSONValue) => {
126+
if (
127+
value == null ||
128+
typeof value !== 'object' ||
129+
!('threadId' in value) ||
130+
!('messageId' in value) ||
131+
typeof value.threadId !== 'string' ||
132+
typeof value.messageId !== 'string'
133+
) {
134+
throw new Error(
135+
'"assistant_control_data" parts expect an object with a "threadId" and "messageId" property.',
136+
);
137+
}
138+
139+
return {
140+
type: 'assistant_control_data',
141+
value: {
142+
threadId: value.threadId,
143+
messageId: value.messageId,
144+
},
145+
};
146+
},
147+
};
148+
71149
const streamParts = [
72150
textStreamPart,
73151
functionCallStreamPart,
74152
dataStreamPart,
153+
errorStreamPart,
154+
assistantMessage,
155+
assistantControlData,
75156
] as const;
76157

77158
// union type of all stream parts
78159
type StreamParts =
79160
| typeof textStreamPart
80161
| typeof functionCallStreamPart
81-
| typeof dataStreamPart;
162+
| typeof dataStreamPart
163+
| typeof errorStreamPart
164+
| typeof assistantMessage
165+
| typeof assistantControlData;
82166

83167
/**
84168
* Maps the type of a stream part to its value type.
@@ -90,12 +174,47 @@ type StreamPartValueType = {
90174
export type StreamPartType =
91175
| ReturnType<typeof textStreamPart.parse>
92176
| ReturnType<typeof functionCallStreamPart.parse>
93-
| ReturnType<typeof dataStreamPart.parse>;
177+
| ReturnType<typeof dataStreamPart.parse>
178+
| ReturnType<typeof errorStreamPart.parse>
179+
| ReturnType<typeof assistantMessage.parse>
180+
| ReturnType<typeof assistantControlData.parse>;
94181

95182
export const streamPartsByCode = {
96183
[textStreamPart.code]: textStreamPart,
97184
[functionCallStreamPart.code]: functionCallStreamPart,
98185
[dataStreamPart.code]: dataStreamPart,
186+
[errorStreamPart.code]: errorStreamPart,
187+
[assistantMessage.code]: assistantMessage,
188+
[assistantControlData.code]: assistantControlData,
189+
} as const;
190+
191+
/**
192+
* The map of prefixes for data in the stream
193+
*
194+
* - 0: Text from the LLM response
195+
* - 1: (OpenAI) function_call responses
196+
* - 2: custom JSON added by the user using `Data`
197+
*
198+
* Example:
199+
* ```
200+
* 0:Vercel
201+
* 0:'s
202+
* 0: AI
203+
* 0: AI
204+
* 0: SDK
205+
* 0: is great
206+
* 0:!
207+
* 2: { "someJson": "value" }
208+
* 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
209+
*```
210+
*/
211+
export const StreamStringPrefixes = {
212+
[textStreamPart.name]: textStreamPart.code,
213+
[functionCallStreamPart.name]: functionCallStreamPart.code,
214+
[dataStreamPart.name]: dataStreamPart.code,
215+
[errorStreamPart.name]: errorStreamPart.code,
216+
[assistantMessage.name]: assistantMessage.code,
217+
[assistantControlData.name]: assistantControlData.code,
99218
} as const;
100219

101220
export const validCodes = streamParts.map(part => part.code);
@@ -108,21 +227,21 @@ export const validCodes = streamParts.map(part => part.code);
108227
* @throws An error if the string cannot be parsed.
109228
*/
110229
export const parseStreamPart = (line: string): StreamPartType => {
111-
const firstSeperatorIndex = line.indexOf(':');
230+
const firstSeparatorIndex = line.indexOf(':');
112231

113-
if (firstSeperatorIndex === -1) {
114-
throw new Error('Failed to parse stream string. No seperator found.');
232+
if (firstSeparatorIndex === -1) {
233+
throw new Error('Failed to parse stream string. No separator found.');
115234
}
116235

117-
const prefix = line.slice(0, firstSeperatorIndex);
236+
const prefix = line.slice(0, firstSeparatorIndex);
118237

119238
if (!validCodes.includes(prefix as keyof typeof streamPartsByCode)) {
120239
throw new Error(`Failed to parse stream string. Invalid code ${prefix}.`);
121240
}
122241

123242
const code = prefix as keyof typeof streamPartsByCode;
124243

125-
const textValue = line.slice(firstSeperatorIndex + 1);
244+
const textValue = line.slice(firstSeparatorIndex + 1);
126245
const jsonValue: JSONValue = JSON.parse(textValue);
127246

128247
return streamPartsByCode[code].parse(jsonValue);

‎packages/core/shared/types.ts

+11
Original file line numberDiff line numberDiff line change
@@ -243,3 +243,14 @@ export type JSONValue =
243243
| boolean
244244
| { [x: string]: JSONValue }
245245
| Array<JSONValue>;
246+
247+
export type AssistantMessage = {
248+
id: string;
249+
role: 'assistant';
250+
content: Array<{
251+
type: 'text';
252+
text: {
253+
value: string;
254+
};
255+
}>;
256+
};

‎packages/core/shared/utils.ts

+1-31
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,8 @@
11
import { customAlphabet } from 'nanoid/non-secure';
2-
import { JSONValue } from './types';
32
import {
43
StreamPartType,
5-
dataStreamPart,
6-
functionCallStreamPart,
4+
StreamStringPrefixes,
75
parseStreamPart,
8-
streamPartsByCode,
9-
textStreamPart,
106
} from './stream-parts';
117

128
// 7-character random string
@@ -50,32 +46,6 @@ function createChunkDecoder(complex?: boolean) {
5046

5147
export { createChunkDecoder };
5248

53-
/**
54-
* The map of prefixes for data in the stream
55-
*
56-
* - 0: Text from the LLM response
57-
* - 1: (OpenAI) function_call responses
58-
* - 2: custom JSON added by the user using `Data`
59-
*
60-
* Example:
61-
* ```
62-
* 0:Vercel
63-
* 0:'s
64-
* 0: AI
65-
* 0: AI
66-
* 0: SDK
67-
* 0: is great
68-
* 0:!
69-
* 2: { "someJson": "value" }
70-
* 1: {"function_call": {"name": "get_current_weather", "arguments": "{\\n\\"location\\": \\"Charlottesville, Virginia\\",\\n\\"format\\": \\"celsius\\"\\n}"}}
71-
*```
72-
*/
73-
export const StreamStringPrefixes = {
74-
[textStreamPart.name]: textStreamPart.code,
75-
[functionCallStreamPart.name]: functionCallStreamPart.code,
76-
[dataStreamPart.name]: dataStreamPart.code,
77-
} as const;
78-
7949
export const isStreamStringEqualToType = (
8050
type: keyof typeof StreamStringPrefixes,
8151
value: string,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import { formatStreamPart } from '../shared/stream-parts';
2+
import { AssistantMessage } from '../shared/types';
3+
4+
export function experimental_AssistantResponse(
5+
{ threadId, messageId }: { threadId: string; messageId: string },
6+
process: (stream: {
7+
threadId: string;
8+
messageId: string;
9+
sendMessage: (message: AssistantMessage) => void;
10+
}) => Promise<void>,
11+
): Response {
12+
const stream = new ReadableStream({
13+
async start(controller) {
14+
const textEncoder = new TextEncoder();
15+
16+
const sendMessage = (message: AssistantMessage) => {
17+
controller.enqueue(
18+
textEncoder.encode(formatStreamPart('assistant_message', message)),
19+
);
20+
};
21+
22+
const sendError = (errorMessage: string) => {
23+
controller.enqueue(
24+
textEncoder.encode(formatStreamPart('error', errorMessage)),
25+
);
26+
};
27+
28+
// send the threadId and messageId as the first message:
29+
controller.enqueue(
30+
textEncoder.encode(
31+
formatStreamPart('assistant_control_data', {
32+
threadId,
33+
messageId,
34+
}),
35+
),
36+
);
37+
38+
try {
39+
await process({
40+
threadId,
41+
messageId,
42+
sendMessage,
43+
});
44+
} catch (error) {
45+
sendError((error as any).message ?? `${error}`);
46+
} finally {
47+
controller.close();
48+
}
49+
},
50+
pull(controller) {},
51+
cancel() {},
52+
});
53+
54+
return new Response(stream, {
55+
status: 200,
56+
headers: {
57+
'Content-Type': 'text/plain; charset=utf-8',
58+
},
59+
});
60+
}

‎packages/core/streams/index.ts

+1
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,4 @@ export * from '../shared/types';
1010
export * from '../shared/utils';
1111
export * from './stream-data';
1212
export * from './streaming-react-response';
13+
export * from './assistant-response';

‎turbo.json

+2-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,8 @@
99
"OPENAI_API_BASE",
1010
"HUGGINGFACE_API_KEY",
1111
"REPLICATE_API_KEY",
12-
"NODE_ENV"
12+
"NODE_ENV",
13+
"ASSISTANT_ID"
1314
],
1415
"outputs": ["dist/**", ".next/**", "!.next/cache/**"]
1516
},

0 commit comments

Comments
 (0)
Please sign in to comment.