Skip to content

Commit 2eb4b55

Browse files
authoredApr 9, 2024··
Rename experimental_StreamData to StreamData. (#1309)
1 parent 149fe26 commit 2eb4b55

File tree

37 files changed

+113
-185
lines changed

37 files changed

+113
-185
lines changed
 

‎.changeset/real-spies-sort.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
Remove experimental\_ prefix from StreamData.

‎docs/pages/docs/api-reference.mdx

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ title: API Reference
3030
- [`StreamingTextResponse`](./api-reference/streaming-text-response)
3131
- [`AIStream`](./api-reference/ai-stream)
3232
- [`streamToResponse`](./api-reference/stream-to-response)
33-
- [`experimental_StreamData`](./api-reference/stream-data)
33+
- [`StreamData`](./api-reference/stream-data)
3434

3535
## Prompt Construction Helpers
3636

‎docs/pages/docs/api-reference/_meta.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"use-chat": "useChat",
66
"use-completion": "useCompletion",
77
"ai-stream": "AIStream",
8-
"stream-data": "experimental_StreamData",
8+
"stream-data": "StreamData",
99
"streaming-text-response": "StreamingTextResponse",
1010
"stream-to-response": "streamToResponse",
1111
"tokens": "<Tokens />"

‎docs/pages/docs/api-reference/providers/inkeep-stream.mdx

+3-3
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ import {
7676
InkeepStream,
7777
InkeepOnFinalMetadata,
7878
StreamingTextResponse,
79-
experimental_StreamData,
79+
StreamData,
8080
} from 'ai';
8181
import { InkeepAI } from '@inkeep/ai-api';
8282
import type { RecordsCited$ } from '@inkeep/ai-api/models/components';
@@ -122,7 +122,7 @@ export async function POST(req: Request) {
122122
}
123123

124124
// used to pass custom metadata to the client
125-
const data = new experimental_StreamData();
125+
const data = new StreamData();
126126

127127
if (!response?.body) {
128128
throw new Error('Response body is null');
@@ -148,7 +148,7 @@ export async function POST(req: Request) {
148148
}
149149
```
150150

151-
This example uses the [experimental_StreamData](/docs/api-reference/stream-data) and the callback methods of `InkeepStream` to attach metadata to the response.
151+
This example uses the [StreamData](/docs/api-reference/stream-data) and the callback methods of `InkeepStream` to attach metadata to the response.
152152

153153
### Client
154154

‎docs/pages/docs/api-reference/providers/mistral-stream.mdx

+2-6
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,7 @@ export async function POST(req: Request) {
5353

5454
```tsx filename="app/api/completion/route.ts" showLineNumbers
5555
import MistralClient from '@mistralai/mistralai';
56-
import {
57-
MistralStream,
58-
StreamingTextResponse,
59-
experimental_StreamData,
60-
} from 'ai';
56+
import { MistralStream, StreamingTextResponse, StreamData } from 'ai';
6157

6258
const mistral = new MistralClient(process.env.MISTRAL_API_KEY || '');
6359

@@ -75,7 +71,7 @@ export async function POST(req: Request) {
7571
});
7672

7773
// optional: use stream data
78-
const data = new experimental_StreamData();
74+
const data = new StreamData();
7975

8076
data.append({ test: 'value' });
8177

‎docs/pages/docs/api-reference/stream-data.mdx

+5-15
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,22 @@
11
---
2-
title: experimental_StreamData
2+
title: StreamData
33
layout:
44
toc: false
55
---
66

77
import { Callout } from 'nextra-theme-docs';
88

9-
# `experimental_StreamData`
9+
# `StreamData`
1010

11-
The `experimental_StreamData` class allows you to stream arbitrary data to the client alongside your LLM response.
11+
The `StreamData` class allows you to stream arbitrary data to the client alongside your LLM response.
1212
For information on the implementation, see the associated [pull request](https://github.com/vercel/ai/pull/425).
1313

14-
<Callout>
15-
The `experimental_` prefix indicates that the API is not yet stable and may
16-
change in the future without a major version bump.
17-
18-
</Callout>
19-
2014
## Usage
2115

2216
### On the Server
2317

2418
```jsx filename="app/api/chat/route.ts" {24-25,39-40,58-59,62-63,66-67}
25-
import {
26-
OpenAIStream,
27-
StreamingTextResponse,
28-
experimental_StreamData,
29-
} from 'ai';
19+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
3020
import OpenAI from 'openai';
3121
import type { ChatCompletionCreateParams } from 'openai/resources/chat';
3222

@@ -46,7 +36,7 @@ export async function POST(req: Request) {
4636
});
4737

4838
// Instantiate the StreamData. It works with all API providers.
49-
const data = new experimental_StreamData();
39+
const data = new StreamData();
5040

5141
const stream = OpenAIStream(response, {
5242
experimental_onFunctionCall: async (

‎docs/pages/docs/api-reference/streaming-react-response.mdx

+2-2
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,14 @@ The `experimental_StreamingReactResponse` class is designed to facilitate stream
2828

2929
This parameter should be a `ReadableStream`, which encapsulates the HTTP response's content. It represents the stream from which the response is read and processed.
3030

31-
### `options?: {ui?: Function, data?: experimental_StreamData}`
31+
### `options?: {ui?: Function, data?: StreamData}`
3232

3333
This optional parameter allows additional configurations for rendering React components and handling streamed data.
3434

3535
The options object can include:
3636

3737
- `ui?: (message: {content: string, data?: JSONValue[] | undefined}) => UINode | Promise<UINode>`: A function that receives a message object with `content` and optional `data` fields. This function should return a React component (as `UINode`) for each chunk in the stream. The `data` attribute in the message is available when the `data` option is configured to include stream data.
38-
- `data?: experimental_StreamData`: An instance of `experimental_StreamData` used to process and stream data along with the response.
38+
- `data?: StreamData`: An instance of `StreamData` used to process and stream data along with the response.
3939

4040
## Returns
4141

‎docs/pages/docs/guides/providers/inkeep.mdx

+3-3
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ import {
5757
InkeepStream,
5858
InkeepOnFinalMetadata,
5959
StreamingTextResponse,
60-
experimental_StreamData,
60+
StreamData,
6161
} from 'ai';
6262
import { InkeepAI } from '@inkeep/ai-api';
6363
import type { RecordsCited$ } from '@inkeep/ai-api/models/components';
@@ -103,7 +103,7 @@ export async function POST(req: Request) {
103103
}
104104

105105
// used to pass custom metadata to the client
106-
const data = new experimental_StreamData();
106+
const data = new StreamData();
107107

108108
if (!response?.body) {
109109
throw new Error('Response body is null');
@@ -140,7 +140,7 @@ This example leverages a few utilities provided by the Vercel AI SDK:
140140
class with the default headers you probably want (hint: `'Content-Type':
141141
'text/plain; charset=utf-8'` is already set for you). This will provide the streamed content to the client.
142142

143-
3. Lastly, we use the [experimental_StreamData](/docs/api-reference/stream-data) and callback methods of the `InkeepStream` to attach metadata to the response like `onFinalMetadata.chat_session_id` and `records_cited.citations` for use by the client.
143+
3. Lastly, we use the [StreamData](/docs/api-reference/stream-data) and callback methods of the `InkeepStream` to attach metadata to the response like `onFinalMetadata.chat_session_id` and `records_cited.citations` for use by the client.
144144

145145
<Callout>
146146
It's common to save a chat to a database. To do so, you can leverage the

‎examples/next-inkeep/app/api/chat/route.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import {
22
InkeepStream,
33
InkeepOnFinalMetadata,
44
StreamingTextResponse,
5-
experimental_StreamData,
5+
StreamData,
66
} from 'ai';
77
import { InkeepAI } from '@inkeep/ai-api';
88
import type { RecordsCited$ } from '@inkeep/ai-api/models/components';
@@ -48,7 +48,7 @@ export async function POST(req: Request) {
4848
}
4949

5050
// used to pass custom metadata to the client
51-
const data = new experimental_StreamData();
51+
const data = new StreamData();
5252

5353
if (!response?.body) {
5454
throw new Error('Response body is null');

‎examples/next-langchain/app/api/stream-data-basic/route.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import {
22
StreamingTextResponse,
33
LangChainStream,
44
Message,
5-
experimental_StreamData,
5+
StreamData,
66
} from 'ai';
77
import { ChatOpenAI } from 'langchain/chat_models/openai';
88
import { AIMessage, HumanMessage } from 'langchain/schema';
@@ -12,7 +12,7 @@ export const runtime = 'edge';
1212
export async function POST(req: Request) {
1313
const { messages } = await req.json();
1414

15-
const data = new experimental_StreamData();
15+
const data = new StreamData();
1616

1717
// important: use LangChainStream from the AI SDK:
1818
const { stream, handlers } = LangChainStream({

‎examples/next-langchain/app/api/stream-data-chain/route.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
LangChainStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from 'ai';
1+
import { LangChainStream, StreamingTextResponse, StreamData } from 'ai';
62
import { LLMChain } from 'langchain/chains';
73
import { OpenAI } from 'langchain/llms/openai';
84
import { PromptTemplate } from 'langchain/prompts';
@@ -18,7 +14,7 @@ export async function POST(req: Request) {
1814
);
1915
const chain = new LLMChain({ llm: model, prompt });
2016

21-
const data = new experimental_StreamData();
17+
const data = new StreamData();
2218

2319
// important: use LangChainStream from the AI SDK:
2420
const { stream, handlers } = LangChainStream({

‎examples/next-mistral/app/api/completion/route.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import MistralClient from '@mistralai/mistralai';
2-
import {
3-
MistralStream,
4-
StreamingTextResponse,
5-
experimental_StreamData,
6-
} from 'ai';
2+
import { MistralStream, StreamingTextResponse, StreamData } from 'ai';
73

84
const mistral = new MistralClient(process.env.MISTRAL_API_KEY || '');
95

@@ -21,7 +17,7 @@ export async function POST(req: Request) {
2117
});
2218

2319
// optional: use stream data
24-
const data = new experimental_StreamData();
20+
const data = new StreamData();
2521

2622
data.append({ test: 'value' });
2723

‎examples/next-openai/app/api/chat-with-functions/route.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
OpenAIStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from 'ai';
1+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
62
import OpenAI from 'openai';
73
import type { ChatCompletionCreateParams } from 'openai/resources/chat';
84

@@ -58,7 +54,7 @@ export async function POST(req: Request) {
5854
functions,
5955
});
6056

61-
const data = new experimental_StreamData();
57+
const data = new StreamData();
6258
const stream = OpenAIStream(response, {
6359
experimental_onFunctionCall: async (
6460
{ name, arguments: args },

‎examples/next-openai/app/api/chat-with-tools/route.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import {
33
StreamingTextResponse,
44
Tool,
55
ToolCallPayload,
6-
experimental_StreamData,
6+
StreamData,
77
} from 'ai';
88
import OpenAI from 'openai';
99

@@ -73,7 +73,7 @@ export async function POST(req: Request) {
7373
tool_choice: 'auto',
7474
});
7575

76-
const data = new experimental_StreamData();
76+
const data = new StreamData();
7777
const stream = OpenAIStream(response, {
7878
experimental_onToolCall: async (
7979
call: ToolCallPayload,

‎examples/next-openai/app/api/completion/route.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
StreamingTextResponse,
3-
experimental_StreamData,
4-
experimental_streamText,
5-
} from 'ai';
1+
import { StreamingTextResponse, StreamData, experimental_streamText } from 'ai';
62
import { openai } from 'ai/openai';
73

84
export const runtime = 'edge';
@@ -18,7 +14,7 @@ export async function POST(req: Request) {
1814
});
1915

2016
// optional: use stream data
21-
const data = new experimental_StreamData();
17+
const data = new StreamData();
2218

2319
data.append({ test: 'value' });
2420

‎examples/next-openai/app/stream-react-response/action.tsx

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import {
44
Message,
55
OpenAIStream,
6-
experimental_StreamData,
6+
StreamData,
77
experimental_StreamingReactResponse,
88
} from 'ai';
99
import { experimental_buildOpenAIMessages } from 'ai/prompts';
@@ -48,7 +48,7 @@ const functions: ChatCompletionCreateParams.Function[] = [
4848
];
4949

5050
export async function handler({ messages }: { messages: Message[] }) {
51-
const data = new experimental_StreamData();
51+
const data = new StreamData();
5252

5353
const openai = new OpenAI({
5454
apiKey: process.env.OPENAI_API_KEY!,

‎examples/nuxt-openai/server/api/chat-with-functions.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
OpenAIStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from 'ai';
1+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
62
import OpenAI from 'openai';
73
import type { ChatCompletionCreateParams } from 'openai/resources/chat';
84

@@ -57,7 +53,7 @@ export default defineLazyEventHandler(async () => {
5753
functions,
5854
});
5955

60-
const data = new experimental_StreamData();
56+
const data = new StreamData();
6157
const stream = OpenAIStream(response, {
6258
experimental_onFunctionCall: async (
6359
{ name, arguments: args },

‎examples/nuxt-openai/server/api/completion.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
OpenAIStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from 'ai';
1+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
62
import OpenAI from 'openai';
73

84
export default defineLazyEventHandler(async () => {
@@ -24,7 +20,7 @@ export default defineLazyEventHandler(async () => {
2420
});
2521

2622
// optional: use stream data
27-
const data = new experimental_StreamData();
23+
const data = new StreamData();
2824

2925
data.append({ test: 'value' });
3026

‎examples/solidstart-openai/src/routes/api/chat-with-functions/index.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
OpenAIStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from 'ai';
1+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
62
import OpenAI from 'openai';
73
import type { ChatCompletionCreateParams } from 'openai/resources/chat';
84

@@ -57,7 +53,7 @@ export const POST = async (event: APIEvent) => {
5753
functions,
5854
});
5955

60-
const data = new experimental_StreamData();
56+
const data = new StreamData();
6157
const stream = OpenAIStream(response, {
6258
experimental_onFunctionCall: async (
6359
{ name, arguments: args },

‎examples/solidstart-openai/src/routes/api/completion/index.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
OpenAIStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from 'ai';
1+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
62
import OpenAI from 'openai';
73

84
import { APIEvent } from 'solid-start/api';
@@ -24,7 +20,7 @@ export const POST = async (event: APIEvent) => {
2420
});
2521

2622
// optional: use stream data
27-
const data = new experimental_StreamData();
23+
const data = new StreamData();
2824

2925
data.append({ test: 'value' });
3026

‎examples/sveltekit-openai/src/routes/api/chat-with-functions/+server.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import OpenAI from 'openai';
2-
import {
3-
OpenAIStream,
4-
StreamingTextResponse,
5-
experimental_StreamData,
6-
} from 'ai';
2+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
73
import { env } from '$env/dynamic/private';
84
import type { ChatCompletionCreateParams } from 'openai/resources/chat';
95

@@ -60,7 +56,7 @@ export async function POST({ request }) {
6056
functions,
6157
});
6258

63-
const data = new experimental_StreamData();
59+
const data = new StreamData();
6460
const stream = OpenAIStream(response, {
6561
experimental_onFunctionCall: async (
6662
{ name, arguments: args },

‎examples/sveltekit-openai/src/routes/api/chat-with-tools/+server.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import OpenAI from 'openai';
22
import {
33
OpenAIStream,
44
StreamingTextResponse,
5-
experimental_StreamData,
5+
StreamData,
66
type Tool,
77
type ToolCallPayload,
88
} from 'ai';
@@ -70,7 +70,7 @@ export async function POST({ request }) {
7070
tool_choice: 'auto',
7171
});
7272

73-
const data = new experimental_StreamData();
73+
const data = new StreamData();
7474

7575
const stream = OpenAIStream(response, {
7676
experimental_onToolCall: async (

‎examples/sveltekit-openai/src/routes/api/completion/+server.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import OpenAI from 'openai';
2-
import {
3-
OpenAIStream,
4-
StreamingTextResponse,
5-
experimental_StreamData,
6-
} from 'ai';
2+
import { OpenAIStream, StreamingTextResponse, StreamData } from 'ai';
73

84
import { env } from '$env/dynamic/private';
95
// You may want to replace the above with a static private env variable
@@ -29,7 +25,7 @@ export const POST = (async ({ request }) => {
2925
});
3026

3127
// optional: use stream data
32-
const data = new experimental_StreamData();
28+
const data = new StreamData();
3329

3430
data.append({ test: 'value' });
3531

‎packages/core/streams/anthropic-stream.test.ts

+5-9
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import Anthropic from '@anthropic-ai/sdk';
2-
import {
3-
AnthropicStream,
4-
StreamingTextResponse,
5-
experimental_StreamData,
6-
} from '.';
2+
import { AnthropicStream, StreamingTextResponse, StreamData } from '.';
73
import {
84
anthropicMessageChunks,
95
anthropicCompletionChunks,
@@ -46,7 +42,7 @@ describe('Anthropic completion', () => {
4642
apiKey: 'sk-doesnt-matter',
4743
});
4844

49-
const data = new experimental_StreamData();
45+
const data = new StreamData();
5046

5147
const anthropicResponse = await anthropic.completions.create({
5248
prompt: '',
@@ -77,7 +73,7 @@ describe('Anthropic completion', () => {
7773
apiKey: 'sk-doesnt-matter',
7874
});
7975

80-
const data = new experimental_StreamData();
76+
const data = new StreamData();
8177

8278
data.append({ t1: 'v1' });
8379

@@ -113,7 +109,7 @@ describe('Anthropic message', () => {
113109
apiKey: 'sk-doesnt-matter',
114110
});
115111

116-
const data = new experimental_StreamData();
112+
const data = new StreamData();
117113

118114
const anthropicResponse = await anthropic.messages.create({
119115
messages: [{ role: 'user', content: 'Hello' }],
@@ -144,7 +140,7 @@ describe('Anthropic message', () => {
144140
apiKey: 'sk-doesnt-matter',
145141
});
146142

147-
const data = new experimental_StreamData();
143+
const data = new StreamData();
148144

149145
data.append({ t1: 'v1' });
150146

‎packages/core/streams/aws-bedrock-stream.test.ts

+7-7
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { StreamingTextResponse, experimental_StreamData } from '.';
1+
import { StreamingTextResponse, StreamData } from '.';
22
import {
33
bedrockAnthropicChunks,
44
bedrockAnthropicV3Chunks,
@@ -40,7 +40,7 @@ function simulateBedrockResponse(chunks: any[]) {
4040
describe('AWS Bedrock', () => {
4141
describe('Anthropic', () => {
4242
it('should send text', async () => {
43-
const data = new experimental_StreamData();
43+
const data = new StreamData();
4444

4545
const bedrockResponse = simulateBedrockResponse(bedrockAnthropicChunks);
4646
const stream = AWSBedrockAnthropicStream(bedrockResponse, {
@@ -60,7 +60,7 @@ describe('AWS Bedrock', () => {
6060
});
6161

6262
it('should send text and data', async () => {
63-
const data = new experimental_StreamData();
63+
const data = new StreamData();
6464

6565
data.append({ t1: 'v1' });
6666

@@ -100,7 +100,7 @@ describe('AWS Bedrock', () => {
100100

101101
describe('Cohere', () => {
102102
it('should send text', async () => {
103-
const data = new experimental_StreamData();
103+
const data = new StreamData();
104104

105105
const bedrockResponse = simulateBedrockResponse(bedrockCohereChunks);
106106
const stream = AWSBedrockCohereStream(bedrockResponse, {
@@ -125,7 +125,7 @@ describe('AWS Bedrock', () => {
125125
});
126126

127127
it('should send text and data', async () => {
128-
const data = new experimental_StreamData();
128+
const data = new StreamData();
129129

130130
data.append({ t1: 'v1' });
131131

@@ -155,7 +155,7 @@ describe('AWS Bedrock', () => {
155155

156156
describe('Llama2', () => {
157157
it('should send text', async () => {
158-
const data = new experimental_StreamData();
158+
const data = new StreamData();
159159

160160
const bedrockResponse = simulateBedrockResponse(bedrockLlama2Chunks);
161161
const stream = AWSBedrockLlama2Stream(bedrockResponse, {
@@ -177,7 +177,7 @@ describe('AWS Bedrock', () => {
177177
});
178178

179179
it('should send text and data', async () => {
180-
const data = new experimental_StreamData();
180+
const data = new StreamData();
181181

182182
data.append({ t1: 'v1' });
183183

‎packages/core/streams/cohere-stream.test.ts

+3-7
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
CohereStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from '.';
1+
import { CohereStream, StreamingTextResponse, StreamData } from '.';
62
import { cohereChatChunks, cohereChunks } from '../tests/snapshots/cohere';
73
import { readAllChunks } from '../tests/utils/mock-client';
84
import { DEFAULT_TEST_URL, createMockServer } from '../tests/utils/mock-server';
@@ -34,7 +30,7 @@ describe('CohereStream', () => {
3430
});
3531

3632
it('should send text', async () => {
37-
const data = new experimental_StreamData();
33+
const data = new StreamData();
3834

3935
const stream = CohereStream(await fetch(DEFAULT_TEST_URL), {
4036
onFinal() {
@@ -54,7 +50,7 @@ describe('CohereStream', () => {
5450
});
5551

5652
it('should send text and data', async () => {
57-
const data = new experimental_StreamData();
53+
const data = new StreamData();
5854

5955
data.append({ t1: 'v1' });
6056

‎packages/core/streams/google-generative-ai-stream.test.ts

+3-7
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
GoogleGenerativeAIStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from '.';
1+
import { GoogleGenerativeAIStream, StreamingTextResponse, StreamData } from '.';
62
import { readAllChunks } from '../tests/utils/mock-client';
73

84
function simulateGenerativeAIResponse(chunks: any[]) {
@@ -68,7 +64,7 @@ export const googleGenerativeAIChunks = [
6864
];
6965

7066
it('should send text', async () => {
71-
const data = new experimental_StreamData();
67+
const data = new StreamData();
7268

7369
const aiResponse = simulateGenerativeAIResponse(googleGenerativeAIChunks);
7470
const stream = GoogleGenerativeAIStream(aiResponse, {
@@ -88,7 +84,7 @@ it('should send text', async () => {
8884
});
8985

9086
it('should send text and data', async () => {
91-
const data = new experimental_StreamData();
87+
const data = new StreamData();
9288

9389
data.append({ t1: 'v1' });
9490

‎packages/core/streams/huggingface-stream.test.ts

+3-7
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import { HfInference } from '@huggingface/inference';
2-
import {
3-
HuggingFaceStream,
4-
StreamingTextResponse,
5-
experimental_StreamData,
6-
} from '.';
2+
import { HuggingFaceStream, StreamingTextResponse, StreamData } from '.';
73
import { huggingfaceChunks } from '../tests/snapshots/huggingface';
84
import { createClient } from '../tests/utils/mock-client';
95
import { DEFAULT_TEST_URL, createMockServer } from '../tests/utils/mock-server';
@@ -36,7 +32,7 @@ describe('HuggingFace stream', () => {
3632
}
3733

3834
it('should send text', async () => {
39-
const data = new experimental_StreamData();
35+
const data = new StreamData();
4036

4137
const stream = HuggingFaceStream(
4238
Hf.textGenerationStream(
@@ -61,7 +57,7 @@ describe('HuggingFace stream', () => {
6157
});
6258

6359
it('should send text and data', async () => {
64-
const data = new experimental_StreamData();
60+
const data = new StreamData();
6561

6662
data.append({ t1: 'v1' });
6763

‎packages/core/streams/inkeep-stream.test.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import {
22
InkeepOnFinalMetadata,
33
InkeepStream,
44
StreamingTextResponse,
5-
experimental_StreamData,
5+
StreamData,
66
} from '.';
77
import { InkeepEventStream } from '../tests/snapshots/inkeep';
88
import { readAllChunks } from '../tests/utils/mock-client';
@@ -34,7 +34,7 @@ describe('InkeepStream', () => {
3434
'"records_cited":{"citations":[{"number":1,"record":{"url":"https://inkeep.com","title":"Inkeep","breadcrumbs":["Home","About"]}}]}';
3535

3636
it('should receive and send Inkeep onFinal metadata with chat_session_id', async () => {
37-
const data = new experimental_StreamData();
37+
const data = new StreamData();
3838

3939
const response = await fetch(DEFAULT_TEST_URL);
4040

@@ -60,7 +60,7 @@ describe('InkeepStream', () => {
6060
});
6161

6262
it('should receive and send Inkeep records_cited data as message annotation', async () => {
63-
const data = new experimental_StreamData();
63+
const data = new StreamData();
6464

6565
const response = await fetch(DEFAULT_TEST_URL);
6666

‎packages/core/streams/langchain-stream.test.ts

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import {
22
LangChainStream,
33
StreamingTextResponse,
44
createStreamDataTransformer,
5-
experimental_StreamData,
5+
StreamData,
66
} from '.';
77
import { openaiChatCompletionChunks } from '../tests/snapshots/openai-chat';
88
import { DEFAULT_TEST_URL, createMockServer } from '../tests/utils/mock-server';
@@ -39,7 +39,7 @@ describe('LangchainStream', () => {
3939
});
4040

4141
it('should send text', async () => {
42-
const data = new experimental_StreamData();
42+
const data = new StreamData();
4343

4444
const model = new ChatOpenAI({
4545
streaming: true,
@@ -83,7 +83,7 @@ describe('LangchainStream', () => {
8383
});
8484

8585
it('should send text and data', async () => {
86-
const data = new experimental_StreamData();
86+
const data = new StreamData();
8787

8888
data.append({ t1: 'v1' });
8989

@@ -131,7 +131,7 @@ describe('LangchainStream', () => {
131131

132132
describe('LangChain LLM call', () => {
133133
it('should send text', async () => {
134-
const data = new experimental_StreamData();
134+
const data = new StreamData();
135135

136136
const { stream, handlers } = LangChainStream({
137137
onFinal() {
@@ -162,7 +162,7 @@ describe('LangchainStream', () => {
162162
});
163163

164164
it('should send text and data', async () => {
165-
const data = new experimental_StreamData();
165+
const data = new StreamData();
166166

167167
data.append({ t1: 'v1' });
168168

‎packages/core/streams/mistral-stream.test.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import MistralClient from '@mistralai/mistralai';
2-
import { StreamingTextResponse, experimental_StreamData } from '.';
2+
import { StreamingTextResponse, StreamData } from '.';
33
import { mistralChunks } from '../tests/snapshots/mistral';
44
import { readAllChunks } from '../tests/utils/mock-client';
55
import { createMockServer } from '../tests/utils/mock-server';
@@ -28,7 +28,7 @@ describe('MistralStream', () => {
2828
});
2929

3030
it('should send text', async () => {
31-
const data = new experimental_StreamData();
31+
const data = new StreamData();
3232

3333
const client = new MistralClient('api-key', 'http://localhost:3030');
3434

@@ -56,7 +56,7 @@ describe('MistralStream', () => {
5656
});
5757

5858
it('should send text and data', async () => {
59-
const data = new experimental_StreamData();
59+
const data = new StreamData();
6060

6161
data.append({ t1: 'v1' });
6262

‎packages/core/streams/openai-stream.test.ts

+10-14
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import OpenAI from 'openai';
2-
import {
3-
OpenAIStream,
4-
StreamingTextResponse,
5-
experimental_StreamData,
6-
} from '.';
2+
import { OpenAIStream, StreamingTextResponse, StreamData } from '.';
73
import {
84
chatCompletionChunksWithToolCall,
95
openaiChatCompletionChunks,
@@ -70,7 +66,7 @@ describe('OpenAIStream', () => {
7066
});
7167

7268
it('should send text', async () => {
73-
const data = new experimental_StreamData();
69+
const data = new StreamData();
7470

7571
const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
7672
onFinal() {
@@ -92,7 +88,7 @@ describe('OpenAIStream', () => {
9288
});
9389

9490
it('should send function response as text stream when onFunctionCall is not defined', async () => {
95-
const data = new experimental_StreamData();
91+
const data = new StreamData();
9692

9793
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
9894
onFinal() {
@@ -131,7 +127,7 @@ describe('OpenAIStream', () => {
131127
});
132128

133129
it('should send function response when onFunctionCall is defined and returns undefined', async () => {
134-
const data = new experimental_StreamData();
130+
const data = new StreamData();
135131

136132
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
137133
onFinal() {
@@ -153,7 +149,7 @@ describe('OpenAIStream', () => {
153149
});
154150

155151
it('should not call onText for function calls', async () => {
156-
const data = new experimental_StreamData();
152+
const data = new StreamData();
157153

158154
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
159155
onFinal() {
@@ -175,7 +171,7 @@ describe('OpenAIStream', () => {
175171
});
176172

177173
it('should send function response and data when onFunctionCall is defined, returns undefined, and data is added', async () => {
178-
const data = new experimental_StreamData();
174+
const data = new StreamData();
179175

180176
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
181177
onFinal() {
@@ -200,7 +196,7 @@ describe('OpenAIStream', () => {
200196
});
201197

202198
it('should send return value when onFunctionCall is defined and returns value', async () => {
203-
const data = new experimental_StreamData();
199+
const data = new StreamData();
204200

205201
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
206202
onFinal() {
@@ -220,7 +216,7 @@ describe('OpenAIStream', () => {
220216
});
221217

222218
it('should send return value and data when onFunctionCall is defined, returns value and data is added', async () => {
223-
const data = new experimental_StreamData();
219+
const data = new StreamData();
224220

225221
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
226222
onFinal() {
@@ -244,7 +240,7 @@ describe('OpenAIStream', () => {
244240
});
245241

246242
it('should send text and data', async () => {
247-
const data = new experimental_StreamData();
243+
const data = new StreamData();
248244

249245
data.append({ t1: 'v1' });
250246

@@ -335,7 +331,7 @@ describe('OpenAIStream', () => {
335331
}
336332

337333
it('should send text', async () => {
338-
const data = new experimental_StreamData();
334+
const data = new StreamData();
339335

340336
const stream = OpenAIStream(
341337
asyncIterableFromArray(azureOpenaiChatCompletionChunks),

‎packages/core/streams/replicate-stream.test.ts

+3-7
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import {
2-
ReplicateStream,
3-
StreamingTextResponse,
4-
experimental_StreamData,
5-
} from '.';
1+
import { ReplicateStream, StreamingTextResponse, StreamData } from '.';
62
import { replicateTextChunks } from '../tests/snapshots/replicate';
73
import { readAllChunks } from '../tests/utils/mock-client';
84
import { DEFAULT_TEST_URL, createMockServer } from '../tests/utils/mock-server';
@@ -29,7 +25,7 @@ describe('ReplicateStream', () => {
2925
});
3026

3127
it('should send text', async () => {
32-
const data = new experimental_StreamData();
28+
const data = new StreamData();
3329

3430
const stream = await ReplicateStream(
3531
{
@@ -58,7 +54,7 @@ describe('ReplicateStream', () => {
5854
});
5955

6056
it('should send text and data', async () => {
61-
const data = new experimental_StreamData();
57+
const data = new StreamData();
6258

6359
data.append({ t1: 'v1' });
6460

‎packages/core/streams/stream-data.ts

+6-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { JSONValue } from '../shared/types';
44
/**
55
* A stream wrapper to send custom JSON-encoded data back to the client.
66
*/
7-
export class experimental_StreamData {
7+
export class StreamData {
88
private encoder = new TextEncoder();
99

1010
private controller: TransformStreamDefaultController<Uint8Array> | null =
@@ -129,3 +129,8 @@ export function createStreamDataTransformer() {
129129
},
130130
});
131131
}
132+
133+
/**
134+
@deprecated Use `StreamData` instead.
135+
*/
136+
export class experimental_StreamData extends StreamData {}

‎packages/core/streams/streaming-react-response.test.tsx

+6-6
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import ReactDOMServer from 'react-dom/server';
22
import {
33
OpenAIStream,
44
ReactResponseRow,
5-
experimental_StreamData,
5+
StreamData,
66
experimental_StreamingReactResponse,
77
} from '.';
88
import {
@@ -85,7 +85,7 @@ describe('without ui', () => {
8585
});
8686

8787
it('should stream text response as React rows from data stream', async () => {
88-
const data = new experimental_StreamData();
88+
const data = new StreamData();
8989

9090
const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
9191
onFinal() {
@@ -128,7 +128,7 @@ describe('with ui: sync jsx for content', () => {
128128
});
129129

130130
it('should stream React response as React rows from data stream', async () => {
131-
const data = new experimental_StreamData();
131+
const data = new StreamData();
132132

133133
const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
134134
onFinal() {
@@ -172,7 +172,7 @@ describe('with ui: async sync jsx for content', () => {
172172
});
173173

174174
it('should stream React response as React rows from data stream', async () => {
175-
const data = new experimental_StreamData();
175+
const data = new StreamData();
176176

177177
const stream = OpenAIStream(await fetch(DEFAULT_TEST_URL), {
178178
onFinal() {
@@ -199,7 +199,7 @@ describe('with ui: async sync jsx for content', () => {
199199

200200
describe('with ui: sync jsx for content and data', () => {
201201
it('should stream React response as React rows from data stream when data is appended', async () => {
202-
const data = new experimental_StreamData();
202+
const data = new StreamData();
203203

204204
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
205205
onFinal() {
@@ -243,7 +243,7 @@ describe('with ui: sync jsx for content and data', () => {
243243

244244
describe('with ui: async jsx for content and data', () => {
245245
it('should stream React response as React rows from data stream when data is appended', async () => {
246-
const data = new experimental_StreamData();
246+
const data = new StreamData();
247247

248248
const stream = OpenAIStream(await fetch(FUNCTION_CALL_TEST_URL), {
249249
onFinal() {

‎packages/core/streams/streaming-react-response.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
import { parseComplexResponse } from '../shared/parse-complex-response';
1212
import { IdGenerator, JSONValue } from '../shared/types';
1313
import { nanoid } from '../shared/utils';
14-
import { experimental_StreamData } from './stream-data';
14+
import { StreamData } from './stream-data';
1515

1616
type UINode = string | JSX.Element | JSX.Element[] | null | undefined;
1717

@@ -35,7 +35,7 @@ export class experimental_StreamingReactResponse {
3535
content: string;
3636
data?: JSONValue[];
3737
}) => UINode | Promise<UINode>;
38-
data?: experimental_StreamData;
38+
data?: StreamData;
3939
generateId?: IdGenerator;
4040
},
4141
) {

‎packages/core/streams/streaming-text-response.ts

+2-6
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,11 @@
11
import type { ServerResponse } from 'node:http';
2-
import { experimental_StreamData } from './stream-data';
2+
import { StreamData } from './stream-data';
33

44
/**
55
* A utility class for streaming text responses.
66
*/
77
export class StreamingTextResponse extends Response {
8-
constructor(
9-
res: ReadableStream,
10-
init?: ResponseInit,
11-
data?: experimental_StreamData,
12-
) {
8+
constructor(res: ReadableStream, init?: ResponseInit, data?: StreamData) {
139
let processedStream = res;
1410

1511
if (data) {

0 commit comments

Comments
 (0)
Please sign in to comment.