|
| 1 | +import type { ReactNode } from 'react'; |
| 2 | +import type OpenAI from 'openai'; |
| 3 | +import { z } from 'zod'; |
| 4 | +import zodToJsonSchema from 'zod-to-json-schema'; |
| 5 | + |
| 6 | +// TODO: This needs to be externalized. |
| 7 | +import { OpenAIStream } from '../streams'; |
| 8 | + |
| 9 | +import { STREAMABLE_VALUE_TYPE } from './constants'; |
| 10 | +import { |
| 11 | + createResolvablePromise, |
| 12 | + createSuspensedChunk, |
| 13 | + consumeStream, |
| 14 | +} from './utils'; |
| 15 | + |
| 16 | +/** |
| 17 | + * Create a piece of changable UI that can be streamed to the client. |
| 18 | + * On the client side, it can be rendered as a normal React node. |
| 19 | + */ |
| 20 | +export function createStreamableUI(initialValue?: React.ReactNode) { |
| 21 | + let currentValue = initialValue; |
| 22 | + let closed = false; |
| 23 | + let { row, resolve, reject } = createSuspensedChunk(initialValue); |
| 24 | + |
| 25 | + function assertStream() { |
| 26 | + if (closed) { |
| 27 | + throw new Error('UI stream is already closed.'); |
| 28 | + } |
| 29 | + } |
| 30 | + |
| 31 | + return { |
| 32 | + value: row, |
| 33 | + update(value: React.ReactNode) { |
| 34 | + assertStream(); |
| 35 | + |
| 36 | + const resolvable = createResolvablePromise(); |
| 37 | + resolve({ value, done: false, next: resolvable.promise }); |
| 38 | + resolve = resolvable.resolve; |
| 39 | + reject = resolvable.reject; |
| 40 | + currentValue = value; |
| 41 | + }, |
| 42 | + append(value: React.ReactNode) { |
| 43 | + assertStream(); |
| 44 | + |
| 45 | + const resolvable = createResolvablePromise(); |
| 46 | + resolve({ value, done: false, next: resolvable.promise }); |
| 47 | + resolve = resolvable.resolve; |
| 48 | + reject = resolvable.reject; |
| 49 | + if (typeof currentValue === 'string' && typeof value === 'string') { |
| 50 | + currentValue += value; |
| 51 | + } else { |
| 52 | + currentValue = ( |
| 53 | + <> |
| 54 | + {currentValue} |
| 55 | + {value} |
| 56 | + </> |
| 57 | + ); |
| 58 | + } |
| 59 | + }, |
| 60 | + error(error: any) { |
| 61 | + assertStream(); |
| 62 | + |
| 63 | + closed = true; |
| 64 | + reject(error); |
| 65 | + }, |
| 66 | + done(...args: any) { |
| 67 | + assertStream(); |
| 68 | + |
| 69 | + closed = true; |
| 70 | + if (args.length) { |
| 71 | + resolve({ value: args[0], done: true }); |
| 72 | + return; |
| 73 | + } |
| 74 | + resolve({ value: currentValue, done: true }); |
| 75 | + }, |
| 76 | + }; |
| 77 | +} |
| 78 | + |
| 79 | +/** |
| 80 | + * Create a wrapped, changable value that can be streamed to the client. |
| 81 | + * On the client side, the value can be accessed via the useStreamableValue() hook. |
| 82 | + */ |
| 83 | +export function createStreamableValue<T = any>(initialValue?: T) { |
| 84 | + // let currentValue = initialValue |
| 85 | + let closed = false; |
| 86 | + let { promise, resolve, reject } = createResolvablePromise(); |
| 87 | + |
| 88 | + function assertStream() { |
| 89 | + if (closed) { |
| 90 | + throw new Error('Value stream is already closed.'); |
| 91 | + } |
| 92 | + } |
| 93 | + |
| 94 | + function createWrapped(val: T | undefined, initial?: boolean) { |
| 95 | + if (initial) { |
| 96 | + return { |
| 97 | + type: STREAMABLE_VALUE_TYPE, |
| 98 | + curr: val, |
| 99 | + next: promise, |
| 100 | + }; |
| 101 | + } |
| 102 | + |
| 103 | + return { |
| 104 | + curr: val, |
| 105 | + next: promise, |
| 106 | + }; |
| 107 | + } |
| 108 | + |
| 109 | + return { |
| 110 | + value: createWrapped(initialValue, true), |
| 111 | + update(value: T) { |
| 112 | + assertStream(); |
| 113 | + |
| 114 | + const resolvePrevious = resolve; |
| 115 | + const resolvable = createResolvablePromise(); |
| 116 | + promise = resolvable.promise; |
| 117 | + resolve = resolvable.resolve; |
| 118 | + reject = resolvable.reject; |
| 119 | + |
| 120 | + resolvePrevious(createWrapped(value)); |
| 121 | + |
| 122 | + // currentValue = value |
| 123 | + }, |
| 124 | + error(error: any) { |
| 125 | + assertStream(); |
| 126 | + |
| 127 | + closed = true; |
| 128 | + reject(error); |
| 129 | + }, |
| 130 | + done(...args: any) { |
| 131 | + assertStream(); |
| 132 | + |
| 133 | + closed = true; |
| 134 | + |
| 135 | + if (args.length) { |
| 136 | + resolve({ curr: args[0] }); |
| 137 | + return; |
| 138 | + } |
| 139 | + |
| 140 | + resolve({}); |
| 141 | + }, |
| 142 | + }; |
| 143 | +} |
| 144 | + |
| 145 | +type Streamable = ReactNode | Promise<ReactNode>; |
| 146 | +type Renderer<T> = ( |
| 147 | + props: T, |
| 148 | +) => |
| 149 | + | Streamable |
| 150 | + | Generator<Streamable, Streamable, void> |
| 151 | + | AsyncGenerator<Streamable, Streamable, void>; |
| 152 | + |
| 153 | +/** |
| 154 | + * `render` is a helper function to create a streamable UI from some LLMs. |
| 155 | + * Currently, it only supports OpenAI's GPT models with Function Calling and Assistants Tools. |
| 156 | + */ |
| 157 | +export function render< |
| 158 | + TS extends { |
| 159 | + [name: string]: z.Schema; |
| 160 | + } = {}, |
| 161 | + FS extends { |
| 162 | + [name: string]: z.Schema; |
| 163 | + } = {}, |
| 164 | +>(options: { |
| 165 | + /** |
| 166 | + * The model name to use. Currently the only models available are OpenAI's |
| 167 | + * GPT models (3.5/4) with Function Calling and Assistants Tools. |
| 168 | + * |
| 169 | + * @example "gpt-3.5-turbo" |
| 170 | + */ |
| 171 | + model: `gpt-${string}`; |
| 172 | + /** |
| 173 | + * The provider instance to use. Currently the only provider available is OpenAI. |
| 174 | + * This needs to match the model name. |
| 175 | + */ |
| 176 | + provider: OpenAI; |
| 177 | + messages: Parameters< |
| 178 | + typeof OpenAI.prototype.chat.completions.create |
| 179 | + >[0]['messages']; |
| 180 | + text?: Renderer<{ content: string; done: boolean }>; |
| 181 | + tools?: { |
| 182 | + [name in keyof TS]: { |
| 183 | + description?: string; |
| 184 | + parameters: TS[name]; |
| 185 | + render: Renderer<z.infer<TS[name]>>; |
| 186 | + }; |
| 187 | + }; |
| 188 | + functions?: { |
| 189 | + [name in keyof FS]: { |
| 190 | + description?: string; |
| 191 | + parameters: FS[name]; |
| 192 | + render: Renderer<z.infer<FS[name]>>; |
| 193 | + }; |
| 194 | + }; |
| 195 | + initial?: ReactNode; |
| 196 | + temperature?: number; |
| 197 | +}): ReactNode { |
| 198 | + const ui = createStreamableUI(options.initial); |
| 199 | + |
| 200 | + const functions = options.functions |
| 201 | + ? Object.entries(options.functions).map( |
| 202 | + ([name, { description, parameters }]) => { |
| 203 | + return { |
| 204 | + name, |
| 205 | + description, |
| 206 | + parameters: zodToJsonSchema(parameters) as Record<string, unknown>, |
| 207 | + }; |
| 208 | + }, |
| 209 | + ) |
| 210 | + : undefined; |
| 211 | + |
| 212 | + const tools = options.tools |
| 213 | + ? Object.entries(options.tools).map( |
| 214 | + ([name, { description, parameters }]) => { |
| 215 | + return { |
| 216 | + type: 'function' as const, |
| 217 | + function: { |
| 218 | + name, |
| 219 | + description, |
| 220 | + parameters: zodToJsonSchema(parameters) as Record< |
| 221 | + string, |
| 222 | + unknown |
| 223 | + >, |
| 224 | + }, |
| 225 | + }; |
| 226 | + }, |
| 227 | + ) |
| 228 | + : undefined; |
| 229 | + |
| 230 | + let finished: ReturnType<typeof createResolvablePromise> | undefined; |
| 231 | + |
| 232 | + async function handleRender( |
| 233 | + args: any, |
| 234 | + renderer: undefined | Renderer<any>, |
| 235 | + res: ReturnType<typeof createStreamableUI>, |
| 236 | + ) { |
| 237 | + if (!renderer) return; |
| 238 | + |
| 239 | + if (finished) await finished.promise; |
| 240 | + finished = createResolvablePromise(); |
| 241 | + const value = renderer(args); |
| 242 | + if ( |
| 243 | + value instanceof Promise || |
| 244 | + (value && |
| 245 | + typeof value === 'object' && |
| 246 | + 'then' in value && |
| 247 | + typeof value.then === 'function') |
| 248 | + ) { |
| 249 | + const node = await (value as Promise<React.ReactNode>); |
| 250 | + res.update(node); |
| 251 | + finished?.resolve(void 0); |
| 252 | + } else if ( |
| 253 | + value && |
| 254 | + typeof value === 'object' && |
| 255 | + Symbol.asyncIterator in value |
| 256 | + ) { |
| 257 | + for await (const node of value as AsyncGenerator< |
| 258 | + React.ReactNode, |
| 259 | + React.ReactNode, |
| 260 | + void |
| 261 | + >) { |
| 262 | + res.update(node); |
| 263 | + } |
| 264 | + finished?.resolve(void 0); |
| 265 | + } else if (value && typeof value === 'object' && Symbol.iterator in value) { |
| 266 | + const it = value as Generator<React.ReactNode, React.ReactNode, void>; |
| 267 | + while (true) { |
| 268 | + const { done, value } = it.next(); |
| 269 | + if (done) break; |
| 270 | + res.update(value); |
| 271 | + } |
| 272 | + finished?.resolve(void 0); |
| 273 | + } else { |
| 274 | + res.update(value); |
| 275 | + finished?.resolve(void 0); |
| 276 | + } |
| 277 | + } |
| 278 | + |
| 279 | + (async () => { |
| 280 | + let hasFunction = false; |
| 281 | + let text = ''; |
| 282 | + |
| 283 | + consumeStream( |
| 284 | + OpenAIStream( |
| 285 | + (await options.provider.chat.completions.create({ |
| 286 | + model: options.model, |
| 287 | + messages: options.messages, |
| 288 | + temperature: options.temperature, |
| 289 | + stream: true, |
| 290 | + ...(functions |
| 291 | + ? { |
| 292 | + functions, |
| 293 | + } |
| 294 | + : {}), |
| 295 | + ...(tools |
| 296 | + ? { |
| 297 | + tools, |
| 298 | + } |
| 299 | + : {}), |
| 300 | + })) as any, |
| 301 | + { |
| 302 | + async experimental_onFunctionCall(functionCallPayload) { |
| 303 | + hasFunction = true; |
| 304 | + handleRender( |
| 305 | + functionCallPayload.arguments, |
| 306 | + options.functions?.[functionCallPayload.name as any]?.render, |
| 307 | + ui, |
| 308 | + ); |
| 309 | + }, |
| 310 | + ...(tools |
| 311 | + ? { |
| 312 | + async experimental_onToolCall(toolCallPayload: any) { |
| 313 | + hasFunction = true; |
| 314 | + |
| 315 | + // TODO: We might need Promise.all here? |
| 316 | + for (const tool of toolCallPayload.tools) { |
| 317 | + handleRender( |
| 318 | + tool.func.arguments, |
| 319 | + options.tools?.[tool.func.name as any]?.render, |
| 320 | + ui, |
| 321 | + ); |
| 322 | + } |
| 323 | + }, |
| 324 | + } |
| 325 | + : {}), |
| 326 | + onToken(token) { |
| 327 | + text += token; |
| 328 | + if (hasFunction) return; |
| 329 | + handleRender({ content: text, done: false }, options.text, ui); |
| 330 | + }, |
| 331 | + async onFinal() { |
| 332 | + if (hasFunction) return; |
| 333 | + handleRender({ content: text, done: true }, options.text, ui); |
| 334 | + |
| 335 | + await finished?.promise; |
| 336 | + ui.done(); |
| 337 | + }, |
| 338 | + }, |
| 339 | + ), |
| 340 | + ); |
| 341 | + })(); |
| 342 | + |
| 343 | + return ui.value; |
| 344 | +} |
0 commit comments