Skip to content

Commit

Permalink
#477 Estimate tokens usage with streaming completions
Browse files Browse the repository at this point in the history
  • Loading branch information
alxmiron committed Mar 29, 2023
1 parent 4613b95 commit dd6b457
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 4 deletions.
16 changes: 13 additions & 3 deletions src/chatgpt-api.ts
Expand Up @@ -211,16 +211,17 @@ export class ChatGPTAPI {
result.id = response.id
}

if (response?.choices?.length) {
if (response.choices?.length) {
const delta = response.choices[0].delta
result.delta = delta.content
if (delta?.content) result.text += delta.content
result.detail = response

if (delta.role) {
result.role = delta.role
}

result.detail = response

onProgress?.(result)
}
} catch (err) {
Expand Down Expand Up @@ -286,7 +287,16 @@ export class ChatGPTAPI {
}
}
}
).then((message) => {
).then(async (message) => {
if (message.detail && !message.detail.usage) {
const promptTokens = numTokens
const completionTokens = await this._getTokenCount(message.text)
message.detail.usage = {
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: promptTokens + completionTokens
}
}
return this._upsertMessage(message).then(() => message)
})

Expand Down
14 changes: 13 additions & 1 deletion src/types.ts
Expand Up @@ -59,13 +59,25 @@ export type SendMessageBrowserOptions = {
abortSignal?: AbortSignal
}

interface CreateChatCompletionStreamResponse
extends openai.CreateChatCompletionDeltaResponse {
usage: CreateCompletionStreamResponseUsage
}

interface CreateCompletionStreamResponseUsage
extends openai.CreateCompletionResponseUsage {
estimated: true
}

export interface ChatMessage {
id: string
text: string
role: Role
name?: string
delta?: string
detail?: any
detail?:
| openai.CreateChatCompletionResponse
| CreateChatCompletionStreamResponse

// relevant for both ChatGPTAPI and ChatGPTUnofficialProxyAPI
parentMessageId?: string
Expand Down

0 comments on commit dd6b457

Please sign in to comment.