|
| 1 | +--- |
| 2 | +title: Inkeep |
| 3 | +--- |
| 4 | + |
| 5 | +import { Steps, Callout } from 'nextra-theme-docs'; |
| 6 | + |
| 7 | +# Inkeep |
| 8 | + |
| 9 | +Vercel AI SDK provides a set of utilities to make it easy to use [Inkeep](https://inkeep.com/)'s AI chat APIs to create chat experiences **powered by your own content**. |
| 10 | + |
| 11 | +In this guide, we'll walk through how to create a Q&A support bot powered by Inkeep. |
| 12 | + |
| 13 | +<Callout> |
| 14 | + You can also use Inkeep as a retrieval-augmented generation (RAG) component or |
| 15 | + neural search component of a complex LLM application, agent, or workflow. |
| 16 | +</Callout> |
| 17 | + |
| 18 | +## Guide: Inkeep Chatbot |
| 19 | + |
| 20 | +<Steps> |
| 21 | + |
| 22 | +### Create a Next.js app |
| 23 | + |
| 24 | +Create a Next.js application, install `ai`, the Vercel AI SDK, as well as [`@inkeep/ai-api`](https://github.com/inkeep/ai-api-ts), the Inkeep API SDK. |
| 25 | + |
| 26 | +```sh |
| 27 | +pnpm dlx create-next-app my-rag-app |
| 28 | +cd my-rag-app |
| 29 | +``` |
| 30 | + |
| 31 | +``` |
| 32 | +pnpm add ai @inkeep/ai-api |
| 33 | +``` |
| 34 | + |
| 35 | +### Add your Inkeep API Key to `.env` |
| 36 | + |
| 37 | +Create a `.env` file in your project root and add your Inkeep API Key: |
| 38 | + |
| 39 | +```env filename=".env" |
| 40 | +INKEEP_API_KEY=xxxxxx |
| 41 | +INKEEP_INTEGRATION_ID=xxxxxx |
| 42 | +``` |
| 43 | + |
| 44 | +### Create a Route Handler |
| 45 | + |
| 46 | +In order to provide analytics and correlate multiple message exchanges into a single "chat session", the Inkeep API provides two endpoints: |
| 47 | + |
| 48 | +1. `POST chat_sessions/chat_results` - To **create** a chat session |
| 49 | +2. `POST chat_sessions/${chat_session_id}/chat_results` - To **continue** a chat session |
| 50 | + |
| 51 | +In this example, we'll use [@inkeep/ai-api](https://github.com/inkeep/chat-api-ts) package to call these endpoints, the `ai` Vercel SDK to create a streamed text response, and `useChat` to render the messages in the UI. |
| 52 | + |
| 53 | +First, let's create a Next.js route handler at `app/api/chat/route.ts` that accepts a `POST` request with a `messages` array of strings and an optional `chat_session_id`. We'll use `chat_session_id` to decide whether to create or continue a chat. |
| 54 | + |
| 55 | +```tsx filename="app/api/chat/route.ts" showLineNumbers |
| 56 | +import { |
| 57 | + InkeepStream, |
| 58 | + InkeepOnFinalMetadata, |
| 59 | + StreamingTextResponse, |
| 60 | + experimental_StreamData, |
| 61 | +} from 'ai'; |
| 62 | +import { InkeepAI } from '@inkeep/ai-api'; |
| 63 | +import type { RecordsCited$ } from '@inkeep/ai-api/models/components'; |
| 64 | + |
| 65 | +interface ChatRequestBody { |
| 66 | + messages: Array<{ |
| 67 | + role: 'user' | 'assistant'; |
| 68 | + content: string; |
| 69 | + }>; |
| 70 | + chat_session_id?: string; |
| 71 | +} |
| 72 | + |
| 73 | +const inkeepIntegrationId = process.env.INKEEP_INTEGRATION_ID; |
| 74 | + |
| 75 | +export async function POST(req: Request) { |
| 76 | + const chatRequestBody: ChatRequestBody = await req.json(); |
| 77 | + const chat_session_id = chatRequestBody.chat_session_id; |
| 78 | + |
| 79 | + const ikpClient = new InkeepAI({ |
| 80 | + apiKey: process.env.INKEEP_API_KEY, |
| 81 | + }); |
| 82 | + |
| 83 | + let response; |
| 84 | + |
| 85 | + if (!chat_session_id) { |
| 86 | + const createRes = await ikpClient.chatSession.create({ |
| 87 | + integrationId: inkeepIntegrationId, |
| 88 | + chatSession: { |
| 89 | + messages: chatRequestBody.messages, |
| 90 | + }, |
| 91 | + stream: true, |
| 92 | + }); |
| 93 | + |
| 94 | + response = createRes.rawResponse; |
| 95 | + } else { |
| 96 | + const continueRes = await ikpClient.chatSession.continue(chat_session_id, { |
| 97 | + integrationId: inkeepIntegrationId, |
| 98 | + message: chatRequestBody.messages[chatRequestBody.messages.length - 1], |
| 99 | + stream: true, |
| 100 | + }); |
| 101 | + |
| 102 | + response = continueRes.rawResponse; |
| 103 | + } |
| 104 | + |
| 105 | + // used to pass custom metadata to the client |
| 106 | + const data = new experimental_StreamData(); |
| 107 | + |
| 108 | + if (!response?.body) { |
| 109 | + throw new Error('Response body is null'); |
| 110 | + } |
| 111 | + |
| 112 | + const stream = InkeepStream(response, { |
| 113 | + onRecordsCited: async (records_cited: RecordsCited$.Inbound) => { |
| 114 | + // append the citations to the message annotations |
| 115 | + data.appendMessageAnnotation({ |
| 116 | + records_cited, |
| 117 | + }); |
| 118 | + }, |
| 119 | + onFinal: async (complete: string, metadata?: InkeepOnFinalMetadata) => { |
| 120 | + // return the chat_session_id to the client |
| 121 | + if (metadata) { |
| 122 | + data.append({ onFinalMetadata: metadata }); |
| 123 | + } |
| 124 | + data.close(); |
| 125 | + }, |
| 126 | + experimental_streamData: true, |
| 127 | + }); |
| 128 | + |
| 129 | + return new StreamingTextResponse(stream, {}, data); |
| 130 | +} |
| 131 | +``` |
| 132 | + |
| 133 | +This example leverages a few utilities provided by the Vercel AI SDK: |
| 134 | + |
| 135 | +1. First, we pass the streaming `response` we receive from the Inkeep API to the |
| 136 | + [`InkeepStream`](/docs/api-reference/inkeep-stream). This |
| 137 | + method decodes/extracts the content of the message from Inkeep's server-side events response and then re-encodes them into a standard [ReadableStream](https://developer.mozilla.org/docs/Web/API/ReadableStream). |
| 138 | + |
| 139 | +2. We then pass that stream directly to the Vercel AI SDK's [`StreamingTextResponse`](/docs/api-reference/streaming-text-response). |
| 140 | + This is another utility class that extends the normal Node/Edge Runtime `Response` |
| 141 | + class with the default headers you probably want (hint: `'Content-Type': |
| 142 | +'text/plain; charset=utf-8'` is already set for you). This will provide the streamed content to the client. |
| 143 | + |
| 144 | +3. Lastly, we use the [experimental_StreamData](/docs/api-reference/stream-data) and callback methods of the `InkeepStream` to attach metadata to the response like `onFinalMetadata.chat_session_id` and `records_cited.citations` for use by the client. |
| 145 | + |
| 146 | +<Callout> |
| 147 | + It's common to save a chat to a database. To do so, you can leverage the |
| 148 | + `onFinal` callback to add your own saving logic. For example, add `await |
| 149 | + saveCompletionToDatabase(complete, metadata);` prior to `data.close();`. |
| 150 | +</Callout> |
| 151 | + |
| 152 | +### Wire up the UI |
| 153 | + |
| 154 | +Next, let's create a client component with a form that we'll use to gather the prompt from the user and then stream back the chat response from. |
| 155 | + |
| 156 | +By default, the [`useChat`](/docs/api-reference/use-chat) hook will use the `POST` Route Handler we created above (it defaults to `/api/chat`). |
| 157 | + |
| 158 | +We will use the `data` field to get the Inkeep `chat_session_id`, which we will include in the request body in any subsequent messages. |
| 159 | + |
| 160 | +```tsx filename="app/page.tsx" showLineNumbers |
| 161 | +'use client'; |
| 162 | + |
| 163 | +import { useChat } from 'ai/react'; |
| 164 | +import { useEffect, useState } from 'react'; |
| 165 | +import { Message } from 'ai'; |
| 166 | +import { type InkeepOnFinalMetadata } from 'ai/streams'; |
| 167 | +import { Citations } from './Citations'; |
| 168 | + |
| 169 | +export default function Chat() { |
| 170 | + /** |
| 171 | + * You can alternatively put the chat_session_id in search params e.g. ?chat_session_id=123 or path params like /chat/123 depending on your use case |
| 172 | + */ |
| 173 | + const [chatSessionId, setChatSessionId] = useState<string | undefined>( |
| 174 | + undefined, |
| 175 | + ); |
| 176 | + |
| 177 | + const { messages, input, handleInputChange, handleSubmit, data } = useChat({ |
| 178 | + body: { |
| 179 | + chat_session_id: chatSessionId, |
| 180 | + }, |
| 181 | + }); |
| 182 | + |
| 183 | + // SET THE INKEEP CHAT SESSION ID FROM THE CHAT DATA |
| 184 | + useEffect(() => { |
| 185 | + // get the onFinalMetadata item from the global data |
| 186 | + const onFinalMetadataItem = data?.find( |
| 187 | + item => |
| 188 | + typeof item === 'object' && item !== null && 'onFinalMetadata' in item, |
| 189 | + ) as { onFinalMetadata: InkeepOnFinalMetadata } | undefined; |
| 190 | + |
| 191 | + // get the chat_session_id from the onFinalMetadata item |
| 192 | + const chatSessionId = onFinalMetadataItem?.onFinalMetadata?.chat_session_id; |
| 193 | + |
| 194 | + setChatSessionId(chatSessionId); |
| 195 | + }, [data]); |
| 196 | + |
| 197 | + return ( |
| 198 | + <div className="flex flex-col w-full max-w-md py-24 mx-auto stretch"> |
| 199 | + {messages.map(m => { |
| 200 | + return ( |
| 201 | + <div key={m.id} className="whitespace-pre-wrap"> |
| 202 | + <br /> |
| 203 | + <strong>{m.role === 'user' ? 'User: ' : 'AI: '}</strong> |
| 204 | + {m.content} |
| 205 | + <Citations annotations={m.annotations} /> |
| 206 | + </div> |
| 207 | + ); |
| 208 | + })} |
| 209 | + |
| 210 | + <form onSubmit={handleSubmit}> |
| 211 | + <input |
| 212 | + className="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl" |
| 213 | + value={input} |
| 214 | + placeholder="Say something..." |
| 215 | + onChange={handleInputChange} |
| 216 | + /> |
| 217 | + </form> |
| 218 | + </div> |
| 219 | + ); |
| 220 | +} |
| 221 | +``` |
| 222 | + |
| 223 | +#### Show Citations (optional) |
| 224 | + |
| 225 | +The Inkeep API provides information about the sources (documentation, web pages, forums, etc.) used to answer a question in a `records_cited` message annotation. |
| 226 | + |
| 227 | +We can use this to display a list of "Citations" at the end of the main chat message content. |
| 228 | + |
| 229 | +```tsx filename="app/Citations.tsx" showLineNumbers |
| 230 | +import { Message } from 'ai'; |
| 231 | +import { InkeepRecordsCitedData } from 'ai/streams'; |
| 232 | + |
| 233 | +interface CitationsProps { |
| 234 | + annotations: Message['annotations']; |
| 235 | +} |
| 236 | + |
| 237 | +export const Citations = ({ annotations }: CitationsProps) => { |
| 238 | + // get the records_cited annotation of the message |
| 239 | + const recordsCitedAnnotation = annotations?.find( |
| 240 | + item => |
| 241 | + typeof item === 'object' && item !== null && 'records_cited' in item, |
| 242 | + ) as { records_cited: InkeepRecordsCitedData } | undefined; |
| 243 | + |
| 244 | + // get the citations from the records_cited annotation |
| 245 | + const citations = recordsCitedAnnotation?.records_cited?.citations; |
| 246 | + |
| 247 | + return ( |
| 248 | + citations && ( |
| 249 | + <> |
| 250 | + {annotations && annotations.length > 0 && ( |
| 251 | + <div> |
| 252 | + <br /> |
| 253 | + {'---SOURCES USED---'} |
| 254 | + <br /> |
| 255 | + <div> |
| 256 | + {citations.map((citation, citationIndex) => ( |
| 257 | + <p key={citationIndex}> |
| 258 | + {citationIndex + 1}.{' '} |
| 259 | + <a target="_blank" href={citation.record.url || ''}> |
| 260 | + {citation.record.title} |
| 261 | + </a> |
| 262 | + </p> |
| 263 | + ))} |
| 264 | + </div> |
| 265 | + </div> |
| 266 | + )} |
| 267 | + </> |
| 268 | + ) |
| 269 | + ); |
| 270 | +}; |
| 271 | +``` |
| 272 | + |
| 273 | +</Steps> |
0 commit comments