import { Shadcn } from "@/components/examples/shadcn";
Overview#
This example demonstrates integrating assistant-ui with the Vercel AI SDK for building production-ready chat interfaces. It showcases a complete chat application with thread management, message persistence, and a collapsible sidebar for conversation history.
Features#
- AI SDK Integration: Seamless connection with Vercel's
aipackage - Thread Management: Create, switch, and delete conversation threads
- Collapsible Sidebar: Toggle sidebar visibility for focused chat
- Model Picker: Switch between different AI models
- Responsive Design: Mobile-friendly with sheet-based navigation
- Real-time Streaming: Live message streaming with loading states
Quick Start#
npm install @assistant-ui/react @assistant-ui/react-ai-sdk ai@^6 @ai-sdk/react@^3 @ai-sdk/openai
Code#
Client Component#
"use client";
import { AssistantRuntimeProvider } from "@assistant-ui/react";
import { Thread } from "@/components/assistant-ui/thread";
import { useChatRuntime } from "@assistant-ui/react-ai-sdk";
export default function Chat() {
const runtime = useChatRuntime();
return (
<AssistantRuntimeProvider runtime={runtime}>
<div className="flex h-full">
<Sidebar />
<main className="flex-1">
<Thread />
</main>
</div>
</AssistantRuntimeProvider>
);
}
API Route#
// app/api/chat/route.ts
import { openai } from "@ai-sdk/openai";
import { streamText, convertToModelMessages } from "ai";
import type { UIMessage } from "ai";
export async function POST(req: Request) {
const { messages }: { messages: UIMessage[] } = await req.json();
const result = streamText({
model: openai("gpt-4o"),
messages: await convertToModelMessages(messages),
// Optional: Add system prompt
system: "You are a helpful assistant.",
});
return result.toUIMessageStreamResponse();
}
Key Integration Points#
| Hook/Function | Purpose |
|---|---|
useChatRuntime | Connects AI SDK to assistant-ui |
streamText | Server-side streaming response generation |
toUIMessageStreamResponse | Converts stream to Response object |
Adding Persistence#
To persist conversations, add a database and modify the API route:
// Save messages to database
await db.messages.create({
threadId,
role: message.role,
content: message.content,
});
// Load messages on page load
const savedMessages = await db.messages.findMany({ threadId });