The Vercel AI SDK makes it easy to add streaming AI chat to your Next.js application with OpenAI, Anthropic, or other providers.
Step 1: Install Dependencies
pnpm add ai @ai-sdk/openai
Step 2: Create the API Route
// app/api/chat/route.ts
import { openai } from "@ai-sdk/openai";
import { streamText } from "ai";
export const maxDuration = 30;
export async function POST(req: Request) {
const { messages } = await req.json();
const result = streamText({
model: openai("gpt-4o-mini"),
system:
"You are a helpful customer support assistant for RCB Software, a web design and development agency. Answer questions about our services, pricing, and process. Be concise and professional.",
messages,
});
return result.toDataStreamResponse();
}
Step 3: Build the Chat Interface
"use client";
import { useChat } from "@ai-sdk/react";
export function ChatWidget() {
const { messages, input, handleInputChange, handleSubmit, isLoading } =
useChat();
return (
<div className="flex h-[500px] flex-col rounded-2xl border bg-white shadow-xl dark:border-gray-700 dark:bg-gray-900">
{/* Header */}
<div className="border-b px-4 py-3 dark:border-gray-700">
<h3 className="font-semibold">Chat with Us</h3>
<p className="text-xs text-gray-500">AI-powered support assistant</p>
</div>
{/* Messages */}
<div className="flex-1 overflow-y-auto p-4 space-y-4">
{messages.length === 0 && (
<div className="flex h-full items-center justify-center">
<p className="text-sm text-gray-400">
Ask us anything about our services!
</p>
</div>
)}
{messages.map((message) => (
<div
key={message.id}
className={`flex ${
message.role === "user" ? "justify-end" : "justify-start"
}`}
>
<div
className={`max-w-[80%] rounded-2xl px-4 py-2 text-sm ${
message.role === "user"
? "bg-blue-600 text-white"
: "bg-gray-100 text-gray-900 dark:bg-gray-800 dark:text-white"
}`}
>
{message.content}
</div>
</div>
))}
{isLoading && (
<div className="flex justify-start">
<div className="rounded-2xl bg-gray-100 px-4 py-2 dark:bg-gray-800">
<div className="flex gap-1">
<span className="h-2 w-2 animate-bounce rounded-full bg-gray-400 [animation-delay:0ms]" />
<span className="h-2 w-2 animate-bounce rounded-full bg-gray-400 [animation-delay:150ms]" />
<span className="h-2 w-2 animate-bounce rounded-full bg-gray-400 [animation-delay:300ms]" />
</div>
</div>
</div>
)}
</div>
{/* Input */}
<form
onSubmit={handleSubmit}
className="flex gap-2 border-t p-4 dark:border-gray-700"
>
<input
value={input}
onChange={handleInputChange}
placeholder="Type a message..."
className="flex-1 rounded-lg border bg-transparent px-4 py-2 text-sm outline-none focus:ring-2 focus:ring-blue-500 dark:border-gray-600"
/>
<button
type="submit"
disabled={isLoading || !input.trim()}
className="rounded-lg bg-blue-600 px-4 py-2 text-sm font-medium text-white hover:bg-blue-700 disabled:opacity-50"
>
Send
</button>
</form>
</div>
);
}
Step 4: Floating Chat Button
"use client";
import { useState } from "react";
import { ChatWidget } from "./ChatWidget";
export function FloatingChat() {
const [open, setOpen] = useState(false);
return (
<div className="fixed bottom-6 right-6 z-50">
{open && (
<div className="mb-4 w-[380px]">
<ChatWidget />
</div>
)}
<button
onClick={() => setOpen(!open)}
className="flex h-14 w-14 items-center justify-center rounded-full bg-blue-600 text-white shadow-lg transition hover:bg-blue-700"
aria-label={open ? "Close chat" : "Open chat"}
>
{open ? (
<XIcon className="h-5 w-5" />
) : (
<MessageIcon className="h-5 w-5" />
)}
</button>
</div>
);
}
Step 5: Suggested Questions
const SUGGESTIONS = [
"What services do you offer?",
"How much does a website cost?",
"What is your development process?",
"Can you redesign my existing site?",
];
{messages.length === 0 && (
<div className="space-y-2 p-4">
<p className="text-xs font-medium text-gray-500">Suggested questions</p>
{SUGGESTIONS.map((question) => (
<button
key={question}
onClick={() => {
handleInputChange({ target: { value: question } } as any);
handleSubmit({ preventDefault: () => {} } as any);
}}
className="block w-full rounded-lg border px-3 py-2 text-left text-sm hover:bg-gray-50 dark:border-gray-700 dark:hover:bg-gray-800"
>
{question}
</button>
))}
</div>
)}
Step 6: Stream with Tool Calls
// app/api/chat/route.ts
import { openai } from "@ai-sdk/openai";
import { streamText, tool } from "ai";
import { z } from "zod";
export async function POST(req: Request) {
const { messages } = await req.json();
const result = streamText({
model: openai("gpt-4o-mini"),
system: "You are a helpful assistant. Use tools when appropriate.",
messages,
tools: {
getWeather: tool({
description: "Get the current weather for a location",
parameters: z.object({
location: z.string().describe("The city name"),
}),
execute: async ({ location }) => {
// Call weather API
return { temperature: 72, condition: "Sunny", location };
},
}),
searchBlog: tool({
description: "Search our blog for articles",
parameters: z.object({
query: z.string().describe("Search query"),
}),
execute: async ({ query }) => {
// Search blog content
return [
{ title: "Web Design Trends 2026", slug: "/blog/web-design-trends-2026" },
];
},
}),
},
});
return result.toDataStreamResponse();
}
Step 7: RAG (Retrieval-Augmented Generation)
import { openai } from "@ai-sdk/openai";
import { streamText, embed } from "ai";
export async function POST(req: Request) {
const { messages } = await req.json();
const lastMessage = messages[messages.length - 1].content;
// 1. Embed the user's question
const { embedding } = await embed({
model: openai.embedding("text-embedding-3-small"),
value: lastMessage,
});
// 2. Search your knowledge base
const relevantDocs = await searchDocuments(embedding);
// 3. Include context in system prompt
const context = relevantDocs.map((d) => d.content).join("\n\n");
const result = streamText({
model: openai("gpt-4o-mini"),
system: `Answer based on this context:\n\n${context}\n\nIf the answer isn't in the context, say so.`,
messages,
});
return result.toDataStreamResponse();
}
Step 8: Conversation Persistence
"use client";
import { useChat } from "@ai-sdk/react";
export function PersistentChat({ chatId }: { chatId: string }) {
const { messages, input, handleInputChange, handleSubmit } = useChat({
id: chatId,
initialMessages: [], // Load from database
onFinish: async (message) => {
// Save to database
await fetch("/api/conversations", {
method: "POST",
body: JSON.stringify({ chatId, message }),
});
},
});
return (
// ... chat UI
);
}
Need AI-Powered Features?
We build web applications with AI chat, content generation, and intelligent search. Contact us to discuss your project.