Skip to main content

Streaming an AI response

  • non-blocking ai response
  • need "ai" package from vercel. npm install ai

Code​

  • Frontend


import { useChat } from "ai/react";

export default function Chat() {
const { messages, input, handleInputChange, handleSubmit } = useChat({
api: "/api/chat",
});
return (
<div className="flex flex-col w-full max-w-md py-24 mx-auto stretch bg-red-100 h-full">
{messages.map((m) => (
<div key={m.id} className="whitespace-pre-wrap">
{m.role === "user" ? "User: " : "AI: "}
{m.content}
</div>
))}
<form onSubmit={handleSubmit}>
<input
className="fixed bottom-0 w-full max-w-md p-2 mb-8 border border-gray-300 rounded shadow-xl"
value={input}
placeholder="Say something..."
onChange={handleInputChange}
/>
</form>
</div>
);
}

  • Backend
import { OpenAIStream, StreamingTextResponse } from "ai";
import OpenAI from "openai";
import { getAPIKey } from "./ai/openai";

// Create an OpenAI API client (that's edge friendly!)
const openai = new OpenAI({
apiKey: getAPIKey(),
});

export const runtime = "edge";

export default async function handler(req, res) {
const { messages } = await req.json();

const response = await openai.chat.completions.create({
model: process.env.NEXT_PUBLIC_AI_MODEL || "gpt-3.5-turbo-16k",
stream: true,
messages,
});

const stream = OpenAIStream(response);
return new StreamingTextResponse(stream);
}