Vercel AI SDK Integration
Perfect for Next.js apps. Add caching, security, and logging to your AI features with minimal code changes.
Install
npm install ai @ai-sdk/openai
# or for Claude
npm install ai @ai-sdk/anthropic
Setup Provider
// lib/ai.ts
import { createOpenAI } from '@ai-sdk/openai';
export const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: 'https://proxy.raptordata.dev/v1',
headers: {
'X-Raptor-Api-Key': process.env.RAPTOR_API_KEY!,
'X-Raptor-Workspace-Id': process.env.RAPTOR_WORKSPACE_ID!
}
});
API Route (Streaming)
// app/api/chat/route.ts
import { openai } from '@/lib/ai';
import { streamText } from 'ai';
export async function POST(req: Request) {
const { messages } = await req.json();
const result = streamText({
model: openai('gpt-4'),
messages
});
return result.toDataStreamResponse();
}
React Component
'use client';
import { useChat } from 'ai/react';
export default function Chat() {
const { messages, input, handleInputChange, handleSubmit } = useChat();
return (
<div>
{messages.map((m) => (
<div key={m.id}>
<b>{m.role}:</b> {m.content}
</div>
))}
<form onSubmit={handleSubmit}>
<input value={input} onChange={handleInputChange} />
</form>
</div>
);
}
Non-Streaming
import { openai } from '@/lib/ai';
import { generateText } from 'ai';
const { text } = await generateText({
model: openai('gpt-4'),
prompt: 'What is 2+2?'
});
Structured Output
import { openai } from '@/lib/ai';
import { generateObject } from 'ai';
import { z } from 'zod';
const { object } = await generateObject({
model: openai('gpt-4'),
schema: z.object({
name: z.string(),
ingredients: z.array(z.string())
}),
prompt: 'Generate a recipe for cookies'
});
Using Anthropic
import { createAnthropic } from '@ai-sdk/anthropic';
const anthropic = createAnthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
baseURL: 'https://proxy.raptordata.dev', // No /v1 for Anthropic
headers: {
'X-Raptor-Api-Key': process.env.RAPTOR_API_KEY!,
'X-Raptor-Workspace-Id': process.env.RAPTOR_WORKSPACE_ID!
}
});
// Use like: anthropic('claude-sonnet-4-20250514')
Environment Variables
Add to .env.local:
OPENAI_API_KEY=sk-...
RAPTOR_API_KEY=rpt_...
RAPTOR_WORKSPACE_ID=your-workspace-uuid
Full Example
// app/api/chat/route.ts
import { createOpenAI } from '@ai-sdk/openai';
import { streamText } from 'ai';
const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL: 'https://proxy.raptordata.dev/v1',
headers: {
'X-Raptor-Api-Key': process.env.RAPTOR_API_KEY!,
'X-Raptor-Workspace-Id': process.env.RAPTOR_WORKSPACE_ID!
}
});
export async function POST(req: Request) {
const { messages } = await req.json();
const result = streamText({
model: openai('gpt-4'),
system: 'You are a helpful assistant.',
messages
});
return result.toDataStreamResponse();
}
All Vercel AI SDK features work: streaming, structured output, tools, and multi-provider support. Every request goes through Raptor for caching, security, and logging.