Installation
npm
npm install openai
yarn
yarn add openai
Basic Setup
import OpenAI from 'openai';
const client = new OpenAI({
apiKey: process.env.SAMURAI_API_KEY,
baseURL: 'https://api.samuraiapi.in/v1'
});
Environment Variables
.env
SAMURAI_API_KEY=sk-samurai-YOUR_KEY
// Load with dotenv
import 'dotenv/config';
import OpenAI from 'openai';
const client = new OpenAI({
apiKey: process.env.SAMURAI_API_KEY,
baseURL: 'https://api.samuraiapi.in/v1'
});
Chat Completions
const response = await client.chat.completions.create({
model: 'gpt-4o',
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: 'What is quantum computing?' }
],
temperature: 0.7,
max_tokens: 500
});
console.log(response.choices[0].message.content);
Streaming
const stream = await client.chat.completions.create({
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Tell me a story.' }],
stream: true
});
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content || '';
process.stdout.write(content);
}
TypeScript Types
import OpenAI from 'openai';
import type { ChatCompletionMessageParam } from 'openai/resources';
const messages: ChatCompletionMessageParam[] = [
{ role: 'user', content: 'Hello!' }
];
const response = await client.chat.completions.create({
model: 'gpt-4o',
messages
});
Vercel AI SDK
import { createOpenAI } from '@ai-sdk/openai';
import { generateText, streamText } from 'ai';
const samurai = createOpenAI({
apiKey: process.env.SAMURAI_API_KEY!,
baseURL: 'https://api.samuraiapi.in/v1',
});
// Generate
const { text } = await generateText({
model: samurai('gpt-4o'),
prompt: 'Explain machine learning in one paragraph.'
});
// Stream
const { textStream } = streamText({
model: samurai('claude-3-5-sonnet-20241022'),
prompt: 'Write a short poem.'
});
for await (const chunk of textStream) {
process.stdout.write(chunk);
}
Next.js API Route
// app/api/chat/route.ts
import OpenAI from 'openai';
const client = new OpenAI({
apiKey: process.env.SAMURAI_API_KEY,
baseURL: 'https://api.samuraiapi.in/v1'
});
export async function POST(req: Request) {
const { messages } = await req.json();
const response = await client.chat.completions.create({
model: 'gpt-4o',
messages,
stream: true
});
return new Response(response.body, {
headers: { 'Content-Type': 'text/event-stream' }
});
}