Claude Integration

Integrate Anthropic's Claude into your application.

Dependencies#

npm install @anthropic-ai/sdk

Environment Variables#

# .env.local ANTHROPIC_API_KEY=sk-ant-...

Client Setup#

1// lib/anthropic.ts 2import Anthropic from '@anthropic-ai/sdk'; 3 4export const anthropic = new Anthropic({ 5 apiKey: process.env.ANTHROPIC_API_KEY, 6});

Basic Message#

1// lib/ai/claude.ts 2import { anthropic } from '@/lib/anthropic'; 3 4export async function chat( 5 messages: Array<{ role: 'user' | 'assistant'; content: string }>, 6 options?: { 7 model?: string; 8 maxTokens?: number; 9 system?: string; 10 } 11) { 12 const response = await anthropic.messages.create({ 13 model: options?.model || 'claude-3-5-sonnet-20241022', 14 max_tokens: options?.maxTokens || 1024, 15 system: options?.system, 16 messages, 17 }); 18 19 const textBlock = response.content.find((block) => block.type === 'text'); 20 return textBlock?.text || ''; 21}

Streaming Response#

1// app/api/chat/claude/route.ts 2import { anthropic } from '@/lib/anthropic'; 3 4export async function POST(request: Request) { 5 const { messages, system } = await request.json(); 6 7 const stream = await anthropic.messages.stream({ 8 model: 'claude-3-5-sonnet-20241022', 9 max_tokens: 1024, 10 system, 11 messages, 12 }); 13 14 // Create a readable stream 15 const encoder = new TextEncoder(); 16 17 const readableStream = new ReadableStream({ 18 async start(controller) { 19 for await (const event of stream) { 20 if ( 21 event.type === 'content_block_delta' && 22 event.delta.type === 'text_delta' 23 ) { 24 controller.enqueue(encoder.encode(event.delta.text)); 25 } 26 } 27 controller.close(); 28 }, 29 }); 30 31 return new Response(readableStream, { 32 headers: { 33 'Content-Type': 'text/plain; charset=utf-8', 34 'Transfer-Encoding': 'chunked', 35 }, 36 }); 37}

Chat Component with Claude#

1// components/ClaudeChat.tsx 2'use client'; 3 4import { useState, useRef, useEffect } from 'react'; 5import { Send, Loader2 } from 'lucide-react'; 6 7interface Message { 8 role: 'user' | 'assistant'; 9 content: string; 10} 11 12export function ClaudeChat() { 13 const [messages, setMessages] = useState<Message[]>([]); 14 const [input, setInput] = useState(''); 15 const [isLoading, setIsLoading] = useState(false); 16 const messagesEndRef = useRef<HTMLDivElement>(null); 17 18 useEffect(() => { 19 messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); 20 }, [messages]); 21 22 const handleSubmit = async (e: React.FormEvent) => { 23 e.preventDefault(); 24 if (!input.trim() || isLoading) return; 25 26 const userMessage = { role: 'user' as const, content: input }; 27 setMessages((prev) => [...prev, userMessage]); 28 setInput(''); 29 setIsLoading(true); 30 31 try { 32 const response = await fetch('/api/chat/claude', { 33 method: 'POST', 34 headers: { 'Content-Type': 'application/json' }, 35 body: JSON.stringify({ 36 messages: [...messages, userMessage], 37 system: 'You are a helpful assistant.', 38 }), 39 }); 40 41 const reader = response.body?.getReader(); 42 const decoder = new TextDecoder(); 43 let assistantMessage = ''; 44 45 setMessages((prev) => [...prev, { role: 'assistant', content: '' }]); 46 47 while (reader) { 48 const { done, value } = await reader.read(); 49 if (done) break; 50 51 const chunk = decoder.decode(value); 52 assistantMessage += chunk; 53 54 setMessages((prev) => [ 55 ...prev.slice(0, -1), 56 { role: 'assistant', content: assistantMessage }, 57 ]); 58 } 59 } catch (error) { 60 console.error('Chat error:', error); 61 } finally { 62 setIsLoading(false); 63 } 64 }; 65 66 return ( 67 <div className="flex flex-col h-[600px] border rounded-xl"> 68 <div className="flex-1 overflow-y-auto p-4 space-y-4"> 69 {messages.map((message, index) => ( 70 <div 71 key={index} 72 className={`flex ${ 73 message.role === 'user' ? 'justify-end' : 'justify-start' 74 }`} 75 > 76 <div 77 className={`max-w-[80%] rounded-lg px-4 py-2 ${ 78 message.role === 'user' 79 ? 'bg-brand-600 text-white' 80 : 'bg-gray-100 dark:bg-gray-800' 81 }`} 82 > 83 {message.content} 84 </div> 85 </div> 86 ))} 87 <div ref={messagesEndRef} /> 88 </div> 89 90 <form 91 onSubmit={handleSubmit} 92 className="border-t p-4 flex items-center gap-2" 93 > 94 <input 95 value={input} 96 onChange={(e) => setInput(e.target.value)} 97 placeholder="Type a message..." 98 className="flex-1 px-4 py-2 border rounded-lg" 99 /> 100 <button 101 type="submit" 102 disabled={isLoading || !input.trim()} 103 className="p-2 bg-brand-600 text-white rounded-lg disabled:opacity-50" 104 > 105 {isLoading ? ( 106 <Loader2 className="w-5 h-5 animate-spin" /> 107 ) : ( 108 <Send className="w-5 h-5" /> 109 )} 110 </button> 111 </form> 112 </div> 113 ); 114}

Tool Use#

1// lib/ai/claude-tools.ts 2import { anthropic } from '@/lib/anthropic'; 3import type { Tool } from '@anthropic-ai/sdk/resources/messages'; 4 5const tools: Tool[] = [ 6 { 7 name: 'get_weather', 8 description: 'Get the current weather for a location', 9 input_schema: { 10 type: 'object', 11 properties: { 12 location: { 13 type: 'string', 14 description: 'The city and state, e.g. San Francisco, CA', 15 }, 16 }, 17 required: ['location'], 18 }, 19 }, 20 { 21 name: 'search_database', 22 description: 'Search the product database', 23 input_schema: { 24 type: 'object', 25 properties: { 26 query: { type: 'string' }, 27 limit: { type: 'number', default: 10 }, 28 }, 29 required: ['query'], 30 }, 31 }, 32]; 33 34async function executeTool(name: string, input: any) { 35 switch (name) { 36 case 'get_weather': 37 // Call weather API 38 return { temperature: 72, condition: 'sunny' }; 39 case 'search_database': 40 // Search database 41 return [{ id: '1', name: 'Product 1' }]; 42 default: 43 throw new Error(`Unknown tool: ${name}`); 44 } 45} 46 47export async function chatWithTools(userMessage: string) { 48 const messages: any[] = [{ role: 'user', content: userMessage }]; 49 50 const response = await anthropic.messages.create({ 51 model: 'claude-3-5-sonnet-20241022', 52 max_tokens: 1024, 53 tools, 54 messages, 55 }); 56 57 // Check for tool use 58 const toolUseBlock = response.content.find( 59 (block) => block.type === 'tool_use' 60 ); 61 62 if (toolUseBlock && toolUseBlock.type === 'tool_use') { 63 const toolResult = await executeTool(toolUseBlock.name, toolUseBlock.input); 64 65 // Continue with tool result 66 messages.push({ role: 'assistant', content: response.content }); 67 messages.push({ 68 role: 'user', 69 content: [ 70 { 71 type: 'tool_result', 72 tool_use_id: toolUseBlock.id, 73 content: JSON.stringify(toolResult), 74 }, 75 ], 76 }); 77 78 const finalResponse = await anthropic.messages.create({ 79 model: 'claude-3-5-sonnet-20241022', 80 max_tokens: 1024, 81 tools, 82 messages, 83 }); 84 85 const textBlock = finalResponse.content.find((b) => b.type === 'text'); 86 return textBlock?.text || ''; 87 } 88 89 const textBlock = response.content.find((b) => b.type === 'text'); 90 return textBlock?.text || ''; 91}

Vision (Image Analysis)#

1// lib/ai/claude-vision.ts 2import { anthropic } from '@/lib/anthropic'; 3 4export async function analyzeImage( 5 imageUrl: string, 6 prompt: string 7): Promise<string> { 8 // Fetch image and convert to base64 9 const response = await fetch(imageUrl); 10 const arrayBuffer = await response.arrayBuffer(); 11 const base64 = Buffer.from(arrayBuffer).toString('base64'); 12 const mediaType = response.headers.get('content-type') || 'image/jpeg'; 13 14 const result = await anthropic.messages.create({ 15 model: 'claude-3-5-sonnet-20241022', 16 max_tokens: 1024, 17 messages: [ 18 { 19 role: 'user', 20 content: [ 21 { 22 type: 'image', 23 source: { 24 type: 'base64', 25 media_type: mediaType, 26 data: base64, 27 }, 28 }, 29 { 30 type: 'text', 31 text: prompt, 32 }, 33 ], 34 }, 35 ], 36 }); 37 38 const textBlock = result.content.find((b) => b.type === 'text'); 39 return textBlock?.text || ''; 40}

Extended Thinking (Claude 3.5)#

1// lib/ai/claude-thinking.ts 2import { anthropic } from '@/lib/anthropic'; 3 4export async function thinkAndRespond( 5 prompt: string, 6 options?: { 7 budgetTokens?: number; 8 } 9) { 10 const response = await anthropic.messages.create({ 11 model: 'claude-3-5-sonnet-20241022', 12 max_tokens: 16000, 13 thinking: { 14 type: 'enabled', 15 budget_tokens: options?.budgetTokens || 10000, 16 }, 17 messages: [{ role: 'user', content: prompt }], 18 }); 19 20 const thinkingBlock = response.content.find((b) => b.type === 'thinking'); 21 const textBlock = response.content.find((b) => b.type === 'text'); 22 23 return { 24 thinking: thinkingBlock?.thinking || '', 25 response: textBlock?.text || '', 26 }; 27}

Prompt Caching#

1// lib/ai/claude-cache.ts 2import { anthropic } from '@/lib/anthropic'; 3 4const SYSTEM_PROMPT = `You are an expert assistant for our e-commerce platform. 5You have access to product information, order history, and customer data. 6Always be helpful, accurate, and professional. 7 8[Large context with product catalog, FAQs, policies...] 9`.repeat(100); // Large context 10 11export async function chatWithCache(userMessage: string) { 12 const response = await anthropic.messages.create({ 13 model: 'claude-3-5-sonnet-20241022', 14 max_tokens: 1024, 15 system: [ 16 { 17 type: 'text', 18 text: SYSTEM_PROMPT, 19 cache_control: { type: 'ephemeral' }, 20 }, 21 ], 22 messages: [{ role: 'user', content: userMessage }], 23 }); 24 25 // Check cache metrics 26 console.log('Cache created:', response.usage?.cache_creation_input_tokens); 27 console.log('Cache read:', response.usage?.cache_read_input_tokens); 28 29 const textBlock = response.content.find((b) => b.type === 'text'); 30 return textBlock?.text || ''; 31}

PDF Processing#

1// lib/ai/claude-pdf.ts 2import { anthropic } from '@/lib/anthropic'; 3 4export async function analyzePDF( 5 pdfUrl: string, 6 prompt: string 7): Promise<string> { 8 const response = await fetch(pdfUrl); 9 const arrayBuffer = await response.arrayBuffer(); 10 const base64 = Buffer.from(arrayBuffer).toString('base64'); 11 12 const result = await anthropic.messages.create({ 13 model: 'claude-3-5-sonnet-20241022', 14 max_tokens: 4096, 15 messages: [ 16 { 17 role: 'user', 18 content: [ 19 { 20 type: 'document', 21 source: { 22 type: 'base64', 23 media_type: 'application/pdf', 24 data: base64, 25 }, 26 }, 27 { 28 type: 'text', 29 text: prompt, 30 }, 31 ], 32 }, 33 ], 34 }); 35 36 const textBlock = result.content.find((b) => b.type === 'text'); 37 return textBlock?.text || ''; 38}

Error Handling#

1// lib/ai/claude-errors.ts 2import Anthropic from '@anthropic-ai/sdk'; 3 4export async function safeChat( 5 messages: any[], 6 options?: any 7): Promise<string | null> { 8 try { 9 const response = await anthropic.messages.create({ 10 model: options?.model || 'claude-3-5-sonnet-20241022', 11 max_tokens: options?.maxTokens || 1024, 12 messages, 13 }); 14 15 const textBlock = response.content.find((b) => b.type === 'text'); 16 return textBlock?.text || ''; 17 } catch (error) { 18 if (error instanceof Anthropic.APIError) { 19 switch (error.status) { 20 case 429: 21 console.error('Rate limit exceeded'); 22 break; 23 case 529: 24 console.error('API overloaded'); 25 break; 26 default: 27 console.error('Anthropic API error:', error.message); 28 } 29 } 30 throw error; 31 } 32}