Files
professor/app/api/ai/route.ts
Aodhan Collins f644937604 Initial commit — AI-powered coding tutor (Professor)
Next.js 16, React 19, Monaco editor, Anthropic SDK, multi-provider AI,
Wandbox Python execution, iframe HTML preview, SQLite auth + session persistence.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-04 21:48:34 +00:00

185 lines
5.9 KiB
TypeScript

import { NextRequest, NextResponse } from 'next/server';
import Anthropic from '@anthropic-ai/sdk';
import {
buildTaskGenerationPrompt,
buildCodeReviewPrompt,
buildChatPrompt,
buildLessonPrompt,
buildClassroomChatPrompt,
} from '@/lib/prompts';
import { PROVIDER_MAP } from '@/lib/providers';
import type { AIRequestBody, ProviderConfig } from '@/types';
// ─── Anthropic streaming ────────────────────────────────────────────────────
async function streamAnthropic(
config: ProviderConfig,
systemPrompt: string,
messages: Anthropic.MessageParam[],
controller: ReadableStreamDefaultController
) {
const client = config.apiKey
? new Anthropic({ apiKey: config.apiKey })
: new Anthropic(); // falls back to ANTHROPIC_API_KEY env var
const stream = await client.messages.stream({
model: config.model,
max_tokens: 2048,
system: systemPrompt,
messages,
});
for await (const chunk of stream) {
if (chunk.type === 'content_block_delta' && chunk.delta.type === 'text_delta') {
controller.enqueue(new TextEncoder().encode(chunk.delta.text));
}
}
}
// ─── OpenAI-compatible streaming (OpenRouter, LM Studio, Ollama) ────────────
async function streamOpenAICompatible(
config: ProviderConfig,
systemPrompt: string,
messages: Array<{ role: 'user' | 'assistant'; content: string }>,
controller: ReadableStreamDefaultController
) {
const providerDef = PROVIDER_MAP[config.provider];
const baseUrl = config.baseUrl ?? providerDef.defaultBaseUrl;
const apiKey = config.apiKey || 'none'; // LM Studio / Ollama accept any value
const headers: Record<string, string> = {
'Content-Type': 'application/json',
Authorization: `Bearer ${apiKey}`,
};
// OpenRouter requires attribution headers
if (config.provider === 'openrouter') {
headers['HTTP-Referer'] = 'http://localhost:3000';
headers['X-Title'] = 'Professor';
}
const res = await fetch(`${baseUrl}/chat/completions`, {
method: 'POST',
headers,
body: JSON.stringify({
model: config.model,
messages: [{ role: 'system', content: systemPrompt }, ...messages],
stream: true,
}),
});
if (!res.ok || !res.body) {
const text = await res.text().catch(() => res.statusText);
throw new Error(`${providerDef.label} error ${res.status}: ${text}`);
}
// Parse SSE stream
const reader = res.body.getReader();
const decoder = new TextDecoder();
let buffer = '';
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split('\n');
buffer = lines.pop() ?? '';
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed.startsWith('data: ')) continue;
const data = trimmed.slice(6);
if (data === '[DONE]') return;
try {
const json = JSON.parse(data);
const content = json.choices?.[0]?.delta?.content;
if (content) controller.enqueue(new TextEncoder().encode(content));
} catch {
// ignore malformed SSE lines
}
}
}
}
// ─── Route handler ──────────────────────────────────────────────────────────
export async function POST(req: NextRequest) {
let body: AIRequestBody;
try {
body = await req.json();
} catch {
return NextResponse.json({ error: 'Invalid JSON' }, { status: 400 });
}
const { mode, topic, code, executionResult, messages, userMessage, providerConfig, responseMode } = body;
if (!mode || !topic || !providerConfig) {
return NextResponse.json({ error: 'Missing required fields' }, { status: 400 });
}
// Build system prompt
let systemPrompt: string;
switch (mode) {
case 'generate_task':
systemPrompt = buildTaskGenerationPrompt(topic);
break;
case 'review_code':
systemPrompt = buildCodeReviewPrompt(topic, code, executionResult, responseMode);
break;
case 'chat':
systemPrompt = buildChatPrompt(topic, code, responseMode);
break;
case 'generate_lesson':
systemPrompt = buildLessonPrompt(topic);
break;
case 'classroom_chat':
systemPrompt = buildClassroomChatPrompt(topic);
break;
default:
return NextResponse.json({ error: 'Invalid mode' }, { status: 400 });
}
// Build message list
const chatMessages: Array<{ role: 'user' | 'assistant'; content: string }> =
mode === 'generate_task'
? [{ role: 'user', content: 'Generate a task for this topic.' }]
: mode === 'review_code'
? [{ role: 'user', content: 'Please review my code and give me feedback.' }]
: mode === 'generate_lesson'
? [{ role: 'user', content: 'Write the lesson.' }]
: [
...(messages ?? []).map((m) => ({
role: m.role as 'user' | 'assistant',
content: m.content,
})),
...(userMessage ? [{ role: 'user' as const, content: userMessage }] : []),
];
const stream = new ReadableStream({
async start(controller) {
try {
if (providerConfig.provider === 'anthropic') {
await streamAnthropic(providerConfig, systemPrompt, chatMessages, controller);
} else {
await streamOpenAICompatible(providerConfig, systemPrompt, chatMessages, controller);
}
controller.close();
} catch (err) {
const message = err instanceof Error ? err.message : 'AI error';
controller.enqueue(new TextEncoder().encode(`\n\n[Error: ${message}]`));
controller.close();
}
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'Transfer-Encoding': 'chunked',
'Cache-Control': 'no-cache',
},
});
}