Files
app/app/api/chat/route.ts
Albert b22931f393 feat: Upgrade chat to Gemini Pro with configurable model
- Update Google AI model to gemini-pro-latest via env var
- Add GOOGLE_AI_MODEL environment variable for easy model switching
- Add initial greeting message explaining Ponderants features
- Re-add tool call handling to display node suggestions
- Fix chat authentication and streaming responses

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-09 04:34:48 +00:00

67 lines
2.2 KiB
TypeScript

import { streamText, UIMessage, convertToModelMessages } from 'ai';
import { google } from '@ai-sdk/google';
import { cookies } from 'next/headers';
import { NodeSuggestionSchema } from '@/lib/ai-schemas';
import { z } from 'zod';
// Edge runtime removed - cookies() requires Node.js runtime
export const maxDuration = 30;
export async function POST(req: Request) {
// Check authentication
const cookieStore = await cookies();
const authCookie = cookieStore.get('ponderants-auth');
console.log('[Chat API] Cookie check:', {
hasCookie: !!authCookie,
allCookies: cookieStore.getAll().map(c => c.name),
});
if (!authCookie) {
console.log('[Chat API] No auth cookie found, returning 401');
return new Response('Unauthorized', { status: 401 });
}
const { messages, data }: { messages: UIMessage[]; data?: { persona?: string } } = await req.json();
// Get the 'persona' from the custom 'data' object
const { persona } = z
.object({
persona: z.string().optional().default('Socratic'),
})
.parse(data || {});
// Dynamically create the system prompt based on persona
const systemPrompt = `You are a ${persona} thought partner.
Your goal is to interview the user to help them explore and structure their ideas.
When you identify a complete, self-contained idea, you MUST use the 'suggest_node' tool
to propose it as a new "thought node". Do not suggest a node until the
idea is fully formed.
For all other conversation, just respond as a helpful AI.`;
// Get model from environment variable
const modelName = process.env.GOOGLE_AI_MODEL;
if (!modelName) {
console.error('[Chat API] GOOGLE_AI_MODEL environment variable is not set');
return new Response('Server configuration error', { status: 500 });
}
// Use the Vercel AI SDK's streamText function with tools
const result = streamText({
model: google(modelName),
system: systemPrompt,
messages: convertToModelMessages(messages),
// Provide the schema as a 'tool' to the model
tools: {
suggest_node: {
description: 'Suggest a new thought node when an idea is complete.',
schema: NodeSuggestionSchema,
},
},
});
// Return the streaming response (v5 API)
return result.toUIMessageStreamResponse();
}