feat: Upgrade chat to Gemini Pro with configurable model
- Update Google AI model to gemini-pro-latest via env var - Add GOOGLE_AI_MODEL environment variable for easy model switching - Add initial greeting message explaining Ponderants features - Re-add tool call handling to display node suggestions - Fix chat authentication and streaming responses 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -9,7 +9,8 @@ SURREALDB_PASS=root
|
|||||||
SURREALDB_JWT_SECRET=your-secret-key-here-change-in-production
|
SURREALDB_JWT_SECRET=your-secret-key-here-change-in-production
|
||||||
|
|
||||||
# Google AI API Key (for Gemini embeddings and chat)
|
# Google AI API Key (for Gemini embeddings and chat)
|
||||||
GOOGLE_AI_API_KEY=your-google-ai-api-key
|
GOOGLE_GENERATIVE_AI_API_KEY=your-google-ai-api-key
|
||||||
|
GOOGLE_AI_MODEL=gemini-pro-latest
|
||||||
|
|
||||||
# Deepgram API Key (for voice-to-text)
|
# Deepgram API Key (for voice-to-text)
|
||||||
DEEPGRAM_API_KEY=your-deepgram-api-key
|
DEEPGRAM_API_KEY=your-deepgram-api-key
|
||||||
|
|||||||
@@ -4,14 +4,21 @@ import { cookies } from 'next/headers';
|
|||||||
import { NodeSuggestionSchema } from '@/lib/ai-schemas';
|
import { NodeSuggestionSchema } from '@/lib/ai-schemas';
|
||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
export const runtime = 'edge';
|
// Edge runtime removed - cookies() requires Node.js runtime
|
||||||
|
export const maxDuration = 30;
|
||||||
|
|
||||||
export async function POST(req: Request) {
|
export async function POST(req: Request) {
|
||||||
// Check authentication
|
// Check authentication
|
||||||
const cookieStore = await cookies();
|
const cookieStore = await cookies();
|
||||||
const authCookie = cookieStore.get('ponderants-auth');
|
const authCookie = cookieStore.get('ponderants-auth');
|
||||||
|
|
||||||
|
console.log('[Chat API] Cookie check:', {
|
||||||
|
hasCookie: !!authCookie,
|
||||||
|
allCookies: cookieStore.getAll().map(c => c.name),
|
||||||
|
});
|
||||||
|
|
||||||
if (!authCookie) {
|
if (!authCookie) {
|
||||||
|
console.log('[Chat API] No auth cookie found, returning 401');
|
||||||
return new Response('Unauthorized', { status: 401 });
|
return new Response('Unauthorized', { status: 401 });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -32,9 +39,16 @@ to propose it as a new "thought node". Do not suggest a node until the
|
|||||||
idea is fully formed.
|
idea is fully formed.
|
||||||
For all other conversation, just respond as a helpful AI.`;
|
For all other conversation, just respond as a helpful AI.`;
|
||||||
|
|
||||||
|
// Get model from environment variable
|
||||||
|
const modelName = process.env.GOOGLE_AI_MODEL;
|
||||||
|
if (!modelName) {
|
||||||
|
console.error('[Chat API] GOOGLE_AI_MODEL environment variable is not set');
|
||||||
|
return new Response('Server configuration error', { status: 500 });
|
||||||
|
}
|
||||||
|
|
||||||
// Use the Vercel AI SDK's streamText function with tools
|
// Use the Vercel AI SDK's streamText function with tools
|
||||||
const result = streamText({
|
const result = streamText({
|
||||||
model: google('gemini-1.5-flash'),
|
model: google(modelName),
|
||||||
system: systemPrompt,
|
system: systemPrompt,
|
||||||
messages: convertToModelMessages(messages),
|
messages: convertToModelMessages(messages),
|
||||||
|
|
||||||
|
|||||||
@@ -21,13 +21,32 @@ export default function ChatPage() {
|
|||||||
const viewport = useRef<HTMLDivElement>(null);
|
const viewport = useRef<HTMLDivElement>(null);
|
||||||
const [input, setInput] = useState('');
|
const [input, setInput] = useState('');
|
||||||
|
|
||||||
const { messages, sendMessage, isLoading } = useChat({
|
const { messages, sendMessage, isLoading, setMessages } = useChat({
|
||||||
api: '/api/chat',
|
api: '/api/chat',
|
||||||
body: {
|
body: {
|
||||||
persona: 'Socratic',
|
persona: 'Socratic',
|
||||||
},
|
},
|
||||||
|
credentials: 'include',
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Add initial greeting message on first load
|
||||||
|
useEffect(() => {
|
||||||
|
if (messages.length === 0) {
|
||||||
|
setMessages([
|
||||||
|
{
|
||||||
|
id: 'initial-greeting',
|
||||||
|
role: 'assistant',
|
||||||
|
parts: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: 'Welcome to Ponderants! I\'m here to help you explore and structure your ideas through conversation.\n\nWhat would you like to talk about today? I can adapt my interview style to best suit your needs (Socratic questioning, collaborative brainstorming, or other approaches).\n\nJust start sharing your thoughts, and we\'ll discover meaningful insights together.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
// Auto-scroll to bottom
|
// Auto-scroll to bottom
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
viewport.current?.scrollTo({
|
viewport.current?.scrollTo({
|
||||||
@@ -79,6 +98,31 @@ export default function ChatPage() {
|
|||||||
</Text>
|
</Text>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Handle tool calls (e.g., suggest_node)
|
||||||
|
if (part.type === 'tool-call') {
|
||||||
|
return (
|
||||||
|
<Paper key={i} withBorder p="xs" mt="xs" bg="dark.6">
|
||||||
|
<Text size="xs" c="dimmed" mb="xs">
|
||||||
|
💡 Node Suggestion
|
||||||
|
</Text>
|
||||||
|
<Text fw={600}>{part.args.title}</Text>
|
||||||
|
<Text size="sm" mt="xs">
|
||||||
|
{part.args.content}
|
||||||
|
</Text>
|
||||||
|
{part.args.tags && part.args.tags.length > 0 && (
|
||||||
|
<Group gap="xs" mt="xs">
|
||||||
|
{part.args.tags.map((tag: string, tagIdx: number) => (
|
||||||
|
<Text key={tagIdx} size="xs" c="blue.4">
|
||||||
|
#{tag}
|
||||||
|
</Text>
|
||||||
|
))}
|
||||||
|
</Group>
|
||||||
|
)}
|
||||||
|
</Paper>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
})}
|
})}
|
||||||
</Paper>
|
</Paper>
|
||||||
|
|||||||
Reference in New Issue
Block a user