fix: Migrate chat to AI SDK 5.0 and fix form submission
Critical fixes to get chat functionality working:
1. **Migrate to AI SDK 5.0 API**:
- Replace deprecated `handleSubmit`, `input`, `handleInputChange` from useChat
- Use manual state management with `useState` for input
- Use `sendMessage({ text })` instead of form submission
- Update API route to use `toUIMessageStreamResponse()` instead of `toAIStreamResponse()`
- Add `convertToModelMessages()` for proper message conversion
- Update message rendering to use `parts` array instead of `content` string
2. **Fix Mantine hydration error**:
- Change `forceColorScheme="dark"` to `defaultColorScheme="dark"` in layout
- Add `suppressHydrationWarning` to html and body tags
- This was preventing React from attaching event handlers to the form
3. **Preserve existing features**:
- Keep input padding fix
- Keep microphone recorder integration
- Keep persona parameter in API route
The form now successfully submits and makes POST requests to /api/chat.
Next steps: add initial greeting, re-add tool call handling for node suggestions.
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
import { streamText } from '@ai-sdk/react';
|
import { streamText, UIMessage, convertToModelMessages } from 'ai';
|
||||||
import { google } from '@ai-sdk/google';
|
import { google } from '@ai-sdk/google';
|
||||||
import { cookies } from 'next/headers';
|
import { cookies } from 'next/headers';
|
||||||
import { NodeSuggestionSchema } from '@/lib/ai-schemas';
|
import { NodeSuggestionSchema } from '@/lib/ai-schemas';
|
||||||
@@ -15,14 +15,14 @@ export async function POST(req: Request) {
|
|||||||
return new Response('Unauthorized', { status: 401 });
|
return new Response('Unauthorized', { status: 401 });
|
||||||
}
|
}
|
||||||
|
|
||||||
const { messages, data } = await req.json();
|
const { messages, data }: { messages: UIMessage[]; data?: { persona?: string } } = await req.json();
|
||||||
|
|
||||||
// Get the 'persona' from the custom 'data' object
|
// Get the 'persona' from the custom 'data' object
|
||||||
const { persona } = z
|
const { persona } = z
|
||||||
.object({
|
.object({
|
||||||
persona: z.string().optional().default('Socratic'),
|
persona: z.string().optional().default('Socratic'),
|
||||||
})
|
})
|
||||||
.parse(data);
|
.parse(data || {});
|
||||||
|
|
||||||
// Dynamically create the system prompt based on persona
|
// Dynamically create the system prompt based on persona
|
||||||
const systemPrompt = `You are a ${persona} thought partner.
|
const systemPrompt = `You are a ${persona} thought partner.
|
||||||
@@ -33,10 +33,10 @@ idea is fully formed.
|
|||||||
For all other conversation, just respond as a helpful AI.`;
|
For all other conversation, just respond as a helpful AI.`;
|
||||||
|
|
||||||
// Use the Vercel AI SDK's streamText function with tools
|
// Use the Vercel AI SDK's streamText function with tools
|
||||||
const result = await streamText({
|
const result = streamText({
|
||||||
model: google('gemini-1.5-flash'),
|
model: google('gemini-1.5-flash'),
|
||||||
system: systemPrompt,
|
system: systemPrompt,
|
||||||
messages: messages,
|
messages: convertToModelMessages(messages),
|
||||||
|
|
||||||
// Provide the schema as a 'tool' to the model
|
// Provide the schema as a 'tool' to the model
|
||||||
tools: {
|
tools: {
|
||||||
@@ -47,6 +47,6 @@ For all other conversation, just respond as a helpful AI.`;
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Return the streaming response
|
// Return the streaming response (v5 API)
|
||||||
return result.toAIStreamResponse();
|
return result.toUIMessageStreamResponse();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,44 +13,18 @@ import {
|
|||||||
Text,
|
Text,
|
||||||
} from '@mantine/core';
|
} from '@mantine/core';
|
||||||
import { useRouter } from 'next/navigation';
|
import { useRouter } from 'next/navigation';
|
||||||
import { useEffect, useRef } from 'react';
|
import { useEffect, useRef, useState } from 'react';
|
||||||
import { NodeSuggestion } from '@/lib/ai-schemas';
|
|
||||||
import { MicrophoneRecorder } from '@/components/MicrophoneRecorder';
|
import { MicrophoneRecorder } from '@/components/MicrophoneRecorder';
|
||||||
|
|
||||||
export default function ChatPage() {
|
export default function ChatPage() {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const viewport = useRef<HTMLDivElement>(null);
|
const viewport = useRef<HTMLDivElement>(null);
|
||||||
|
const [input, setInput] = useState('');
|
||||||
|
|
||||||
const {
|
const { messages, sendMessage, isLoading } = useChat({
|
||||||
messages,
|
|
||||||
input,
|
|
||||||
handleInputChange,
|
|
||||||
handleSubmit,
|
|
||||||
setInput,
|
|
||||||
isLoading,
|
|
||||||
} = useChat({
|
|
||||||
api: '/api/chat',
|
api: '/api/chat',
|
||||||
// Send the persona in the 'data' property
|
body: {
|
||||||
data: {
|
persona: 'Socratic',
|
||||||
persona: 'Socratic', // This could be a <Select> value
|
|
||||||
},
|
|
||||||
// The 'experimental_onToolCall' handler is fired when
|
|
||||||
// the AI returns the structured JSON 'suggest_node' tool.
|
|
||||||
experimental_onToolCall: (toolCall, appendToolResult) => {
|
|
||||||
if (toolCall.toolName === 'suggest_node') {
|
|
||||||
const { title, body } = toolCall.args as NodeSuggestion;
|
|
||||||
|
|
||||||
// Redirect to the editor with the AI-generated draft
|
|
||||||
const query = new URLSearchParams({ title, body }).toString();
|
|
||||||
router.push(`/editor/new?${query}`);
|
|
||||||
|
|
||||||
// Return a message to display in the chat
|
|
||||||
return appendToolResult({
|
|
||||||
toolName: 'suggest_node',
|
|
||||||
args: { title, body },
|
|
||||||
result: `Drafting node: "${title}"`,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -62,6 +36,14 @@ export default function ChatPage() {
|
|||||||
});
|
});
|
||||||
}, [messages]);
|
}, [messages]);
|
||||||
|
|
||||||
|
const handleSubmit = (e: React.FormEvent) => {
|
||||||
|
e.preventDefault();
|
||||||
|
if (!input.trim() || isLoading) return;
|
||||||
|
|
||||||
|
sendMessage({ text: input });
|
||||||
|
setInput('');
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Container size="md" h="100vh" style={{ display: 'flex', flexDirection: 'column' }}>
|
<Container size="md" h="100vh" style={{ display: 'flex', flexDirection: 'column' }}>
|
||||||
<Title order={2} py="md">
|
<Title order={2} py="md">
|
||||||
@@ -74,11 +56,6 @@ export default function ChatPage() {
|
|||||||
viewportRef={viewport}
|
viewportRef={viewport}
|
||||||
>
|
>
|
||||||
<Stack gap="md" pb="xl">
|
<Stack gap="md" pb="xl">
|
||||||
{messages.length === 0 && (
|
|
||||||
<Text c="dimmed" ta="center" mt="xl">
|
|
||||||
Start a conversation by typing or speaking...
|
|
||||||
</Text>
|
|
||||||
)}
|
|
||||||
{messages.map((m) => (
|
{messages.map((m) => (
|
||||||
<Paper
|
<Paper
|
||||||
key={m.id}
|
key={m.id}
|
||||||
@@ -94,7 +71,16 @@ export default function ChatPage() {
|
|||||||
w="80%"
|
w="80%"
|
||||||
>
|
>
|
||||||
<Text fw={700} size="sm">{m.role === 'user' ? 'You' : 'AI'}</Text>
|
<Text fw={700} size="sm">{m.role === 'user' ? 'You' : 'AI'}</Text>
|
||||||
<Text style={{ whiteSpace: 'pre-wrap' }}>{m.content}</Text>
|
{m.parts.map((part, i) => {
|
||||||
|
if (part.type === 'text') {
|
||||||
|
return (
|
||||||
|
<Text key={i} style={{ whiteSpace: 'pre-wrap' }}>
|
||||||
|
{part.text}
|
||||||
|
</Text>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
})}
|
||||||
</Paper>
|
</Paper>
|
||||||
))}
|
))}
|
||||||
</Stack>
|
</Stack>
|
||||||
@@ -105,9 +91,15 @@ export default function ChatPage() {
|
|||||||
<Group>
|
<Group>
|
||||||
<TextInput
|
<TextInput
|
||||||
value={input}
|
value={input}
|
||||||
onChange={handleInputChange}
|
onChange={(e) => setInput(e.currentTarget.value)}
|
||||||
placeholder="Speak or type your thoughts..."
|
placeholder="Speak or type your thoughts..."
|
||||||
style={{ flex: 1 }}
|
style={{ flex: 1 }}
|
||||||
|
styles={{
|
||||||
|
input: {
|
||||||
|
paddingLeft: '1rem',
|
||||||
|
paddingRight: '0.5rem',
|
||||||
|
},
|
||||||
|
}}
|
||||||
variant="unstyled"
|
variant="unstyled"
|
||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -19,13 +19,13 @@ export default function RootLayout({
|
|||||||
children: React.ReactNode;
|
children: React.ReactNode;
|
||||||
}>) {
|
}>) {
|
||||||
return (
|
return (
|
||||||
<html lang="en">
|
<html lang="en" suppressHydrationWarning>
|
||||||
<head>
|
<head>
|
||||||
{/* Enforce dark scheme as per our theme */}
|
{/* Enforce dark scheme as per our theme */}
|
||||||
<ColorSchemeScript forceColorScheme="dark" />
|
<ColorSchemeScript defaultColorScheme="dark" />
|
||||||
</head>
|
</head>
|
||||||
<body className={inter.className}>
|
<body className={inter.className} suppressHydrationWarning>
|
||||||
<MantineProvider theme={theme} forceColorScheme="dark">
|
<MantineProvider theme={theme} defaultColorScheme="dark">
|
||||||
<Notifications />
|
<Notifications />
|
||||||
{children}
|
{children}
|
||||||
</MantineProvider>
|
</MantineProvider>
|
||||||
|
|||||||
Reference in New Issue
Block a user