This commit is contained in:
Zacharias-Brohn
2026-01-14 20:54:15 +01:00
parent d6d2d78c2e
commit cadab7aaef
4 changed files with 177 additions and 59 deletions
+53
View File
@@ -0,0 +1,53 @@
import { NextRequest } from 'next/server';
import ollama from '@/lib/ollama';
export async function POST(request: NextRequest) {
try {
const { model, messages } = await request.json();
if (!model || !messages) {
return new Response(JSON.stringify({ error: 'Model and messages are required' }), {
status: 400,
headers: { 'Content-Type': 'application/json' },
});
}
const response = await ollama.chat({
model,
messages,
stream: true,
});
// Create a readable stream from the Ollama response
const stream = new ReadableStream({
async start(controller) {
const encoder = new TextEncoder();
try {
for await (const chunk of response) {
const text = chunk.message?.content || '';
if (text) {
controller.enqueue(encoder.encode(text));
}
}
controller.close();
} catch (error) {
controller.error(error);
}
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'Transfer-Encoding': 'chunked',
},
});
} catch (error: any) {
console.error('Chat stream error:', error);
return new Response(JSON.stringify({ error: error.message || 'Failed to stream response' }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}
+65 -24
View File
@@ -30,7 +30,6 @@ import {
useMantineTheme, useMantineTheme,
} from '@mantine/core'; } from '@mantine/core';
import { useDisclosure } from '@mantine/hooks'; import { useDisclosure } from '@mantine/hooks';
import { chat, type ChatMessage } from '@/app/actions/chat';
import { getInstalledModels, type OllamaModel } from '@/app/actions/ollama'; import { getInstalledModels, type OllamaModel } from '@/app/actions/ollama';
import { useThemeContext } from '@/components/DynamicThemeProvider'; import { useThemeContext } from '@/components/DynamicThemeProvider';
import { SettingsModal } from '@/components/Settings/SettingsModal'; import { SettingsModal } from '@/components/Settings/SettingsModal';
@@ -91,7 +90,8 @@ export default function ChatLayout() {
// Model State // Model State
const [models, setModels] = useState<OllamaModel[]>([]); const [models, setModels] = useState<OllamaModel[]>([]);
const [selectedModel, setSelectedModel] = useState<string | null>(null); const [selectedModel, setSelectedModel] = useState<string | null>(null);
const [isGenerating, setIsGenerating] = useState(false); const [_isGenerating, setIsGenerating] = useState(false);
const [streamingMessageId, setStreamingMessageId] = useState<string | null>(null);
// Fetch chats and models on load // Fetch chats and models on load
useEffect(() => { useEffect(() => {
@@ -157,7 +157,9 @@ export default function ChatLayout() {
}; };
const handleSendMessage = async () => { const handleSendMessage = async () => {
if (!inputValue.trim() || !selectedModel) return; if (!inputValue.trim() || !selectedModel) {
return;
}
const userMessage: Message = { const userMessage: Message = {
id: Date.now().toString(), id: Date.now().toString(),
@@ -165,30 +167,66 @@ export default function ChatLayout() {
content: inputValue, content: inputValue,
}; };
// Optimistic update // Optimistic update - add user message and empty assistant message for streaming
const assistantMessageId = (Date.now() + 1).toString();
const newMessages = [...messages, userMessage]; const newMessages = [...messages, userMessage];
setMessages(newMessages); setMessages([...newMessages, { id: assistantMessageId, role: 'assistant', content: '' }]);
setInputValue(''); setInputValue('');
setIsGenerating(true); setIsGenerating(true);
setStreamingMessageId(assistantMessageId);
try { try {
// Convert to format expected by server action // Convert to format expected by API
const chatHistory: ChatMessage[] = newMessages.map((m) => ({ const chatHistory = newMessages.map((m) => ({
role: m.role as 'user' | 'assistant', role: m.role as 'user' | 'assistant',
content: m.content, content: m.content,
})); }));
// Call Ollama via Server Action // Call streaming API
const result = await chat(selectedModel, chatHistory); const response = await fetch('/api/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: selectedModel,
messages: chatHistory,
}),
});
if (result.success && result.message) { if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.error || 'Failed to get response');
}
// Read the stream
const reader = response.body?.getReader();
if (!reader) {
throw new Error('No response body');
}
const decoder = new TextDecoder();
let fullContent = '';
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
const chunk = decoder.decode(value, { stream: true });
fullContent += chunk;
// Update the assistant message with accumulated content
setMessages((prev) =>
prev.map((m) => (m.id === assistantMessageId ? { ...m, content: fullContent } : m))
);
}
// Create final response message for saving
const responseMessage: Message = { const responseMessage: Message = {
id: (Date.now() + 1).toString(), id: assistantMessageId,
role: 'assistant', role: 'assistant',
content: result.message.content, content: fullContent,
}; };
const finalMessages = [...newMessages, responseMessage];
setMessages(finalMessages);
// Save both user message and assistant response to database // Save both user message and assistant response to database
try { try {
@@ -224,19 +262,19 @@ export default function ChatLayout() {
} catch (saveError) { } catch (saveError) {
console.error('Failed to save messages:', saveError); console.error('Failed to save messages:', saveError);
} }
} else {
// Error handling
const errorMessage: Message = {
id: (Date.now() + 1).toString(),
role: 'assistant',
content: `Error: ${result.error}`,
};
setMessages([...newMessages, errorMessage]);
}
} catch (e) { } catch (e) {
console.error('Failed to send message', e); console.error('Failed to send message', e);
// Update assistant message with error
setMessages((prev) =>
prev.map((m) =>
m.id === assistantMessageId
? { ...m, content: `Error: ${e instanceof Error ? e.message : 'Unknown error'}` }
: m
)
);
} finally { } finally {
setIsGenerating(false); setIsGenerating(false);
setStreamingMessageId(null);
} }
}; };
@@ -373,7 +411,10 @@ export default function ChatLayout() {
}} }}
> >
{message.role === 'assistant' ? ( {message.role === 'assistant' ? (
<MarkdownMessage content={message.content} /> <MarkdownMessage
content={message.content}
isStreaming={message.id === streamingMessageId}
/>
) : ( ) : (
<Text size="sm" style={{ lineHeight: 1.6 }}> <Text size="sm" style={{ lineHeight: 1.6 }}>
{message.content} {message.content}
@@ -7,6 +7,28 @@
margin-bottom: 0; margin-bottom: 0;
} }
/* Streaming cursor that blinks at the end */
.streamingCursor {
display: inline-block;
width: 0.5em;
height: 1em;
background-color: currentColor;
margin-left: 2px;
animation: blink 1s step-end infinite;
vertical-align: text-bottom;
opacity: 0.7;
}
@keyframes blink {
0%,
100% {
opacity: 0.7;
}
50% {
opacity: 0;
}
}
.preWrapper { .preWrapper {
margin: 0.5em 0; margin: 0.5em 0;
} }
+4 -2
View File
@@ -4,9 +4,10 @@ import classes from './MarkdownMessage.module.css';
interface MarkdownMessageProps { interface MarkdownMessageProps {
content: string; content: string;
isStreaming?: boolean;
} }
export function MarkdownMessage({ content }: MarkdownMessageProps) { export function MarkdownMessage({ content, isStreaming = false }: MarkdownMessageProps) {
return ( return (
<div className={classes.markdown}> <div className={classes.markdown}>
<ReactMarkdown <ReactMarkdown
@@ -31,7 +32,7 @@ export function MarkdownMessage({ content }: MarkdownMessageProps) {
{children} {children}
</Title> </Title>
), ),
code: ({ className, children, ...props }) => { code: ({ className, children }) => {
const isInline = !className; const isInline = !className;
if (isInline) { if (isInline) {
return <Code>{children}</Code>; return <Code>{children}</Code>;
@@ -74,6 +75,7 @@ export function MarkdownMessage({ content }: MarkdownMessageProps) {
> >
{content} {content}
</ReactMarkdown> </ReactMarkdown>
{isStreaming && <span className={classes.streamingCursor} />}
</div> </div>
); );
} }