This commit is contained in:
Zacharias-Brohn
2026-01-14 20:54:15 +01:00
parent d6d2d78c2e
commit cadab7aaef
4 changed files with 177 additions and 59 deletions
+53
View File
@@ -0,0 +1,53 @@
import { NextRequest } from 'next/server';
import ollama from '@/lib/ollama';
export async function POST(request: NextRequest) {
try {
const { model, messages } = await request.json();
if (!model || !messages) {
return new Response(JSON.stringify({ error: 'Model and messages are required' }), {
status: 400,
headers: { 'Content-Type': 'application/json' },
});
}
const response = await ollama.chat({
model,
messages,
stream: true,
});
// Create a readable stream from the Ollama response
const stream = new ReadableStream({
async start(controller) {
const encoder = new TextEncoder();
try {
for await (const chunk of response) {
const text = chunk.message?.content || '';
if (text) {
controller.enqueue(encoder.encode(text));
}
}
controller.close();
} catch (error) {
controller.error(error);
}
},
});
return new Response(stream, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'Transfer-Encoding': 'chunked',
},
});
} catch (error: any) {
console.error('Chat stream error:', error);
return new Response(JSON.stringify({ error: error.message || 'Failed to stream response' }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}
+65 -24
View File
@@ -30,7 +30,6 @@ import {
useMantineTheme,
} from '@mantine/core';
import { useDisclosure } from '@mantine/hooks';
import { chat, type ChatMessage } from '@/app/actions/chat';
import { getInstalledModels, type OllamaModel } from '@/app/actions/ollama';
import { useThemeContext } from '@/components/DynamicThemeProvider';
import { SettingsModal } from '@/components/Settings/SettingsModal';
@@ -91,7 +90,8 @@ export default function ChatLayout() {
// Model State
const [models, setModels] = useState<OllamaModel[]>([]);
const [selectedModel, setSelectedModel] = useState<string | null>(null);
const [isGenerating, setIsGenerating] = useState(false);
const [_isGenerating, setIsGenerating] = useState(false);
const [streamingMessageId, setStreamingMessageId] = useState<string | null>(null);
// Fetch chats and models on load
useEffect(() => {
@@ -157,7 +157,9 @@ export default function ChatLayout() {
};
const handleSendMessage = async () => {
if (!inputValue.trim() || !selectedModel) return;
if (!inputValue.trim() || !selectedModel) {
return;
}
const userMessage: Message = {
id: Date.now().toString(),
@@ -165,30 +167,66 @@ export default function ChatLayout() {
content: inputValue,
};
// Optimistic update
// Optimistic update - add user message and empty assistant message for streaming
const assistantMessageId = (Date.now() + 1).toString();
const newMessages = [...messages, userMessage];
setMessages(newMessages);
setMessages([...newMessages, { id: assistantMessageId, role: 'assistant', content: '' }]);
setInputValue('');
setIsGenerating(true);
setStreamingMessageId(assistantMessageId);
try {
// Convert to format expected by server action
const chatHistory: ChatMessage[] = newMessages.map((m) => ({
// Convert to format expected by API
const chatHistory = newMessages.map((m) => ({
role: m.role as 'user' | 'assistant',
content: m.content,
}));
// Call Ollama via Server Action
const result = await chat(selectedModel, chatHistory);
// Call streaming API
const response = await fetch('/api/chat', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: selectedModel,
messages: chatHistory,
}),
});
if (result.success && result.message) {
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.error || 'Failed to get response');
}
// Read the stream
const reader = response.body?.getReader();
if (!reader) {
throw new Error('No response body');
}
const decoder = new TextDecoder();
let fullContent = '';
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
const chunk = decoder.decode(value, { stream: true });
fullContent += chunk;
// Update the assistant message with accumulated content
setMessages((prev) =>
prev.map((m) => (m.id === assistantMessageId ? { ...m, content: fullContent } : m))
);
}
// Create final response message for saving
const responseMessage: Message = {
id: (Date.now() + 1).toString(),
id: assistantMessageId,
role: 'assistant',
content: result.message.content,
content: fullContent,
};
const finalMessages = [...newMessages, responseMessage];
setMessages(finalMessages);
// Save both user message and assistant response to database
try {
@@ -224,19 +262,19 @@ export default function ChatLayout() {
} catch (saveError) {
console.error('Failed to save messages:', saveError);
}
} else {
// Error handling
const errorMessage: Message = {
id: (Date.now() + 1).toString(),
role: 'assistant',
content: `Error: ${result.error}`,
};
setMessages([...newMessages, errorMessage]);
}
} catch (e) {
console.error('Failed to send message', e);
// Update assistant message with error
setMessages((prev) =>
prev.map((m) =>
m.id === assistantMessageId
? { ...m, content: `Error: ${e instanceof Error ? e.message : 'Unknown error'}` }
: m
)
);
} finally {
setIsGenerating(false);
setStreamingMessageId(null);
}
};
@@ -373,7 +411,10 @@ export default function ChatLayout() {
}}
>
{message.role === 'assistant' ? (
<MarkdownMessage content={message.content} />
<MarkdownMessage
content={message.content}
isStreaming={message.id === streamingMessageId}
/>
) : (
<Text size="sm" style={{ lineHeight: 1.6 }}>
{message.content}
@@ -7,6 +7,28 @@
margin-bottom: 0;
}
/* Streaming cursor that blinks at the end */
.streamingCursor {
display: inline-block;
width: 0.5em;
height: 1em;
background-color: currentColor;
margin-left: 2px;
animation: blink 1s step-end infinite;
vertical-align: text-bottom;
opacity: 0.7;
}
@keyframes blink {
0%,
100% {
opacity: 0.7;
}
50% {
opacity: 0;
}
}
.preWrapper {
margin: 0.5em 0;
}
+4 -2
View File
@@ -4,9 +4,10 @@ import classes from './MarkdownMessage.module.css';
interface MarkdownMessageProps {
content: string;
isStreaming?: boolean;
}
export function MarkdownMessage({ content }: MarkdownMessageProps) {
export function MarkdownMessage({ content, isStreaming = false }: MarkdownMessageProps) {
return (
<div className={classes.markdown}>
<ReactMarkdown
@@ -31,7 +32,7 @@ export function MarkdownMessage({ content }: MarkdownMessageProps) {
{children}
</Title>
),
code: ({ className, children, ...props }) => {
code: ({ className, children }) => {
const isInline = !className;
if (isInline) {
return <Code>{children}</Code>;
@@ -74,6 +75,7 @@ export function MarkdownMessage({ content }: MarkdownMessageProps) {
>
{content}
</ReactMarkdown>
{isStreaming && <span className={classes.streamingCursor} />}
</div>
);
}