'use client'; import { useEffect, useState } from 'react'; import { IconLayoutSidebar, IconMessage, IconPlus, IconRobot, IconSend, IconSettings, IconUser, } from '@tabler/icons-react'; import { ActionIcon, AppShell, Avatar, Burger, Container, Group, Paper, rem, ScrollArea, Select, Stack, Text, TextInput, TextInputProps, Title, Tooltip, UnstyledButton, useMantineTheme, } from '@mantine/core'; import { useDisclosure } from '@mantine/hooks'; import { chat, type ChatMessage } from '@/app/actions/chat'; import { getInstalledModels, type OllamaModel } from '@/app/actions/ollama'; import { useThemeContext } from '@/components/DynamicThemeProvider'; import { SettingsModal } from '@/components/Settings/SettingsModal'; interface Message { id: string; role: 'user' | 'assistant'; content: string; } interface Chat { id: string; title: string; updatedAt: string; messages?: Message[]; } export function InputWithButton(props: TextInputProps) { const theme = useMantineTheme(); return ( } {...props} /> ); } export default function ChatLayout() { const [mobileOpened, { toggle: toggleMobile }] = useDisclosure(); const [desktopOpened, { toggle: toggleDesktop }] = useDisclosure(true); const [settingsOpened, { open: openSettings, close: closeSettings }] = useDisclosure(false); const { primaryColor, setPrimaryColor } = useThemeContext(); const theme = useMantineTheme(); // State const [chats, setChats] = useState([]); const [activeChatId, setActiveChatId] = useState(null); const [messages, setMessages] = useState([ { id: '1', role: 'assistant', content: 'Hello! I am an AI assistant. How can I help you today?', }, ]); const [inputValue, setInputValue] = useState(''); const [isInputFocused, setIsInputFocused] = useState(false); const [isLoadingChats, setIsLoadingChats] = useState(false); // Model State const [models, setModels] = useState([]); const [selectedModel, setSelectedModel] = useState(null); const [isGenerating, setIsGenerating] = useState(false); // Fetch chats and models on load useEffect(() => { fetchChats(); fetchModels(); }, [settingsOpened]); const fetchModels = async () => { const list = await getInstalledModels(); setModels(list); // Select first model if none selected and list not empty if (!selectedModel && list.length > 0) { setSelectedModel(list[0].name); } }; const fetchChats = async () => { setIsLoadingChats(true); try { const res = await fetch('/api/chats'); if (res.ok) { const data = await res.json(); if (Array.isArray(data)) { setChats(data); } else { setChats([]); } } else { setChats([]); } } catch (e) { console.error('Failed to fetch chats', e); setChats([]); } finally { setIsLoadingChats(false); } }; const handleSelectChat = (chat: Chat) => { setActiveChatId(chat.id); if (chat.messages) { setMessages(chat.messages); } else { setMessages([]); } if (mobileOpened) { toggleMobile(); } }; const handleNewChat = () => { setActiveChatId(null); setMessages([ { id: Date.now().toString(), role: 'assistant', content: 'Hello! I am an AI assistant. How can I help you today?', }, ]); if (mobileOpened) { toggleMobile(); } }; const handleSendMessage = async () => { if (!inputValue.trim() || !selectedModel) return; const userMessage: Message = { id: Date.now().toString(), role: 'user', content: inputValue, }; // Optimistic update const newMessages = [...messages, userMessage]; setMessages(newMessages); setInputValue(''); setIsGenerating(true); try { // Convert to format expected by server action const chatHistory: ChatMessage[] = newMessages.map((m) => ({ role: m.role as 'user' | 'assistant', content: m.content, })); // Call Ollama via Server Action const result = await chat(selectedModel, chatHistory); if (result.success && result.message) { const responseMessage: Message = { id: (Date.now() + 1).toString(), role: 'assistant', content: result.message.content, }; setMessages([...newMessages, responseMessage]); } else { // Error handling const errorMessage: Message = { id: (Date.now() + 1).toString(), role: 'assistant', content: `Error: ${result.error}`, }; setMessages([...newMessages, errorMessage]); } } catch (e) { console.error('Failed to send message', e); } finally { setIsGenerating(false); } }; const handleKeyDown = (event: React.KeyboardEvent) => { if (event.key === 'Enter') { handleSendMessage(); } }; return ( <> AI Chat