This commit is contained in:
Zacharias-Brohn
2026-01-14 18:48:47 +01:00
parent 8eade83b5d
commit e479c24484
8 changed files with 9323 additions and 13046 deletions
+134
View File
@@ -0,0 +1,134 @@
'use server';
import ollama, { Tool } from 'ollama';
export interface ChatMessage {
role: 'user' | 'assistant' | 'system' | 'tool';
content: string;
tool_calls?: any[]; // Ollama tool calls
images?: string[];
}
// --- Tool Definitions ---
const tools: Tool[] = [
{
type: 'function',
function: {
name: 'get_current_time',
description: 'Get the current time',
parameters: {
type: 'object',
properties: {},
required: [],
},
},
},
{
type: 'function',
function: {
name: 'calculate',
description: 'Perform a mathematical calculation',
parameters: {
type: 'object',
properties: {
expression: {
type: 'string',
description: 'The mathematical expression to evaluate (e.g., "2 + 2" or "15 * 7")',
},
},
required: ['expression'],
},
},
},
];
// --- Tool Implementations ---
const availableTools: Record<string, Function> = {
get_current_time: () => {
return new Date().toLocaleTimeString();
},
calculate: ({ expression }: { expression: string }) => {
try {
// Safety: simplistic eval for demo purposes.
// In production, use a math parser library like mathjs.
// eslint-disable-next-line no-eval
return eval(expression).toString();
} catch {
return 'Error evaluating expression';
}
},
};
export async function chat(model: string, messages: ChatMessage[]) {
try {
// 1. Initial Call
let response;
try {
response = await ollama.chat({
model: model,
messages: messages,
tools: tools,
});
} catch (e: any) {
// Fallback: If model doesn't support tools, retry without them
if (e.message?.includes('does not support tools')) {
console.warn(`Model ${model} does not support tools. Falling back to standard chat.`);
response = await ollama.chat({
model: model,
messages: messages,
});
} else {
throw e;
}
}
// 2. Loop to handle tool calls (Ollama might chain multiple calls)
// We limit recursion to avoid infinite loops
let maxTurns = 5;
while (response.message.tool_calls && response.message.tool_calls.length > 0 && maxTurns > 0) {
maxTurns--;
// Append the assistant's message (which contains the tool calls) to history
messages.push(response.message as ChatMessage);
// Execute each tool call
for (const tool of response.message.tool_calls) {
const functionName = tool.function.name;
const functionToCall = availableTools[functionName];
if (functionToCall) {
console.log(`🤖 Tool Call: ${functionName}`, tool.function.arguments);
const functionArgs = tool.function.arguments;
const functionResponse = functionToCall(functionArgs);
// Append the tool result to history
messages.push({
role: 'tool',
content: functionResponse,
});
}
}
// 3. Send the tool results back to the model to get the final answer
response = await ollama.chat({
model: model,
messages: messages,
tools: tools,
});
}
return {
success: true,
message: response.message,
};
} catch (error: any) {
console.error('Chat error:', error);
return {
success: false,
error: error.message || 'Failed to generate response',
};
}
}
+51
View File
@@ -0,0 +1,51 @@
'use server';
import ollama from 'ollama';
export interface OllamaModel {
name: string;
size: number;
digest: string;
details: {
format: string;
family: string;
families: string[];
parameter_size: string;
quantization_level: string;
};
}
export async function getInstalledModels(): Promise<OllamaModel[]> {
try {
const response = await ollama.list();
return response.models as OllamaModel[];
} catch (error) {
console.error('Error fetching models:', error);
return [];
}
}
export async function pullModel(modelName: string): Promise<{ success: boolean; message: string }> {
try {
// This awaits the full pull. For large models, this might timeout the server action.
// Ideally we would stream this, but for now we'll try a simple await.
// Next.js Server Actions have a default timeout.
await ollama.pull({ model: modelName });
return { success: true, message: `Successfully pulled ${modelName}` };
} catch (error: any) {
console.error('Error pulling model:', error);
return { success: false, message: error.message || 'Failed to pull model' };
}
}
export async function deleteModel(
modelName: string
): Promise<{ success: boolean; message: string }> {
try {
await ollama.delete({ model: modelName });
return { success: true, message: `Successfully deleted ${modelName}` };
} catch (error: any) {
console.error('Error deleting model:', error);
return { success: false, message: error.message || 'Failed to delete model' };
}
}
+55 -45
View File
@@ -20,6 +20,7 @@ import {
Paper,
rem,
ScrollArea,
Select,
Stack,
Text,
TextInput,
@@ -29,6 +30,8 @@ import {
useMantineTheme,
} from '@mantine/core';
import { useDisclosure } from '@mantine/hooks';
import { chat, type ChatMessage } from '@/app/actions/chat';
import { getInstalledModels, type OllamaModel } from '@/app/actions/ollama';
import { useThemeContext } from '@/components/DynamicThemeProvider';
import { SettingsModal } from '@/components/Settings/SettingsModal';
@@ -66,10 +69,25 @@ export default function ChatLayout() {
const [isInputFocused, setIsInputFocused] = useState(false);
const [isLoadingChats, setIsLoadingChats] = useState(false);
// Fetch chats on load
// Model State
const [models, setModels] = useState<OllamaModel[]>([]);
const [selectedModel, setSelectedModel] = useState<string | null>(null);
const [isGenerating, setIsGenerating] = useState(false);
// Fetch chats and models on load
useEffect(() => {
fetchChats();
}, [settingsOpened]); // Refresh when settings close (might have logged in/out)
fetchModels();
}, [settingsOpened]);
const fetchModels = async () => {
const list = await getInstalledModels();
setModels(list);
// Select first model if none selected and list not empty
if (!selectedModel && list.length > 0) {
setSelectedModel(list[0].name);
}
};
const fetchChats = async () => {
setIsLoadingChats(true);
@@ -98,7 +116,6 @@ export default function ChatLayout() {
if (chat.messages) {
setMessages(chat.messages);
} else {
// In a real app we might fetch full messages here if not included in list
setMessages([]);
}
if (mobileOpened) {
@@ -121,9 +138,7 @@ export default function ChatLayout() {
};
const handleSendMessage = async () => {
if (!inputValue.trim()) {
return;
}
if (!inputValue.trim() || !selectedModel) return;
const userMessage: Message = {
id: Date.now().toString(),
@@ -135,54 +150,38 @@ export default function ChatLayout() {
const newMessages = [...messages, userMessage];
setMessages(newMessages);
setInputValue('');
setIsGenerating(true);
try {
// Save to backend
const res = await fetch('/api/chats', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
messages: [userMessage],
chatId: activeChatId,
}),
});
// Convert to format expected by server action
const chatHistory: ChatMessage[] = newMessages.map((m) => ({
role: m.role as 'user' | 'assistant',
content: m.content,
}));
if (res.ok) {
const data = await res.json();
if (data.chatId && data.chatId !== activeChatId) {
setActiveChatId(data.chatId);
fetchChats(); // Refresh list to show new chat
}
// Call Ollama via Server Action
const result = await chat(selectedModel, chatHistory);
// Simulate AI response
setTimeout(async () => {
if (result.success && result.message) {
const responseMessage: Message = {
id: (Date.now() + 1).toString(),
role: 'assistant',
content:
'I am a simulated AI response. I do not have a backend yet. I just repeat that I am simulated.',
content: result.message.content,
};
const updatedMessages = [...newMessages, responseMessage];
setMessages(updatedMessages);
// Save AI response to backend
try {
await fetch('/api/chats', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
messages: [responseMessage],
chatId: data.chatId || activeChatId,
}),
});
} catch (e) {
console.error(e);
}
}, 1000);
setMessages([...newMessages, responseMessage]);
} else {
// Error handling
const errorMessage: Message = {
id: (Date.now() + 1).toString(),
role: 'assistant',
content: `Error: ${result.error}`,
};
setMessages([...newMessages, errorMessage]);
}
} catch (e) {
console.error('Failed to save message', e);
console.error('Failed to send message', e);
} finally {
setIsGenerating(false);
}
};
@@ -213,7 +212,18 @@ export default function ChatLayout() {
</ActionIcon>
</Tooltip>
<IconRobot size={28} stroke={1.5} color={theme.colors[primaryColor][6]} />
<Title order={3}>AI Chat</Title>
<Title order={3} mr="md">
AI Chat
</Title>
<Select
placeholder="Select Model"
data={models.map((m) => ({ value: m.name, label: m.name }))}
value={selectedModel}
onChange={setSelectedModel}
searchable
size="xs"
style={{ width: 200 }}
/>
</Group>
<ActionIcon variant="subtle" color="gray" onClick={openSettings}>
<IconSettings size={20} />
+238 -3
View File
@@ -1,22 +1,53 @@
import { useEffect, useState } from 'react';
import { IconAlertCircle, IconPalette, IconUser, IconX } from '@tabler/icons-react';
import {
IconAlertCircle,
IconDownload,
IconPalette,
IconRobot,
IconTrash,
IconUser,
IconX,
} from '@tabler/icons-react';
import {
ActionIcon,
Alert,
Badge,
Button,
Card,
ColorSwatch,
Combobox,
Divider,
Group,
Input,
InputBase,
Loader,
Modal,
NavLink,
PasswordInput,
rem,
ScrollArea,
Stack,
Text,
TextInput,
Title,
useCombobox,
useMantineTheme,
} from '@mantine/core';
import { deleteModel, getInstalledModels, pullModel, type OllamaModel } from '@/app/actions/ollama';
const POPULAR_MODELS = [
'llama3.2',
'llama3.1',
'mistral',
'gemma2',
'qwen2.5',
'phi3.5',
'neural-chat',
'starling-lm',
'codellama',
'deepseek-coder',
'llava',
];
interface User {
id: string;
@@ -37,7 +68,7 @@ export function SettingsModal({
setPrimaryColor,
}: SettingsModalProps) {
const theme = useMantineTheme();
const [activeTab, setActiveTab] = useState<'appearance' | 'account'>('appearance');
const [activeTab, setActiveTab] = useState<'appearance' | 'account' | 'models'>('appearance');
// Account State
const [user, setUser] = useState<User | null>(null);
@@ -47,12 +78,45 @@ export function SettingsModal({
const [error, setError] = useState('');
const [loading, setLoading] = useState(false);
// Models State
const [models, setModels] = useState<OllamaModel[]>([]);
const [loadingModels, setLoadingModels] = useState(false);
const [pullingModel, setPullingModel] = useState<string | null>(null);
const [newModelName, setNewModelName] = useState('');
// Combobox State
const [search, setSearch] = useState('');
const combobox = useCombobox({
onDropdownClose: () => {
combobox.resetSelectedOption();
combobox.focusTarget();
setSearch('');
},
onDropdownOpen: () => {
combobox.focusSearchInput();
},
});
const [value, setValue] = useState<string | null>(null);
// Filter installed models based on search
const options = models
.filter((item) => item.name.toLowerCase().includes(search.toLowerCase().trim()))
.map((item) => (
<Combobox.Option value={item.name} key={item.digest}>
{item.name}
</Combobox.Option>
));
// Check login status on mount
useEffect(() => {
if (opened) {
fetchUser();
if (activeTab === 'models') {
fetchModels();
}
}, [opened]);
}
}, [opened, activeTab]);
const fetchUser = async () => {
try {
@@ -68,6 +132,46 @@ export function SettingsModal({
}
};
const fetchModels = async () => {
setLoadingModels(true);
try {
const list = await getInstalledModels();
setModels(list);
} catch (e) {
console.error(e);
} finally {
setLoadingModels(false);
}
};
const handlePullModel = async () => {
if (!newModelName) return;
setPullingModel(newModelName);
try {
const result = await pullModel(newModelName);
if (result.success) {
setNewModelName('');
await fetchModels();
} else {
setError(result.message);
}
} catch (e) {
console.error(e);
} finally {
setPullingModel(null);
}
};
const handleDeleteModel = async (name: string) => {
if (!confirm(`Are you sure you want to delete ${name}?`)) return;
try {
await deleteModel(name);
await fetchModels();
} catch (e) {
console.error(e);
}
};
const handleAuth = async () => {
setError('');
setLoading(true);
@@ -135,6 +239,15 @@ export function SettingsModal({
onClick={() => setActiveTab('appearance')}
style={{ borderRadius: 'var(--mantine-radius-lg)' }}
/>
<NavLink
active={activeTab === 'models'}
label="Models"
leftSection={<IconRobot size={18} stroke={1.5} />}
variant="light"
color={primaryColor}
onClick={() => setActiveTab('models')}
style={{ borderRadius: 'var(--mantine-radius-lg)' }}
/>
<NavLink
active={activeTab === 'account'}
label="Account"
@@ -189,6 +302,128 @@ export function SettingsModal({
</>
)}
{activeTab === 'models' && (
<>
<Title order={4}>Models</Title>
<Text size="sm" c="dimmed">
Manage your local AI models via Ollama.
</Text>
<Divider my="sm" />
<Group align="flex-end">
<Combobox
store={combobox}
withinPortal={false}
onOptionSubmit={(val) => {
setNewModelName(val);
combobox.closeDropdown();
// Optional: trigger pull immediately or let user click button?
// User code sample sets value. I'll set newModelName.
}}
>
<Combobox.Target>
<InputBase
component="button"
type="button"
pointer
rightSection={<Combobox.Chevron />}
onClick={() => combobox.toggleDropdown()}
rightSectionPointerEvents="none"
label="Download Model"
description="Select an installed model to update, or type a new model name (e.g. llama3)"
style={{ flex: 1 }}
>
{newModelName || (
<Input.Placeholder>Pick or type model name</Input.Placeholder>
)}
</InputBase>
</Combobox.Target>
<Combobox.Dropdown>
<Combobox.Search
value={search}
onChange={(event) => {
setSearch(event.currentTarget.value);
setNewModelName(event.currentTarget.value); // Allow typing new names
}}
placeholder="Search installed models or type new one"
/>
<Combobox.Options>
{options.length > 0 ? (
options
) : (
<Combobox.Empty>No matching installed models</Combobox.Empty>
)}
</Combobox.Options>
</Combobox.Dropdown>
</Combobox>
<Button
onClick={handlePullModel}
loading={!!pullingModel}
leftSection={<IconDownload size={16} />}
color={primaryColor}
>
Pull
</Button>
</Group>
{pullingModel && (
<Alert icon={<Loader size={16} />} title="Downloading..." color="blue" mt="md">
Pulling {pullingModel}. This may take a while depending on your connection.
</Alert>
)}
<Text size="sm" fw={500} mt="xl" mb="xs">
Installed Models
</Text>
{loadingModels ? (
<Group justify="center" py="xl">
<Loader size="sm" />
</Group>
) : models.length === 0 ? (
<Text c="dimmed" size="sm" ta="center" py="xl">
No models found. Try pulling one!
</Text>
) : (
<ScrollArea h={300} offsetScrollbars>
<Stack gap="xs">
{models.map((model) => (
<Card key={model.digest} withBorder padding="sm" radius="md">
<Group justify="space-between">
<div>
<Text fw={500} size="sm">
{model.name}
</Text>
<Group gap="xs">
<Badge size="xs" variant="light" color="gray">
{(model.size / 1024 / 1024 / 1024).toFixed(2)} GB
</Badge>
<Badge size="xs" variant="light" color="blue">
{model.details.parameter_size}
</Badge>
<Badge size="xs" variant="light" color="orange">
{model.details.quantization_level}
</Badge>
</Group>
</div>
<ActionIcon
color="red"
variant="subtle"
onClick={() => handleDeleteModel(model.name)}
>
<IconTrash size={16} />
</ActionIcon>
</Group>
</Card>
))}
</Stack>
</ScrollArea>
)}
</>
)}
{activeTab === 'account' && (
<>
<Title order={4}>Account</Title>
+16 -34
View File
@@ -16,6 +16,7 @@
"bcryptjs": "^3.0.3",
"jose": "^6.1.3",
"next": "16.1.1",
"ollama": "^0.6.3",
"prisma": "^5.10.2",
"react": "19.2.3",
"react-dom": "19.2.3"
@@ -116,7 +117,6 @@
"integrity": "sha512-H3mcG6ZDLTlYfaSNi0iOKkigqMFvkTKlGUYlD8GW7nNOYRrevuA46iTypPyv+06V3fEmvvazfntkBU34L0azAw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.28.6",
"@babel/generator": "^7.28.6",
@@ -2039,7 +2039,6 @@
"integrity": "sha512-FA5LmZVF1VziNc0bIdCSA1IoSVnDCqE8HJIZZv2/W8YmoAM50+tnUgJR/gQZwEeIMleuIOnRnHA/UaZRNeV4iQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@keyv/serialize": "^1.1.1"
}
@@ -2153,7 +2152,6 @@
}
],
"license": "MIT",
"peer": true,
"engines": {
"node": ">=18"
},
@@ -2197,7 +2195,6 @@
}
],
"license": "MIT",
"peer": true,
"engines": {
"node": ">=18"
}
@@ -2857,7 +2854,6 @@
"integrity": "sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -4140,7 +4136,6 @@
"resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-8.3.12.tgz",
"integrity": "sha512-lMMDzDewd3lUNtJCAHDj3g8On9X5aBl4q6EBwgOixKQSby9RG9ASEpK8oYHundHTm9tzo3MDeXWV/z32oSQWuw==",
"license": "MIT",
"peer": true,
"peerDependencies": {
"react": "^18.x || ^19.x"
}
@@ -5102,7 +5097,6 @@
"integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.10.4",
"@babel/runtime": "^7.12.5",
@@ -5442,7 +5436,6 @@
"integrity": "sha512-3MbSL37jEchWZz2p2mjntRZtPt837ij10ApxKfgmXCTuHWagYg7iA5bqPw6C8BMPfwidlvfPI/fxOc42HLhcyg==",
"devOptional": true,
"license": "MIT",
"peer": true,
"dependencies": {
"csstype": "^3.2.2"
}
@@ -5537,7 +5530,6 @@
"integrity": "sha512-npiaib8XzbjtzS2N4HlqPvlpxpmZ14FjSJrteZpPxGUaYPlvhzlzUZ4mZyABo0EFrOWnvyd0Xxroq//hKhtAWg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@typescript-eslint/scope-manager": "8.53.0",
"@typescript-eslint/types": "8.53.0",
@@ -6316,7 +6308,6 @@
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"license": "MIT",
"peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -6404,7 +6395,6 @@
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
@@ -7281,7 +7271,6 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"baseline-browser-mapping": "^2.9.0",
"caniuse-lite": "^1.0.30001759",
@@ -8978,7 +8967,6 @@
"integrity": "sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.8.0",
"@eslint-community/regexpp": "^4.12.1",
@@ -9178,7 +9166,6 @@
"integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@rtsao/scc": "^1.1.0",
"array-includes": "^3.1.9",
@@ -9259,7 +9246,6 @@
"integrity": "sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"aria-query": "^5.3.2",
"array-includes": "^3.1.8",
@@ -9300,7 +9286,6 @@
"integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"array-includes": "^3.1.8",
"array.prototype.findlast": "^1.2.5",
@@ -11577,7 +11562,6 @@
"integrity": "sha512-F26gjC0yWN8uAA5m5Ss8ZQf5nDHWGlN/xWZIh8S5SRbsEKBovwZhxGd6LJlbZYxBgCYOtreSUyb8hpXyGC5O4A==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@jest/core": "30.2.0",
"@jest/types": "30.2.0",
@@ -12585,7 +12569,6 @@
"integrity": "sha512-Cvc9WUhxSMEo4McES3P7oK3QaXldCfNWp7pl2NNeiIFlCoLr3kfq9kb1fxftiwk1FLV7CvpvDfonxtzUDeSOPg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"cssstyle": "^4.2.1",
"data-urls": "^5.0.0",
@@ -13600,6 +13583,15 @@
"dev": true,
"license": "ISC"
},
"node_modules/ollama": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/ollama/-/ollama-0.6.3.tgz",
"integrity": "sha512-KEWEhIqE5wtfzEIZbDCLH51VFZ6Z3ZSa6sIOg/E/tBV8S51flyqBOXi+bRxlOYKDf8i327zG9eSTb8IJxvm3Zg==",
"license": "MIT",
"dependencies": {
"whatwg-fetch": "^3.6.20"
}
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
@@ -14071,7 +14063,6 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
@@ -14385,7 +14376,6 @@
"integrity": "sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"cssesc": "^3.0.0",
"util-deprecate": "^1.0.2"
@@ -14434,7 +14424,6 @@
"integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==",
"dev": true,
"license": "MIT",
"peer": true,
"bin": {
"prettier": "bin/prettier.cjs"
},
@@ -14490,7 +14479,6 @@
"integrity": "sha512-hqb/JMz9/kymRE25pMWCxkdyhbnIWrq+h7S6WysJpdnCvhstbJSNP/S6mScEcqiB8Qv2F+0R3yG+osRaWqZacQ==",
"hasInstallScript": true,
"license": "Apache-2.0",
"peer": true,
"dependencies": {
"@prisma/engines": "5.10.2"
},
@@ -14681,7 +14669,6 @@
"resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz",
"integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -14736,7 +14723,6 @@
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz",
"integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==",
"license": "MIT",
"peer": true,
"dependencies": {
"scheduler": "^0.27.0"
},
@@ -14767,7 +14753,6 @@
"integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -15562,7 +15547,6 @@
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
@@ -16007,7 +15991,6 @@
"integrity": "sha512-pKP5jXJYM4OjvNklGuHKO53wOCAwfx79KvZyOWHoi9zXUH5WVMFUe/ZfWyxXG/GTcj0maRgHGUjq/0I43r0dDQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@storybook/global": "^5.0.0",
"@storybook/icons": "^2.0.0",
@@ -16455,7 +16438,6 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"@csstools/css-parser-algorithms": "^3.0.5",
"@csstools/css-syntax-patches-for-csstree": "^1.0.19",
@@ -17173,7 +17155,6 @@
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -17553,7 +17534,6 @@
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -17568,7 +17548,6 @@
"integrity": "sha512-xHURCQNxZ1dsWn0sdOaOfCSQG0HKeqSj9OexIxrz6ypU6wHYOdX2I3D2b8s8wFSsSOYJb+6q283cLiLlkEsBYw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@typescript-eslint/eslint-plugin": "8.53.0",
"@typescript-eslint/parser": "8.53.0",
@@ -17979,7 +17958,6 @@
"integrity": "sha512-Qphch25abbMNtekmEGJmeRUhLDbe+QfiWTiqpKYkpCOWY64v9eyl+KRRLmqOFA2AvKPpc9DC6+u2n76tQLBoaA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@types/eslint-scope": "^3.7.7",
"@types/estree": "^1.0.8",
@@ -18115,7 +18093,6 @@
"integrity": "sha512-khZGfAeJx6I8K9zKohEWWYN6KDlVw2DHownoe+6Vtwj1LP9WFgegXnVMSkZ/dBEBtXFwrkkydsaPFlB7f8wU2A==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"ansi-html-community": "0.0.8",
"html-entities": "^2.1.0",
@@ -18197,6 +18174,12 @@
"node": ">=18"
}
},
"node_modules/whatwg-fetch": {
"version": "3.6.20",
"resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz",
"integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==",
"license": "MIT"
},
"node_modules/whatwg-mimetype": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
@@ -18614,7 +18597,6 @@
"integrity": "sha512-k7Nwx6vuWx1IJ9Bjuf4Zt1PEllcwe7cls3VNzm4CQ1/hgtFUK2bRNG3rvnpPUhFjmqJKAKtjV576KnUkHocg/g==",
"dev": true,
"license": "MIT",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}
+1
View File
@@ -28,6 +28,7 @@
"bcryptjs": "^3.0.3",
"jose": "^6.1.3",
"next": "16.1.1",
"ollama": "^0.6.3",
"prisma": "^5.10.2",
"react": "19.2.3",
"react-dom": "19.2.3"
-15
View File
@@ -1,15 +0,0 @@
// This file was generated by Prisma, and assumes you have installed the following:
// npm install --save-dev prisma dotenv
import 'dotenv/config';
import { defineConfig } from 'prisma/config';
export default defineConfig({
schema: 'prisma/schema.prisma',
migrations: {
path: 'prisma/migrations',
},
datasource: {
url: 'file:./dev.db',
},
});
+8824 -12945
View File
File diff suppressed because it is too large Load Diff