This commit is contained in:
Zacharias-Brohn
2026-01-15 14:22:35 +01:00
parent cf230fbd46
commit 56b64c30e8
4 changed files with 7722 additions and 95 deletions
+253 -90
View File
@@ -1,8 +1,9 @@
import { useEffect, useState } from 'react'; import { useCallback, useEffect, useRef, useState } from 'react';
import { import {
IconAlertCircle, IconAlertCircle,
IconDownload, IconDownload,
IconPalette, IconPalette,
IconRefresh,
IconRobot, IconRobot,
IconTrash, IconTrash,
IconUser, IconUser,
@@ -30,27 +31,18 @@ import {
Text, Text,
TextInput, TextInput,
Title, Title,
Tooltip,
useCombobox, useCombobox,
useMantineTheme, useMantineTheme,
} from '@mantine/core'; } from '@mantine/core';
import { deleteModel, getInstalledModels, pullModel, type OllamaModel } from '@/app/actions/ollama'; import { deleteModel, getInstalledModels, pullModel, type OllamaModel } from '@/app/actions/ollama';
/* // Type for the scraped models JSON
* Popular models list - reserved for future autocomplete feature interface OllamaModelsData {
* const POPULAR_MODELS = [ generatedAt: string;
* 'llama3.2', modelCount: number;
* 'llama3.1', models: Record<string, string[]>;
* 'mistral', }
* 'gemma2',
* 'qwen2.5',
* 'phi3.5',
* 'neural-chat',
* 'starling-lm',
* 'codellama',
* 'deepseek-coder',
* 'llava',
* ];
*/
interface User { interface User {
id: string; id: string;
@@ -65,6 +57,10 @@ interface SettingsModalProps {
setPrimaryColor: (color: string) => void; setPrimaryColor: (color: string) => void;
} }
// Session-level cache for available models (survives modal close/open)
let availableModelsCache: OllamaModelsData | null = null;
let installedModelsCache: OllamaModel[] | null = null;
export function SettingsModal({ export function SettingsModal({
opened, opened,
close, close,
@@ -83,42 +79,123 @@ export function SettingsModal({
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
// Models State // Models State
const [models, setModels] = useState<OllamaModel[]>([]); const [installedModels, setInstalledModels] = useState<OllamaModel[]>(installedModelsCache || []);
const [loadingModels, setLoadingModels] = useState(false); const [availableModels, setAvailableModels] = useState<OllamaModelsData | null>(
availableModelsCache
);
const [loadingInstalled, setLoadingInstalled] = useState(false);
const [loadingAvailable, setLoadingAvailable] = useState(false);
const [pullingModel, setPullingModel] = useState<string | null>(null); const [pullingModel, setPullingModel] = useState<string | null>(null);
const [newModelName, setNewModelName] = useState(''); const [pullError, setPullError] = useState('');
// Combobox State // Selected model and tag for downloading
const [search, setSearch] = useState(''); const [selectedModel, setSelectedModel] = useState<string>('');
const combobox = useCombobox({ const [selectedTag, setSelectedTag] = useState<string>('');
// Combobox states
const [modelSearch, setModelSearch] = useState('');
const [tagSearch, setTagSearch] = useState('');
const modelCombobox = useCombobox({
onDropdownClose: () => { onDropdownClose: () => {
combobox.resetSelectedOption(); modelCombobox.resetSelectedOption();
combobox.focusTarget(); modelCombobox.focusTarget();
setSearch(''); setModelSearch('');
}, },
onDropdownOpen: () => { onDropdownOpen: () => {
combobox.focusSearchInput(); modelCombobox.focusSearchInput();
}, },
}); });
// Filter installed models based on search const tagCombobox = useCombobox({
const options = models onDropdownClose: () => {
.filter((item) => item.name.toLowerCase().includes(search.toLowerCase().trim())) tagCombobox.resetSelectedOption();
.map((item) => ( tagCombobox.focusTarget();
<Combobox.Option value={item.name} key={item.digest}> setTagSearch('');
{item.name} },
</Combobox.Option> onDropdownOpen: () => {
)); tagCombobox.focusSearchInput();
},
});
// Track if we've fetched this session
const hasFetchedInstalled = useRef(false);
const hasFetchedAvailable = useRef(false);
// Get list of model names for the dropdown
const modelNames = availableModels ? Object.keys(availableModels.models).sort() : [];
// Filter models based on search
const filteredModels = modelNames.filter((name) =>
name.toLowerCase().includes(modelSearch.toLowerCase().trim())
);
// Get tags for the selected model
const availableTags =
selectedModel && availableModels ? availableModels.models[selectedModel] || [] : [];
// Filter tags based on search
const filteredTags = availableTags.filter((tag) =>
tag.toLowerCase().includes(tagSearch.toLowerCase().trim())
);
// Fetch available models from the static JSON
const fetchAvailableModels = useCallback(async (force = false) => {
if (!force && availableModelsCache) {
setAvailableModels(availableModelsCache);
return;
}
setLoadingAvailable(true);
try {
const response = await fetch('/ollama-models.json');
if (response.ok) {
const data: OllamaModelsData = await response.json();
availableModelsCache = data;
setAvailableModels(data);
}
} catch (e) {
console.error('Failed to fetch available models:', e);
} finally {
setLoadingAvailable(false);
}
}, []);
// Fetch installed models from Ollama
const fetchInstalledModels = useCallback(async (force = false) => {
if (!force && installedModelsCache) {
setInstalledModels(installedModelsCache);
return;
}
setLoadingInstalled(true);
try {
const list = await getInstalledModels();
installedModelsCache = list;
setInstalledModels(list);
} catch (e) {
console.error('Failed to fetch installed models:', e);
} finally {
setLoadingInstalled(false);
}
}, []);
// Check login status on mount // Check login status on mount
useEffect(() => { useEffect(() => {
if (opened) { if (opened) {
fetchUser(); fetchUser();
if (activeTab === 'models') { if (activeTab === 'models') {
fetchModels(); if (!hasFetchedInstalled.current) {
fetchInstalledModels();
hasFetchedInstalled.current = true;
}
if (!hasFetchedAvailable.current) {
fetchAvailableModels();
hasFetchedAvailable.current = true;
} }
} }
}, [opened, activeTab]); }
}, [opened, activeTab, fetchInstalledModels, fetchAvailableModels]);
const fetchUser = async () => { const fetchUser = async () => {
try { try {
@@ -130,55 +207,48 @@ export function SettingsModal({
setUser(null); setUser(null);
} }
} catch (e) { } catch (e) {
// eslint-disable-next-line no-console
console.error(e); console.error(e);
} }
}; };
const fetchModels = async () => {
setLoadingModels(true);
try {
const list = await getInstalledModels();
setModels(list);
} catch (e) {
// eslint-disable-next-line no-console
console.error(e);
} finally {
setLoadingModels(false);
}
};
const handlePullModel = async () => { const handlePullModel = async () => {
if (!newModelName) { if (!selectedModel) {
return; return;
} }
setPullingModel(newModelName);
// Build the full model name with tag
const fullModelName = selectedTag ? `${selectedModel}:${selectedTag}` : selectedModel;
setPullingModel(fullModelName);
setPullError('');
try { try {
const result = await pullModel(newModelName); const result = await pullModel(fullModelName);
if (result.success) { if (result.success) {
setNewModelName(''); setSelectedModel('');
await fetchModels(); setSelectedTag('');
// Force refresh installed models
await fetchInstalledModels(true);
} else { } else {
setError(result.message); setPullError(result.message);
} }
} catch (e) { } catch (e) {
// eslint-disable-next-line no-console
console.error(e); console.error(e);
setPullError('An error occurred while pulling the model');
} finally { } finally {
setPullingModel(null); setPullingModel(null);
} }
}; };
const handleDeleteModel = async (name: string) => { const handleDeleteModel = async (name: string) => {
// eslint-disable-next-line no-alert
if (!confirm(`Are you sure you want to delete ${name}?`)) { if (!confirm(`Are you sure you want to delete ${name}?`)) {
return; return;
} }
try { try {
await deleteModel(name); await deleteModel(name);
await fetchModels(); // Force refresh installed models
await fetchInstalledModels(true);
} catch (e) { } catch (e) {
// eslint-disable-next-line no-console
console.error(e); console.error(e);
} }
}; };
@@ -230,7 +300,6 @@ export function SettingsModal({
body: JSON.stringify({ accentColor: color }), body: JSON.stringify({ accentColor: color }),
}); });
} catch (e) { } catch (e) {
// eslint-disable-next-line no-console
console.error('Failed to save accent color:', e); console.error('Failed to save accent color:', e);
} }
} }
@@ -240,6 +309,13 @@ export function SettingsModal({
(color) => color !== 'dark' && color !== 'gray' && color !== 'white' && color !== 'black' (color) => color !== 'dark' && color !== 'gray' && color !== 'white' && color !== 'black'
); );
// When model selection changes, reset tag
const handleModelSelect = (model: string) => {
setSelectedModel(model);
setSelectedTag('');
modelCombobox.closeDropdown();
};
return ( return (
<Modal <Modal
opened={opened} opened={opened}
@@ -336,19 +412,66 @@ export function SettingsModal({
<> <>
<Title order={4}>Models</Title> <Title order={4}>Models</Title>
<Text size="sm" c="dimmed"> <Text size="sm" c="dimmed">
Manage your local AI models via Ollama. Download and manage AI models from the Ollama registry.
</Text> </Text>
<Divider my="sm" /> <Divider my="sm" />
<Group align="flex-end"> {/* Model Selection */}
<Text size="sm" fw={500} mb="xs">
Download New Model
</Text>
<Group align="flex-end" gap="sm">
{/* Model Name Dropdown */}
<Combobox <Combobox
store={combobox} store={modelCombobox}
withinPortal={false}
onOptionSubmit={handleModelSelect}
>
<Combobox.Target>
<InputBase
component="button"
type="button"
pointer
rightSection={loadingAvailable ? <Loader size={14} /> : <Combobox.Chevron />}
onClick={() => modelCombobox.toggleDropdown()}
rightSectionPointerEvents="none"
label="Model"
style={{ minWidth: 180 }}
>
{selectedModel || <Input.Placeholder>Select model</Input.Placeholder>}
</InputBase>
</Combobox.Target>
<Combobox.Dropdown>
<Combobox.Search
value={modelSearch}
onChange={(event) => setModelSearch(event.currentTarget.value)}
placeholder="Search models..."
/>
<Combobox.Options>
<ScrollArea.Autosize type="scroll" mah={200}>
{filteredModels.length > 0 ? (
filteredModels.map((name) => (
<Combobox.Option value={name} key={name}>
{name}
</Combobox.Option>
))
) : (
<Combobox.Empty>No models found</Combobox.Empty>
)}
</ScrollArea.Autosize>
</Combobox.Options>
</Combobox.Dropdown>
</Combobox>
{/* Tag/Quantization Dropdown */}
<Combobox
store={tagCombobox}
withinPortal={false} withinPortal={false}
onOptionSubmit={(val) => { onOptionSubmit={(val) => {
setNewModelName(val); setSelectedTag(val);
combobox.closeDropdown(); tagCombobox.closeDropdown();
// Optional: trigger pull immediately or let user click button?
// User code sample sets value. I'll set newModelName.
}} }}
> >
<Combobox.Target> <Combobox.Target>
@@ -357,33 +480,38 @@ export function SettingsModal({
type="button" type="button"
pointer pointer
rightSection={<Combobox.Chevron />} rightSection={<Combobox.Chevron />}
onClick={() => combobox.toggleDropdown()} onClick={() => tagCombobox.toggleDropdown()}
rightSectionPointerEvents="none" rightSectionPointerEvents="none"
label="Download Model" label="Tag"
description="Select an installed model to update, or type a new model name (e.g. llama3)" disabled={!selectedModel}
style={{ flex: 1 }} style={{ minWidth: 180 }}
> >
{newModelName || ( {selectedTag || (
<Input.Placeholder>Pick or type model name</Input.Placeholder> <Input.Placeholder>
{selectedModel ? 'Select tag (optional)' : 'Select model first'}
</Input.Placeholder>
)} )}
</InputBase> </InputBase>
</Combobox.Target> </Combobox.Target>
<Combobox.Dropdown> <Combobox.Dropdown>
<Combobox.Search <Combobox.Search
value={search} value={tagSearch}
onChange={(event) => { onChange={(event) => setTagSearch(event.currentTarget.value)}
setSearch(event.currentTarget.value); placeholder="Search tags..."
setNewModelName(event.currentTarget.value); // Allow typing new names
}}
placeholder="Search installed models or type new one"
/> />
<Combobox.Options> <Combobox.Options>
{options.length > 0 ? ( <ScrollArea.Autosize type="scroll" mah={200}>
options {filteredTags.length > 0 ? (
filteredTags.map((tag) => (
<Combobox.Option value={tag} key={tag}>
{tag}
</Combobox.Option>
))
) : ( ) : (
<Combobox.Empty>No matching installed models</Combobox.Empty> <Combobox.Empty>No tags found</Combobox.Empty>
)} )}
</ScrollArea.Autosize>
</Combobox.Options> </Combobox.Options>
</Combobox.Dropdown> </Combobox.Dropdown>
</Combobox> </Combobox>
@@ -391,6 +519,7 @@ export function SettingsModal({
<Button <Button
onClick={handlePullModel} onClick={handlePullModel}
loading={!!pullingModel} loading={!!pullingModel}
disabled={!selectedModel}
leftSection={<IconDownload size={16} />} leftSection={<IconDownload size={16} />}
color={primaryColor} color={primaryColor}
> >
@@ -404,22 +533,49 @@ export function SettingsModal({
</Alert> </Alert>
)} )}
<Text size="sm" fw={500} mt="xl" mb="xs"> {pullError && (
<Alert
icon={<IconAlertCircle size={16} />}
title="Error"
color="red"
mt="md"
withCloseButton
onClose={() => setPullError('')}
>
{pullError}
</Alert>
)}
{/* Installed Models */}
<Group justify="space-between" mt="xl" mb="xs">
<Text size="sm" fw={500}>
Installed Models Installed Models
</Text> </Text>
<Tooltip label="Refresh">
<ActionIcon
variant="subtle"
color="gray"
size="sm"
onClick={() => fetchInstalledModels(true)}
loading={loadingInstalled}
>
<IconRefresh size={16} />
</ActionIcon>
</Tooltip>
</Group>
{loadingModels ? ( {loadingInstalled && installedModels.length === 0 ? (
<Group justify="center" py="xl"> <Group justify="center" py="xl">
<Loader size="sm" /> <Loader size="sm" />
</Group> </Group>
) : models.length === 0 ? ( ) : installedModels.length === 0 ? (
<Text c="dimmed" size="sm" ta="center" py="xl"> <Text c="dimmed" size="sm" ta="center" py="xl">
No models found. Try pulling one! No models installed. Pull one from above!
</Text> </Text>
) : ( ) : (
<ScrollArea h={300} offsetScrollbars> <ScrollArea h={200} offsetScrollbars>
<Stack gap="xs"> <Stack gap="xs">
{models.map((model) => ( {installedModels.map((model) => (
<Card key={model.digest} withBorder padding="sm" radius="md"> <Card key={model.digest} withBorder padding="sm" radius="md">
<Group justify="space-between"> <Group justify="space-between">
<div> <div>
@@ -451,6 +607,13 @@ export function SettingsModal({
</Stack> </Stack>
</ScrollArea> </ScrollArea>
)} )}
{availableModels && (
<Text size="xs" c="dimmed" mt="md">
Model list last updated:{' '}
{new Date(availableModels.generatedAt).toLocaleDateString()}
</Text>
)}
</> </>
)} )}
+2 -1
View File
@@ -18,7 +18,8 @@
"prettier:write": "prettier --write \"**/*.{ts,tsx}\"", "prettier:write": "prettier --write \"**/*.{ts,tsx}\"",
"test": "npx next typegen && npm run prettier:check && npm run lint && npm run typecheck && npm run jest", "test": "npx next typegen && npm run prettier:check && npm run lint && npm run typecheck && npm run jest",
"storybook": "storybook dev -p 6006", "storybook": "storybook dev -p 6006",
"storybook:build": "storybook build" "storybook:build": "storybook build",
"scrape-models": "node scripts/scrape-ollama-models.mjs"
}, },
"dependencies": { "dependencies": {
"@mantine/core": "^8.3.12", "@mantine/core": "^8.3.12",
File diff suppressed because it is too large Load Diff
+111
View File
@@ -0,0 +1,111 @@
#!/usr/bin/env node
/**
* Scrapes available Ollama models and their tags from ollama.com
* Outputs a JSON file that can be used by the frontend for model selection.
*
* Usage: node scripts/scrape-ollama-models.mjs
*/
import { writeFileSync } from 'fs';
import { dirname, join } from 'path';
import { fileURLToPath } from 'url';
const __dirname = dirname(fileURLToPath(import.meta.url));
const OLLAMA_LIBRARY_URL = 'https://ollama.com/library';
/**
* Fetches the list of all available model names from Ollama's library page
*/
async function fetchModelNames() {
console.log('Fetching model list from Ollama library...');
const response = await fetch(OLLAMA_LIBRARY_URL);
const html = await response.text();
// Extract model names using regex (matches href="/library/modelname")
const modelRegex = /href="\/library\/([^"\/]+)"/g;
const models = new Set();
let match;
while ((match = modelRegex.exec(html)) !== null) {
// Filter out non-model links (like "tags" subpages)
const name = match[1];
if (name && !name.includes('/') && !name.includes(':')) {
models.add(name);
}
}
const modelList = Array.from(models);
console.log(`Found ${modelList.length} models`);
return modelList;
}
/**
* Fetches available tags for a specific model
*/
async function fetchModelTags(modelName) {
const url = `${OLLAMA_LIBRARY_URL}/${modelName}/tags`;
try {
const response = await fetch(url);
const html = await response.text();
// Extract tags using regex (matches /library/modelname:tagname)
const tagRegex = new RegExp(`/library/${modelName}:([^"]+)"`, 'g');
const tags = new Set();
let match;
while ((match = tagRegex.exec(html)) !== null) {
tags.add(match[1]);
}
return Array.from(tags);
} catch (error) {
console.error(`Error fetching tags for ${modelName}:`, error.message);
return [];
}
}
/**
* Main function to scrape all models and their tags
*/
async function main() {
const startTime = Date.now();
// Fetch all model names
const modelNames = await fetchModelNames();
// Fetch tags for each model (with concurrency limit to be nice to the server)
const CONCURRENCY = 5;
const models = {};
for (let i = 0; i < modelNames.length; i += CONCURRENCY) {
const batch = modelNames.slice(i, i + CONCURRENCY);
const results = await Promise.all(
batch.map(async (name) => {
const tags = await fetchModelTags(name);
return { name, tags };
})
);
for (const { name, tags } of results) {
models[name] = tags;
console.log(` ${name}: ${tags.length} tags`);
}
}
// Create output structure
const output = {
generatedAt: new Date().toISOString(),
modelCount: Object.keys(models).length,
models,
};
// Write to public directory so it can be served statically
const outputPath = join(__dirname, '..', 'public', 'ollama-models.json');
writeFileSync(outputPath, JSON.stringify(output, null, 2));
const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nDone! Scraped ${Object.keys(models).length} models in ${elapsed}s`);
console.log(`Output written to: ${outputPath}`);
}
main().catch(console.error);