This commit is contained in:
Zacharias-Brohn
2025-09-08 00:04:54 +02:00
parent f3c9154822
commit 6decc3c2f4
4 changed files with 82 additions and 41 deletions
+38
View File
@@ -0,0 +1,38 @@
require("chatgpt").setup({
api_host_cmd = "echo http://localhost:5000",
api_key_cmd = "echo ''",
openai_params = {
model = "Selene-1-Mini-Llama-3.1-8B-EXL3",
frequency_penalty = 0,
presence_penalty = 0,
max_tokens = 1024,
temperature = 0.1,
top_p = 1,
n = 1,
},
keymaps = {
close = "<C-c>",
close_n = "<Esc>",
yank_last = "<C-y>",
yank_last_code = "<C-k>",
scroll_up = "<C-u>",
scroll_down = "<C-d>",
new_session = "<C-l>",
cycle_windows = "<Tab>",
cycle_modes = "<C-f>",
next_message = "<C-j>",
prev_message = "<C-k>",
select_session = "<Space>",
rename_session = "r",
delete_session = "d",
draft_message = "<C-r>",
edit_message = "e",
delete_message = "d",
toggle_settings = "<C-o>",
toggle_sessions = "<C-p>",
toggle_help = "<C-h>",
toggle_message_role = "<C-r>",
toggle_system_role_open = "<C-s>",
stop_generating = "<C-x>",
},
})
+30 -41
View File
@@ -1,52 +1,41 @@
require("CopilotChat").setup { require("CopilotChat").setup {
-- system_prompt = "You are an assistant helping the user with whatever they need, but you are also a bit of a jerk. Do not use profanity.",
prompts = { prompts = {
CivitAI = {
system_prompt = "You are an assistant helping with stable diffusion models and python to implement failsafes for a server regarding image generation.",
},
Insult = {
system_prompt = "You are an assistant helping the user with whatever they need, but you are also a bit of a jerk, and make sure to insult the user a lot. Use vulgar language if necessary.",
}
}, },
sticky = "#glob:**/*",
headers = { headers = {
user = ' You: ', user = ' You: ',
assistant = ' Copilot: ', assistant = ' Copilot: ',
tool = '󰖷 Tool: ', tool = '󰖷 Tool: ',
}, },
-- providers = { providers = {
-- ollama = { tabby = {
-- embed = 'copilot_embeddings', -- Use Copilot as embedding provider prepare_input = require('CopilotChat.config.providers').copilot.prepare_input,
-- prepare_output = require('CopilotChat.config.providers').copilot.prepare_output,
-- -- Copy copilot input and output processing
-- prepare_input = require('CopilotChat.config.providers').copilot.prepare_input, get_models = function(headers)
-- prepare_output = require('CopilotChat.config.providers').copilot.prepare_output, local response, err = require('CopilotChat.utils').curl_get('http://localhost:5000/v1/models', {
-- headers = headers,
-- get_models = function(headers) json_response = true
-- local utils = require('CopilotChat.utils') })
-- local response, err = utils.curl_get('http://localhost:11434/api/tags', {
-- headers = headers, if err then
-- json_response = true, error(err)
-- }) end
--
-- if err then return vim.tbl_map(function(model)
-- error(err) return {
-- end id = model.id,
-- name = model.id,
-- local models = {} }
-- for _, model in ipairs(response.body.models) do end, response.body.data)
-- table.insert(models, { end,
-- id = model.name,
-- name = model.name get_url = function()
-- }) return 'http://localhost:5000/v1/chat/completions'
-- end end,
-- return models }
-- end, }
--
-- get_url = function()
-- return 'http://localhost:11434/api/chat'
-- end,
-- }
-- }
} }
+2
View File
@@ -57,3 +57,5 @@ map("n", "<leader>mr", "<cmd>CellularAutomaton make_it_rain<CR>");
map("n", "<leader><leader>", function() map("n", "<leader><leader>", function()
vim.cmd("so") vim.cmd("so")
end) end)
map("n", "<A-v>", "<cmd>ChatGPT<CR>")
+12
View File
@@ -236,4 +236,16 @@ return {
"mg979/vim-visual-multi", "mg979/vim-visual-multi",
branch = "master", branch = "master",
}, },
{
"jackMort/ChatGPT.nvim",
dependencies = {
"MunifTanjim/nui.nvim",
"nvim-lua/plenary.nvim",
"folke/trouble.nvim",
"nvim-telescope/telescope.nvim"
},
config = function()
require("config.chatgpt")
end,
},
} }