refactor: simplify fish prompt and update config

This commit is contained in:
mitchell 2026-04-23 06:02:32 -04:00
parent ac3ae0076b
commit 9b64299c45
7 changed files with 35 additions and 84 deletions

View file

@ -18,3 +18,11 @@ autocmd("TermOpen", {
command = "setlocal nonumber norelativenumber",
group = "TerminalSettings",
})
-- Yaml settings
augroup("YamlSettings", { clear = true })
autocmd("FileType", {
pattern = "y*ml",
command = "setlocal foldenable foldlevel=1",
group = "YamlSettings",
})

View file

@ -15,12 +15,16 @@ map("v", "//", function()
end, { noremap = true, silent = true, desc = "Search for Visual Selection" })
-- LSP Diagnostic Mappings
local diag_opts = { silent = true }
local diag_opts = { silent = true, noremap = true }
map("n", "<leader>d", vim.diagnostic.open_float, diag_opts)
map("n", "[d", function()
vim.diagnostic.jump({ count = 1 })
end, diag_opts)
map("n", "]d", function()
vim.diagnostic.jump({ count = -1 })
end, diag_opts)
map("n", "]d", function()
vim.diagnostic.jump({ count = 1 })
end, diag_opts)
map("n", "<space>q", vim.diagnostic.setloclist, diag_opts)
-- Noice
map("n", "<leader>o", "<cmd>NoiceDismiss<cr>", { silent = true, noremap = true })
map("n", "<leader>O", "<cmd>NoiceDisable<cr>", { silent = true, noremap = true })

View file

@ -25,24 +25,24 @@ return {
},
interactions = {
chat = {
adapter = { name = "opencode", model = "llama.cpp/qwen3.5-35b-a3b-dev" },
opts = {
-- system_prompt = "",
},
tools = {
opts = {
system_prompt = {
enabled = true, -- Enable the tools system prompt?
replace_main_system_prompt = true, -- Replace the main system prompt with the tools system prompt?
},
},
},
adapter = { name = "opencode", model = "llama.cpp/qwen3.6-35b-a3b-dev" },
-- opts = {
-- system_prompt = "",
-- },
-- tools = {
-- opts = {
-- system_prompt = {
-- enabled = true, -- Enable the tools system prompt?
-- replace_main_system_prompt = true, -- Replace the main system prompt with the tools system prompt?
-- },
-- },
-- },
},
inline = {
adapter = { name = "llama.cpp", model = "qwen3.5-35b-a3b-dev" },
adapter = { name = "llama.cpp", model = "qwen3.6-35b-a3b-dev" },
},
cmd = {
adapter = { name = "llama.cpp", model = "qwen3.5-35b-a3b-dev" },
adapter = { name = "llama.cpp", model = "qwen3.6-35b-a3b-dev" },
},
cli = {
agent = "opencode",
@ -65,7 +65,7 @@ return {
return require("codecompanion.adapters").extend("openai_compatible", {
schema = {
model = {
default = "qwen3.5-35b-a3b-dev",
default = "qwen3.6-35b-a3b-dev",
},
},
env = {