This commit is contained in:
jmarkin 2025-11-05 00:11:58 +03:00
parent 09cb23c371
commit 8bf1f3e330
10 changed files with 192 additions and 34 deletions

View file

@ -98,11 +98,11 @@
"nixpkgs-lib": "nixpkgs-lib_2"
},
"locked": {
"lastModified": 1760948891,
"narHash": "sha256-TmWcdiUUaWk8J4lpjzu4gCGxWY6/Ok7mOK4fIFfBuU4=",
"lastModified": 1762040540,
"narHash": "sha256-z5PlZ47j50VNF3R+IMS9LmzI5fYRGY/Z5O5tol1c9I4=",
"owner": "hercules-ci",
"repo": "flake-parts",
"rev": "864599284fc7c0ba6357ed89ed5e2cd5040f0c04",
"rev": "0010412d62a25d959151790968765a70c436598b",
"type": "github"
},
"original": {
@ -199,6 +199,22 @@
"type": "github"
}
},
"gp-nvim": {
"flake": false,
"locked": {
"lastModified": 1754903071,
"narHash": "sha256-+K536d3WF5eHRTSgkhn1NLFHms67iw4A0Ql8OZ9TgTw=",
"owner": "Robitx",
"repo": "gp.nvim",
"rev": "c37f154b97690c4925fef4e35ffdbf2c844b5f4e",
"type": "github"
},
"original": {
"owner": "Robitx",
"repo": "gp.nvim",
"type": "github"
}
},
"hlargs-nvim": {
"flake": false,
"locked": {
@ -241,11 +257,11 @@
"kulala-nvim": {
"flake": false,
"locked": {
"lastModified": 1761409896,
"narHash": "sha256-fChsMhTgne97vHvJzKAxBbM3OO1AZLE4b2TCrY2xL+4=",
"lastModified": 1762151137,
"narHash": "sha256-NQZGW4RblskDrARb8TwzufJgSClNnuIdv7twTIlYfYs=",
"owner": "mistweaverco",
"repo": "kulala.nvim",
"rev": "9a9308b664f71159f1c150e8cfb18541b143a9e9",
"rev": "c328aeb219c4b77106917dd2698c90ea9657281b",
"type": "github"
},
"original": {
@ -313,11 +329,11 @@
]
},
"locked": {
"lastModified": 1761955453,
"narHash": "sha256-hQomzSbBiFsDXDMCjHmWXrAMgFlQlCiy7T37Eq7RvT4=",
"lastModified": 1762214689,
"narHash": "sha256-rRIECim04sRqCeBCvuARPLyDezGa7CU4XKAkCx4mmqA=",
"owner": "nix-community",
"repo": "neovim-nightly-overlay",
"rev": "c58076a0d9b24bf77fef4fa2e7c43950914edf71",
"rev": "928ecc3c71ef85227c25cf0ff3bfba1efd9b1930",
"type": "github"
},
"original": {
@ -329,11 +345,11 @@
"neovim-src": {
"flake": false,
"locked": {
"lastModified": 1761949631,
"narHash": "sha256-YgMQaFD4L9+PEYSkUlBkqaKt+ALPHiVgzgRbjOSW4tE=",
"lastModified": 1762151025,
"narHash": "sha256-5XdkjVsB8LbqTUMmOmK3YscnCVm7yHenKoaKrgFESac=",
"owner": "neovim",
"repo": "neovim",
"rev": "1fddd74da7428e38b79ccb817dbd6952ff1d8ac6",
"rev": "b80d390765b0c987f86ecd257fa8c38cc1225797",
"type": "github"
},
"original": {
@ -375,11 +391,11 @@
},
"nixpkgs-lib_2": {
"locked": {
"lastModified": 1754788789,
"narHash": "sha256-x2rJ+Ovzq0sCMpgfgGaaqgBSwY+LST+WbZ6TytnT9Rk=",
"lastModified": 1761765539,
"narHash": "sha256-b0yj6kfvO8ApcSE+QmA6mUfu8IYG6/uU28OFn4PaC8M=",
"owner": "nix-community",
"repo": "nixpkgs.lib",
"rev": "a73b9c743612e4244d865a2fdee11865283c04e6",
"rev": "719359f4562934ae99f5443f20aa06c2ffff91fc",
"type": "github"
},
"original": {
@ -406,11 +422,11 @@
},
"nixpkgs_2": {
"locked": {
"lastModified": 1761907660,
"narHash": "sha256-kJ8lIZsiPOmbkJypG+B5sReDXSD1KGu2VEPNqhRa/ew=",
"lastModified": 1762111121,
"narHash": "sha256-4vhDuZ7OZaZmKKrnDpxLZZpGIJvAeMtK6FKLJYUtAdw=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "2fb006b87f04c4d3bdf08cfdbc7fab9c13d94a15",
"rev": "b3d51a0365f6695e7dd5cdf3e180604530ed33b4",
"type": "github"
},
"original": {
@ -475,6 +491,7 @@
"flake-parts": "flake-parts_2",
"gen-luarc": "gen-luarc",
"gentags-lua": "gentags-lua",
"gp-nvim": "gp-nvim",
"hlargs-nvim": "hlargs-nvim",
"kulala-fmt": "kulala-fmt",
"kulala-nvim": "kulala-nvim",

View file

@ -70,6 +70,13 @@
flake = false;
};
gp-nvim = {
url = "github:Robitx/gp.nvim";
flake = false;
};
# my
cmp-diag-codes = {
url = "github:JMarkin/cmp-diag-codes";
flake = false;

View file

@ -1,4 +1,4 @@
{ inputs, pkgs, ... }:
{ inputs, pkgs, mkNvimPlugin, ... }:
with pkgs.vimPlugins; [
# {
# plugin = codecompanion-nvim.overrideAttrs (oa: {
@ -32,5 +32,5 @@ with pkgs.vimPlugins; [
}
'';
}
(mkNvimPlugin inputs.gp-nvim "gp.nvim")
]

View file

@ -13,7 +13,7 @@ in
yaml-language-server
# systemd-language-server
# nginx-language-server
docker-language-server
# docker-language-server
vacuum-go
taplo

View file

@ -215,7 +215,7 @@ let
''--suffix LUA_PATH ";" "${concatMapStringsSep ";" luaPackages.getLuaPath resolvedExtraLuaPackages}"'';
# wrapNeovimUnstable is the nixpkgs utility function for building a Neovim derivation.
neovim-wrapped = wrapNeovimUnstable neovim-nightly (neovimConfig
neovim-wrapped = wrapNeovimUnstable neovim-unwrapped (neovimConfig
// {
luaRcContent = initLua;
wrapperArgs =

View file

@ -50,7 +50,7 @@ opt.scrollback = 2000
opt.conceallevel = 0
opt.autowriteall = true
opt.virtualedit = 'block'
opt.mouse = "vh"
opt.mouse = "a"
opt.mousemoveevent = true
opt.mousefocus = false
g.mapleader = "\\"
@ -211,3 +211,13 @@ vim.g.rainbow_delimiters_highlight = {
"RainbowDelimiterViolet",
"RainbowDelimiterCyan",
}
-- ollama configs
g.ollama_host = vim.env.OLLAMA_HOST or "localhost"
g.ollama_port = vim.env.OLLAMA_PORT or "11434"
g.ollama_url = string.format("http://%s:%s", g.ollama_host, g.ollama_port)
g.ollama_generate_endpoint = string.format("%s/api/generate", g.ollama_url)
g.ollama_chat_endpoint = string.format("%s/api/chat", g.ollama_url)
g.ollama_chat_completions_endpoint = string.format("%s/v1/chat/completions", g.ollama_url)
g.ollama_completions_endpoint = string.format("%s/api/generate", g.ollama_url)

View file

@ -162,13 +162,7 @@ local opts = {
},
}
-- ollama setup
g.ollama_host = vim.env.OLLAMA_HOST or "localhost"
g.ollama_port = vim.env.OLLAMA_PORT or "11434"
g.ollama_url = string.format("http://%s:%s", g.ollama_host, g.ollama_port)
g.ollama_generate_endpoint = string.format("%s/api/generate", g.ollama_url)
g.ollama_chat_endpoint = string.format("%s/api/chat", g.ollama_url)
g.ollama_completions_endpoint = string.format("%s/api/generate", g.ollama_url)
g.ollama_model = "hf.co/mradermacher/Qwen2.5-CoderX-14B-v0.5-GGUF:Q8_0"
g.ollama_model = "orieg/gemma3-tools:4b"
local ollama_modify_config = function(cfg)
cfg.provider = "ollama"

130
nvim/plugin/ai_gp.lua Normal file
View file

@ -0,0 +1,130 @@
if vim.g.did_load_gp_plugin or vim.g.did_load_ai_plugin then
return
end
vim.g.did_load_gp_plugin = true
lze.load({
"gp.nvim",
event = "BufEnter",
after = function()
require("gp").setup({
providers = {
openai = {
disable = true,
},
ollama = {
disable = false,
endpoint = vim.g.ollama_chat_endpoint,
},
},
whisper = {
disable = true,
},
agents = {
{
name = "GPT-OSS",
chat = true,
command = true,
provider = "ollama",
model = {
model = "gpt-oss-safeguard:20b",
num_ctx = 1024 * 8,
},
system_prompt = require("gp.defaults").code_system_prompt,
},
{
name = "Gemma",
chat = true,
command = true,
provider = "ollama",
model = { model = "orieg/gemma3-tools:4b" },
system_prompt = require("gp.defaults").code_system_prompt,
},
{
name = "Cogito",
chat = false,
command = true,
provider = "ollama",
model = {
model = "cogito:14b",
num_ctx = 1024 * 8,
},
system_prompt = require("gp.defaults").code_system_prompt,
},
{
name = "Amoral",
chat = true,
command = true,
provider = "ollama",
model = {
model = "hf.co/mradermacher/amoral-gemma3-12B-v2-qat-i1-GGUF:Q4_K_M ",
num_ctx = 1024 * 8,
},
system_prompt = require("gp.defaults").code_system_prompt,
},
{
name = "Qwen3-Coder",
chat = false,
command = true,
provider = "ollama",
model = {
model = "danielsheep/Qwen3-Coder-30B-A3B-Instruct-1M-Unsloth:UD-IQ3_XXS",
num_ctx = 1024 * 8,
},
system_prompt = "Please return ONLY code snippets.\nSTART AND END YOUR ANSWER WITH:\n\n```",
},
},
default_chat_agent = "GPT-OSS",
default_command_agent = "Gemma",
hooks = {
-- GpImplement rewrites the provided selection/range based on comments in it
Implement = function(gp, params)
local template = "Having following from {{filename}}:\n\n"
.. "```{{filetype}}\n{{selection}}\n```\n\n"
.. "Please rewrite this according to the contained instructions."
.. "\n\nRespond exclusively with the snippet that should replace the selection above."
local agent = gp.get_command_agent()
gp.Prompt(params, gp.Target.append, agent, template)
end,
UnitTests = function(gp, params)
local template = "I have the following code from {{filename}}:\n\n"
.. "```{{filetype}}\n{{selection}}\n```\n\n"
.. "Please respond by writing table driven unit tests for the code above."
local agent = gp.get_command_agent()
gp.Prompt(params, gp.Target.vnew, agent, template)
end,
Explain = function(gp, params)
local template = "I have the following code from {{filename}}:\n\n"
.. "```{{filetype}}\n{{selection}}\n```\n\n"
.. "Please respond by explaining the code above."
local agent = gp.get_chat_agent()
gp.Prompt(params, gp.Target.popup, agent, template)
end,
TranslateRu = function(gp, params)
local chat_system_prompt = "You are a Translator, please translate to Russian."
local agent = gp.get_chat_agent("GPT-OSS")
gp.cmd.ChatNew(params, chat_system_prompt, agent)
end,
TranslateEn = function(gp, params)
local chat_system_prompt = "You are a Translator, please translate to English."
local agent = gp.get_chat_agent("GPT-OSS")
gp.cmd.ChatNew(params, chat_system_prompt, agent)
end,
CodeReview = function(gp, params)
local template = "I have the following code from {{filename}}:\n\n"
.. "```{{filetype}}\n{{selection}}\n```\n\n"
.. "Please analyze for code smells and suggest improvements."
local agent = gp.get_chat_agent()
gp.Prompt(params, gp.Target.enew("markdown"), agent, template)
end,
-- example of making :%GpChatNew a dedicated command which
-- opens new chat with the entire current buffer as a context
BufferChatNew = function(gp, _)
-- call GpChatNew command in range mode on whole buffer
vim.api.nvim_command("%" .. gp.config.cmd_prefix .. "ChatNew")
end,
},
})
end,
})

View file

@ -45,18 +45,18 @@ Content-Type: application/json
"messages": [
{
"role": "system",
"content": "You are Fill in the Middle completion. Respond with only the filled-in code. Generate 5 different completion separate <COMPLETION> as separator of completion"
"content": "You AI assistant"
},
{
"role": "user",
"content": "language python indentation 4 spaces for a tab."
"content": "language python indentation 2 spaces for a tab."
},
{
"role": "user",
"content": "<|fim_prefix|>def quicksort(arr):\n if len(arr) <= 1:\n return arr\n pivot = arr[len(arr) // 2]\n <|fim_suffix|>\n middle = [x for x in arr if x == pivot]\n right = [x for x in arr if x > pivot]\n return quicksort(left) + middle + quicksort(right)<|fim_middle|>"
"content": "get fibbonachi fucntion for calcualte first 30 numbers fastest"
}
],
"model":"hf.co/unsloth/gemma-3-12b-it-qat-GGUF:Q4_K_M",
"model":"orieg/gemma3-tools:4b",
"stream": false
}