Skip to content

Commit ff2d683

Browse files
authored
feat: implement auto_unload for ollama backend. (#38)
* feat: implement auto_unload for ollama backend. * docs(ollama): change the documentation to the default behaviour.
1 parent fd464ec commit ff2d683

File tree

2 files changed

+10
-1
lines changed

2 files changed

+10
-1
lines changed

README.md

+2
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,8 @@ cmp_ai:setup({
181181
provider = 'Ollama',
182182
provider_options = {
183183
model = 'codellama:7b-code',
184+
auto_unload = false, -- Set to true to automatically unload the model when
185+
-- exiting nvim.
184186
},
185187
notify = true,
186188
notify_callback = function(msg)

lua/cmp_ai/backends/ollama.lua

+8-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,14 @@ function Ollama:new(o)
1313
temperature = 0.2,
1414
},
1515
})
16-
16+
if self.params.auto_unload then
17+
vim.api.nvim_create_autocmd('VimLeave', {
18+
callback = function()
19+
self:Get(self.params.base_url, {}, { model = self.params.model, keep_alive = 0 }, function() end)
20+
end,
21+
group = vim.api.nvim_create_augroup('CmpAIOllama', { clear = true }),
22+
})
23+
end
1724
return o
1825
end
1926

0 commit comments

Comments
 (0)