ollama settings
This commit is contained in:
parent
d581be532d
commit
4d9313407b
@ -829,7 +829,7 @@ require('lazy').setup({
|
||||
{
|
||||
'David-Kunz/gen.nvim',
|
||||
opts = {
|
||||
model = 'dolphin-mistral', -- The default model to use.
|
||||
model = 'dolphin-llama3', -- The default model to use.
|
||||
host = 'localhost', -- The host running the Ollama service.
|
||||
port = '11434', -- The port on which the Ollama service is listening.
|
||||
display_mode = 'split', -- The display mode. Can be "float" or "split".
|
||||
@ -863,6 +863,7 @@ require('lazy').setup({
|
||||
extract = '```$filetype\n(.-)```',
|
||||
model = 'codellama:latest',
|
||||
}
|
||||
require('gen').select_model()
|
||||
end,
|
||||
vim.keymap.set('n', '<leader>lo', vim.cmd.Gen, { desc = '[O]llama AI' }),
|
||||
},
|
||||
|
Loading…
x
Reference in New Issue
Block a user