gemini/lua/gemini/init.lua
2025-03-16 21:30:11 +01:00

126 lines
3.8 KiB
Lua

-- lua/gemini/init.lua
local api = require("gemini.api")
local chat = require("gemini.chat")
local config = require("gemini.config")
local completion = require("gemini.completion")
local M = {}
local function get_current_buffer_content()
local lines = vim.api.nvim_buf_get_lines(0, 0, -1, false)
return table.concat(lines, "\n")
end
function M.prompt_query(context)
local prompt = context and "Gemini (with buffer context): " or "Gemini: "
vim.ui.input({ prompt = prompt }, function(input)
if input then
M.query(input, context)
end
end)
end
function M.query(prompt, context)
chat.set_context(context)
chat.create_window()
local initial_content = "User: " .. prompt .. "\n\nAssistant: Thinking..."
chat.update_content(initial_content, true, true)
api.get_response(prompt, context, function(response, error)
if error then
vim.notify("Failed to get response: " .. error, vim.log.levels.ERROR)
return
end
local content = "User: " .. prompt .. "\n\nAssistant: " .. response
chat.update_content(content, true)
end)
end
function M.setup(opts)
config.setup(opts)
pcall(vim.treesitter.language.require_language, "markdown")
-- Set up omnifunc globally
vim.api.nvim_create_autocmd("FileType", {
pattern = "*",
callback = function()
vim.bo.omnifunc = "v:lua.require'gemini'.complete"
end,
})
vim.api.nvim_create_user_command("Gemini", function(opts)
if opts.args == "" then
vim.notify("Please provide a prompt for Gemini.", vim.log.levels.WARN)
return
end
M.query(opts.args)
end, {
desc = "Query Google AI",
nargs = "+",
complete = "shellcmd",
})
vim.api.nvim_create_user_command("GeminiClearChat", function()
api.clear_conversation()
chat.clear()
vim.notify("Chat history cleared", vim.log.levels.INFO)
end, {
desc = "Clear Gemini chat history"
})
vim.keymap.set("n", "<leader>gc", function()
M.prompt_query()
end, { desc = "Chat with Gemini AI" })
vim.keymap.set("n", "<leader>gs", function()
M.prompt_query(get_current_buffer_content())
end, { desc = "Chat with Gemini AI (with buffer context)" })
vim.keymap.set("n", "<leader>gq", function()
api.clear_conversation()
chat.clear()
vim.notify("Chat history cleared", vim.log.levels.INFO)
end, { desc = "Clear Gemini chat history" })
-- Change the completion keymap to a more unique combination
vim.keymap.set('i', '<C-g><C-g>', function()
-- Force Gemini's omnifunc
vim.api.nvim_command('set omnifunc=v:lua.require\'gemini\'.complete')
vim.api.nvim_feedkeys(vim.api.nvim_replace_termcodes('<C-x><C-o>', true, true, true), 'n', true)
end, { desc = 'Trigger Gemini completion' })
end
function M.complete(findstart, base)
if findstart == 1 then
local line = vim.api.nvim_get_current_line()
local col = vim.api.nvim_win_get_cursor(0)[2]
local start = col
while start > 0 and string.match(line:sub(start, start), "[%w_]") do
start = start - 1
end
return start
else
-- Return empty list immediately to keep menu open
local items = {
{ word = "Loading...", kind = vim.lsp.protocol.CompletionItemKind.Text }
}
-- Start completion request
completion.get_completion({ word = base }, function(completions)
if #completions > 0 then
vim.schedule(function()
-- Update the completion menu
vim.fn.complete(vim.fn.col('.') - #base, completions)
end)
end
end)
return items
end
end
return M