From a42f903e8a4938adfa5bbe78b71d6695f5e70ce4 Mon Sep 17 00:00:00 2001 From: Jonas Widen Date: Sun, 16 Mar 2025 14:34:21 +0100 Subject: [PATCH] Added leader gs to sync the current buffer and query a question --- README.md | 11 +++- lua/gemini/api.lua | 31 ++++++---- lua/gemini/init.lua | 141 ++++++++++++++++++++++++-------------------- 3 files changed, 106 insertions(+), 77 deletions(-) diff --git a/README.md b/README.md index 076be22..d1c7cc0 100644 --- a/README.md +++ b/README.md @@ -38,17 +38,22 @@ vim.g.gemini_api_key = "your-api-key-here" ## Usage -The plugin provides two ways to interact with Gemini: +The plugin provides three ways to interact with Gemini: 1. Command Mode: ```vim :Gemini What is SOLID in software engineering? ``` -2. Keymap (default): -- Press `g` to open an input prompt +2. Simple Query (default): +- Press `gc` to open an input prompt (mnemonic: 'gemini chat') - Type your query and press Enter +3. Context-Aware Query: +- Press `gs` to open an input prompt (mnemonic: 'gemini sync') +- This will send your current buffer's content as context along with your query +- Useful for code-related questions about the file you're working on + ### Response Window The AI response appears in a floating window. You can close it using: diff --git a/lua/gemini/api.lua b/lua/gemini/api.lua index 2843f79..f2d82a8 100644 --- a/lua/gemini/api.lua +++ b/lua/gemini/api.lua @@ -18,7 +18,7 @@ local function get_api_key() return nil -- API key not found end -local function make_request(prompt) +local function make_request(prompt, context) local api_key = get_api_key() if not api_key then @@ -29,17 +29,26 @@ local function make_request(prompt) return nil end - local model = "gemini-2.0-flash" -- Updated to an available model - local payload = vim.json.encode({ - contents = { - { - parts = { - { - text = prompt, - }, + local model = "gemini-2.0-flash" + local contents = { + { + parts = { + { + text = prompt, }, }, }, + } + + -- If context is provided, add it to the contents + if context then + table.insert(contents[1].parts, 1, { + text = "Context:\n" .. context .. "\n\nQuery:\n", + }) + end + + local payload = vim.json.encode({ + contents = contents, }) -- Escape the payload for shell @@ -72,8 +81,8 @@ local function make_request(prompt) return decoded_result end -function M.get_response(prompt) - local result = make_request(prompt) +function M.get_response(prompt, context) + local result = make_request(prompt, context) if result then if result.error then diff --git a/lua/gemini/init.lua b/lua/gemini/init.lua index ed19f6f..fba6647 100644 --- a/lua/gemini/init.lua +++ b/lua/gemini/init.lua @@ -3,79 +3,94 @@ local api = require("gemini.api") local M = {} -local function gemini_query(prompt) - local response = api.get_response(prompt) +local function get_current_buffer_content() + local lines = vim.api.nvim_buf_get_lines(0, 0, -1, false) + return table.concat(lines, "\n") +end - if response then - -- Create a scratch buffer - local new_buf = vim.api.nvim_create_buf(false, true) - vim.api.nvim_buf_set_lines(new_buf, 0, 0, false, vim.split(response, "\n")) - - -- Set buffer options - vim.api.nvim_buf_set_option(new_buf, 'modifiable', false) - vim.api.nvim_buf_set_option(new_buf, 'buftype', 'nofile') - - -- Create the window - local new_win = vim.api.nvim_open_win(new_buf, true, { - relative = "editor", - width = 80, - height = 20, - row = 5, - col = vim.o.columns / 2 - 40, - border = "rounded", - title = "Google AI Response", - style = "minimal" - }) +local function gemini_query(prompt, context) + local response = api.get_response(prompt, context) - -- Set window-local keymaps - local close_keys = {'q', '', ''} - for _, key in ipairs(close_keys) do - vim.keymap.set('n', key, function() - vim.api.nvim_win_close(new_win, true) - end, { buffer = new_buf, nowait = true }) - end + if response then + -- Create a scratch buffer + local new_buf = vim.api.nvim_create_buf(false, true) + vim.api.nvim_buf_set_lines(new_buf, 0, 0, false, vim.split(response, "\n")) + + -- Set buffer options + vim.api.nvim_buf_set_option(new_buf, 'modifiable', false) + vim.api.nvim_buf_set_option(new_buf, 'buftype', 'nofile') + + -- Create the window + local new_win = vim.api.nvim_open_win(new_buf, true, { + relative = "editor", + width = 80, + height = 20, + row = 5, + col = vim.o.columns / 2 - 40, + border = "rounded", + title = "Google AI Response", + style = "minimal" + }) - -- Add autocmd to enable closing with :q - vim.api.nvim_create_autocmd("BufWinLeave", { - buffer = new_buf, - callback = function() - if vim.api.nvim_win_is_valid(new_win) then - vim.api.nvim_win_close(new_win, true) - end - end, - once = true, - }) - else - vim.notify("Failed to get a response from Gemini API", vim.log.levels.ERROR) - end + -- Set window-local keymaps + local close_keys = {'q', '', ''} + for _, key in ipairs(close_keys) do + vim.keymap.set('n', key, function() + vim.api.nvim_win_close(new_win, true) + end, { buffer = new_buf, nowait = true }) + end + + -- Add autocmd to enable closing with :q + vim.api.nvim_create_autocmd("BufWinLeave", { + buffer = new_buf, + callback = function() + if vim.api.nvim_win_is_valid(new_win) then + vim.api.nvim_win_close(new_win, true) + end + end, + once = true, + }) + else + vim.notify("Failed to get a response from Gemini API", vim.log.levels.ERROR) + end end -- Make gemini_query available in M so it can be used by the setup function M.gemini_query = gemini_query function M.setup() - -- Create the user command - vim.api.nvim_create_user_command("Gemini", function(opts) - local prompt = opts.args - if prompt == "" then - vim.notify("Please provide a prompt for Gemini.", vim.log.levels.WARN) - return - end - M.gemini_query(prompt) -- Use M.gemini_query instead of gemini_query - end, { - desc = "Query Google AI", - nargs = "+", - complete = "shellcmd", - }) + -- Create the user command + vim.api.nvim_create_user_command("Gemini", function(opts) + local prompt = opts.args + if prompt == "" then + vim.notify("Please provide a prompt for Gemini.", vim.log.levels.WARN) + return + end + M.gemini_query(prompt) + end, { + desc = "Query Google AI", + nargs = "+", + complete = "shellcmd", + }) - -- Set up keymapping - vim.keymap.set("n", "g", function() - vim.ui.input({ prompt = "Gemini Query: " }, function(input) - if input then - M.gemini_query(input) -- Use M.gemini_query instead of gemini_query - end - end) - end, { desc = "Query Google AI (via Input)" }) + -- Set up keymapping with 'gc' for 'gemini chat' + vim.keymap.set("n", "gc", function() + vim.ui.input({ prompt = "Gemini Query: " }, function(input) + if input then + M.gemini_query(input) + end + end) + end, { desc = "Query Google AI (via Input)" }) + + -- Set up keymapping with 'gs' for 'gemini sync' + vim.keymap.set("n", "gs", function() + vim.ui.input({ prompt = "Gemini Query (with buffer context): " }, function(input) + if input then + local buffer_content = get_current_buffer_content() + M.gemini_query(input, buffer_content) + end + end) + end, { desc = "Query Google AI (with buffer context)" }) end return M