Added leader gs to sync the current buffer and query a question

This commit is contained in:
Jonas Widen 2025-03-16 14:34:21 +01:00
parent 1f2671c329
commit a42f903e8a
3 changed files with 106 additions and 77 deletions

View File

@ -38,17 +38,22 @@ vim.g.gemini_api_key = "your-api-key-here"
## Usage ## Usage
The plugin provides two ways to interact with Gemini: The plugin provides three ways to interact with Gemini:
1. Command Mode: 1. Command Mode:
```vim ```vim
:Gemini What is SOLID in software engineering? :Gemini What is SOLID in software engineering?
``` ```
2. Keymap (default): 2. Simple Query (default):
- Press `<leader>g` to open an input prompt - Press `<leader>gc` to open an input prompt (mnemonic: 'gemini chat')
- Type your query and press Enter - Type your query and press Enter
3. Context-Aware Query:
- Press `<leader>gs` to open an input prompt (mnemonic: 'gemini sync')
- This will send your current buffer's content as context along with your query
- Useful for code-related questions about the file you're working on
### Response Window ### Response Window
The AI response appears in a floating window. You can close it using: The AI response appears in a floating window. You can close it using:

View File

@ -18,7 +18,7 @@ local function get_api_key()
return nil -- API key not found return nil -- API key not found
end end
local function make_request(prompt) local function make_request(prompt, context)
local api_key = get_api_key() local api_key = get_api_key()
if not api_key then if not api_key then
@ -29,9 +29,8 @@ local function make_request(prompt)
return nil return nil
end end
local model = "gemini-2.0-flash" -- Updated to an available model local model = "gemini-2.0-flash"
local payload = vim.json.encode({ local contents = {
contents = {
{ {
parts = { parts = {
{ {
@ -39,7 +38,17 @@ local function make_request(prompt)
}, },
}, },
}, },
}, }
-- If context is provided, add it to the contents
if context then
table.insert(contents[1].parts, 1, {
text = "Context:\n" .. context .. "\n\nQuery:\n",
})
end
local payload = vim.json.encode({
contents = contents,
}) })
-- Escape the payload for shell -- Escape the payload for shell
@ -72,8 +81,8 @@ local function make_request(prompt)
return decoded_result return decoded_result
end end
function M.get_response(prompt) function M.get_response(prompt, context)
local result = make_request(prompt) local result = make_request(prompt, context)
if result then if result then
if result.error then if result.error then

View File

@ -3,8 +3,13 @@
local api = require("gemini.api") local api = require("gemini.api")
local M = {} local M = {}
local function gemini_query(prompt) local function get_current_buffer_content()
local response = api.get_response(prompt) local lines = vim.api.nvim_buf_get_lines(0, 0, -1, false)
return table.concat(lines, "\n")
end
local function gemini_query(prompt, context)
local response = api.get_response(prompt, context)
if response then if response then
-- Create a scratch buffer -- Create a scratch buffer
@ -61,21 +66,31 @@ function M.setup()
vim.notify("Please provide a prompt for Gemini.", vim.log.levels.WARN) vim.notify("Please provide a prompt for Gemini.", vim.log.levels.WARN)
return return
end end
M.gemini_query(prompt) -- Use M.gemini_query instead of gemini_query M.gemini_query(prompt)
end, { end, {
desc = "Query Google AI", desc = "Query Google AI",
nargs = "+", nargs = "+",
complete = "shellcmd", complete = "shellcmd",
}) })
-- Set up keymapping -- Set up keymapping with 'gc' for 'gemini chat'
vim.keymap.set("n", "<leader>g", function() vim.keymap.set("n", "<leader>gc", function()
vim.ui.input({ prompt = "Gemini Query: " }, function(input) vim.ui.input({ prompt = "Gemini Query: " }, function(input)
if input then if input then
M.gemini_query(input) -- Use M.gemini_query instead of gemini_query M.gemini_query(input)
end end
end) end)
end, { desc = "Query Google AI (via Input)" }) end, { desc = "Query Google AI (via Input)" })
-- Set up keymapping with 'gs' for 'gemini sync'
vim.keymap.set("n", "<leader>gs", function()
vim.ui.input({ prompt = "Gemini Query (with buffer context): " }, function(input)
if input then
local buffer_content = get_current_buffer_content()
M.gemini_query(input, buffer_content)
end
end)
end, { desc = "Query Google AI (with buffer context)" })
end end
return M return M