diff --git a/plugin/gemini/api.lua b/plugin/gemini/api.lua new file mode 100644 index 0000000..05ac65e --- /dev/null +++ b/plugin/gemini/api.lua @@ -0,0 +1,70 @@ +-- plugin/gemini/api.lua + +local M = {} + +local api_key = vim.g.gemini_api_key -- Get API key from global variable + +local function make_request(prompt) + if not api_key then + vim.notify( + "Google AI API key not set. Set g.gemini_api_key in your init.vim or init.lua.", + vim.log.levels.ERROR + ) + return nil + end + + local model = "gemini-2.0-flash" -- SPECIFY Gemini 2.0 Flash MODEL + -- Construct the JSON payload + local payload = vim.json.encode({ + contents = { + { + parts = { + { + text = prompt, + }, + }, + }, + }, + }) + + local command = string.format( + "curl -s -X POST " + .. "'https://generative-ai.googleapis.com/v1beta/models/%s:generateContent?key=%s' " + .. "-H 'Content-Type: application/json' " + .. "-d '%s'", + model, + api_key, + payload + ) + + local result = vim.fn.system(command) + + -- Check for errors during the curl execution + if vim.v.shell_error ~= 0 then + vim.notify("Error executing curl. Check your command and ensure curl is installed.", vim.log.levels.ERROR) + return nil + end + + local decoded_result = vim.json.decode(result) + + return decoded_result +end + +function M.get_response(prompt) + local result = make_request(prompt) + + if + result + and result.candidates + and result.candidates[1] + and result.candidates[1].content + and result.candidates[1].content.parts + then + return result.candidates[1].content.parts[1].text + else + vim.notify("No response from Google AI API or malformed response.", vim.log.levels.ERROR) + return nil + end +end + +return M diff --git a/plugin/gemini/init.lua b/plugin/gemini/init.lua new file mode 100644 index 0000000..af0e5a7 --- /dev/null +++ b/plugin/gemini/init.lua @@ -0,0 +1,53 @@ +-- plugin/gemini/init.lua + +local api = require("gemini.api") + +local function gemini_query(prompt) + local response = api.get_response(prompt) + + if response then + -- Display the response in a new buffer + local new_buf = vim.api.nvim_create_buf(false, true) -- Create a scratch buffer + vim.api.nvim_buf_set_lines(new_buf, 0, 0, false, vim.split(response, "\n")) + local new_win = vim.api.nvim_open_win(new_buf, true, { + relative = "editor", + width = 80, + height = 20, + row = 5, + col = vim.o.columns / 2 - 40, + border = "rounded", + title = "Google AI Response", + }) + end +end + +vim.api.nvim_create_user_command("Gemini", function(opts) + local prompt = opts.args + if prompt == "" then + vim.notify("Please provide a prompt for Gemini.", vim.log.levels.WARN) + return + end + gemini_query(prompt) +end, { + desc = "Query Google AI", + nargs = "+", -- Require at least one argument + complete = "shellcmd", -- Optional: Enable completion +}) + +-- Keymapping example (optional) +vim.keymap.set("n", "g", function() + vim.ui.input({ prompt = "Gemini Query: " }, function(input) + if input then + gemini_query(input) + end + end) +end, { desc = "Query Google AI (via Input)" }) + +-- Optional setup function if you want to add configuration +local function setup() + -- Add any setup logic here (e.g., reading configuration options) +end + +return { + setup = setup, +}