From 9054ba178f6033a8637f89999f3a8cd39a5be4da Mon Sep 17 00:00:00 2001 From: Jonas Widen Date: Sun, 16 Mar 2025 18:31:40 +0100 Subject: [PATCH] try to use nvim http client instead of curl --- lua/gemini/api.lua | 189 ++++++++++++++++++++++++++------------------ lua/gemini/init.lua | 37 +++++---- 2 files changed, 130 insertions(+), 96 deletions(-) diff --git a/lua/gemini/api.lua b/lua/gemini/api.lua index 0de6338..a9da6d8 100644 --- a/lua/gemini/api.lua +++ b/lua/gemini/api.lua @@ -5,37 +5,92 @@ local M = {} -- Store conversation history local conversation_history = {} +-- Helper function to get API key local function get_api_key() - -- Check for environment variable - local api_key = os.getenv("GEMINI_API_KEY") - if api_key then - return api_key - end - - -- Check for Neovim global variable - api_key = vim.g.gemini_api_key - if api_key then - return api_key - end - - return nil -- API key not found + return vim.g.gemini_api_key or os.getenv("GEMINI_API_KEY") end -local function make_request(prompt, context) - local api_key = get_api_key() +-- Async HTTP request function +local function async_request(url, payload, callback) + local host, port = vim.uri_from_fname(url):match("^https://([^/]+)(/.*)$") + local client = vim.loop.new_tcp() + + client:connect(host, 443, function(err) + if err then + vim.schedule(function() + callback(nil, "Connection error: " .. err) + end) + return + end - if not api_key then - vim.notify( - "Google AI API key not set. Set GEMINI_API_KEY environment variable or g.gemini_api_key in your init.vim or init.lua.", - vim.log.levels.ERROR + local ssl = vim.loop.new_tls() + ssl:wrap(client) + + local request = string.format( + "POST %s HTTP/1.1\r\n" .. + "Host: %s\r\n" .. + "Content-Type: application/json\r\n" .. + "Content-Length: %d\r\n" .. + "\r\n" .. + "%s", + port, host, #payload, payload ) - return nil + + local response = "" + + ssl:write(request, function(err) + if err then + vim.schedule(function() + callback(nil, "Write error: " .. err) + end) + return + end + + ssl:read_start(function(err, chunk) + if err then + vim.schedule(function() + callback(nil, "Read error: " .. err) + end) + return + end + + if chunk then + response = response .. chunk + else + -- Connection closed, process response + local body = response:match("\r\n\r\n(.+)$") + local success, decoded = pcall(vim.json.decode, body) + + vim.schedule(function() + if success then + callback(decoded) + else + callback(nil, "JSON decode error: " .. body) + end + end) + + ssl:close() + client:close() + end + end) + end) + end) +end + +function M.get_response(prompt, context, callback) + local api_key = get_api_key() + + if not api_key then + vim.schedule(function() + callback(nil, "API key not set") + end) + return end local model = "gemini-2.0-flash" local contents = {} - -- Add conversation history to the request + -- Add conversation history for _, message in ipairs(conversation_history) do table.insert(contents, { role = message.role, @@ -45,7 +100,7 @@ local function make_request(prompt, context) }) end - -- Add the current prompt + -- Add current prompt if context then table.insert(contents, { role = "user", @@ -62,77 +117,53 @@ local function make_request(prompt, context) }) end - local payload = vim.json.encode({ - contents = contents, - }) - - -- Escape the payload for shell - payload = vim.fn.shellescape(payload) - - local command = string.format( - "curl -s -X POST " - .. "'https://generativelanguage.googleapis.com/v1/models/%s:generateContent?key=%s' " - .. "-H 'Content-Type: application/json' " - .. "-d %s", - model, - api_key, - payload - ) - - local result = vim.fn.system(command) - - -- Check for errors during the curl execution - if vim.v.shell_error ~= 0 then - vim.notify("Error executing curl. Check your command and ensure curl is installed.", vim.log.levels.ERROR) - return nil - end - - local success, decoded_result = pcall(vim.json.decode, result) - if not success then - vim.notify("Failed to decode API response: " .. result, vim.log.levels.ERROR) - return nil - end - - return decoded_result -end - -function M.get_response(prompt, context) - -- Add user message to history + -- Store prompt in history table.insert(conversation_history, { role = "user", content = prompt }) - local result = make_request(prompt, context) + local payload = vim.json.encode({ + contents = contents, + }) - if result then - if result.error then - vim.notify("API Error: " .. vim.inspect(result.error), vim.log.levels.ERROR) - return nil + local url = string.format( + "https://generativelanguage.googleapis.com/v1/models/%s:generateContent?key=%s", + model, + api_key + ) + + async_request(url, payload, function(result, error) + if error then + callback(nil, error) + return end - if - result.candidates - and result.candidates[1] - and result.candidates[1].content - and result.candidates[1].content.parts - and result.candidates[1].content.parts[1] - and result.candidates[1].content.parts[1].text - then + if result.error then + callback(nil, "API Error: " .. vim.inspect(result.error)) + return + end + + if result.candidates and + result.candidates[1] and + result.candidates[1].content and + result.candidates[1].content.parts and + result.candidates[1].content.parts[1] and + result.candidates[1].content.parts[1].text then + local response_text = result.candidates[1].content.parts[1].text - -- Add assistant response to history + + -- Store response in history table.insert(conversation_history, { role = "model", content = response_text }) - return response_text + + callback(response_text) + else + callback(nil, "Unexpected response structure") end - - vim.notify("Unexpected response structure: " .. vim.inspect(result), vim.log.levels.ERROR) - end - - vim.notify("No response from Google AI API or malformed response.", vim.log.levels.ERROR) - return nil + end) end -- Add function to clear conversation history diff --git a/lua/gemini/init.lua b/lua/gemini/init.lua index 49c3446..a968846 100644 --- a/lua/gemini/init.lua +++ b/lua/gemini/init.lua @@ -161,15 +161,31 @@ local function gemini_query(prompt, context) -- Store the context for subsequent queries current_context = context - -- Show initial message in chat window and ensure it's visible + -- Show initial message in chat window local initial_content = "User: " .. prompt .. "\n\nAssistant: Thinking..." update_chat_window(initial_content) -- Force Neovim to update the screen vim.cmd('redraw') - local response = api.get_response(prompt, context) - if response then + -- Make async request + api.get_response(prompt, context, function(response, error) + if error then + -- Replace "Thinking..." with error message + local lines = vim.api.nvim_buf_get_lines(chat_bufnr, 0, -1, false) + for i = 1, #lines do + if lines[i] == "Assistant: Thinking..." then + vim.api.nvim_buf_set_option(chat_bufnr, 'modifiable', true) + vim.api.nvim_buf_set_lines(chat_bufnr, i, i + 1, false, + {"Assistant: Error - " .. error}) + vim.api.nvim_buf_set_option(chat_bufnr, 'modifiable', false) + break + end + end + vim.notify("Failed to get response: " .. error, vim.log.levels.ERROR) + return + end + -- Make buffer modifiable vim.api.nvim_buf_set_option(chat_bufnr, 'modifiable', true) @@ -204,20 +220,7 @@ local function gemini_query(prompt, context) -- Clear the command line vim.cmd('echo ""') - else - -- Replace "Thinking..." with error message - local lines = vim.api.nvim_buf_get_lines(chat_bufnr, 0, -1, false) - for i = 1, #lines do - if lines[i] == "Assistant: Thinking..." then - lines[i] = "Assistant: Failed to get response from Gemini API" - vim.api.nvim_buf_set_option(chat_bufnr, 'modifiable', true) - vim.api.nvim_buf_set_lines(chat_bufnr, 0, -1, false, lines) - vim.api.nvim_buf_set_option(chat_bufnr, 'modifiable', false) - break - end - end - vim.notify("Failed to get a response from Gemini API", vim.log.levels.ERROR) - end + end) end -- Make gemini_query available in M so it can be used by the setup function