Fixed issues

This commit is contained in:
Jonas Widen 2025-03-16 14:14:12 +01:00
parent c366268b5c
commit 7bfeb48f7d

@ -29,7 +29,7 @@ local function make_request(prompt)
return nil
end
local model = "gemini-2.0-flash" -- SPECIFY Gemini 2.0 Flash MODEL
local model = "gemini-pro" -- Using the standard model instead of flash
-- Construct the JSON payload
local payload = vim.json.encode({
contents = {
@ -43,11 +43,14 @@ local function make_request(prompt)
},
})
-- Escape the payload for shell
payload = vim.fn.shellescape(payload)
local command = string.format(
"curl -s -X POST "
.. "'https://generative-ai.googleapis.com/v1beta/models/%s:generateContent?key=%s' "
.. "'https://generative-ai.googleapis.com/v1/models/%s:generateContent?key=%s' "
.. "-H 'Content-Type: application/json' "
.. "-d '%s'",
.. "-d %s",
model,
api_key,
payload
@ -61,7 +64,11 @@ local function make_request(prompt)
return nil
end
local decoded_result = vim.json.decode(result)
local success, decoded_result = pcall(vim.json.decode, result)
if not success then
vim.notify("Failed to decode API response: " .. result, vim.log.levels.ERROR)
return nil
end
return decoded_result
end
@ -83,6 +90,4 @@ function M.get_response(prompt)
end
end
print("api.lua loaded")
return M