Fixed issues

This commit is contained in:
Jonas Widen 2025-03-16 14:14:12 +01:00
parent c366268b5c
commit 7bfeb48f7d

View File

@ -29,7 +29,7 @@ local function make_request(prompt)
return nil return nil
end end
local model = "gemini-2.0-flash" -- SPECIFY Gemini 2.0 Flash MODEL local model = "gemini-pro" -- Using the standard model instead of flash
-- Construct the JSON payload -- Construct the JSON payload
local payload = vim.json.encode({ local payload = vim.json.encode({
contents = { contents = {
@ -43,11 +43,14 @@ local function make_request(prompt)
}, },
}) })
-- Escape the payload for shell
payload = vim.fn.shellescape(payload)
local command = string.format( local command = string.format(
"curl -s -X POST " "curl -s -X POST "
.. "'https://generative-ai.googleapis.com/v1beta/models/%s:generateContent?key=%s' " .. "'https://generative-ai.googleapis.com/v1/models/%s:generateContent?key=%s' "
.. "-H 'Content-Type: application/json' " .. "-H 'Content-Type: application/json' "
.. "-d '%s'", .. "-d %s",
model, model,
api_key, api_key,
payload payload
@ -61,7 +64,11 @@ local function make_request(prompt)
return nil return nil
end end
local decoded_result = vim.json.decode(result) local success, decoded_result = pcall(vim.json.decode, result)
if not success then
vim.notify("Failed to decode API response: " .. result, vim.log.levels.ERROR)
return nil
end
return decoded_result return decoded_result
end end
@ -83,6 +90,4 @@ function M.get_response(prompt)
end end
end end
print("api.lua loaded")
return M return M