debug completion

This commit is contained in:
Jonas Widen 2025-03-17 07:00:12 +01:00
parent 8b6a2c7454
commit 9456cbd902

View File

@ -170,20 +170,30 @@ function M.trigger_completion()
-- Add up to 10 previous lines for context -- Add up to 10 previous lines for context
local start_line = math.max(1, line - 10) local start_line = math.max(1, line - 10)
for i = start_line, line - 1 do for i = start_line, line - 1 do
if lines[i] and #lines[i] > 0 then
table.insert(context, lines[i]) table.insert(context, lines[i])
end end
end
-- Add current line -- Add current line
if current_line and #current_line > 0 then
table.insert(context, current_line) table.insert(context, current_line)
end
-- Add up to 10 lines after current line -- Add up to 10 lines after current line
local end_line = math.min(#lines, line + 10) local end_line = math.min(#lines, line + 10)
for i = line + 1, end_line do for i = line + 1, end_line do
if lines[i] and #lines[i] > 0 then
table.insert(context, lines[i]) table.insert(context, lines[i])
end end
end
-- Combine all lines -- Combine all lines and validate
local full_context = table.concat(context, "\n") local full_context = table.concat(context, "\n")
if #full_context == 0 then
debug_print("Empty context, skipping completion request")
return
end
-- Construct prompt for Gemini -- Construct prompt for Gemini
local prompt = string.format( local prompt = string.format(
@ -191,6 +201,12 @@ function M.trigger_completion()
full_context full_context
) )
-- Validate prompt before sending
if #prompt == 0 then
debug_print("Empty prompt, skipping completion request")
return
end
-- Get completion from Gemini -- Get completion from Gemini
api.get_response(prompt, nil, function(response, error) api.get_response(prompt, nil, function(response, error)
if error then if error then
@ -198,7 +214,7 @@ function M.trigger_completion()
return return
end end
if type(response) == "string" then if type(response) == "string" and #response > 0 then
-- Cache the response -- Cache the response
current_suggestion.cache[cache_key] = response current_suggestion.cache[cache_key] = response