Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -445,10 +445,12 @@ Below are all available configuration options with their default values:
tools = nil, -- Default tool or array of tools (or groups) to share with LLM (can be specified manually in prompt via @).
sticky = nil, -- Default sticky prompt or array of sticky prompts to use at start of every new chat (can be specified manually in prompt via >).

resource_processing = false, -- Enable intelligent resource processing (skips unnecessary resources to save tokens)

temperature = 0.1, -- Result temperature
headless = false, -- Do not write to chat buffer and use history (useful for using custom processing)
callback = nil, -- Function called when full response is received
remember_as_sticky = true, -- Remember model as sticky prompts when asking questions
remember_as_sticky = true, -- Remember config as sticky prompts when asking questions

-- default selection
-- see select.lua for implementation
Expand Down
20 changes: 3 additions & 17 deletions lua/CopilotChat/client.lua
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,7 @@ local class = utils.class
--- Constants
local RESOURCE_FORMAT = '# %s\n```%s\n%s\n```'
local LINE_CHARACTERS = 100
local BIG_FILE_THRESHOLD = 1000 * LINE_CHARACTERS
local BIG_EMBED_THRESHOLD = 200 * LINE_CHARACTERS
local TRUNCATED = '... (truncated)'

--- Resolve provider function
---@param model string
Expand Down Expand Up @@ -103,16 +101,9 @@ end

--- Generate content block with line numbers, truncating if necessary
---@param content string
---@param threshold number: The threshold for truncation
---@param start_line number?: The starting line number
---@return string
local function generate_content_block(content, threshold, start_line)
local total_chars = #content
if total_chars > threshold then
content = content:sub(1, threshold)
content = content .. '\n' .. TRUNCATED
end

local function generate_content_block(content, start_line)
if start_line ~= nil then
local lines = vim.split(content, '\n')
local total_lines = #lines
Expand Down Expand Up @@ -144,12 +135,7 @@ local function generate_selection_message(selection)
if selection.start_line and selection.end_line then
out = out .. string.format('Excerpt from %s, lines %s to %s:\n', filename, selection.start_line, selection.end_line)
end
out = out
.. string.format(
'```%s\n%s\n```',
filetype,
generate_content_block(content, BIG_FILE_THRESHOLD, selection.start_line)
)
out = out .. string.format('```%s\n%s\n```', filetype, generate_content_block(content, selection.start_line))

return {
content = out,
Expand All @@ -167,7 +153,7 @@ local function generate_resource_messages(resources)
return resource.data and resource.data ~= ''
end)
:map(function(resource)
local content = generate_content_block(resource.data, BIG_FILE_THRESHOLD, 1)
local content = generate_content_block(resource.data, 1)

return {
content = string.format(RESOURCE_FORMAT, resource.name, resource.type, content),
Expand Down
5 changes: 4 additions & 1 deletion lua/CopilotChat/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
---@field model string?
---@field tools string|table<string>|nil
---@field sticky string|table<string>|nil
---@field resource_processing boolean?
---@field temperature number?
---@field headless boolean?
---@field callback nil|fun(response: string, source: CopilotChat.source)
Expand Down Expand Up @@ -57,10 +58,12 @@ return {
tools = nil, -- Default tool or array of tools (or groups) to share with LLM (can be specified manually in prompt via @).
sticky = nil, -- Default sticky prompt or array of sticky prompts to use at start of every new chat (can be specified manually in prompt via >).

resource_processing = false, -- Enable intelligent resource processing (skips unnecessary resources to save tokens)

temperature = 0.1, -- Result temperature
headless = false, -- Do not write to chat buffer and use history (useful for using custom processing)
callback = nil, -- Function called when full response is received
remember_as_sticky = true, -- Remember model as sticky prompts when asking questions
remember_as_sticky = true, -- Remember config as sticky prompts when asking questions

-- default selection
selection = require('CopilotChat.select').visual,
Expand Down
14 changes: 9 additions & 5 deletions lua/CopilotChat/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -903,11 +903,15 @@ function M.ask(prompt, config)
local ok, err = pcall(async.run, function()
local selected_tools, resolved_resources, resolved_tools, prompt = M.resolve_functions(prompt, config)
local selected_model, prompt = M.resolve_model(prompt, config)
local query_ok, processed_resources = pcall(resources.process_resources, prompt, selected_model, resolved_resources)
if query_ok then
resolved_resources = processed_resources
else
log.warn('Failed to process resources', processed_resources)

if config.resource_processing then
local query_ok, processed_resources =
pcall(resources.process_resources, prompt, selected_model, resolved_resources)
if query_ok then
resolved_resources = processed_resources
else
log.warn('Failed to process resources', processed_resources)
end
end

prompt = vim.trim(prompt)
Expand Down
Loading