diff --git a/README.md b/README.md index d9d5d433..3355f1b7 100644 --- a/README.md +++ b/README.md @@ -171,14 +171,13 @@ The mappings can be customized by setting the `mappings` table in your configura - `normal`: Key for normal mode - `insert`: Key for insert mode -For example, to change the submit prompt mapping or show_diff full diff option: +For example, to change the complete mapping to Tab or show_diff full diff option: ```lua { mappings = { - submit_prompt = { - normal = 's', - insert = '' + complete = { + insert = '' } show_diff = { full_diff = true @@ -444,6 +443,7 @@ Below are all available configuration options with their default values: model = 'gpt-4.1', -- Default model to use, see ':CopilotChatModels' for available models (can be specified manually in prompt via $). tools = nil, -- Default tool or array of tools (or groups) to share with LLM (can be specified manually in prompt via @). sticky = nil, -- Default sticky prompt or array of sticky prompts to use at start of every new chat (can be specified manually in prompt via >). + language = 'English', -- Default language to use for answers resource_processing = false, -- Enable intelligent resource processing (skips unnecessary resources to save tokens) diff --git a/lua/CopilotChat/config.lua b/lua/CopilotChat/config.lua index 8809fdc6..30ad2bd8 100644 --- a/lua/CopilotChat/config.lua +++ b/lua/CopilotChat/config.lua @@ -18,6 +18,7 @@ ---@field model string? ---@field tools string|table|nil ---@field sticky string|table|nil +---@field language string? ---@field resource_processing boolean? ---@field temperature number? ---@field headless boolean? @@ -58,6 +59,7 @@ return { model = 'gpt-4.1', -- Default model to use, see ':CopilotChatModels' for available models (can be specified manually in prompt via $). tools = nil, -- Default tool or array of tools (or groups) to share with LLM (can be specified manually in prompt via @). sticky = nil, -- Default sticky prompt or array of sticky prompts to use at start of every new chat (can be specified manually in prompt via >). + language = 'English', -- Default language to use for answers resource_processing = false, -- Enable intelligent resource processing (skips unnecessary resources to save tokens) diff --git a/lua/CopilotChat/config/prompts.lua b/lua/CopilotChat/config/prompts.lua index 8bb5efd9..8764f914 100644 --- a/lua/CopilotChat/config/prompts.lua +++ b/lua/CopilotChat/config/prompts.lua @@ -2,6 +2,7 @@ local COPILOT_BASE = [[ When asked for your name, you must respond with "GitHub Copilot". Follow the user's requirements carefully & to the letter. Keep your answers short and impersonal. +Always answer in {LANGUAGE} unless explicitly asked otherwise. The user works in editor called Neovim which has these core concepts: - Buffer: An in-memory text content that may be associated with a file diff --git a/lua/CopilotChat/init.lua b/lua/CopilotChat/init.lua index 1c25eda5..67848dd8 100644 --- a/lua/CopilotChat/init.lua +++ b/lua/CopilotChat/init.lua @@ -473,6 +473,7 @@ function M.resolve_prompt(prompt, config) if config.system_prompt then config.system_prompt = config.system_prompt:gsub('{OS_NAME}', jit.os) + config.system_prompt = config.system_prompt:gsub('{LANGUAGE}', config.language) if state.source then config.system_prompt = config.system_prompt:gsub('{DIR}', state.source.cwd()) end