diff --git a/CHANGELOG.md b/CHANGELOG.md index 894a8113..a7d125ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [4.7.2](https://github.com/CopilotC-Nvim/CopilotChat.nvim/compare/v4.7.1...v4.7.2) (2025-09-17) + + +### Bug Fixes + +* **chat:** do not create multiple chat isntances ([#1432](https://github.com/CopilotC-Nvim/CopilotChat.nvim/issues/1432)) ([74611b5](https://github.com/CopilotC-Nvim/CopilotChat.nvim/commit/74611b56e813f50e905122387b92fb832ac9616c)) + ## [4.7.1](https://github.com/CopilotC-Nvim/CopilotChat.nvim/compare/v4.7.0...v4.7.1) (2025-09-16) diff --git a/README.md b/README.md index 9f1cda76..8da6be23 100644 --- a/README.md +++ b/README.md @@ -19,11 +19,12 @@ https://github.com/user-attachments/assets/8cad5643-63b2-4641-a5c4-68bc313f20e6 CopilotChat.nvim brings GitHub Copilot Chat capabilities directly into Neovim with a focus on transparency and user control. - πŸ€– **Multiple AI Models** - GitHub Copilot (including GPT-4o, Gemini 2.5 Pro, Claude 4 Sonnet, Claude 3.7 Sonnet, Claude 3.5 Sonnet, o3-mini, o4-mini) + custom providers (Ollama, Mistral.ai). The exact list of available models depends on your [GitHub Copilot settings](https://github.com/settings/copilot/features) and the models provided by GitHub's API. -- πŸ”§ **Tool Calling** - LLM can use workspace functions (file reading, git operations, search) with your explicit approval -- πŸ”’ **Explicit Control** - Only shares what you specifically request - no background data collection -- πŸ“ **Interactive Chat** - Rich UI with completion, diffs, and quickfix integration +- πŸ”§ **Tool Calling** - LLM can call workspace functions (file reading, git operations, search) with your explicit approval +- πŸ”’ **Privacy First** - Only shares what you explicitly request - no background data collection +- πŸ“ **Interactive Chat** - Interactive UI with completion, diffs, and quickfix integration - 🎯 **Smart Prompts** - Composable templates and sticky prompts for consistent context -- ⚑ **Efficient** - Smart token usage with tiktoken counting and history management +- ⚑ **Token Efficient** - Resource replacement prevents duplicate context, history management via tiktoken counting +- πŸ”— **Scriptable** - Comprehensive Lua API for automation and headless mode operation - πŸ”Œ **Extensible** - [Custom functions](https://github.com/CopilotC-Nvim/CopilotChat.nvim/discussions/categories/functions) and [providers](https://github.com/CopilotC-Nvim/CopilotChat.nvim/discussions/categories/providers), plus integrations like [mcphub.nvim](https://github.com/ravitemer/mcphub.nvim) # Installation diff --git a/doc/CopilotChat.txt b/doc/CopilotChat.txt index bf35ba50..b86efef6 100644 --- a/doc/CopilotChat.txt +++ b/doc/CopilotChat.txt @@ -1,4 +1,4 @@ -*CopilotChat.txt* For NVIM v0.8.0 Last change: 2025 September 16 +*CopilotChat.txt* For NVIM v0.8.0 Last change: 2025 September 17 ============================================================================== Table of Contents *CopilotChat-table-of-contents* @@ -41,11 +41,12 @@ CopilotChat.nvim brings GitHub Copilot Chat capabilities directly into Neovim with a focus on transparency and user control. - πŸ€– **Multiple AI Models** - GitHub Copilot (including GPT-4o, Gemini 2.5 Pro, Claude 4 Sonnet, Claude 3.7 Sonnet, Claude 3.5 Sonnet, o3-mini, o4-mini) + custom providers (Ollama, Mistral.ai). The exact list of available models depends on your GitHub Copilot settings and the models provided by GitHub’s API. -- πŸ”§ **Tool Calling** - LLM can use workspace functions (file reading, git operations, search) with your explicit approval -- πŸ”’ **Explicit Control** - Only shares what you specifically request - no background data collection -- πŸ“ **Interactive Chat** - Rich UI with completion, diffs, and quickfix integration +- πŸ”§ **Tool Calling** - LLM can call workspace functions (file reading, git operations, search) with your explicit approval +- πŸ”’ **Privacy First** - Only shares what you explicitly request - no background data collection +- πŸ“ **Interactive Chat** - Interactive UI with completion, diffs, and quickfix integration - 🎯 **Smart Prompts** - Composable templates and sticky prompts for consistent context -- ⚑ **Efficient** - Smart token usage with tiktoken counting and history management +- ⚑ **Token Efficient** - Resource replacement prevents duplicate context, history management via tiktoken counting +- πŸ”— **Scriptable** - Comprehensive Lua API for automation and headless mode operation - πŸ”Œ **Extensible** - Custom functions and providers , plus integrations like mcphub.nvim diff --git a/lua/CopilotChat/init.lua b/lua/CopilotChat/init.lua index 98c1c1e6..268405bd 100644 --- a/lua/CopilotChat/init.lua +++ b/lua/CopilotChat/init.lua @@ -1100,41 +1100,41 @@ function M.setup(config) if M.chat then M.chat:close(state.source.bufnr) M.chat:delete() - end - - M.chat = require('CopilotChat.ui.chat')(M.config, function(bufnr) - for name, _ in pairs(M.config.mappings) do - map_key(name, bufnr) - end - - require('CopilotChat.completion').enable(bufnr, M.config.chat_autocomplete) + else + M.chat = require('CopilotChat.ui.chat')(M.config, function(bufnr) + for name, _ in pairs(M.config.mappings) do + map_key(name, bufnr) + end - vim.api.nvim_create_autocmd({ 'BufEnter', 'BufLeave' }, { - buffer = bufnr, - callback = function(ev) - if ev.event == 'BufEnter' then - update_source() - end + require('CopilotChat.completion').enable(bufnr, M.config.chat_autocomplete) - vim.schedule(function() - select.highlight(state.source.bufnr, not (M.config.highlight_selection and M.chat:focused())) - end) - end, - }) - - if M.config.insert_at_end then - vim.api.nvim_create_autocmd({ 'InsertEnter' }, { + vim.api.nvim_create_autocmd({ 'BufEnter', 'BufLeave' }, { buffer = bufnr, - callback = function() - vim.cmd('normal! 0') - vim.cmd('normal! G$') - vim.v.char = 'x' + callback = function(ev) + if ev.event == 'BufEnter' then + update_source() + end + + vim.schedule(function() + select.highlight(state.source.bufnr, not (M.config.highlight_selection and M.chat:focused())) + end) end, }) - end - finish(true) - end) + if M.config.insert_at_end then + vim.api.nvim_create_autocmd({ 'InsertEnter' }, { + buffer = bufnr, + callback = function() + vim.cmd('normal! 0') + vim.cmd('normal! G$') + vim.v.char = 'x' + end, + }) + end + + finish(true) + end) + end for name, prompt in pairs(list_prompts()) do if prompt.prompt then diff --git a/lua/CopilotChat/ui/chat.lua b/lua/CopilotChat/ui/chat.lua index 267b8aed..8cd08c11 100644 --- a/lua/CopilotChat/ui/chat.lua +++ b/lua/CopilotChat/ui/chat.lua @@ -164,7 +164,7 @@ local Chat = class(function(self, config, on_buf_create) if not msg or msg == '' then self.chat_overlay:restore(self.winnr, self.bufnr) else - self:overlay({ text = msg }) + self.chat_overlay:show(msg, self.winnr) end end) end, Overlay) diff --git a/version.txt b/version.txt index 7c66fca5..af9764a5 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -4.7.1 +4.7.2