Skip to content

Commit ede3adf

Browse files
committed
Improve model listing and add info about :CopilotChatModels to config
- Deduplicate models based on version - Sort models by name - Use shortest model id based on version in ouput Signed-off-by: Tomas Slusny <slusnucky@gmail.com>
1 parent 041465a commit ede3adf

4 files changed

Lines changed: 16 additions & 4 deletions

File tree

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ Also see [here](/lua/CopilotChat/config.lua):
206206
allow_insecure = false, -- Allow insecure server connections
207207

208208
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
209-
model = 'gpt-4o', -- GPT model to use, 'gpt-3.5-turbo', 'gpt-4', or 'gpt-4o'
209+
model = 'gpt-4o', -- GPT model to use, see ':CopilotChatModels' for available models
210210
temperature = 0.1, -- GPT temperature
211211

212212
question_header = '## User ', -- Header to use for user questions

lua/CopilotChat/config.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ return {
8484
allow_insecure = false, -- Allow insecure server connections
8585

8686
system_prompt = prompts.COPILOT_INSTRUCTIONS, -- System prompt to use
87-
model = 'gpt-4o-2024-05-13', -- GPT model to use, 'gpt-3.5-turbo', 'gpt-4', or `gpt-4o-2024-05-13`
87+
model = 'gpt-4o', -- GPT model to use, see ':CopilotChatModels' for available models
8888
temperature = 0.1, -- GPT temperature
8989

9090
question_header = '## User ', -- Header to use for user questions

lua/CopilotChat/copilot.lua

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -664,7 +664,19 @@ end
664664
function Copilot:list_models(callback)
665665
self:with_auth(function()
666666
self:with_models(function()
667-
callback(vim.tbl_keys(self.models))
667+
-- Group models by version and shortest ID
668+
local version_map = {}
669+
for id, model in pairs(self.models) do
670+
local version = model.version
671+
if not version_map[version] or #id < #version_map[version] then
672+
version_map[version] = id
673+
end
674+
end
675+
676+
-- Map to IDs and sort
677+
local result = vim.tbl_values(version_map)
678+
table.sort(result)
679+
callback(result)
668680
end)
669681
end)
670682
end

lua/CopilotChat/tiktoken.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@ local function load_tiktoken_data(done, tokenizer)
2323
local tiktoken_url = 'https://openaipublic.blob.core.windows.net/encodings/'
2424
.. tokenizer
2525
.. '.tiktoken'
26-
log.info('Downloading tiktoken data from ' .. tiktoken_url)
2726
local cache_path = get_cache_path(tiktoken_url:match('.+/(.+)'))
2827

2928
local async
3029
async = vim.loop.new_async(function()
3130
if not file_exists(cache_path) then
31+
log.info('Downloading tiktoken data from ' .. tiktoken_url)
3232
vim.schedule(function()
3333
curl.get(tiktoken_url, {
3434
output = cache_path,

0 commit comments

Comments
 (0)