Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
8f6c468
feat: add GitHub Enterprise Server/Cloud support
jkorsvik Oct 13, 2025
32e948d
fix: use copilot-api subdomain for enterprise Copilot endpoints
jkorsvik Oct 13, 2025
6f07c56
chore: clean up docs and bump version to 0.8.0
jkorsvik Oct 13, 2025
ee4670a
refactor: clean up enterprise URL handling and simplify code patterns
jkorsvik Oct 13, 2025
151646e
feat: add AGENTS.md for build, lint, test commands and code style gui…
jkorsvik Oct 13, 2025
d9f3aad
Grafana
luisbrandao Oct 16, 2025
dc46615
Refactor code structure for improved readability and maintainability
luisbrandao Oct 28, 2025
265a2df
feat: include context size in token usage logs for chat completions
luisbrandao Oct 28, 2025
0f99cae
refactor: replace consola with console.log for token logging
luisbrandao Oct 28, 2025
1e6d259
feat: add tzdata installation for timezone support in Dockerfile
luisbrandao Oct 28, 2025
a7da48d
feat: add local timezone formatting for timestamps in chat completion…
luisbrandao Oct 28, 2025
29668ce
feat: support copilot reasoning_opaque and reasoning_text
caozhiyuan Nov 19, 2025
a2467d3
feat: add signature field to AnthropicThinkingBlock
caozhiyuan Nov 19, 2025
58f7a45
feat: add idleTimeout configuration for bun server
caozhiyuan Nov 19, 2025
3fa5519
feat: enhance reasoning handling in tool calls and change the thinkin…
caozhiyuan Nov 19, 2025
dfb40d2
feat: conditionally handle reasoningOpaque in handleFinish based on t…
caozhiyuan Nov 19, 2025
7657d87
fix: handleReasoningOpaqueInToolCalls add isToolBlockOpen judge
caozhiyuan Nov 20, 2025
968ff12
merge: integrate chat-completions-reasoning support
jkorsvik Nov 25, 2025
9b3f737
Merge branch 'feat/enterprise-and-reasoning'
luisbrandao Feb 19, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
refactor: replace consola with console.log for token logging
  • Loading branch information
luisbrandao committed Oct 28, 2025
commit 0f99cae915d0e3dd2474bb83e5e710a810661852
6 changes: 3 additions & 3 deletions src/routes/chat-completions/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ export async function handleCompletion(c: Context) {
response.usage.prompt_tokens,
response.usage.completion_tokens,
)
consola.info(
console.log(
`${timestamp} - INFO - Tokens - Model: ${response.model}, In: ${response.usage.prompt_tokens}, Out: ${response.usage.completion_tokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
)
}
Expand Down Expand Up @@ -133,8 +133,8 @@ export async function handleCompletion(c: Context) {
selectedModel?.capabilities.limits.max_context_window_tokens ?? "N/A"

recordTokenUsage(payload.model, totalPromptTokens, totalCompletionTokens)
consola.info(
`${timestamp} - INFO - Tokens (streaming) - Model: ${payload.model}, In: ${totalPromptTokens}, Out: ${totalCompletionTokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
console.log(
`${timestamp} - INFO - Tokens - Model: ${payload.model}, In: ${totalPromptTokens}, Out: ${totalCompletionTokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
)
}
})
Expand Down
8 changes: 4 additions & 4 deletions src/routes/messages/handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ export async function handleCompletion(c: Context) {
response.usage.prompt_tokens,
response.usage.completion_tokens,
)
consola.info(
`${timestamp} - INFO - Tokens (Anthropic) - Model: ${response.model}, In: ${response.usage.prompt_tokens}, Out: ${response.usage.completion_tokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
console.log(
`${timestamp} - INFO - Tokens - Model: ${response.model}, In: ${response.usage.prompt_tokens}, Out: ${response.usage.completion_tokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
)
}

Expand Down Expand Up @@ -155,8 +155,8 @@ export async function handleCompletion(c: Context) {
totalPromptTokens,
totalCompletionTokens,
)
consola.info(
`${timestamp} - INFO - Tokens (Anthropic streaming) - Model: ${openAIPayload.model}, In: ${totalPromptTokens}, Out: ${totalCompletionTokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
console.log(
`${timestamp} - INFO - Tokens - Model: ${openAIPayload.model}, In: ${totalPromptTokens}, Out: ${totalCompletionTokens}, Ctx: ${contextSize}, Time: ${timeFormatted}, Speed: ${speed.toFixed(2)} t/s`,
)
}
})
Expand Down