Skip to content

Commit 28ac89c

Browse files
committed
Limit references token usage
1 parent e43902a commit 28ac89c

1 file changed

Lines changed: 5 additions & 2 deletions

File tree

Tool/Sources/OpenAIService/Memory/AutoManagedChatGPTMemory.swift

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
3030
static let encoder: TokenEncoder = TiktokenCl100kBaseTokenEncoder()
3131

3232
var onHistoryChange: () -> Void = {}
33-
33+
3434
let composeHistory: HistoryComposer
3535

3636
public init(
@@ -251,14 +251,17 @@ extension AutoManagedChatGPTMemory {
251251
usage: Int,
252252
references: [ChatMessage.Reference]
253253
) {
254+
/// the available tokens count for retrieved content
255+
let thresholdMaxTokenCount = min(maxTokenCount, configuration.maxTokens / 2)
256+
254257
var retrievedContentTokenCount = 0
255258
let separator = String(repeating: "=", count: 32) // only 1 token
256259
var message = ""
257260
var references = [ChatMessage.Reference]()
258261

259262
func appendToMessage(_ text: String) -> Bool {
260263
let tokensCount = encoder.countToken(text: text)
261-
if tokensCount + retrievedContentTokenCount > maxTokenCount { return false }
264+
if tokensCount + retrievedContentTokenCount > thresholdMaxTokenCount { return false }
262265
retrievedContentTokenCount += tokensCount
263266
message += text
264267
return true

0 commit comments

Comments
 (0)