forked from intitni/CopilotForXcode
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathAutoManagedChatGPTMemory.swift
More file actions
93 lines (77 loc) · 3.13 KB
/
AutoManagedChatGPTMemory.swift
File metadata and controls
93 lines (77 loc) · 3.13 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import Foundation
import GPTEncoder
import Preferences
/// A memory that automatically manages the history according to max tokens and max message count.
public actor AutoManagedChatGPTMemory: ChatGPTMemory {
public var messages: [ChatMessage] { generateSendingHistory() }
public var remainingTokens: Int? { generateRemainingTokens() }
public var systemPrompt: ChatMessage
public var history: [ChatMessage] = [] {
didSet { onHistoryChange() }
}
public var configuration: ChatGPTConfiguration
static let encoder: TokenEncoder = GPTEncoder()
var onHistoryChange: () -> Void = {}
public init(systemPrompt: String, configuration: ChatGPTConfiguration) {
self.systemPrompt = .init(role: .system, content: systemPrompt)
self.configuration = configuration
}
public func mutateHistory(_ update: (inout [ChatMessage]) -> Void) {
update(&history)
}
public func mutateSystemPrompt(_ newPrompt: String) {
systemPrompt.content = newPrompt
}
public nonisolated
func observeHistoryChange(_ onChange: @escaping () -> Void) {
Task {
await setOnHistoryChangeBlock(onChange)
}
}
func generateSendingHistory(
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
encoder: TokenEncoder = AutoManagedChatGPTMemory.encoder
) -> [ChatMessage] {
func countToken(_ message: inout ChatMessage) -> Int {
if let count = message.tokensCount { return count }
let count = encoder.countToken(message: message)
message.tokensCount = count
return count
}
var all: [ChatMessage] = []
let systemMessageTokenCount = countToken(&systemPrompt)
var allTokensCount = systemPrompt.isEmpty ? 0 : systemMessageTokenCount
for (index, message) in history.enumerated().reversed() {
if maxNumberOfMessages > 0, all.count >= maxNumberOfMessages { break }
if message.isEmpty { continue }
let tokensCount = countToken(&history[index])
if tokensCount + allTokensCount >
configuration.maxTokens - configuration.minimumReplyTokens
{
break
}
allTokensCount += tokensCount
all.append(message)
}
if !systemPrompt.isEmpty {
all.append(systemPrompt)
}
return all.reversed()
}
func generateRemainingTokens(
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
encoder: TokenEncoder = AutoManagedChatGPTMemory.encoder
) -> Int? {
// It should be fine to just let OpenAI decide.
return nil
// let tokensCount = generateSendingHistory(
// maxNumberOfMessages: maxNumberOfMessages,
// encoder: encoder
// )
// .reduce(0) { $0 + ($1.tokensCount ?? 0) }
// return max(configuration.minimumReplyTokens, configuration.maxTokens - tokensCount)
}
func setOnHistoryChangeBlock(_ onChange: @escaping () -> Void) {
onHistoryChange = onChange
}
}