forked from intitni/CopilotForXcode
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathAutoManagedChatGPTMemory.swift
More file actions
131 lines (114 loc) · 4.68 KB
/
AutoManagedChatGPTMemory.swift
File metadata and controls
131 lines (114 loc) · 4.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
import Foundation
import Preferences
import TokenEncoder
/// A memory that automatically manages the history according to max tokens and max message count.
public actor AutoManagedChatGPTMemory: ChatGPTMemory {
public var messages: [ChatMessage] { generateSendingHistory() }
public var remainingTokens: Int? { generateRemainingTokens() }
public var systemPrompt: ChatMessage
public var history: [ChatMessage] = [] {
didSet { onHistoryChange() }
}
public var configuration: ChatGPTConfiguration
public var functionProvider: ChatGPTFunctionProvider
static let encoder: TokenEncoder = TiktokenCl100kBaseTokenEncoder()
var onHistoryChange: () -> Void = {}
public init(
systemPrompt: String,
configuration: ChatGPTConfiguration,
functionProvider: ChatGPTFunctionProvider
) {
self.systemPrompt = .init(role: .system, content: systemPrompt)
self.configuration = configuration
self.functionProvider = functionProvider
_ = Self.encoder // force pre-initialize
}
public func mutateHistory(_ update: (inout [ChatMessage]) -> Void) {
update(&history)
}
public func mutateSystemPrompt(_ newPrompt: String) {
systemPrompt.content = newPrompt
}
public nonisolated
func observeHistoryChange(_ onChange: @escaping () -> Void) {
Task {
await setOnHistoryChangeBlock(onChange)
}
}
/// https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
func generateSendingHistory(
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
encoder: TokenEncoder = AutoManagedChatGPTMemory.encoder
) -> [ChatMessage] {
func countToken(_ message: inout ChatMessage) -> Int {
if let count = message.tokensCount { return count }
let count = encoder.countToken(message: message)
message.tokensCount = count
return count
}
var all: [ChatMessage] = []
let systemMessageTokenCount = countToken(&systemPrompt)
let functionTokenCount = functionProvider.functions.reduce(into: 0) { partial, function in
var count = encoder.countToken(text: function.name)
+ encoder.countToken(text: function.description)
if let data = try? JSONEncoder().encode(function.argumentSchema),
let string = String(data: data, encoding: .utf8)
{
count += encoder.countToken(text: string)
}
partial += count
}
var allTokensCount = functionTokenCount + 3 // every reply is primed with <|start|>assistant<|message|>
allTokensCount += systemPrompt.isEmpty ? 0 : systemMessageTokenCount
for (index, message) in history.enumerated().reversed() {
if maxNumberOfMessages > 0, all.count >= maxNumberOfMessages { break }
if message.isEmpty { continue }
let tokensCount = countToken(&history[index])
if tokensCount + allTokensCount >
configuration.maxTokens - configuration.minimumReplyTokens
{
break
}
allTokensCount += tokensCount
all.append(message)
}
if !systemPrompt.isEmpty {
all.append(systemPrompt)
}
return all.reversed()
}
func generateRemainingTokens(
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
encoder: TokenEncoder = AutoManagedChatGPTMemory.encoder
) -> Int? {
// It should be fine to just let OpenAI decide.
return nil
// let tokensCount = generateSendingHistory(
// maxNumberOfMessages: maxNumberOfMessages,
// encoder: encoder
// )
// .reduce(0) { $0 + ($1.tokensCount ?? 0) }
// return max(configuration.minimumReplyTokens, configuration.maxTokens - tokensCount)
}
func setOnHistoryChangeBlock(_ onChange: @escaping () -> Void) {
onHistoryChange = onChange
}
}
extension TokenEncoder {
/// https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
func countToken(message: ChatMessage) -> Int {
var total = 3
if let content = message.content {
total += encode(text: content).count
}
if let name = message.name {
total += encode(text: name).count
total += 1
}
if let functionCall = message.functionCall {
total += encode(text: functionCall.name).count
total += encode(text: functionCall.arguments).count
}
return total
}
}