Skip to content

Commit 4eae653

Browse files
committed
Merge branch 'feature/auto-token-management' into develop
2 parents 65dc849 + 8589d54 commit 4eae653

6 files changed

Lines changed: 216 additions & 20 deletions

File tree

Copilot for Xcode.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved

Lines changed: 9 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Copilot for Xcode/OpenAIView.swift

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ final class OpenAIViewSettings: ObservableObject {
1111
@AppStorage(\.chatGPTEndpoint) var chatGPTEndpoint: String
1212
@AppStorage(\.chatGPTLanguage) var chatGPTLanguage: String
1313
@AppStorage(\.chatGPTMaxToken) var chatGPTMaxToken: Int
14+
@AppStorage(\.chatGPTTemperature) var chatGPTTemperature: Double
15+
@AppStorage(\.chatGPTMaxMessageCount) var chatGPTMaxMessageCount: Int
1416
init() {}
1517
}
1618

@@ -112,16 +114,34 @@ struct OpenAIView: View {
112114
.textFieldStyle(.roundedBorder)
113115
}
114116
}
117+
118+
HStack {
119+
Slider(value: $settings.chatGPTTemperature, in: 0...2, step: 0.1) {
120+
Text("Temperature")
121+
}
122+
123+
Text(
124+
"\(settings.chatGPTTemperature.formatted(.number.precision(.fractionLength(1))))"
125+
)
126+
.monospacedDigit()
127+
}
128+
129+
Picker("Max Message Count Sending to the Bot", selection: $settings.chatGPTMaxMessageCount) {
130+
Text("No Limit").tag(0)
131+
Text("3 Messages").tag(3)
132+
Text("5 Messages").tag(5)
133+
Text("7 Messages").tag(7)
134+
}
115135
}
116136
}
117137
}
118138
}
119-
139+
120140
var languagePicker: some View {
121141
Menu {
122142
if !settings.chatGPTLanguage.isEmpty,
123143
!OpenAIViewSettings.availableLocalizedLocales
124-
.contains(settings.chatGPTLanguage)
144+
.contains(settings.chatGPTLanguage)
125145
{
126146
Button(
127147
settings.chatGPTLanguage,
@@ -144,8 +164,8 @@ struct OpenAIView: View {
144164
} label: {
145165
Text(
146166
settings.chatGPTLanguage.isEmpty
147-
? "Auto-detected by ChatGPT"
148-
: settings.chatGPTLanguage
167+
? "Auto-detected by ChatGPT"
168+
: settings.chatGPTLanguage
149169
)
150170
}
151171
}

Core/Package.swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ let package = Package(
3939
.package(url: "https://github.com/nmdias/FeedKit", from: "9.1.2"),
4040
.package(url: "https://github.com/gonzalezreal/swift-markdown-ui", from: "2.1.0"),
4141
.package(url: "https://github.com/sparkle-project/Sparkle", from: "2.0.0"),
42+
.package(url: "https://github.com/alfianlosari/GPTEncoder", from: "1.0.4"),
4243
],
4344
targets: [
4445
.target(name: "CGEventObserver"),
@@ -144,6 +145,7 @@ let package = Package(
144145
dependencies: [
145146
"Logger",
146147
"Preferences",
148+
"GPTEncoder",
147149
.product(name: "AsyncAlgorithms", package: "swift-async-algorithms"),
148150
]
149151
),

Core/Sources/OpenAIService/ChatGPTService.swift

Lines changed: 42 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import AsyncAlgorithms
22
import Foundation
3+
import GPTEncoder
34
import Preferences
45

56
public protocol ChatGPTServiceType: ObservableObject {
@@ -54,7 +55,12 @@ public struct ChatGPTError: Error, Codable, LocalizedError {
5455

5556
public actor ChatGPTService: ChatGPTServiceType {
5657
public var systemPrompt: String
57-
public var temperature: Double
58+
59+
public var defaultTemperature: Double {
60+
min(max(0, UserDefaults.shared.value(for: \.chatGPTTemperature)), 2)
61+
}
62+
63+
var temperature: Double?
5864

5965
public var model: String {
6066
let value = UserDefaults.shared.value(for: \.chatGPTModel)
@@ -92,7 +98,7 @@ public actor ChatGPTService: ChatGPTServiceType {
9298

9399
public init(
94100
systemPrompt: String = "",
95-
temperature: Double = 0.7
101+
temperature: Double? = nil
96102
) {
97103
self.systemPrompt = systemPrompt
98104
self.temperature = temperature
@@ -112,12 +118,14 @@ public actor ChatGPTService: ChatGPTServiceType {
112118
)
113119
history.append(newMessage)
114120

121+
let (messages, remainingTokens) = combineHistoryWithSystemPrompt()
122+
115123
let requestBody = CompletionRequestBody(
116124
model: model,
117-
messages: combineHistoryWithSystemPrompt(),
118-
temperature: temperature,
125+
messages: messages,
126+
temperature: temperature ?? defaultTemperature,
119127
stream: true,
120-
max_tokens: maxToken
128+
max_tokens: remainingTokens
121129
)
122130

123131
isReceivingMessage = true
@@ -190,12 +198,14 @@ public actor ChatGPTService: ChatGPTServiceType {
190198
)
191199
history.append(newMessage)
192200

201+
let (messages, remainingTokens) = combineHistoryWithSystemPrompt()
202+
193203
let requestBody = CompletionRequestBody(
194204
model: model,
195-
messages: combineHistoryWithSystemPrompt(),
196-
temperature: temperature,
205+
messages: messages,
206+
temperature: temperature ?? defaultTemperature,
197207
stream: true,
198-
max_tokens: maxToken
208+
max_tokens: remainingTokens
199209
)
200210

201211
isReceivingMessage = true
@@ -210,10 +220,10 @@ public actor ChatGPTService: ChatGPTServiceType {
210220
role: choice.message.role,
211221
content: choice.message.content
212222
))
213-
223+
214224
return choice.message.content
215225
}
216-
226+
217227
return nil
218228
}
219229

@@ -250,17 +260,34 @@ extension ChatGPTService {
250260
uuidGenerator = generator
251261
}
252262

253-
func combineHistoryWithSystemPrompt() -> [CompletionRequestBody.Message] {
263+
func combineHistoryWithSystemPrompt(
264+
minimumReplyTokens: Int = 200,
265+
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
266+
maxTokens: Int = UserDefaults.shared.value(for: \.chatGPTMaxToken),
267+
encoder: TokenEncoder = GPTEncoder()
268+
)
269+
-> (messages: [CompletionRequestBody.Message], remainingTokens: Int)
270+
{
254271
var all: [CompletionRequestBody.Message] = []
255-
var count = 0
272+
var allTokensCount = encoder.encode(text: systemPrompt).count
256273
for message in history.reversed() {
257-
if count >= 5 { break }
274+
if maxNumberOfMessages > 0, all.count >= maxNumberOfMessages { break }
258275
if message.content.isEmpty { continue }
276+
let tokensCount = encoder.encode(text: message.content).count
277+
if tokensCount + allTokensCount > maxTokens - minimumReplyTokens {
278+
break
279+
}
280+
allTokensCount += tokensCount
259281
all.append(.init(role: message.role, content: message.content))
260-
count += 1
261282
}
262283

263284
all.append(.init(role: .system, content: systemPrompt))
264-
return all.reversed()
285+
return (all.reversed(), max(minimumReplyTokens, maxTokens - allTokensCount))
265286
}
266287
}
288+
289+
protocol TokenEncoder {
290+
func encode(text: String) -> [Int]
291+
}
292+
293+
extension GPTEncoder: TokenEncoder {}

Core/Sources/Preferences/Keys.swift

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ public extension UserDefaultPreferenceKeys {
193193
var chatGPTModel: ChatGPTModel { .init() }
194194

195195
struct ChatGPTMaxToken: UserDefaultPreferenceKey {
196-
public let defaultValue = 2048
196+
public let defaultValue = 4000
197197
public let key = "ChatGPTMaxToken"
198198
}
199199

@@ -205,6 +205,20 @@ public extension UserDefaultPreferenceKeys {
205205
}
206206

207207
var chatGPTLanguage: ChatGPTLanguage { .init() }
208+
209+
struct ChatGPTMaxMessageCount: UserDefaultPreferenceKey {
210+
public let defaultValue = 5
211+
public let key = "ChatGPTMaxMessageCount"
212+
}
213+
214+
var chatGPTMaxMessageCount: ChatGPTMaxMessageCount { .init() }
215+
216+
struct ChatGPTTemperature: UserDefaultPreferenceKey {
217+
public let defaultValue = 0.7
218+
public let key = "ChatGPTTemperature"
219+
}
220+
221+
var chatGPTTemperature: ChatGPTTemperature { .init() }
208222
}
209223

210224
// MARK: - Custom Commands
Lines changed: 124 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,124 @@
1+
import Foundation
2+
import XCTest
3+
4+
@testable import OpenAIService
5+
6+
final class LimitMessagesTests: XCTestCase {
7+
func test_send_all_messages_if_not_reached_token_limit() async {
8+
let service = await createService(systemPrompt: "system", messages: [
9+
"hi",
10+
"hello",
11+
"world",
12+
])
13+
14+
let (messages, remainingTokens) = await runService(
15+
service,
16+
minimumReplyTokens: 200,
17+
maxNumberOfMessages: 0, // smaller than 1 means no limit
18+
maxTokens: 10000
19+
)
20+
XCTAssertEqual(messages, [
21+
"system",
22+
"hi",
23+
"hello",
24+
"world",
25+
])
26+
27+
XCTAssertEqual(remainingTokens, 10000 - 12 - 6)
28+
}
29+
30+
func test_send_max_message_if_not_reached_token_limit() async {
31+
let service = await createService(systemPrompt: "system", messages: [
32+
"hi",
33+
"hello",
34+
"world",
35+
])
36+
37+
let (messages, remainingTokens) = await runService(
38+
service,
39+
minimumReplyTokens: 200,
40+
maxNumberOfMessages: 2,
41+
maxTokens: 10000
42+
)
43+
XCTAssertEqual(messages, [
44+
"system",
45+
"hello",
46+
"world",
47+
], "Count from end to start.")
48+
49+
XCTAssertEqual(remainingTokens, 10000 - 10 - 6)
50+
}
51+
52+
func test_reached_token_limit() async {
53+
let service = await createService(systemPrompt: "system", messages: [
54+
"hi",
55+
"hello",
56+
"world",
57+
])
58+
59+
let (messages, remainingTokens) = await runService(
60+
service,
61+
minimumReplyTokens: 200,
62+
maxNumberOfMessages: 100,
63+
maxTokens: 212
64+
)
65+
XCTAssertEqual(messages, [
66+
"system",
67+
"world",
68+
])
69+
70+
XCTAssertEqual(remainingTokens, 201)
71+
}
72+
73+
func test_minimum_reply_tokens_count() async {
74+
let service = await createService(systemPrompt: "system", messages: [
75+
"hi",
76+
"hello",
77+
"world",
78+
])
79+
80+
let (messages, remainingTokens) = await runService(
81+
service,
82+
minimumReplyTokens: 200,
83+
maxNumberOfMessages: 100,
84+
maxTokens: 200
85+
)
86+
XCTAssertEqual(messages, [
87+
"system",
88+
])
89+
90+
XCTAssertEqual(remainingTokens, 200)
91+
}
92+
}
93+
94+
class MockEncoder: TokenEncoder {
95+
func encode(text: String) -> [Int] {
96+
return .init(repeating: 0, count: text.count)
97+
}
98+
}
99+
100+
private func createService(systemPrompt: String, messages: [String]) async -> ChatGPTService {
101+
let service = ChatGPTService(systemPrompt: systemPrompt)
102+
await service.mutateHistory { history in
103+
messages.forEach { message in
104+
history.append(.init(role: .user, content: message))
105+
}
106+
}
107+
return service
108+
}
109+
110+
private func runService(
111+
_ service: ChatGPTService,
112+
minimumReplyTokens: Int,
113+
maxNumberOfMessages: Int,
114+
maxTokens: Int
115+
) async -> (messages: [String], remainingTokens: Int) {
116+
let (messages, remainingTokens) = await service.combineHistoryWithSystemPrompt(
117+
minimumReplyTokens: minimumReplyTokens,
118+
maxNumberOfMessages: maxNumberOfMessages,
119+
maxTokens: maxTokens,
120+
encoder: MockEncoder()
121+
)
122+
123+
return (messages.map(\.content), remainingTokens)
124+
}

0 commit comments

Comments
 (0)