Skip to content

Commit 8589d54

Browse files
committed
Add settings for temperature and max message count
1 parent 19b33b5 commit 8589d54

File tree

4 files changed

+54
-15
lines changed

4 files changed

+54
-15
lines changed

Copilot for Xcode/OpenAIView.swift

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ final class OpenAIViewSettings: ObservableObject {
1111
@AppStorage(\.chatGPTEndpoint) var chatGPTEndpoint: String
1212
@AppStorage(\.chatGPTLanguage) var chatGPTLanguage: String
1313
@AppStorage(\.chatGPTMaxToken) var chatGPTMaxToken: Int
14+
@AppStorage(\.chatGPTTemperature) var chatGPTTemperature: Double
15+
@AppStorage(\.chatGPTMaxMessageCount) var chatGPTMaxMessageCount: Int
1416
init() {}
1517
}
1618

@@ -112,16 +114,34 @@ struct OpenAIView: View {
112114
.textFieldStyle(.roundedBorder)
113115
}
114116
}
117+
118+
HStack {
119+
Slider(value: $settings.chatGPTTemperature, in: 0...2, step: 0.1) {
120+
Text("Temperature")
121+
}
122+
123+
Text(
124+
"\(settings.chatGPTTemperature.formatted(.number.precision(.fractionLength(1))))"
125+
)
126+
.monospacedDigit()
127+
}
128+
129+
Picker("Max Message Count Sending to the Bot", selection: $settings.chatGPTMaxMessageCount) {
130+
Text("No Limit").tag(0)
131+
Text("3 Messages").tag(3)
132+
Text("5 Messages").tag(5)
133+
Text("7 Messages").tag(7)
134+
}
115135
}
116136
}
117137
}
118138
}
119-
139+
120140
var languagePicker: some View {
121141
Menu {
122142
if !settings.chatGPTLanguage.isEmpty,
123143
!OpenAIViewSettings.availableLocalizedLocales
124-
.contains(settings.chatGPTLanguage)
144+
.contains(settings.chatGPTLanguage)
125145
{
126146
Button(
127147
settings.chatGPTLanguage,
@@ -144,8 +164,8 @@ struct OpenAIView: View {
144164
} label: {
145165
Text(
146166
settings.chatGPTLanguage.isEmpty
147-
? "Auto-detected by ChatGPT"
148-
: settings.chatGPTLanguage
167+
? "Auto-detected by ChatGPT"
168+
: settings.chatGPTLanguage
149169
)
150170
}
151171
}

Core/Sources/OpenAIService/ChatGPTService.swift

Lines changed: 14 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,12 @@ public struct ChatGPTError: Error, Codable, LocalizedError {
5555

5656
public actor ChatGPTService: ChatGPTServiceType {
5757
public var systemPrompt: String
58-
public var temperature: Double
58+
59+
public var defaultTemperature: Double {
60+
min(max(0, UserDefaults.shared.value(for: \.chatGPTTemperature)), 2)
61+
}
62+
63+
var temperature: Double?
5964

6065
public var model: String {
6166
let value = UserDefaults.shared.value(for: \.chatGPTModel)
@@ -93,7 +98,7 @@ public actor ChatGPTService: ChatGPTServiceType {
9398

9499
public init(
95100
systemPrompt: String = "",
96-
temperature: Double = 0.7
101+
temperature: Double? = nil
97102
) {
98103
self.systemPrompt = systemPrompt
99104
self.temperature = temperature
@@ -114,11 +119,11 @@ public actor ChatGPTService: ChatGPTServiceType {
114119
history.append(newMessage)
115120

116121
let (messages, remainingTokens) = combineHistoryWithSystemPrompt()
117-
122+
118123
let requestBody = CompletionRequestBody(
119124
model: model,
120125
messages: messages,
121-
temperature: temperature,
126+
temperature: temperature ?? defaultTemperature,
122127
stream: true,
123128
max_tokens: remainingTokens
124129
)
@@ -194,11 +199,11 @@ public actor ChatGPTService: ChatGPTServiceType {
194199
history.append(newMessage)
195200

196201
let (messages, remainingTokens) = combineHistoryWithSystemPrompt()
197-
202+
198203
let requestBody = CompletionRequestBody(
199204
model: model,
200205
messages: messages,
201-
temperature: temperature,
206+
temperature: temperature ?? defaultTemperature,
202207
stream: true,
203208
max_tokens: remainingTokens
204209
)
@@ -257,16 +262,16 @@ extension ChatGPTService {
257262

258263
func combineHistoryWithSystemPrompt(
259264
minimumReplyTokens: Int = 200,
260-
maxNumberOfMessages: Int = 5,
261-
maxTokens: Int = UserDefaults.shared.value(for: \.chatGPTMaxToken),
265+
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
266+
maxTokens: Int = UserDefaults.shared.value(for: \.chatGPTMaxToken),
262267
encoder: TokenEncoder = GPTEncoder()
263268
)
264269
-> (messages: [CompletionRequestBody.Message], remainingTokens: Int)
265270
{
266271
var all: [CompletionRequestBody.Message] = []
267272
var allTokensCount = encoder.encode(text: systemPrompt).count
268273
for message in history.reversed() {
269-
if all.count >= maxNumberOfMessages { break }
274+
if maxNumberOfMessages > 0, all.count >= maxNumberOfMessages { break }
270275
if message.content.isEmpty { continue }
271276
let tokensCount = encoder.encode(text: message.content).count
272277
if tokensCount + allTokensCount > maxTokens - minimumReplyTokens {

Core/Sources/Preferences/Keys.swift

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@ public extension UserDefaultPreferenceKeys {
193193
var chatGPTModel: ChatGPTModel { .init() }
194194

195195
struct ChatGPTMaxToken: UserDefaultPreferenceKey {
196-
public let defaultValue = 2048
196+
public let defaultValue = 4000
197197
public let key = "ChatGPTMaxToken"
198198
}
199199

@@ -205,6 +205,20 @@ public extension UserDefaultPreferenceKeys {
205205
}
206206

207207
var chatGPTLanguage: ChatGPTLanguage { .init() }
208+
209+
struct ChatGPTMaxMessageCount: UserDefaultPreferenceKey {
210+
public let defaultValue = 5
211+
public let key = "ChatGPTMaxMessageCount"
212+
}
213+
214+
var chatGPTMaxMessageCount: ChatGPTMaxMessageCount { .init() }
215+
216+
struct ChatGPTTemperature: UserDefaultPreferenceKey {
217+
public let defaultValue = 0.7
218+
public let key = "ChatGPTTemperature"
219+
}
220+
221+
var chatGPTTemperature: ChatGPTTemperature { .init() }
208222
}
209223

210224
// MARK: - Custom Commands

Core/Tests/OpenAIServiceTests/LimitMessagesTests.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ final class LimitMessagesTests: XCTestCase {
1414
let (messages, remainingTokens) = await runService(
1515
service,
1616
minimumReplyTokens: 200,
17-
maxNumberOfMessages: 100,
17+
maxNumberOfMessages: 0, // smaller than 1 means no limit
1818
maxTokens: 10000
1919
)
2020
XCTAssertEqual(messages, [

0 commit comments

Comments
 (0)