Skip to content

Commit 8330640

Browse files
committed
Move context system prompt to right next to the latest message
1 parent 4974b20 commit 8330640

2 files changed

Lines changed: 27 additions & 6 deletions

File tree

Core/Sources/ChatService/DynamicContextController.swift

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,7 @@ final class DynamicContextController {
9292
return contexts
9393
}
9494

95-
let extraSystemPrompt = contexts
95+
let contextSystemPrompt = contexts
9696
.map(\.systemPrompt)
9797
.filter { !$0.isEmpty }
9898
.joined(separator: "\n\n")
@@ -104,9 +104,10 @@ final class DynamicContextController {
104104

105105
let contextualSystemPrompt = """
106106
\(language.isEmpty ? "" : "You must always reply in \(language)")
107-
\(systemPrompt)\(extraSystemPrompt.isEmpty ? "" : "\n\(extraSystemPrompt)")
107+
\(systemPrompt)
108108
"""
109109
await memory.mutateSystemPrompt(contextualSystemPrompt)
110+
await memory.mutateContextSystemPrompt(contextSystemPrompt)
110111
await memory.mutateRetrievedContent(contextPrompts.map(\.content))
111112
functionProvider.append(functions: contexts.flatMap(\.functions))
112113
}

Tool/Sources/OpenAIService/Memory/AutoManagedChatGPTMemory.swift

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
99
public private(set) var remainingTokens: Int?
1010

1111
public var systemPrompt: String
12+
public var contextSystemPrompt: String
1213
public var retrievedContent: [String] = []
1314
public var history: [ChatMessage] = [] {
1415
didSet { onHistoryChange() }
@@ -27,6 +28,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
2728
functionProvider: ChatGPTFunctionProvider
2829
) {
2930
self.systemPrompt = systemPrompt
31+
contextSystemPrompt = ""
3032
self.configuration = configuration
3133
self.functionProvider = functionProvider
3234
_ = Self.encoder // force pre-initialize
@@ -40,6 +42,10 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
4042
systemPrompt = newPrompt
4143
}
4244

45+
public func mutateContextSystemPrompt(_ newPrompt: String) {
46+
contextSystemPrompt = newPrompt
47+
}
48+
4349
public func mutateRetrievedContent(_ newContent: [String]) {
4450
retrievedContent = newContent
4551
}
@@ -67,6 +73,8 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
6773
/// [Retrieved Content B]
6874
/// [Functions] priority: high
6975
/// [Message History] priority: medium
76+
/// [Context System Prompt] priority: high
77+
/// [Latest Message] priority: high
7078
/// ```
7179
func generateSendingHistory(
7280
maxNumberOfMessages: Int = UserDefaults.shared.value(for: \.chatGPTMaxMessageCount),
@@ -80,7 +88,12 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
8088
}
8189

8290
var smallestSystemPromptMessage = ChatMessage(role: .system, content: systemPrompt)
91+
var contextSystemPromptMessage = ChatMessage(role: .system, content: contextSystemPrompt)
8392
let smallestSystemMessageTokenCount = countToken(&smallestSystemPromptMessage)
93+
let contextSystemPromptTokenCount = !contextSystemPrompt.isEmpty
94+
? countToken(&contextSystemPromptMessage)
95+
: 0
96+
8497
let functionTokenCount = functionProvider.functions.reduce(into: 0) { partial, function in
8598
var count = encoder.countToken(text: function.name)
8699
+ encoder.countToken(text: function.description)
@@ -92,6 +105,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
92105
partial += count
93106
}
94107
let mandatoryContentTokensCount = smallestSystemMessageTokenCount
108+
+ contextSystemPromptTokenCount
95109
+ functionTokenCount
96110
+ 3 // every reply is primed with <|start|>assistant<|message|>
97111

@@ -135,13 +149,13 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
135149
for (index, content) in retrievedContent.filter({ !$0.isEmpty }).enumerated() {
136150
if index == 0 {
137151
if !appendToSystemPrompt("""
138-
139-
152+
153+
140154
## Relevant Content
141-
155+
142156
Below are information related to the conversation, separated by \(separator)
143157
144-
158+
145159
""") { break }
146160
} else {
147161
if !appendToSystemPrompt("\n\(separator)\n") { break }
@@ -154,16 +168,22 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
154168
let message = ChatMessage(role: .system, content: systemPrompt)
155169
allMessages.append(message)
156170
}
171+
172+
if !contextSystemPrompt.isEmpty {
173+
allMessages.insert(contextSystemPromptMessage, at: 1)
174+
}
157175

158176
#if DEBUG
159177
Logger.service.info("""
160178
Sending tokens count
161179
- system prompt: \(smallestSystemMessageTokenCount)
180+
- context system prompt: \(contextSystemPromptTokenCount)
162181
- functions: \(functionTokenCount)
163182
- messages: \(messageTokenCount)
164183
- retrieved content: \(retrievedContentTokenCount)
165184
- total: \(
166185
smallestSystemMessageTokenCount
186+
+ contextSystemPromptTokenCount
167187
+ functionTokenCount
168188
+ messageTokenCount
169189
+ retrievedContentTokenCount

0 commit comments

Comments
 (0)