@@ -5,16 +5,14 @@ import TokenEncoder
55
66/// A memory that automatically manages the history according to max tokens and max message count.
77public actor AutoManagedChatGPTMemory : ChatGPTMemory {
8- public private( set) var messages : [ ChatMessage ] = [ ]
8+ public private( set) var history : [ ChatMessage ] = [ ] {
9+ didSet { onHistoryChange ( ) }
10+ }
911 public private( set) var remainingTokens : Int ?
1012
1113 public var systemPrompt : String
1214 public var contextSystemPrompt : String
13- public var retrievedContent : [ String ] = [ ]
14- public var history : [ ChatMessage ] = [ ] {
15- didSet { onHistoryChange ( ) }
16- }
17-
15+ public var retrievedContent : [ ChatMessage . Reference ] = [ ]
1816 public var configuration : ChatGPTConfiguration
1917 public var functionProvider : ChatGPTFunctionProvider
2018
@@ -46,7 +44,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
4644 contextSystemPrompt = newPrompt
4745 }
4846
49- public func mutateRetrievedContent( _ newContent: [ String ] ) {
47+ public func mutateRetrievedContent( _ newContent: [ ChatMessage . Reference ] ) {
5048 retrievedContent = newContent
5149 }
5250
@@ -57,9 +55,8 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
5755 }
5856 }
5957
60- public func refresh( ) async {
61- messages = generateSendingHistory ( )
62- remainingTokens = generateRemainingTokens ( )
58+ public func generatePrompt( ) async -> ChatGPTPrompt {
59+ return generateSendingHistory ( )
6360 }
6461
6562 /// https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
@@ -79,7 +76,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
7976 func generateSendingHistory(
8077 maxNumberOfMessages: Int = UserDefaults . shared. value ( for: \. chatGPTMaxMessageCount) ,
8178 encoder: TokenEncoder = AutoManagedChatGPTMemory . encoder
82- ) -> [ ChatMessage ] {
79+ ) -> ChatGPTPrompt {
8380 let (
8481 systemPromptMessage,
8582 contextSystemPromptMessage,
@@ -102,7 +99,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
10299 retrievedContentMessage,
103100 _,
104101 retrievedContentUsage,
105- _
102+ retrievedContent
106103 ) = generateRetrievedContentMessage (
107104 maxTokenCount: availableTokenCountForRetrievedContent,
108105 encoder: encoder
@@ -134,15 +131,7 @@ public actor AutoManagedChatGPTMemory: ChatGPTMemory {
134131 """ )
135132 #endif
136133
137- return allMessages
138- }
139-
140- func generateRemainingTokens(
141- maxNumberOfMessages: Int = UserDefaults . shared. value ( for: \. chatGPTMaxMessageCount) ,
142- encoder: TokenEncoder = AutoManagedChatGPTMemory . encoder
143- ) -> Int ? {
144- // It should be fine to just let OpenAI decide.
145- return nil
134+ return . init( history: allMessages, references: retrievedContent)
146135 }
147136
148137 func setOnHistoryChangeBlock( _ onChange: @escaping ( ) -> Void ) {
@@ -240,41 +229,42 @@ extension AutoManagedChatGPTMemory {
240229 retrievedContent: ChatMessage ,
241230 remainingTokenCount: Int ,
242231 usage: Int ,
243- includedRetrievedContent : [ String ]
232+ references : [ ChatMessage . Reference ]
244233 ) {
245234 var retrievedContentTokenCount = 0
246235 let separator = String ( repeating: " = " , count: 32 ) // only 1 token
247236 var message = " "
248- var includedRetrievedContent = [ String ] ( )
237+ var references = [ ChatMessage . Reference ] ( )
249238
250239 func appendToMessage( _ text: String ) -> Bool {
251240 let tokensCount = encoder. countToken ( text: text)
252241 if tokensCount + retrievedContentTokenCount > maxTokenCount { return false }
253242 retrievedContentTokenCount += tokensCount
254243 message += text
255- includedRetrievedContent. append ( text)
256244 return true
257245 }
258246
259- for (index, content) in retrievedContent. filter ( { !$0. isEmpty } ) . enumerated ( ) {
247+ for (index, content) in retrievedContent. filter ( { !$0. content . isEmpty } ) . enumerated ( ) {
260248 if index == 0 {
261249 if !appendToMessage( """
262- Here are the information you know about the system and the project, separated by \( separator)
250+ Here are the information you know about the system and the project, \
251+ separated by \( separator)
263252
264253
265254 """ ) { break }
266255 } else {
267256 if !appendToMessage( " \n \( separator) \n " ) { break }
268257 }
269258
270- if !appendToMessage( content) { break }
259+ if !appendToMessage( content. content) { break }
260+ references. append ( content)
271261 }
272262
273263 return (
274264 . init( role: . user, content: message) ,
275265 maxTokenCount - retrievedContentTokenCount,
276266 retrievedContentTokenCount,
277- includedRetrievedContent
267+ references
278268 )
279269 }
280270}
0 commit comments