@@ -125,7 +125,7 @@ public actor ChatGPTService: ChatGPTServiceType {
125125 messages: messages,
126126 temperature: temperature ?? defaultTemperature,
127127 stream: true ,
128- max_tokens: remainingTokens
128+ max_tokens: maxTokenForReply ( model : model , remainingTokens: remainingTokens )
129129 )
130130
131131 isReceivingMessage = true
@@ -205,7 +205,7 @@ public actor ChatGPTService: ChatGPTServiceType {
205205 messages: messages,
206206 temperature: temperature ?? defaultTemperature,
207207 stream: true ,
208- max_tokens: remainingTokens
208+ max_tokens: maxTokenForReply ( model : model , remainingTokens: remainingTokens )
209209 )
210210
211211 isReceivingMessage = true
@@ -261,7 +261,7 @@ extension ChatGPTService {
261261 }
262262
263263 func combineHistoryWithSystemPrompt(
264- minimumReplyTokens: Int = 200 ,
264+ minimumReplyTokens: Int = 300 ,
265265 maxNumberOfMessages: Int = UserDefaults . shared. value ( for: \. chatGPTMaxMessageCount) ,
266266 maxTokens: Int = UserDefaults . shared. value ( for: \. chatGPTMaxToken) ,
267267 encoder: TokenEncoder = GPTEncoder ( )
@@ -292,3 +292,8 @@ protocol TokenEncoder {
292292}
293293
294294extension GPTEncoder : TokenEncoder { }
295+
296+ func maxTokenForReply( model: String , remainingTokens: Int ) -> Int {
297+ guard let model = ChatGPTModel ( rawValue: model) else { return remainingTokens }
298+ return min ( model. maxToken / 2 , remainingTokens)
299+ }
0 commit comments