@@ -182,7 +182,7 @@ extension ChatGPTService {
182182 func sendMemory( ) async throws -> AsyncThrowingStream < StreamContent , Error > {
183183 guard let url = URL ( string: configuration. endpoint)
184184 else { throw ChatGPTServiceError . endpointIncorrect }
185-
185+
186186 await memory. refresh ( )
187187
188188 let messages = await memory. messages. map {
@@ -282,7 +282,7 @@ extension ChatGPTService {
282282 func sendMemoryAndWait( ) async throws -> ChatMessage ? {
283283 guard let url = URL ( string: configuration. endpoint)
284284 else { throw ChatGPTServiceError . endpointIncorrect }
285-
285+
286286 await memory. refresh ( )
287287
288288 let messages = await memory. messages. map {
@@ -368,16 +368,7 @@ extension ChatGPTService {
368368 let messageId = messageId ?? uuidGenerator ( )
369369
370370 guard var function = functionProvider. function ( named: call. name) else {
371- let content = " Error: function not found "
372- let responseMessage = ChatMessage (
373- id: messageId,
374- role: . function,
375- content: content,
376- name: call. name,
377- summary: " Function ` \( call. name) ` not found. "
378- )
379- await memory. appendMessage ( responseMessage)
380- return content
371+ return await fallbackFunctionCall ( call, messageId: messageId)
381372 }
382373
383374 // Insert the chat message into memory to indicate the start of the function.
@@ -414,6 +405,56 @@ extension ChatGPTService {
414405 return content
415406 }
416407 }
408+
409+ /// Mock a function call result when the bot is calling a function that is not implemented.
410+ func fallbackFunctionCall(
411+ _ call: ChatMessage . FunctionCall ,
412+ messageId: String
413+ ) async -> String {
414+ let memory = ConversationChatGPTMemory ( systemPrompt: {
415+ if call. name == " python " {
416+ return """
417+ Act like a Python interpreter.
418+ I will give you Python code and you will execute it.
419+ Reply with output of the code and tell me it's an answer generated by LLM.
420+ """
421+ } else {
422+ return """
423+ You are a function simulator. Your name is \( call. name) .
424+ Act like a function.
425+ I will send you the arguments.
426+ Reply with output of the function and tell me it's an answer generated by LLM.
427+ """
428+ }
429+ } ( ) )
430+
431+ let service = ChatGPTService (
432+ memory: memory,
433+ configuration: OverridingChatGPTConfiguration ( overriding: configuration, with: . init(
434+ temperature: 0
435+ ) ) ,
436+ functionProvider: NoChatGPTFunctionProvider ( )
437+ )
438+
439+ let content : String = await {
440+ do {
441+ return try await service. sendAndWait ( content: """
442+ \( call. arguments)
443+ """ ) ?? " No result. "
444+ } catch {
445+ return " No result. "
446+ }
447+ } ( )
448+ let responseMessage = ChatMessage (
449+ id: messageId,
450+ role: . function,
451+ content: content,
452+ name: call. name,
453+ summary: " Finished running function. "
454+ )
455+ await memory. appendMessage ( responseMessage)
456+ return content
457+ }
417458}
418459
419460extension ChatGPTService {
@@ -430,5 +471,5 @@ func maxTokenForReply(model: String, remainingTokens: Int?) -> Int? {
430471 guard let remainingTokens else { return nil }
431472 guard let model = ChatGPTModel ( rawValue: model) else { return remainingTokens }
432473 return min ( model. maxToken / 2 , remainingTokens)
433- }
474+ }
434475
0 commit comments