Skip to content

Commit a412c23

Browse files
committed
Move tool call responses into its source assistant message
1 parent c962212 commit a412c23

17 files changed

Lines changed: 345 additions & 203 deletions

Core/Sources/ChatContextCollectors/WebChatContextCollector/WebChatContextCollector.swift

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,8 @@ extension WebChatContextCollector {
3232
static func detectLinks(from messages: [ChatMessage]) -> [String] {
3333
return messages.lazy
3434
.compactMap {
35-
$0.content ?? $0.toolCalls?.map(\.function.arguments).joined(separator: " ") ?? ""
35+
$0.content ?? $0.toolCallContext?.toolCalls.map(\.function.arguments)
36+
.joined(separator: " ") ?? ""
3637
}
3738
.map(detectLinks(from:))
3839
.flatMap { $0 }

Core/Sources/ChatGPTChatTab/Chat.swift

Lines changed: 20 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ public struct DisplayedChatMessage: Equatable {
1515

1616
public struct Reference: Equatable {
1717
public typealias Kind = ChatMessage.Reference.Kind
18-
18+
1919
public var title: String
2020
public var subtitle: String
2121
public var uri: String
@@ -135,7 +135,7 @@ struct Chat: ReducerProtocol {
135135
await send(.focusOnTextField)
136136
await send(.refresh)
137137
}
138-
138+
139139
case .refresh:
140140
return .run { send in
141141
await send(.chatMenu(.refresh))
@@ -298,8 +298,9 @@ struct Chat: ReducerProtocol {
298298
}.cancellable(id: CancelID.observeDefaultScopesChange(id), cancelInFlight: true)
299299

300300
case .historyChanged:
301-
state.history = service.chatHistory.map { message in
302-
.init(
301+
state.history = service.chatHistory.flatMap { message in
302+
var all = [DisplayedChatMessage]()
303+
all.append(.init(
303304
id: message.id,
304305
role: {
305306
switch message.role {
@@ -312,7 +313,6 @@ struct Chat: ReducerProtocol {
312313
return .assistant
313314
}
314315
return .ignored
315-
case .tool: return .tool
316316
}
317317
}(),
318318
text: message.summary ?? message.content ?? "",
@@ -325,7 +325,20 @@ struct Chat: ReducerProtocol {
325325
kind: $0.kind
326326
)
327327
}
328-
)
328+
))
329+
330+
if let responses = message.toolCallContext?.responses {
331+
for response in responses {
332+
all.append(.init(
333+
id: message.id + response.id,
334+
role: .tool,
335+
text: response.summary ?? response.content,
336+
references: []
337+
))
338+
}
339+
}
340+
341+
return all
329342
}
330343

331344
state.title = {
@@ -401,7 +414,7 @@ struct ChatMenu: ReducerProtocol {
401414
return .run {
402415
await $0(.refresh)
403416
}
404-
417+
405418
case .refresh:
406419
state.temperatureOverride = service.configuration.overriding.temperature
407420
state.chatModelIdOverride = service.configuration.overriding.modelId

Core/Sources/ChatService/ChatService.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ public final class ChatService: ObservableObject {
126126

127127
// if it's stopped before the tool calls finish, remove the message.
128128
await memory.mutateHistory { history in
129-
if history.last?.role == .assistant, history.last?.toolCalls != nil {
129+
if history.last?.role == .assistant, history.last?.toolCallContext?.toolCalls != nil {
130130
history.removeLast()
131131
}
132132
}

Core/Sources/ChatService/ContextAwareAutoManagedChatGPTMemory.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ public final class ContextAwareAutoManagedChatGPTMemory: ChatGPTMemory {
3939

4040
public func generatePrompt() async -> ChatGPTPrompt {
4141
let content = (await memory.history)
42-
.last(where: { $0.role == .user || $0.role == .tool })?.content
42+
.last(where: { $0.role == .user })?.content
4343
try? await contextController.collectContextInformation(
4444
systemPrompt: """
4545
\(chatService?.systemPrompt ?? "")

Pro

Submodule Pro updated from 49bbd4a to a2e8aa5

Tool/Sources/LangChain/Chains/LLMChain.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ public class ChatModelChain<Input>: Chain {
3333
public func parseOutput(_ output: Output) -> String {
3434
if let content = output.content {
3535
return content
36-
} else if let toolCalls = output.toolCalls {
36+
} else if let toolCalls = output.toolCallContext?.toolCalls {
3737
return toolCalls.map { "[\($0.id)] \($0.function.name): \($0.function.arguments)" }
3838
.joined(separator: "\n")
3939
}

Tool/Sources/LangChain/Chains/RefineDocumentChain.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ public final class RefineDocumentChain: Chain {
153153
}
154154

155155
func extractAnswer(_ chatMessage: ChatMessage) -> IntermediateAnswer {
156-
for functionCall in chatMessage.toolCalls?.map(\.function) ?? [] {
156+
for functionCall in chatMessage.toolCallContext?.toolCalls.map(\.function) ?? [] {
157157
do {
158158
let intermediateAnswer = try JSONDecoder().decode(
159159
IntermediateAnswer.self,

Tool/Sources/LangChain/Chains/RelevantInformationExtractionChain.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ public final class RelevantInformationExtractionChain: Chain {
104104
callbackManagers: callbackManagers
105105
)
106106

107-
if let functionCall = output.toolCalls?
107+
if let functionCall = output.toolCallContext?.toolCalls
108108
.first(where: { $0.function.name == FinalAnswer().name })?.function
109109
{
110110
do {

Tool/Sources/LangChain/Chains/StructuredOutputChatModelChain.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ public class StructuredOutputChatModelChain<Output: Decodable>: Chain {
108108
}
109109

110110
public func parseOutput(_ message: ChatMessage) async -> Output? {
111-
if let functionCall = message.toolCalls?.first?.function {
111+
if let functionCall = message.toolCallContext?.toolCalls.first?.function {
112112
do {
113113
let result = try JSONDecoder().decode(
114114
EndFunction.Arguments.self,

Tool/Sources/OpenAIService/APIs/ChatCompletionsAPIDefinition.swift

Lines changed: 24 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,28 @@ import Preferences
55

66
struct ChatCompletionsRequestBody: Codable, Equatable {
77
struct Message: Codable, Equatable {
8+
enum Role: String, Codable, Equatable {
9+
case system
10+
case user
11+
case assistant
12+
case tool
13+
14+
var asChatMessageRole: ChatMessage.Role {
15+
switch self {
16+
case .system:
17+
return .system
18+
case .user:
19+
return .user
20+
case .assistant:
21+
return .assistant
22+
case .tool:
23+
return .user
24+
}
25+
}
26+
}
27+
828
/// The role of the message.
9-
var role: ChatMessage.Role
29+
var role: Role
1030
/// The content of the message.
1131
var content: String
1232
/// When we want to reply to a function call with the result, we have to provide the
@@ -149,12 +169,13 @@ struct ChatCompletionsStreamDataChunk {
149169
}
150170

151171
struct ToolCall {
172+
var index: Int?
152173
var id: String?
153174
var type: String?
154175
var function: FunctionCall?
155176
}
156177

157-
var role: ChatMessage.Role?
178+
var role: ChatCompletionsRequestBody.Message.Role?
158179
var content: String?
159180
var toolCalls: [ToolCall]?
160181
}
@@ -174,7 +195,7 @@ protocol ChatCompletionsAPI {
174195

175196
struct ChatCompletionResponseBody: Codable, Equatable {
176197
typealias Message = ChatCompletionsRequestBody.Message
177-
198+
178199
var id: String?
179200
var object: String
180201
var model: String

0 commit comments

Comments
 (0)