Skip to content

Commit 266e7ca

Browse files
committed
Merge branch 'feature/fix-deep-seek' into hotfix/0.35.4
2 parents dbd4194 + c86f5c8 commit 266e7ca

File tree

4 files changed

+161
-66
lines changed

4 files changed

+161
-66
lines changed

Core/Sources/HostApp/AccountSettings/ChatModelManagement/ChatModelEdit.swift

Lines changed: 41 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ struct ChatModelEdit {
3232
var openAIOrganizationID: String = ""
3333
var openAIProjectID: String = ""
3434
var customHeaders: [ChatModel.Info.CustomHeaderInfo.HeaderField] = []
35+
var openAICompatibleSupportsMultipartMessageContent = true
3536
}
3637

3738
enum Action: Equatable, BindableAction {
@@ -88,20 +89,40 @@ struct ChatModelEdit {
8889
let model = ChatModel(state: state)
8990
return .run { send in
9091
do {
91-
let service = LegacyChatGPTService(
92-
configuration: UserPreferenceChatGPTConfiguration()
93-
.overriding {
94-
$0.model = model
95-
}
96-
)
97-
let reply = try await service
98-
.sendAndWait(content: "Respond with \"Test succeeded\"")
99-
await send(.testSucceeded(reply ?? "No Message"))
100-
let stream = try await service
101-
.send(content: "Respond with \"Stream response is working\"")
92+
let configuration = UserPreferenceChatGPTConfiguration().overriding {
93+
$0.model = model
94+
}
95+
let service = ChatGPTService(configuration: configuration)
96+
let reply = try await service.send(TemplateChatGPTMemory(
97+
memoryTemplate: .init(messages: [
98+
.init(chatMessage: .init(
99+
role: .user,
100+
content: "Respond with \"Test succeeded\""
101+
)),
102+
]),
103+
configuration: configuration,
104+
functionProvider: NoChatGPTFunctionProvider()
105+
)).asText()
106+
107+
await send(.testSucceeded(reply))
108+
let stream = service.send(TemplateChatGPTMemory(
109+
memoryTemplate: .init(messages: [
110+
.init(chatMessage: .init(
111+
role: .user,
112+
content: "Respond with \"Stream response is working\""
113+
)),
114+
]),
115+
configuration: configuration,
116+
functionProvider: NoChatGPTFunctionProvider()
117+
))
102118
var streamReply = ""
103119
for try await chunk in stream {
104-
streamReply += chunk
120+
switch chunk {
121+
case let .partialText(text):
122+
streamReply += text
123+
default:
124+
continue
125+
}
105126
}
106127
await send(.testSucceeded(streamReply))
107128
} catch {
@@ -206,7 +227,11 @@ extension ChatModel {
206227
),
207228
ollamaInfo: .init(keepAlive: state.ollamaKeepAlive),
208229
googleGenerativeAIInfo: .init(apiVersion: state.apiVersion),
209-
openAICompatibleInfo: .init(enforceMessageOrder: state.enforceMessageOrder),
230+
openAICompatibleInfo: .init(
231+
enforceMessageOrder: state.enforceMessageOrder,
232+
supportsMultipartMessageContent: state
233+
.openAICompatibleSupportsMultipartMessageContent
234+
),
210235
customHeaderInfo: .init(headers: state.customHeaders)
211236
)
212237
)
@@ -230,7 +255,9 @@ extension ChatModel {
230255
enforceMessageOrder: info.openAICompatibleInfo.enforceMessageOrder,
231256
openAIOrganizationID: info.openAIInfo.organizationID,
232257
openAIProjectID: info.openAIInfo.projectID,
233-
customHeaders: info.customHeaderInfo.headers
258+
customHeaders: info.customHeaderInfo.headers,
259+
openAICompatibleSupportsMultipartMessageContent: info.openAICompatibleInfo
260+
.supportsMultipartMessageContent
234261
)
235262
}
236263
}

Core/Sources/HostApp/AccountSettings/ChatModelManagement/ChatModelEditView.swift

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -322,6 +322,10 @@ struct ChatModelEditView: View {
322322
Text("Enforce message order to be user/assistant alternated")
323323
}
324324

325+
Toggle(isOn: $store.openAICompatibleSupportsMultipartMessageContent) {
326+
Text("Support multi-part message content")
327+
}
328+
325329
Button("Custom Headers") {
326330
isEditingCustomHeader.toggle()
327331
}

Tool/Sources/AIModel/ChatModel.swift

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -46,16 +46,22 @@ public struct ChatModel: Codable, Equatable, Identifiable {
4646
self.projectID = projectID
4747
}
4848
}
49-
49+
5050
public struct OpenAICompatibleInfo: Codable, Equatable {
5151
@FallbackDecoding<EmptyBool>
5252
public var enforceMessageOrder: Bool
53+
@FallbackDecoding<EmptyTrue>
54+
public var supportsMultipartMessageContent: Bool
5355

54-
public init(enforceMessageOrder: Bool = false) {
56+
public init(
57+
enforceMessageOrder: Bool = false,
58+
supportsMultipartMessageContent: Bool = true
59+
) {
5560
self.enforceMessageOrder = enforceMessageOrder
61+
self.supportsMultipartMessageContent = supportsMultipartMessageContent
5662
}
5763
}
58-
64+
5965
public struct GoogleGenerativeAIInfo: Codable, Equatable {
6066
@FallbackDecoding<EmptyString>
6167
public var apiVersion: String
@@ -64,21 +70,21 @@ public struct ChatModel: Codable, Equatable, Identifiable {
6470
self.apiVersion = apiVersion
6571
}
6672
}
67-
73+
6874
public struct CustomHeaderInfo: Codable, Equatable {
6975
public struct HeaderField: Codable, Equatable {
7076
public var key: String
7177
public var value: String
72-
78+
7379
public init(key: String, value: String) {
7480
self.key = key
7581
self.value = value
7682
}
7783
}
78-
84+
7985
@FallbackDecoding<EmptyArray>
8086
public var headers: [HeaderField]
81-
87+
8288
public init(headers: [HeaderField] = []) {
8389
self.headers = headers
8490
}
@@ -203,3 +209,7 @@ public struct EmptyChatModelOpenAICompatibleInfo: FallbackValueProvider {
203209
public struct EmptyChatModelCustomHeaderInfo: FallbackValueProvider {
204210
public static var defaultValue: ChatModel.Info.CustomHeaderInfo { .init() }
205211
}
212+
213+
public struct EmptyTrue: FallbackValueProvider {
214+
public static var defaultValue: Bool { true }
215+
}

Tool/Sources/OpenAIService/APIs/OpenAIChatCompletionsService.swift

Lines changed: 99 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -299,6 +299,8 @@ actor OpenAIChatCompletionsService: ChatCompletionsStreamAPI, ChatCompletionsAPI
299299
requestBody,
300300
endpoint: endpoint,
301301
enforceMessageOrder: model.info.openAICompatibleInfo.enforceMessageOrder,
302+
supportsMultipartMessageContent: model.info.openAICompatibleInfo
303+
.supportsMultipartMessageContent,
302304
canUseTool: model.info.supportsFunctionCalling,
303305
supportsImage: model.info.supportsImage,
304306
supportsAudio: model.info.supportsAudio
@@ -651,25 +653,43 @@ extension OpenAIChatCompletionsService.RequestBody {
651653
_ message: inout Message,
652654
content: String,
653655
images: [ChatCompletionsRequestBody.Message.Image],
654-
audios: [ChatCompletionsRequestBody.Message.Audio]
656+
audios: [ChatCompletionsRequestBody.Message.Audio],
657+
supportsMultipartMessageContent: Bool
655658
) {
656-
switch message.role {
657-
case .system, .assistant, .user:
658-
let newParts = Self.convertContentPart(
659-
content: content,
660-
images: images,
661-
audios: audios
662-
)
663-
if case let .contentParts(existingParts) = message.content {
664-
message.content = .contentParts(existingParts + newParts)
665-
} else {
666-
message.content = .contentParts(newParts)
659+
if supportsMultipartMessageContent {
660+
switch message.role {
661+
case .system, .assistant, .user:
662+
let newParts = Self.convertContentPart(
663+
content: content,
664+
images: images,
665+
audios: audios
666+
)
667+
if case let .contentParts(existingParts) = message.content {
668+
message.content = .contentParts(existingParts + newParts)
669+
} else {
670+
message.content = .contentParts(newParts)
671+
}
672+
case .tool, .function:
673+
if case let .text(existingText) = message.content {
674+
message.content = .text(existingText + "\n\n" + content)
675+
} else {
676+
message.content = .text(content)
677+
}
667678
}
668-
case .tool, .function:
669-
if case let .text(existingText) = message.content {
670-
message.content = .text(existingText + "\n\n" + content)
671-
} else {
672-
message.content = .text(content)
679+
} else {
680+
switch message.role {
681+
case .system, .assistant, .user:
682+
if case let .text(existingText) = message.content {
683+
message.content = .text(existingText + "\n\n" + content)
684+
} else {
685+
message.content = .text(content)
686+
}
687+
case .tool, .function:
688+
if case let .text(existingText) = message.content {
689+
message.content = .text(existingText + "\n\n" + content)
690+
} else {
691+
message.content = .text(content)
692+
}
673693
}
674694
}
675695
}
@@ -678,6 +698,7 @@ extension OpenAIChatCompletionsService.RequestBody {
678698
_ body: ChatCompletionsRequestBody,
679699
endpoint: URL,
680700
enforceMessageOrder: Bool,
701+
supportsMultipartMessageContent: Bool,
681702
canUseTool: Bool,
682703
supportsImage: Bool,
683704
supportsAudio: Bool
@@ -702,7 +723,7 @@ extension OpenAIChatCompletionsService.RequestBody {
702723
model = body.model
703724

704725
// Special case for Claude through OpenRouter
705-
726+
706727
if endpoint.absoluteString.contains("openrouter.ai"), model.hasPrefix("anthropic/") {
707728
var body = body
708729
body.model = model.replacingOccurrences(of: "anthropic/", with: "")
@@ -731,7 +752,7 @@ extension OpenAIChatCompletionsService.RequestBody {
731752
}
732753
return
733754
}
734-
755+
735756
// Enforce message order
736757

737758
if enforceMessageOrder {
@@ -752,16 +773,22 @@ extension OpenAIChatCompletionsService.RequestBody {
752773
&nonSystemMessages[nonSystemMessages.endIndex - 1],
753774
content: message.content,
754775
images: supportsImage ? message.images : [],
755-
audios: supportsAudio ? message.audios : []
776+
audios: supportsAudio ? message.audios : [],
777+
supportsMultipartMessageContent: supportsMultipartMessageContent
756778
)
757779
} else {
758780
nonSystemMessages.append(.init(
759781
role: .tool,
760-
content: .contentParts(Self.convertContentPart(
761-
content: message.content,
762-
images: supportsImage ? message.images : [],
763-
audios: supportsAudio ? message.audios : []
764-
)),
782+
content: {
783+
if supportsMultipartMessageContent {
784+
return .contentParts(Self.convertContentPart(
785+
content: message.content,
786+
images: supportsImage ? message.images : [],
787+
audios: supportsAudio ? message.audios : []
788+
))
789+
}
790+
return .text(message.content)
791+
}(),
765792
tool_calls: message.toolCalls?.map { tool in
766793
MessageToolCall(
767794
id: tool.id,
@@ -780,16 +807,22 @@ extension OpenAIChatCompletionsService.RequestBody {
780807
&nonSystemMessages[nonSystemMessages.endIndex - 1],
781808
content: message.content,
782809
images: supportsImage ? message.images : [],
783-
audios: supportsAudio ? message.audios : []
810+
audios: supportsAudio ? message.audios : [],
811+
supportsMultipartMessageContent: supportsMultipartMessageContent
784812
)
785813
} else {
786814
nonSystemMessages.append(.init(
787815
role: .assistant,
788-
content: .contentParts(Self.convertContentPart(
789-
content: message.content,
790-
images: supportsImage ? message.images : [],
791-
audios: supportsAudio ? message.audios : []
792-
))
816+
content: {
817+
if supportsMultipartMessageContent {
818+
return .contentParts(Self.convertContentPart(
819+
content: message.content,
820+
images: supportsImage ? message.images : [],
821+
audios: supportsAudio ? message.audios : []
822+
))
823+
}
824+
return .text(message.content)
825+
}()
793826
))
794827
}
795828
case (.user, _):
@@ -798,16 +831,22 @@ extension OpenAIChatCompletionsService.RequestBody {
798831
&nonSystemMessages[nonSystemMessages.endIndex - 1],
799832
content: message.content,
800833
images: supportsImage ? message.images : [],
801-
audios: supportsAudio ? message.audios : []
834+
audios: supportsAudio ? message.audios : [],
835+
supportsMultipartMessageContent: supportsMultipartMessageContent
802836
)
803837
} else {
804838
nonSystemMessages.append(.init(
805839
role: .user,
806-
content: .contentParts(Self.convertContentPart(
807-
content: message.content,
808-
images: supportsImage ? message.images : [],
809-
audios: supportsAudio ? message.audios : []
810-
)),
840+
content: {
841+
if supportsMultipartMessageContent {
842+
return .contentParts(Self.convertContentPart(
843+
content: message.content,
844+
images: supportsImage ? message.images : [],
845+
audios: supportsAudio ? message.audios : []
846+
))
847+
}
848+
return .text(message.content)
849+
}(),
811850
name: message.name,
812851
tool_call_id: message.toolCallId
813852
))
@@ -817,15 +856,25 @@ extension OpenAIChatCompletionsService.RequestBody {
817856
messages = [
818857
.init(
819858
role: .system,
820-
content: .contentParts(systemPrompts)
859+
content: {
860+
if supportsMultipartMessageContent {
861+
return .contentParts(systemPrompts)
862+
}
863+
let textParts = systemPrompts.compactMap {
864+
if case let .text(text) = $0 { return text.text }
865+
return nil
866+
}
867+
868+
return .text(textParts.joined(separator: "\n\n"))
869+
}()
821870
),
822871
] + nonSystemMessages
823872

824873
return
825874
}
826-
875+
827876
// Default
828-
877+
829878
messages = body.messages.map { message in
830879
.init(
831880
role: {
@@ -840,11 +889,16 @@ extension OpenAIChatCompletionsService.RequestBody {
840889
return .tool
841890
}
842891
}(),
843-
content: .contentParts(Self.convertContentPart(
844-
content: message.content,
845-
images: supportsImage ? message.images : [],
846-
audios: supportsAudio ? message.audios : []
847-
)),
892+
content: {
893+
if supportsMultipartMessageContent {
894+
return .contentParts(Self.convertContentPart(
895+
content: message.content,
896+
images: supportsImage ? message.images : [],
897+
audios: supportsAudio ? message.audios : []
898+
))
899+
}
900+
return .text(message.content)
901+
}(),
848902
name: message.name,
849903
tool_calls: message.toolCalls?.map { tool in
850904
MessageToolCall(

0 commit comments

Comments
 (0)