Skip to content

Commit 65aa3c2

Browse files
committed
Fix model name and error handling
1 parent 8a99568 commit 65aa3c2

3 files changed

Lines changed: 67 additions & 27 deletions

File tree

Tool/Sources/OpenAIService/APIs/GoogleAICompletionAPI.swift

Lines changed: 46 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ struct GoogleCompletionAPI: CompletionAPI {
1010

1111
func callAsFunction() async throws -> CompletionResponseBody {
1212
let aiModel = GenerativeModel(
13-
name: model.name,
13+
name: model.info.modelName,
1414
apiKey: apiKey,
1515
generationConfig: .init(GenerationConfig(
1616
temperature: requestBody.temperature.map(Float.init),
@@ -30,30 +30,52 @@ struct GoogleCompletionAPI: CompletionAPI {
3030
)
3131
}
3232

33-
let response = try await aiModel.generateContent(history)
34-
35-
return .init(
36-
object: "chat.completion",
37-
model: model.name,
38-
usage: .init(prompt_tokens: 0, completion_tokens: 0, total_tokens: 0),
39-
choices: response.candidates.enumerated().map {
40-
let (index, candidate) = $0
41-
return .init(
42-
message: .init(
43-
role: .assistant,
44-
content: candidate.content.parts.first(where: { part in
45-
if let text = part.text {
46-
return !text.isEmpty
47-
} else {
48-
return false
49-
}
50-
})?.text ?? ""
51-
),
52-
index: index,
53-
finish_reason: candidate.finishReason?.rawValue ?? ""
54-
)
33+
do {
34+
let response = try await aiModel.generateContent(history)
35+
36+
return .init(
37+
object: "chat.completion",
38+
model: model.info.modelName,
39+
usage: .init(prompt_tokens: 0, completion_tokens: 0, total_tokens: 0),
40+
choices: response.candidates.enumerated().map {
41+
let (index, candidate) = $0
42+
return .init(
43+
message: .init(
44+
role: .assistant,
45+
content: candidate.content.parts.first(where: { part in
46+
if let text = part.text {
47+
return !text.isEmpty
48+
} else {
49+
return false
50+
}
51+
})?.text ?? ""
52+
),
53+
index: index,
54+
finish_reason: candidate.finishReason?.rawValue ?? ""
55+
)
56+
}
57+
)
58+
} catch let error as GenerateContentError {
59+
struct ErrorWrapper: Error, LocalizedError {
60+
let error: Error
61+
var errorDescription: String? {
62+
var s = ""
63+
dump(error, to: &s)
64+
return "Internal Error: \(s)"
65+
}
5566
}
56-
)
67+
68+
switch error {
69+
case let .internalError(underlying):
70+
throw ErrorWrapper(error: underlying)
71+
case .promptBlocked:
72+
throw error
73+
case .responseStoppedEarly:
74+
throw error
75+
}
76+
} catch {
77+
throw error
78+
}
5779
}
5880
}
5981

Tool/Sources/OpenAIService/APIs/GoogleAICompletionStreamAPI.swift

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ struct GoogleCompletionStreamAPI: CompletionStreamAPI {
1010

1111
func callAsFunction() async throws -> AsyncThrowingStream<CompletionStreamDataChunk, Error> {
1212
let aiModel = GenerativeModel(
13-
name: model.name,
13+
name: model.info.modelName,
1414
apiKey: apiKey,
1515
generationConfig: .init(GenerationConfig(
1616
temperature: requestBody.temperature.map(Float.init),
@@ -38,7 +38,7 @@ struct GoogleCompletionStreamAPI: CompletionStreamAPI {
3838
if Task.isCancelled { break }
3939
let chunk = CompletionStreamDataChunk(
4040
object: "",
41-
model: model.name,
41+
model: model.info.modelName,
4242
choices: response.candidates.map { candidate in
4343
.init(delta: .init(
4444
role: .assistant,
@@ -50,6 +50,24 @@ struct GoogleCompletionStreamAPI: CompletionStreamAPI {
5050
continuation.yield(chunk)
5151
}
5252
continuation.finish()
53+
} catch let error as GenerateContentError {
54+
struct ErrorWrapper: Error, LocalizedError {
55+
let error: Error
56+
var errorDescription: String? {
57+
var s = ""
58+
dump(error, to: &s)
59+
return "Internal Error: \(s)"
60+
}
61+
}
62+
63+
switch error {
64+
case let .internalError(underlying):
65+
continuation.finish(throwing: ErrorWrapper(error: underlying))
66+
case .promptBlocked:
67+
continuation.finish(throwing: error)
68+
case .responseStoppedEarly:
69+
continuation.finish(throwing: error)
70+
}
5371
} catch {
5472
continuation.finish(throwing: error)
5573
}

Tool/Sources/OpenAIService/Memory/AutoManagedChatGPTMemoryStrategy/AutoManagedChatGPTMemoryGoogleAIStrategy.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ extension AutoManagedChatGPTMemory {
1010
guard let model = configuration.model else {
1111
return 0
1212
}
13-
let aiModel = GenerativeModel(name: model.name, apiKey: configuration.apiKey)
13+
let aiModel = GenerativeModel(name: model.info.modelName, apiKey: configuration.apiKey)
1414
if message.isEmpty { return 0 }
1515
let modelMessage = ModelContent(message)
1616
return (try? await aiModel.countTokens([modelMessage]).totalTokens) ?? 0

0 commit comments

Comments
 (0)