File tree Expand file tree Collapse file tree
Tool/Sources/OpenAIService/APIs Expand file tree Collapse file tree Original file line number Diff line number Diff line change 11import AIModel
22import Foundation
33import Preferences
4+ import CodableWrappers
45
56protocol EmbeddingAPI {
67 func embed( text: String ) async throws -> EmbeddingResponse
@@ -12,17 +13,28 @@ public struct EmbeddingResponse: Decodable {
1213 public struct Object : Decodable {
1314 public var embedding : [ Float ]
1415 public var index : Int
16+ @FallbackDecoding < EmptyString >
1517 public var object : String
1618 }
1719
20+ @FallbackDecoding < EmptyArray >
1821 public var data : [ Object ]
22+ @FallbackDecoding < EmptyString >
1923 public var model : String
2024
2125 public struct Usage : Decodable {
26+ @FallbackDecoding < EmptyInt >
2227 public var prompt_tokens : Int
28+ @FallbackDecoding < EmptyInt >
2329 public var total_tokens : Int
30+
31+ public struct Fallback : FallbackValueProvider {
32+ public static var defaultValue : Usage { Usage ( prompt_tokens: 0 , total_tokens: 0 ) }
33+ }
2434 }
2535
36+ @FallbackDecoding < Usage . Fallback >
2637 public var usage : Usage
2738}
2839
40+
Original file line number Diff line number Diff line change @@ -50,15 +50,6 @@ struct OpenAIEmbeddingService: EmbeddingAPI {
5050 }
5151
5252 let embeddingResponse = try JSONDecoder ( ) . decode ( EmbeddingResponse . self, from: result)
53- #if DEBUG
54- Logger . service. info ( """
55- Embedding usage
56- - number of strings: \( text. count)
57- - prompt tokens: \( embeddingResponse. usage. prompt_tokens)
58- - total tokens: \( embeddingResponse. usage. total_tokens)
59-
60- """ )
61- #endif
6253 return embeddingResponse
6354 }
6455
@@ -92,15 +83,6 @@ struct OpenAIEmbeddingService: EmbeddingAPI {
9283 }
9384
9485 let embeddingResponse = try JSONDecoder ( ) . decode ( EmbeddingResponse . self, from: result)
95- #if DEBUG
96- Logger . service. info ( """
97- Embedding usage
98- - number of strings: \( tokens. count)
99- - prompt tokens: \( embeddingResponse. usage. prompt_tokens)
100- - total tokens: \( embeddingResponse. usage. total_tokens)
101-
102- """ )
103- #endif
10486 return embeddingResponse
10587 }
10688
You can’t perform that action at this time.
0 commit comments