11import AsyncAlgorithms
22import Foundation
3+ import Logger
34
45public protocol ChatGPTServiceType {
56 func send( content: String , summary: String ? ) async throws -> AsyncThrowingStream < String , Error >
@@ -11,7 +12,7 @@ public protocol ChatGPTServiceType {
1112public enum ChatGPTServiceError : Error , LocalizedError {
1213 case endpointIncorrect
1314 case responseInvalid
14-
15+
1516 public var errorDescription : String ? {
1617 switch self {
1718 case . endpointIncorrect:
@@ -49,7 +50,7 @@ public struct ChatGPTError: Error, Codable, LocalizedError {
4950
5051public actor ChatGPTService : ChatGPTServiceType , ObservableObject {
5152 public var temperature : Double
52- public var model : ChatGPTModel
53+ public var model : String
5354 public var endpoint : String
5455 public var apiKey : String
5556 public var systemPrompt : String
@@ -62,20 +63,24 @@ public actor ChatGPTService: ChatGPTServiceType, ObservableObject {
6263 var cancelTask : Cancellable ?
6364 var buildCompletionStreamAPI : CompletionStreamAPIBuilder = OpenAICompletionStreamAPI . init
6465
66+ deinit {
67+ print ( " deinit " )
68+ }
69+
6570 public init (
6671 systemPrompt: String ,
6772 apiKey: String ,
68- endpoint: String = " https://api.openai.com/v1/chat/completions " ,
69- model: ChatGPTModel = . gpt_3_5_turbo ,
73+ endpoint: String ? = nil ,
74+ model: String ? = nil ,
7075 temperature: Double = 1 ,
7176 maxToken: Int = 2048
7277 ) {
7378 self . systemPrompt = systemPrompt
7479 self . apiKey = apiKey
75- self . model = model
80+ self . model = model ?? " gpt-3.5-turbo "
7681 self . temperature = temperature
7782 self . maxToken = maxToken
78- self . endpoint = endpoint
83+ self . endpoint = endpoint ?? " https://api.openai.com/v1/chat/completions "
7984 }
8085
8186 public func send(
@@ -88,23 +93,23 @@ public actor ChatGPTService: ChatGPTServiceType, ObservableObject {
8893 history. append ( newMessage)
8994
9095 let requestBody = CompletionRequestBody (
91- model: model. rawValue ,
96+ model: model,
9297 messages: combineHistoryWithSystemPrompt ( ) ,
9398 temperature: temperature,
9499 stream: true ,
95100 max_tokens: maxToken
96101 )
97102
98103 isReceivingMessage = true
99-
104+
100105 do {
101106 let api = buildCompletionStreamAPI ( apiKey, url, requestBody)
102- let ( trunks, cancel) = try await api ( )
103- cancelTask = cancel
104107
105108 return AsyncThrowingStream < String , Error > { continuation in
106109 Task {
107110 do {
111+ let ( trunks, cancel) = try await api ( )
112+ cancelTask = cancel
108113 for try await trunk in trunks {
109114 guard let delta = trunk. choices. first? . delta else { continue }
110115
@@ -117,9 +122,9 @@ public actor ChatGPTService: ChatGPTServiceType, ObservableObject {
117122 }
118123 } else {
119124 history. append ( . init(
125+ id: trunk. id,
120126 role: delta. role ?? . assistant,
121- content: delta. content ?? " " ,
122- id: trunk. id
127+ content: delta. content ?? " "
123128 ) )
124129 }
125130
@@ -131,13 +136,16 @@ public actor ChatGPTService: ChatGPTServiceType, ObservableObject {
131136 continuation. finish ( )
132137 isReceivingMessage = false
133138 } catch {
139+ Logger . service. error ( error)
140+ history. append ( . init(
141+ role: . assistant,
142+ content: error. localizedDescription
143+ ) )
144+ isReceivingMessage = false
134145 continuation. finish ( throwing: error)
135146 }
136147 }
137148 }
138- } catch {
139- isReceivingMessage = false
140- throw error
141149 }
142150 }
143151
@@ -160,12 +168,16 @@ extension ChatGPTService {
160168 func changeBuildCompletionStreamAPI( _ builder: @escaping CompletionStreamAPIBuilder ) {
161169 buildCompletionStreamAPI = builder
162170 }
163-
164- func combineHistoryWithSystemPrompt( ) -> [ ChatMessage ] {
171+
172+ func combineHistoryWithSystemPrompt( ) -> [ CompletionRequestBody . Message ] {
165173 if history. count > 4 {
166174 return [ . init( role: . system, content: systemPrompt) ] +
167- history[ history. endIndex - 4 ..< history. endIndex]
175+ history[ history. endIndex - 4 ..< history. endIndex] . map {
176+ . init( role: $0. role, content: $0. content)
177+ }
178+ }
179+ return [ . init( role: . system, content: systemPrompt) ] + history. map {
180+ . init( role: $0. role, content: $0. content)
168181 }
169- return [ . init( role: . system, content: systemPrompt) ] + history
170182 }
171183}
0 commit comments