Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Core/Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ let package = Package(
.product(name: "AppMonitoring", package: "Tool"),
.product(name: "ChatTab", package: "Tool"),
.product(name: "Logger", package: "Tool"),
.product(name: "CustomAsyncAlgorithms", package: "Tool"),
.product(name: "AsyncAlgorithms", package: "swift-async-algorithms"),
.product(name: "MarkdownUI", package: "swift-markdown-ui"),
.product(name: "ComposableArchitecture", package: "swift-composable-architecture"),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import AIModel
import Toast
import ComposableArchitecture
import Dependencies
import Keychain
Expand Down Expand Up @@ -40,7 +41,12 @@ struct ChatModelEdit: ReducerProtocol {
case baseURLSelection(BaseURLSelection.Action)
}

@Dependency(\.toast) var toast
var toast: (String, ToastType) -> Void {
@Dependency(\.namespacedToast) var toast
return {
toast($0, $1, "ChatModelEdit")
}
}
@Dependency(\.apiKeyKeychain) var keychain

var body: some ReducerProtocol<State, Action> {
Expand Down Expand Up @@ -86,27 +92,35 @@ struct ChatModelEdit: ReducerProtocol {
)
return .run { send in
do {
let reply =
try await ChatGPTService(
configuration: UserPreferenceChatGPTConfiguration()
.overriding {
$0.model = model
}
).sendAndWait(content: "Hello")
let service = ChatGPTService(
configuration: UserPreferenceChatGPTConfiguration()
.overriding {
$0.model = model
}
)
let reply = try await service
.sendAndWait(content: "Respond with \"Test succeeded\"")
await send(.testSucceeded(reply ?? "No Message"))
let stream = try await service
.send(content: "Respond with \"Stream response is working\"")
var streamReply = ""
for try await chunk in stream {
streamReply += chunk
}
await send(.testSucceeded(streamReply))
} catch {
await send(.testFailed(error.localizedDescription))
}
}

case let .testSucceeded(message):
state.isTesting = false
toast(message, .info)
toast(message.trimmingCharacters(in: .whitespacesAndNewlines), .info)
return .none

case let .testFailed(message):
state.isTesting = false
toast(message, .error)
toast(message.trimmingCharacters(in: .whitespacesAndNewlines), .error)
return .none

case .refreshAvailableModelNames:
Expand All @@ -132,6 +146,15 @@ struct ChatModelEdit: ReducerProtocol {
state.suggestedMaxTokens = nil
}
return .none
case .claude:
if let knownModel = ClaudeChatCompletionsService
.KnownModel(rawValue: state.modelName)
{
state.suggestedMaxTokens = knownModel.contextWindow
} else {
state.suggestedMaxTokens = nil
}
return .none
default:
state.suggestedMaxTokens = nil
return .none
Expand Down Expand Up @@ -192,13 +215,12 @@ extension ChatModel {
isFullURL: state.isFullURL,
maxTokens: state.maxTokens,
supportsFunctionCalling: {
if case .googleAI = state.format {
return false
}
if case .ollama = state.format {
switch state.format {
case .googleAI, .ollama, .claude:
return false
case .azureOpenAI, .openAI, .openAICompatible:
return state.supportsFunctionCalling
}
return state.supportsFunctionCalling
}(),
modelName: state.modelName.trimmingCharacters(in: .whitespacesAndNewlines),
ollamaInfo: .init(keepAlive: state.ollamaKeepAlive)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import AIModel
import ComposableArchitecture
import OpenAIService
import Preferences
import SwiftUI

Expand All @@ -26,6 +27,8 @@ struct ChatModelEditView: View {
googleAI
case .ollama:
ollama
case .claude:
claude
}
}
}
Expand Down Expand Up @@ -68,6 +71,7 @@ struct ChatModelEditView: View {
store.send(.appear)
}
.fixedSize(horizontal: false, vertical: true)
.handleToast(namespace: "ChatModelEdit")
}

var nameTextField: some View {
Expand Down Expand Up @@ -96,6 +100,8 @@ struct ChatModelEditView: View {
Text("Google Generative AI").tag(format)
case .ollama:
Text("Ollama").tag(format)
case .claude:
Text("Claude").tag(format)
}
}
},
Expand Down Expand Up @@ -348,7 +354,7 @@ struct ChatModelEditView: View {

maxTokensTextField
}

@ViewBuilder
var ollama: some View {
baseURLTextField(prompt: Text("http://127.0.0.1:11434")) {
Expand All @@ -363,7 +369,7 @@ struct ChatModelEditView: View {
}

maxTokensTextField

WithViewStore(
store,
removeDuplicates: { $0.ollamaKeepAlive == $1.ollamaKeepAlive }
Expand All @@ -380,6 +386,51 @@ struct ChatModelEditView: View {
}
.padding(.vertical)
}

@ViewBuilder
var claude: some View {
baseURLTextField(prompt: Text("https://api.anthropic.com")) {
Text("/v1/messages")
}

apiKeyNamePicker

WithViewStore(
store,
removeDuplicates: { $0.modelName == $1.modelName }
) { viewStore in
TextField("Model Name", text: viewStore.$modelName)
.overlay(alignment: .trailing) {
Picker(
"",
selection: viewStore.$modelName,
content: {
if ClaudeChatCompletionsService
.KnownModel(rawValue: viewStore.state.modelName) == nil
{
Text("Custom Model").tag(viewStore.state.modelName)
}
ForEach(
ClaudeChatCompletionsService.KnownModel.allCases,
id: \.self
) { model in
Text(model.rawValue).tag(model.rawValue)
}
}
)
.frame(width: 20)
}
}

maxTokensTextField

VStack(alignment: .leading, spacing: 8) {
Text(Image(systemName: "exclamationmark.triangle.fill")) + Text(
" For more details, please visit [https://anthropic.com](https://anthropic.com)."
)
}
.padding(.vertical)
}
}

#Preview("OpenAI") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ extension ChatModel: ManageableAIModel {
case .openAICompatible: return "OpenAI Compatible"
case .googleAI: return "Google Generative AI"
case .ollama: return "Ollama"
case .claude: return "Claude"
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import AIModel
import Toast
import ComposableArchitecture
import Dependencies
import Keychain
Expand Down Expand Up @@ -39,7 +40,12 @@ struct EmbeddingModelEdit: ReducerProtocol {
case baseURLSelection(BaseURLSelection.Action)
}

@Dependency(\.toast) var toast
var toast: (String, ToastType) -> Void {
@Dependency(\.namespacedToast) var toast
return {
toast($0, $1, "EmbeddingModelEdit")
}
}
@Dependency(\.apiKeyKeychain) var keychain

var body: some ReducerProtocol<State, Action> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ struct EmbeddingModelEditView: View {
store.send(.appear)
}
.fixedSize(horizontal: false, vertical: true)
.handleToast(namespace: "EmbeddingModelEdit")
}

var nameTextField: some View {
Expand Down
20 changes: 10 additions & 10 deletions Core/Sources/HostApp/FeatureSettings/PromptToCodeSettingsView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ import ProHostApp

struct PromptToCodeSettingsView: View {
final class Settings: ObservableObject {
@AppStorage(\.hideCommonPrecedingSpacesInSuggestion)
var hideCommonPrecedingSpacesInSuggestion
@AppStorage(\.suggestionCodeFontSize)
var suggestionCodeFontSize
@AppStorage(\.hideCommonPrecedingSpacesInPromptToCode)
var hideCommonPrecedingSpaces
@AppStorage(\.promptToCodeCodeFontSize)
var fontSize
@AppStorage(\.promptToCodeGenerateDescription)
var promptToCodeGenerateDescription
@AppStorage(\.promptToCodeGenerateDescriptionInUserPreferredLanguage)
Expand Down Expand Up @@ -84,25 +84,25 @@ struct PromptToCodeSettingsView: View {
}
}

SettingsDivider("Mirroring Settings of Suggestion Feature")
SettingsDivider("UI")

Form {
Toggle(isOn: $settings.hideCommonPrecedingSpacesInSuggestion) {
Toggle(isOn: $settings.hideCommonPrecedingSpaces) {
Text("Hide Common Preceding Spaces")
}.disabled(true)
}

HStack {
TextField(text: .init(get: {
"\(Int(settings.suggestionCodeFontSize))"
"\(Int(settings.fontSize))"
}, set: {
settings.suggestionCodeFontSize = Double(Int($0) ?? 0)
settings.fontSize = Double(Int($0) ?? 0)
})) {
Text("Font size of suggestion code")
}
.textFieldStyle(.roundedBorder)

Text("pt")
}.disabled(true)
}
}

ScopeForm()
Expand Down
49 changes: 49 additions & 0 deletions Core/Sources/HostApp/HandleToast.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import Dependencies
import SwiftUI
import Toast

struct ToastHandler: View {
@ObservedObject var toastController: ToastController
let namespace: String?

init(toastController: ToastController, namespace: String?) {
_toastController = .init(wrappedValue: toastController)
self.namespace = namespace
}

var body: some View {
VStack(spacing: 4) {
ForEach(toastController.messages) { message in
if let n = message.namespace, n != namespace {
EmptyView()
} else {
message.content
.foregroundColor(.white)
.padding(8)
.background({
switch message.type {
case .info: return Color.accentColor
case .error: return Color(nsColor: .systemRed)
case .warning: return Color(nsColor: .systemOrange)
}
}() as Color, in: RoundedRectangle(cornerRadius: 8))
.shadow(color: Color.black.opacity(0.2), radius: 4)
}
}
}
.padding()
.allowsHitTesting(false)
}
}

extension View {
func handleToast(namespace: String? = nil) -> some View {
@Dependency(\.toastController) var toastController
return overlay(alignment: .bottom) {
ToastHandler(toastController: toastController, namespace: namespace)
}.environment(\.toast) { [toastController] content, type in
toastController.toast(content: content, type: type, namespace: namespace)
}
}
}

23 changes: 1 addition & 22 deletions Core/Sources/HostApp/TabContainer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -75,33 +75,12 @@ public struct TabContainer: View {
}
.environment(\.tabBarTabTag, tag)
.frame(minHeight: 400)
.overlay(alignment: .bottom) {
VStack(spacing: 4) {
ForEach(toastController.messages) { message in
message.content
.foregroundColor(.white)
.padding(8)
.background({
switch message.type {
case .info: return Color.accentColor
case .error: return Color(nsColor: .systemRed)
case .warning: return Color(nsColor: .systemOrange)
}
}() as Color, in: RoundedRectangle(cornerRadius: 8))
.shadow(color: Color.black.opacity(0.2), radius: 4)
}
}
.padding()
.allowsHitTesting(false)
}
}
.focusable(false)
.padding(.top, 8)
.background(.ultraThinMaterial.opacity(0.01))
.background(Color(nsColor: .controlBackgroundColor).opacity(0.4))
.environment(\.toast) { [toastController] content, type in
toastController.toast(content: content, type: type)
}
.handleToast()
.onPreferenceChange(TabBarItemPreferenceKey.self) { items in
tabBarItems = items
}
Expand Down
Loading