Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
46 commits
Select commit Hold shift + click to select a range
4cbd5c3
Merge tag '0.30.5' into develop
intitni Feb 22, 2024
9de4774
Add ResponseStream
intitni Mar 1, 2024
99dbf8f
Move definitions to CompletionsAPIDefinition
intitni Mar 1, 2024
1b213af
Rename file
intitni Mar 1, 2024
fd7f92d
Rename types
intitni Mar 1, 2024
d3c02a1
Convert API types to OpenAIService
intitni Mar 1, 2024
6d6fc49
Convert API types to GoogleAIService
intitni Mar 1, 2024
d741333
Reset ChatCompletionsStreamAPI to return AsyncThrowingStream
intitni Mar 1, 2024
16a770e
Add UI for ollama models
intitni Mar 1, 2024
ef6dd45
Implement Ollama non stream chat API
intitni Mar 1, 2024
58483b0
Update
intitni Mar 1, 2024
ed811e8
Add OpenAIEmbeddingService
intitni Mar 1, 2024
9f137ee
Add OllamaEmbeddingService
intitni Mar 1, 2024
6282aa1
Update test success message
intitni Mar 1, 2024
96bf018
Change Max Tokens to Context Window for better clearity
intitni Mar 2, 2024
7bc8ea7
Merge branch 'feature/ollama-support' into develop
intitni Mar 2, 2024
37111d2
Update
intitni Mar 2, 2024
ecf740e
Migrate service specific info to their own structs
intitni Mar 2, 2024
42a8efb
Support tool call
intitni Mar 2, 2024
c962212
Remove function role
intitni Mar 2, 2024
a412c23
Move tool call responses into its source assistant message
intitni Mar 3, 2024
ca9e981
Use ResponseStream to handle stream responses
intitni Mar 3, 2024
21d7ae4
Put tool call responses into the tool call struct
intitni Mar 3, 2024
143a92f
Parse Mistral.AI errors
intitni Mar 3, 2024
ef396be
Remove useless fields
intitni Mar 3, 2024
2ad4e6d
Remove tool calling content if function calling is not supported
intitni Mar 3, 2024
36f51bb
Fix unit test
intitni Mar 3, 2024
d0102f8
Remove `id` from ToolCallResponse
intitni Mar 3, 2024
21d5d68
Fix error decoding
intitni Mar 3, 2024
0dccd99
Merge branch 'feature/update-openai-api-to-latest-format' into develop
intitni Mar 3, 2024
e9b88b7
Remove warnings
intitni Mar 3, 2024
f2ed55d
Fix function calling settings for ollama
intitni Mar 3, 2024
84058ec
Update
intitni Mar 3, 2024
59ed0da
Merge branch 'main' into develop
intitni Mar 3, 2024
5927fff
Merge tag '0.31.0.beta' into develop
intitni Mar 4, 2024
6063d41
Give response role the default value `assistant`
intitni Mar 4, 2024
01c7cad
Merge branch 'feature/role-fallback-for-bad-stream-response' into dev…
intitni Mar 4, 2024
6fb6360
Hide the circle when Xcode is not active
intitni Mar 6, 2024
d6afaab
Update
intitni Mar 6, 2024
a9b25c8
Bump Copilot.vim to 1.25.0
intitni Mar 7, 2024
8808b47
Merge branch 'feature/bump-github-copilot-1.25.0' into develop
intitni Mar 7, 2024
1285933
Bump Codeium language server to 1.8.5
intitni Mar 7, 2024
cf54d2a
Merge branch 'feature/bump-codeium-language-server-to-1.8.5' into dev…
intitni Mar 7, 2024
6f8091b
Bump version to 0.31.0
intitni Mar 4, 2024
a1604dd
Update appcast.xml
intitni Mar 8, 2024
7e91a10
Merge branch 'release/0.31.0'
intitni Mar 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ extension WebChatContextCollector {
static func detectLinks(from messages: [ChatMessage]) -> [String] {
return messages.lazy
.compactMap {
$0.content ?? $0.functionCall?.arguments
$0.content ?? $0.toolCalls?.map(\.function.arguments).joined(separator: " ") ?? ""
}
.map(detectLinks(from:))
.flatMap { $0 }
Expand Down
27 changes: 19 additions & 8 deletions Core/Sources/ChatGPTChatTab/Chat.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ public struct DisplayedChatMessage: Equatable {
public enum Role: Equatable {
case user
case assistant
case function
case tool
case ignored
}

public struct Reference: Equatable {
public typealias Kind = ChatMessage.Reference.Kind

public var title: String
public var subtitle: String
public var uri: String
Expand Down Expand Up @@ -135,7 +135,7 @@ struct Chat: ReducerProtocol {
await send(.focusOnTextField)
await send(.refresh)
}

case .refresh:
return .run { send in
await send(.chatMenu(.refresh))
Expand Down Expand Up @@ -298,8 +298,9 @@ struct Chat: ReducerProtocol {
}.cancellable(id: CancelID.observeDefaultScopesChange(id), cancelInFlight: true)

case .historyChanged:
state.history = service.chatHistory.map { message in
.init(
state.history = service.chatHistory.flatMap { message in
var all = [DisplayedChatMessage]()
all.append(.init(
id: message.id,
role: {
switch message.role {
Expand All @@ -312,7 +313,6 @@ struct Chat: ReducerProtocol {
return .assistant
}
return .ignored
case .function: return .function
}
}(),
text: message.summary ?? message.content ?? "",
Expand All @@ -325,7 +325,18 @@ struct Chat: ReducerProtocol {
kind: $0.kind
)
}
)
))

for call in message.toolCalls ?? [] {
all.append(.init(
id: message.id + call.id,
role: .tool,
text: call.response.summary ?? call.response.content,
references: []
))
}

return all
}

state.title = {
Expand Down Expand Up @@ -401,7 +412,7 @@ struct ChatMenu: ReducerProtocol {
return .run {
await $0(.refresh)
}

case .refresh:
state.temperatureOverride = service.configuration.overriding.temperature
state.chatModelIdOverride = service.configuration.overriding.modelId
Expand Down
4 changes: 2 additions & 2 deletions Core/Sources/ChatGPTChatTab/ChatPanel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ struct ChatHistory: View {
trailing: -8
))
.padding(.vertical, 4)
case .function:
case .tool:
FunctionMessage(id: message.id, text: text)
case .ignored:
EmptyView()
Expand Down Expand Up @@ -453,7 +453,7 @@ struct ChatPanel_Preview: PreviewProvider {
),
.init(
id: "6",
role: .function,
role: .tool,
text: """
Searching for something...
- abc
Expand Down
4 changes: 2 additions & 2 deletions Core/Sources/ChatService/ChatService.swift
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ public final class ChatService: ObservableObject {
await chatGPTService.stopReceivingMessage()
isReceivingMessage = false

// if it's stopped before the function finishes, remove the function call.
// if it's stopped before the tool calls finish, remove the message.
await memory.mutateHistory { history in
if history.last?.role == .assistant, history.last?.functionCall != nil {
if history.last?.role == .assistant, history.last?.toolCalls != nil {
history.removeLast()
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public final class ContextAwareAutoManagedChatGPTMemory: ChatGPTMemory {

public func generatePrompt() async -> ChatGPTPrompt {
let content = (await memory.history)
.last(where: { $0.role == .user || $0.role == .function })?.content
.last(where: { $0.role == .user })?.content
try? await contextController.collectContextInformation(
systemPrompt: """
\(chatService?.systemPrompt ?? "")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ struct ChatModelEdit: ReducerProtocol {
@BindingState var maxTokens: Int = 4000
@BindingState var supportsFunctionCalling: Bool = true
@BindingState var modelName: String = ""
@BindingState var ollamaKeepAlive: String = ""
var apiKeyName: String { apiKeySelection.apiKeyName }
var baseURL: String { baseURLSelection.baseURL }
var isFullURL: Bool { baseURLSelection.isFullURL }
Expand Down Expand Up @@ -48,7 +49,7 @@ struct ChatModelEdit: ReducerProtocol {
Scope(state: \.apiKeySelection, action: /Action.apiKeySelection) {
APIKeySelection()
}

Scope(state: \.baseURLSelection, action: /Action.baseURLSelection) {
BaseURLSelection()
}
Expand Down Expand Up @@ -135,10 +136,10 @@ struct ChatModelEdit: ReducerProtocol {
state.suggestedMaxTokens = nil
return .none
}

case .apiKeySelection:
return .none

case .baseURLSelection:
return .none

Expand Down Expand Up @@ -169,6 +170,7 @@ extension ChatModelEdit.State {
maxTokens: model.info.maxTokens,
supportsFunctionCalling: model.info.supportsFunctionCalling,
modelName: model.info.modelName,
ollamaKeepAlive: model.info.ollamaInfo.keepAlive,
apiKeySelection: .init(
apiKeyName: model.info.apiKeyName,
apiKeyManagement: .init(availableAPIKeyNames: [model.info.apiKeyName])
Expand All @@ -193,9 +195,13 @@ extension ChatModel {
if case .googleAI = state.format {
return false
}
if case .ollama = state.format {
return false
}
return state.supportsFunctionCalling
}(),
modelName: state.modelName.trimmingCharacters(in: .whitespacesAndNewlines)
modelName: state.modelName.trimmingCharacters(in: .whitespacesAndNewlines),
ollamaInfo: .init(keepAlive: state.ollamaKeepAlive)
)
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ struct ChatModelEditView: View {
openAICompatible
case .googleAI:
googleAI
case .ollama:
ollama
}
}
}
Expand Down Expand Up @@ -92,6 +94,8 @@ struct ChatModelEditView: View {
Text("OpenAI Compatible").tag(format)
case .googleAI:
Text("Google Generative AI").tag(format)
case .ollama:
Text("Ollama").tag(format)
}
}
},
Expand Down Expand Up @@ -171,7 +175,7 @@ struct ChatModelEditView: View {
)

TextField(text: textFieldBinding) {
Text("Max Tokens (Including Reply)")
Text("Context Window")
.multilineTextAlignment(.trailing)
}
.overlay(alignment: .trailing) {
Expand Down Expand Up @@ -344,6 +348,38 @@ struct ChatModelEditView: View {

maxTokensTextField
}

@ViewBuilder
var ollama: some View {
baseURLTextField(prompt: Text("http://127.0.0.1:11434")) {
Text("/api/chat")
}

WithViewStore(
store,
removeDuplicates: { $0.modelName == $1.modelName }
) { viewStore in
TextField("Model Name", text: viewStore.$modelName)
}

maxTokensTextField

WithViewStore(
store,
removeDuplicates: { $0.ollamaKeepAlive == $1.ollamaKeepAlive }
) { viewStore in
TextField(text: viewStore.$ollamaKeepAlive, prompt: Text("Default Value")) {
Text("Keep Alive")
}
}

VStack(alignment: .leading, spacing: 8) {
Text(Image(systemName: "exclamationmark.triangle.fill")) + Text(
" For more details, please visit [https://ollama.com](https://ollama.com)."
)
}
.padding(.vertical)
}
}

#Preview("OpenAI") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ extension ChatModel: ManageableAIModel {
case .azureOpenAI: return "Azure OpenAI"
case .openAICompatible: return "OpenAI Compatible"
case .googleAI: return "Google Generative AI"
case .ollama: return "Ollama"
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ struct EmbeddingModelEdit: ReducerProtocol {
@BindingState var format: EmbeddingModel.Format
@BindingState var maxTokens: Int = 8191
@BindingState var modelName: String = ""
@BindingState var ollamaKeepAlive: String = ""
var apiKeyName: String { apiKeySelection.apiKeyName }
var baseURL: String { baseURLSelection.baseURL }
var isFullURL: Bool { baseURLSelection.isFullURL }
Expand Down Expand Up @@ -83,14 +84,13 @@ struct EmbeddingModelEdit: ReducerProtocol {
)
return .run { send in
do {
let tokenUsage =
try await EmbeddingService(
configuration: UserPreferenceEmbeddingConfiguration()
.overriding {
$0.model = model
}
).embed(text: "Hello").usage.total_tokens
await send(.testSucceeded("Used \(tokenUsage) tokens."))
_ = try await EmbeddingService(
configuration: UserPreferenceEmbeddingConfiguration()
.overriding {
$0.model = model
}
).embed(text: "Hello")
await send(.testSucceeded("Succeeded!"))
} catch {
await send(.testFailed(error.localizedDescription))
}
Expand Down Expand Up @@ -155,6 +155,7 @@ extension EmbeddingModelEdit.State {
format: model.format,
maxTokens: model.info.maxTokens,
modelName: model.info.modelName,
ollamaKeepAlive: model.info.ollamaInfo.keepAlive,
apiKeySelection: .init(
apiKeyName: model.info.apiKeyName,
apiKeyManagement: .init(availableAPIKeyNames: [model.info.apiKeyName])
Expand All @@ -175,7 +176,8 @@ extension EmbeddingModel {
baseURL: state.baseURL.trimmingCharacters(in: .whitespacesAndNewlines),
isFullURL: state.isFullURL,
maxTokens: state.maxTokens,
modelName: state.modelName.trimmingCharacters(in: .whitespacesAndNewlines)
modelName: state.modelName.trimmingCharacters(in: .whitespacesAndNewlines),
ollamaInfo: .init(keepAlive: state.ollamaKeepAlive)
)
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ struct EmbeddingModelEditView: View {
azureOpenAI
case .openAICompatible:
openAICompatible
case .ollama:
ollama
}
}
}
Expand Down Expand Up @@ -88,6 +90,8 @@ struct EmbeddingModelEditView: View {
Text("Azure OpenAI").tag(format)
case .openAICompatible:
Text("OpenAI Compatible").tag(format)
case .ollama:
Text("Ollama").tag(format)
}
}
},
Expand Down Expand Up @@ -289,6 +293,38 @@ struct EmbeddingModelEditView: View {

maxTokensTextField
}

@ViewBuilder
var ollama: some View {
baseURLTextField(prompt: Text("http://127.0.0.1:11434")) {
Text("/api/embeddings")
}

WithViewStore(
store,
removeDuplicates: { $0.modelName == $1.modelName }
) { viewStore in
TextField("Model Name", text: viewStore.$modelName)
}

maxTokensTextField

WithViewStore(
store,
removeDuplicates: { $0.ollamaKeepAlive == $1.ollamaKeepAlive }
) { viewStore in
TextField(text: viewStore.$ollamaKeepAlive, prompt: Text("Default Value")) {
Text("Keep Alive")
}
}

VStack(alignment: .leading, spacing: 8) {
Text(Image(systemName: "exclamationmark.triangle.fill")) + Text(
" For more details, please visit [https://ollama.com](https://ollama.com)."
)
}
.padding(.vertical)
}
}

class EmbeddingModelManagementView_Editing_Previews: PreviewProvider {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ extension EmbeddingModel: ManageableAIModel {
case .openAI: return "OpenAI"
case .azureOpenAI: return "Azure OpenAI"
case .openAICompatible: return "OpenAI Compatible"
case .ollama: return "Ollama"
}
}

Expand Down
11 changes: 10 additions & 1 deletion Core/Sources/SuggestionWidget/WidgetWindowsController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ actor WidgetWindowsController: NSObject {
let xcodeInspector = self.xcodeInspector
let activeApp = await xcodeInspector.safe.activeApplication
let latestActiveXcode = await xcodeInspector.safe.latestActiveXcode
let previousActiveApplication = xcodeInspector.previousActiveApplication
await MainActor.run {
let state = store.withState { $0 }
let isChatPanelDetached = state.chatPanelState.chatPanelInASeparateWindow
Expand Down Expand Up @@ -123,9 +124,17 @@ actor WidgetWindowsController: NSObject {
return true
}()

let previousAppIsXcode = previousActiveApplication?.isXcode ?? false

windows.sharedPanelWindow.alphaValue = noFocus ? 0 : 1
windows.suggestionPanelWindow.alphaValue = noFocus ? 0 : 1
windows.widgetWindow.alphaValue = noFocus ? 0 : 1
windows.widgetWindow.alphaValue = if noFocus {
0
} else if previousAppIsXcode {
1
} else {
0
}
windows.toastWindow.alphaValue = noFocus ? 0 : 1
if isChatPanelDetached {
windows.chatPanelWindow.isWindowHidden = !hasChat
Expand Down
1 change: 1 addition & 0 deletions ExtensionService/AppDelegate+Menu.swift
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ extension AppDelegate {
.init("sourceEditorDebugMenu")
}

@MainActor
@objc func buildStatusBarMenu() {
let statusBar = NSStatusBar.system
statusBarItem = statusBar.statusItem(
Expand Down
Loading