Bug gixes, features added, GUI updates and more

This commit is contained in:
2026-02-12 14:29:35 +01:00
parent 52447b5e17
commit 7265d22438
21 changed files with 2187 additions and 123 deletions

View File

@@ -32,16 +32,43 @@ class ChatViewModel {
var showStats: Bool = false
var showHelp: Bool = false
var showCredits: Bool = false
var showHistory: Bool = false
var modelInfoTarget: ModelInfo? = nil
var commandHistory: [String] = []
var historyIndex: Int = 0
// MARK: - Private State
private var commandHistory: [String] = []
private var historyIndex: Int = -1
private var streamingTask: Task<Void, Never>?
private let settings = SettingsService.shared
private let providerRegistry = ProviderRegistry.shared
// Default system prompt
private let defaultSystemPrompt = """
You are a helpful AI assistant. Follow these guidelines:
1. **Accuracy First**: Never invent information or make assumptions. If you're unsure about something, say so clearly.
2. **Ask for Clarification**: When a request is ambiguous or lacks necessary details, ask clarifying questions before proceeding.
3. **Be Honest About Limitations**: If you cannot or should not help with a request, explain why clearly and politely. Don't attempt tasks outside your capabilities.
4. **Stay Grounded**: Base responses on facts and known information. Clearly distinguish between facts, informed opinions, and speculation.
5. **Be Direct**: Provide concise, relevant answers. Avoid unnecessary preambles or apologies.
Remember: It's better to ask questions or admit uncertainty than to provide incorrect or fabricated information.
"""
/// Builds the complete system prompt by combining default + custom
private var effectiveSystemPrompt: String {
var prompt = defaultSystemPrompt
if let customPrompt = settings.systemPrompt, !customPrompt.isEmpty {
prompt += "\n\n---\n\nAdditional Instructions:\n" + customPrompt
}
return prompt
}
// MARK: - Initialization
init() {
@@ -50,6 +77,17 @@ class ChatViewModel {
self.onlineMode = settings.onlineMode
self.memoryEnabled = settings.memoryEnabled
self.mcpEnabled = settings.mcpEnabled
// Load command history from database
if let history = try? DatabaseService.shared.loadCommandHistory() {
self.commandHistory = history.map { $0.input }
self.historyIndex = self.commandHistory.count
}
// Load models on startup
Task {
await loadAvailableModels()
}
}
// MARK: - Public Methods
@@ -102,7 +140,12 @@ class ChatViewModel {
let models = try await provider.listModels()
availableModels = models
if selectedModel == nil, let firstModel = models.first {
// Select model priority: saved default > current selection > first available
if let defaultModelId = settings.defaultModel,
let defaultModel = models.first(where: { $0.id == defaultModelId }) {
selectedModel = defaultModel
} else if selectedModel == nil, let firstModel = models.first {
selectedModel = firstModel
}
isLoadingModels = false
@@ -116,18 +159,22 @@ class ChatViewModel {
func sendMessage() {
guard !inputText.trimmingCharacters(in: .whitespaces).isEmpty else { return }
let trimmedInput = inputText.trimmingCharacters(in: .whitespaces)
// Check if it's a slash command
if trimmedInput.hasPrefix("/") {
handleCommand(trimmedInput)
// Handle slash escape: "//" becomes "/"
var effectiveInput = trimmedInput
if effectiveInput.hasPrefix("//") {
effectiveInput = String(effectiveInput.dropFirst())
} else if effectiveInput.hasPrefix("/") {
// Check if it's a slash command
handleCommand(effectiveInput)
inputText = ""
return
}
// Parse file attachments
let (cleanText, filePaths) = trimmedInput.parseFileAttachments()
let (cleanText, filePaths) = effectiveInput.parseFileAttachments()
// Read file attachments from disk
let attachments: [FileAttachment]? = filePaths.isEmpty ? nil : readFileAttachments(filePaths)
@@ -141,17 +188,18 @@ class ChatViewModel {
timestamp: Date(),
attachments: attachments
)
messages.append(userMessage)
sessionStats.addMessage(inputTokens: userMessage.tokens, outputTokens: nil, cost: nil)
// Clear input
inputText = ""
// Add to command history
// Add to command history (in-memory and database)
commandHistory.append(trimmedInput)
historyIndex = commandHistory.count
DatabaseService.shared.saveCommandHistory(input: trimmedInput)
// Clear input
inputText = ""
// Generate real AI response
generateAIResponse(to: cleanText, attachments: userMessage.attachments)
}
@@ -220,6 +268,9 @@ class ChatViewModel {
case "/help":
showHelp = true
case "/history":
showHistory = true
case "/model":
showModelSelector = true
@@ -367,6 +418,8 @@ class ChatViewModel {
// Start streaming
streamingTask = Task {
let startTime = Date()
var messageId: UUID?
do {
// Create empty assistant message for streaming
let assistantMessage = Message(
@@ -378,7 +431,8 @@ class ChatViewModel {
attachments: nil,
isStreaming: true
)
messageId = assistantMessage.id
// Already on MainActor
messages.append(assistantMessage)
@@ -411,14 +465,12 @@ class ChatViewModel {
maxTokens: settings.maxTokens > 0 ? settings.maxTokens : nil,
temperature: settings.temperature > 0 ? settings.temperature : nil,
topP: nil,
systemPrompt: nil,
systemPrompt: effectiveSystemPrompt,
tools: nil,
onlineMode: onlineMode,
imageGeneration: isImageGen
)
let messageId = assistantMessage.id
if isImageGen {
// Image generation: use non-streaming request
// Image models don't reliably support streaming
@@ -435,11 +487,13 @@ class ChatViewModel {
imageGeneration: true
)
let response = try await provider.chat(request: nonStreamRequest)
let responseTime = Date().timeIntervalSince(startTime)
if let index = messages.firstIndex(where: { $0.id == messageId }) {
messages[index].content = response.content
messages[index].isStreaming = false
messages[index].generatedImages = response.generatedImages
messages[index].responseTime = responseTime
if let usage = response.usage {
messages[index].tokens = usage.completionTokens
@@ -455,9 +509,13 @@ class ChatViewModel {
// Regular text: stream response
var fullContent = ""
var totalTokens: ChatResponse.Usage? = nil
var wasCancelled = false
for try await chunk in provider.streamChat(request: chatRequest) {
if Task.isCancelled { break }
if Task.isCancelled {
wasCancelled = true
break
}
if let content = chunk.deltaContent {
fullContent += content
@@ -471,9 +529,19 @@ class ChatViewModel {
}
}
// Check for cancellation one more time after loop exits
// (in case it was cancelled after the last chunk)
if Task.isCancelled {
wasCancelled = true
}
let responseTime = Date().timeIntervalSince(startTime)
if let index = messages.firstIndex(where: { $0.id == messageId }) {
messages[index].content = fullContent
messages[index].isStreaming = false
messages[index].responseTime = responseTime
messages[index].wasInterrupted = wasCancelled
if let usage = totalTokens {
messages[index].tokens = usage.completionTokens
@@ -491,13 +559,27 @@ class ChatViewModel {
streamingTask = nil
} catch {
// Remove the empty streaming message
if let index = messages.lastIndex(where: { $0.role == .assistant && $0.content.isEmpty }) {
messages.remove(at: index)
let responseTime = Date().timeIntervalSince(startTime)
// Check if this was a cancellation (either by checking Task state or error type)
let isCancellation = Task.isCancelled || error is CancellationError
if isCancellation, let msgId = messageId {
// Mark the message as interrupted instead of removing it
if let index = messages.firstIndex(where: { $0.id == msgId }) {
messages[index].isStreaming = false
messages[index].wasInterrupted = true
messages[index].responseTime = responseTime
}
} else if let msgId = messageId {
// For real errors, remove the empty streaming message
if let index = messages.firstIndex(where: { $0.id == msgId && $0.content.isEmpty }) {
messages.remove(at: index)
}
Log.api.error("Generation failed: \(error.localizedDescription)")
showSystemMessage("\(friendlyErrorMessage(from: error))")
}
Log.api.error("Generation failed: \(error.localizedDescription)")
showSystemMessage("\(friendlyErrorMessage(from: error))")
isGenerating = false
streamingTask = nil
}
@@ -682,6 +764,8 @@ class ChatViewModel {
streamingTask?.cancel()
streamingTask = Task {
let startTime = Date()
var wasCancelled = false
do {
let tools = mcp.getToolSchemas()
@@ -702,7 +786,10 @@ class ChatViewModel {
if !writeCapabilities.isEmpty {
capabilities += " You can also \(writeCapabilities.joined(separator: ", "))."
}
let systemContent = "You have access to the user's filesystem through tool calls. \(capabilities) The user has granted you access to these folders:\n - \(folderList)\n\nWhen the user asks about their files, use the tools proactively with the allowed paths. Always use absolute paths."
var systemContent = "You have access to the user's filesystem through tool calls. \(capabilities) The user has granted you access to these folders:\n - \(folderList)\n\nWhen the user asks about their files, use the tools proactively with the allowed paths. Always use absolute paths."
// Append the complete system prompt (default + custom)
systemContent += "\n\n---\n\n" + effectiveSystemPrompt
var messagesToSend: [Message] = memoryEnabled
? messages.filter { $0.role != .system }
@@ -736,7 +823,10 @@ class ChatViewModel {
var totalUsage: ChatResponse.Usage?
for iteration in 0..<maxIterations {
if Task.isCancelled { break }
if Task.isCancelled {
wasCancelled = true
break
}
let response = try await provider.chatWithToolMessages(
model: effectiveModelId,
@@ -779,7 +869,10 @@ class ChatViewModel {
// Execute each tool and append results
for tc in toolCalls {
if Task.isCancelled { break }
if Task.isCancelled {
wasCancelled = true
break
}
let result = mcp.executeTool(name: tc.functionName, arguments: tc.arguments)
let resultJSON: String
@@ -806,13 +899,22 @@ class ChatViewModel {
}
}
// Check for cancellation one more time after loop exits
if Task.isCancelled {
wasCancelled = true
}
// Display the final response as an assistant message
let responseTime = Date().timeIntervalSince(startTime)
let assistantMessage = Message(
role: .assistant,
content: finalContent,
tokens: totalUsage?.completionTokens,
cost: nil,
timestamp: Date()
timestamp: Date(),
attachments: nil,
responseTime: responseTime,
wasInterrupted: wasCancelled
)
messages.append(assistantMessage)
@@ -834,8 +936,26 @@ class ChatViewModel {
streamingTask = nil
} catch {
Log.api.error("Tool generation failed: \(error.localizedDescription)")
showSystemMessage("\(friendlyErrorMessage(from: error))")
let responseTime = Date().timeIntervalSince(startTime)
// Check if this was a cancellation
let isCancellation = Task.isCancelled || wasCancelled || error is CancellationError
if isCancellation {
// Create an interrupted message
let assistantMessage = Message(
role: .assistant,
content: "",
timestamp: Date(),
responseTime: responseTime,
wasInterrupted: true
)
messages.append(assistantMessage)
} else {
Log.api.error("Tool generation failed: \(error.localizedDescription)")
showSystemMessage("\(friendlyErrorMessage(from: error))")
}
isGenerating = false
streamingTask = nil
}