Small feature changes and bug fixes

This commit is contained in:
2026-02-16 13:17:08 +01:00
parent 04c9b8da1e
commit 25bcca213e
20 changed files with 2193 additions and 125 deletions

View File

@@ -266,6 +266,9 @@ Don't narrate future actions ("Let me...") - just use the tools.
messages.append(userMessage)
sessionStats.addMessage(inputTokens: userMessage.tokens, outputTokens: nil, cost: nil)
// Generate embedding for user message
generateEmbeddingForMessage(userMessage)
// Add to command history (in-memory and database)
commandHistory.append(trimmedInput)
historyIndex = commandHistory.count
@@ -374,15 +377,28 @@ Don't narrate future actions ("Let me...") - just use the tools.
showSystemMessage("No previous message to retry")
return
}
// Remove last assistant response if exists
if let lastMessage = messages.last, lastMessage.role == .assistant {
messages.removeLast()
}
generateAIResponse(to: lastUserMessage.content, attachments: lastUserMessage.attachments)
}
func toggleMessageStar(messageId: UUID) {
// Get current starred status
let isStarred = (try? DatabaseService.shared.getMessageMetadata(messageId: messageId)?.user_starred == 1) ?? false
// Toggle starred status
do {
try DatabaseService.shared.setMessageStarred(messageId: messageId, starred: !isStarred)
Log.ui.info("Message \(messageId) starred: \(!isStarred)")
} catch {
Log.ui.error("Failed to toggle star for message \(messageId): \(error)")
}
}
// MARK: - Command Handling
private func handleCommand(_ command: String) {
@@ -586,14 +602,46 @@ Don't narrate future actions ("Let me...") - just use the tools.
if isImageGen {
Log.ui.info("Image generation mode for model \(modelId)")
}
// Smart context selection
let contextStrategy: SelectionStrategy
if !memoryEnabled {
contextStrategy = .lastMessageOnly
} else if settings.contextSelectionEnabled {
contextStrategy = .smart
} else {
contextStrategy = .allMessages
}
let contextWindow = ContextSelectionService.shared.selectContext(
allMessages: messagesToSend,
strategy: contextStrategy,
maxTokens: selectedModel?.contextLength ?? settings.contextMaxTokens,
currentQuery: messagesToSend.last?.content
)
if contextWindow.excludedCount > 0 {
Log.ui.info("Smart context: selected \(contextWindow.messages.count) messages (\(contextWindow.totalTokens) tokens), excluded \(contextWindow.excludedCount)")
}
// Build system prompt with summaries (if any)
var finalSystemPrompt = effectiveSystemPrompt
if !contextWindow.summaries.isEmpty {
let summariesText = contextWindow.summaries.enumerated().map { index, summary in
"[Previous conversation summary (part \(index + 1)):]\\n\(summary)"
}.joined(separator: "\n\n")
finalSystemPrompt = summariesText + "\n\n---\n\n" + effectiveSystemPrompt
}
let chatRequest = ChatRequest(
messages: Array(memoryEnabled ? messagesToSend : [messagesToSend.last!]),
messages: contextWindow.messages,
model: modelId,
stream: settings.streamEnabled,
maxTokens: settings.maxTokens > 0 ? settings.maxTokens : nil,
temperature: settings.temperature > 0 ? settings.temperature : nil,
topP: nil,
systemPrompt: effectiveSystemPrompt,
systemPrompt: finalSystemPrompt,
tools: nil,
onlineMode: onlineMode,
imageGeneration: isImageGen
@@ -680,6 +728,9 @@ Don't narrate future actions ("Let me...") - just use the tools.
sessionStats.addMessage(inputTokens: usage.promptTokens, outputTokens: usage.completionTokens, cost: cost)
}
}
// Generate embedding for assistant message
generateEmbeddingForMessage(messages[index])
}
}
@@ -1363,6 +1414,25 @@ Don't narrate future actions ("Let me...") - just use the tools.
Log.ui.info("Auto-saved conversation: \(conversationName)")
// Check if progressive summarization is needed
Task {
await checkAndSummarizeOldMessages(conversationId: conversation.id)
}
// Generate embeddings for messages that don't have them yet
if settings.embeddingsEnabled {
Task {
for message in chatMessages {
// Skip if already embedded
if let _ = try? EmbeddingService.shared.getMessageEmbedding(messageId: message.id) {
continue
}
// Generate embedding (this will now succeed since message is in DB)
generateEmbeddingForMessage(message)
}
}
}
// Mark as saved to prevent duplicate saves
let conversationHash = chatMessages.map { $0.content }.joined()
settings.syncLastAutoSaveConversationId = conversationHash
@@ -1480,4 +1550,207 @@ Don't narrate future actions ("Let me...") - just use the tools.
await autoSaveConversation()
}
}
// MARK: - Embedding Generation
/// Generate embedding for a message in the background
func generateEmbeddingForMessage(_ message: Message) {
guard settings.embeddingsEnabled else { return }
guard message.content.count > 20 else { return } // Skip very short messages
Task {
do {
// Use user's selected provider, or fall back to best available
guard let provider = EmbeddingService.shared.getSelectedProvider() else {
Log.api.warning("No embedding providers available - skipping embedding generation")
return
}
let embedding = try await EmbeddingService.shared.generateEmbedding(
text: message.content,
provider: provider
)
try EmbeddingService.shared.saveMessageEmbedding(
messageId: message.id,
embedding: embedding,
model: provider.defaultModel
)
Log.api.info("Generated embedding for message \(message.id) using \(provider.displayName)")
} catch {
// Check if it's a foreign key constraint error (message not saved to DB yet)
let errorString = String(describing: error)
if errorString.contains("FOREIGN KEY constraint failed") {
Log.api.debug("Message \(message.id) not in database yet - will embed later during save or batch operation")
} else {
Log.api.error("Failed to generate embedding for message \(message.id): \(error)")
}
}
}
}
/// Batch generate embeddings for all messages in all conversations
func batchEmbedAllConversations() async {
guard settings.embeddingsEnabled else {
showSystemMessage("Embeddings are disabled. Enable them in Settings > Advanced.")
return
}
// Check if we have an embedding provider available
guard let provider = EmbeddingService.shared.getSelectedProvider() else {
showSystemMessage("⚠️ No embedding provider available. Please configure an API key for OpenAI, OpenRouter, or Google.")
return
}
showSystemMessage("Starting batch embedding generation using \(provider.displayName)...")
let conversations = (try? DatabaseService.shared.listConversations()) ?? []
var processedMessages = 0
var skippedMessages = 0
for conv in conversations {
guard let (_, messages) = try? DatabaseService.shared.loadConversation(id: conv.id) else {
continue
}
for message in messages {
// Skip if already embedded
if let _ = try? EmbeddingService.shared.getMessageEmbedding(messageId: message.id) {
skippedMessages += 1
continue
}
// Skip very short messages
guard message.content.count > 20 else {
skippedMessages += 1
continue
}
do {
let embedding = try await EmbeddingService.shared.generateEmbedding(
text: message.content,
provider: provider
)
try EmbeddingService.shared.saveMessageEmbedding(
messageId: message.id,
embedding: embedding,
model: provider.defaultModel
)
processedMessages += 1
// Rate limit: 10 embeddings/sec
try? await Task.sleep(for: .milliseconds(100))
} catch {
Log.api.error("Failed to generate embedding for message \(message.id): \(error)")
}
}
// Generate conversation embedding
do {
try await EmbeddingService.shared.generateConversationEmbedding(conversationId: conv.id)
} catch {
Log.api.error("Failed to generate conversation embedding for \(conv.id): \(error)")
}
}
showSystemMessage("Batch embedding complete: \(processedMessages) messages processed, \(skippedMessages) skipped")
Log.ui.info("Batch embedding complete: \(processedMessages) messages, \(skippedMessages) skipped")
}
// MARK: - Progressive Summarization
/// Check if conversation needs summarization and create summaries if needed
func checkAndSummarizeOldMessages(conversationId: UUID) async {
guard settings.progressiveSummarizationEnabled else { return }
let chatMessages = messages.filter { $0.role == .user || $0.role == .assistant }
let threshold = settings.summarizationThreshold
guard chatMessages.count > threshold else { return }
// Calculate which chunk to summarize (messages 0 to threshold-20)
let chunkEnd = threshold - 20
guard chunkEnd > 30 else { return } // Need at least 30 messages to summarize
// Check if already summarized
if let hasSummary = try? DatabaseService.shared.hasSummaryForRange(
conversationId: conversationId,
startIndex: 0,
endIndex: chunkEnd
), hasSummary {
return // Already summarized
}
// Get messages to summarize
let messagesToSummarize = Array(chatMessages.prefix(chunkEnd))
Log.ui.info("Summarizing messages 0-\(chunkEnd) for conversation \(conversationId)")
// Generate summary
guard let summary = await summarizeMessageChunk(messagesToSummarize) else {
Log.ui.error("Failed to generate summary for conversation \(conversationId)")
return
}
// Save summary
do {
try DatabaseService.shared.saveConversationSummary(
conversationId: conversationId,
startIndex: 0,
endIndex: chunkEnd,
summary: summary,
model: selectedModel?.id,
tokenCount: summary.estimateTokens()
)
Log.ui.info("Saved summary for messages 0-\(chunkEnd)")
} catch {
Log.ui.error("Failed to save summary: \(error)")
}
}
/// Summarize a chunk of messages into a concise summary
private func summarizeMessageChunk(_ messages: [Message]) async -> String? {
guard let provider = providerRegistry.getProvider(for: currentProvider),
let modelId = selectedModel?.id else {
return nil
}
// Combine messages into text
let combinedText = messages.map { msg in
let role = msg.role == .user ? "User" : "Assistant"
return "[\(role)]: \(msg.content)"
}.joined(separator: "\n\n")
// Create summarization prompt
let summaryPrompt = """
Please create a concise 2-3 paragraph summary of the following conversation.
Focus on the main topics discussed, key decisions made, and important information exchanged.
Do not include unnecessary details or greetings.
Conversation:
\(combinedText)
"""
let summaryMessage = Message(role: .user, content: summaryPrompt)
let request = ChatRequest(
messages: [summaryMessage],
model: modelId,
stream: false,
maxTokens: 500,
temperature: 0.3,
topP: nil,
systemPrompt: "You are a helpful assistant that creates concise, informative summaries of conversations.",
tools: nil,
onlineMode: false,
imageGeneration: false
)
do {
let response = try await provider.chat(request: request)
return response.content
} catch {
Log.api.error("Summary generation failed: \(error)")
return nil
}
}
}