Small feature changes and bug fixes
This commit is contained in:
247
oAI/Services/ContextSelectionService.swift
Normal file
247
oAI/Services/ContextSelectionService.swift
Normal file
@@ -0,0 +1,247 @@
|
||||
//
|
||||
// ContextSelectionService.swift
|
||||
// oAI
|
||||
//
|
||||
// Smart context selection for AI conversations
|
||||
// Selects relevant messages instead of sending entire history
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os
|
||||
|
||||
// MARK: - Context Window
|
||||
|
||||
struct ContextWindow {
|
||||
let messages: [Message]
|
||||
let summaries: [String]
|
||||
let totalTokens: Int
|
||||
let excludedCount: Int
|
||||
}
|
||||
|
||||
// MARK: - Selection Strategy
|
||||
|
||||
enum SelectionStrategy {
|
||||
case allMessages // Memory ON (old behavior): send all messages
|
||||
case lastMessageOnly // Memory OFF: send only last message
|
||||
case smart // NEW: intelligent selection
|
||||
}
|
||||
|
||||
// MARK: - Context Selection Service
|
||||
|
||||
final class ContextSelectionService {
|
||||
static let shared = ContextSelectionService()
|
||||
|
||||
private init() {}
|
||||
|
||||
/// Select context messages using the specified strategy
|
||||
func selectContext(
|
||||
allMessages: [Message],
|
||||
strategy: SelectionStrategy,
|
||||
maxTokens: Int?,
|
||||
currentQuery: String? = nil,
|
||||
conversationId: UUID? = nil
|
||||
) -> ContextWindow {
|
||||
switch strategy {
|
||||
case .allMessages:
|
||||
return allMessagesContext(allMessages)
|
||||
|
||||
case .lastMessageOnly:
|
||||
return lastMessageOnlyContext(allMessages)
|
||||
|
||||
case .smart:
|
||||
guard let maxTokens = maxTokens else {
|
||||
// Fallback to all messages if no token limit
|
||||
return allMessagesContext(allMessages)
|
||||
}
|
||||
return smartSelection(allMessages: allMessages, maxTokens: maxTokens, conversationId: conversationId)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Simple Strategies
|
||||
|
||||
private func allMessagesContext(_ messages: [Message]) -> ContextWindow {
|
||||
ContextWindow(
|
||||
messages: messages,
|
||||
summaries: [],
|
||||
totalTokens: estimateTokens(messages),
|
||||
excludedCount: 0
|
||||
)
|
||||
}
|
||||
|
||||
private func lastMessageOnlyContext(_ messages: [Message]) -> ContextWindow {
|
||||
guard let last = messages.last else {
|
||||
return ContextWindow(messages: [], summaries: [], totalTokens: 0, excludedCount: 0)
|
||||
}
|
||||
return ContextWindow(
|
||||
messages: [last],
|
||||
summaries: [],
|
||||
totalTokens: estimateTokens([last]),
|
||||
excludedCount: messages.count - 1
|
||||
)
|
||||
}
|
||||
|
||||
// MARK: - Smart Selection Algorithm
|
||||
|
||||
private func smartSelection(allMessages: [Message], maxTokens: Int, conversationId: UUID? = nil) -> ContextWindow {
|
||||
guard !allMessages.isEmpty else {
|
||||
return ContextWindow(messages: [], summaries: [], totalTokens: 0, excludedCount: 0)
|
||||
}
|
||||
|
||||
// Filter out system messages (tools)
|
||||
let chatMessages = allMessages.filter { $0.role == .user || $0.role == .assistant }
|
||||
|
||||
// Step 1: Always include last N messages (recent context)
|
||||
let recentCount = min(10, chatMessages.count)
|
||||
let recentMessages = Array(chatMessages.suffix(recentCount))
|
||||
var selectedMessages = recentMessages
|
||||
var currentTokens = estimateTokens(recentMessages)
|
||||
|
||||
Log.ui.debug("Smart selection: starting with last \(recentCount) messages (\(currentTokens) tokens)")
|
||||
|
||||
// Step 2: Add starred messages from earlier in conversation
|
||||
let olderMessages = chatMessages.dropLast(recentCount)
|
||||
var starredMessages: [Message] = []
|
||||
|
||||
for message in olderMessages {
|
||||
// Check if message is starred
|
||||
if isMessageStarred(message) {
|
||||
let msgTokens = estimateTokens([message])
|
||||
if currentTokens + msgTokens <= maxTokens {
|
||||
starredMessages.append(message)
|
||||
currentTokens += msgTokens
|
||||
} else {
|
||||
Log.ui.debug("Smart selection: token budget exceeded, stopping at \(selectedMessages.count + starredMessages.count) messages")
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Add important messages (high cost, long content)
|
||||
var importantMessages: [Message] = []
|
||||
if currentTokens < maxTokens {
|
||||
for message in olderMessages {
|
||||
// Skip if already starred
|
||||
if starredMessages.contains(where: { $0.id == message.id }) {
|
||||
continue
|
||||
}
|
||||
|
||||
let importance = getImportanceScore(message)
|
||||
if importance > 0.5 { // Threshold for "important"
|
||||
let msgTokens = estimateTokens([message])
|
||||
if currentTokens + msgTokens <= maxTokens {
|
||||
importantMessages.append(message)
|
||||
currentTokens += msgTokens
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Combine: starred + important + recent (in chronological order)
|
||||
let allSelected = (starredMessages + importantMessages + recentMessages)
|
||||
.sorted { $0.timestamp < $1.timestamp }
|
||||
|
||||
// Remove duplicates while preserving order
|
||||
var seen = Set<UUID>()
|
||||
selectedMessages = allSelected.filter { message in
|
||||
if seen.contains(message.id) {
|
||||
return false
|
||||
}
|
||||
seen.insert(message.id)
|
||||
return true
|
||||
}
|
||||
|
||||
let excludedCount = chatMessages.count - selectedMessages.count
|
||||
|
||||
// Get summaries for excluded message ranges
|
||||
var summaries: [String] = []
|
||||
if excludedCount > 0, let conversationId = conversationId {
|
||||
summaries = getSummariesForExcludedRange(
|
||||
conversationId: conversationId,
|
||||
totalMessages: chatMessages.count,
|
||||
selectedCount: selectedMessages.count
|
||||
)
|
||||
}
|
||||
|
||||
Log.ui.info("Smart selection: selected \(selectedMessages.count)/\(chatMessages.count) messages (\(currentTokens) tokens, excluded: \(excludedCount), summaries: \(summaries.count))")
|
||||
|
||||
return ContextWindow(
|
||||
messages: selectedMessages,
|
||||
summaries: summaries,
|
||||
totalTokens: currentTokens,
|
||||
excludedCount: excludedCount
|
||||
)
|
||||
}
|
||||
|
||||
/// Get summaries for excluded message ranges
|
||||
private func getSummariesForExcludedRange(
|
||||
conversationId: UUID,
|
||||
totalMessages: Int,
|
||||
selectedCount: Int
|
||||
) -> [String] {
|
||||
guard let summaryRecords = try? DatabaseService.shared.getConversationSummaries(conversationId: conversationId) else {
|
||||
return []
|
||||
}
|
||||
|
||||
var summaries: [String] = []
|
||||
for record in summaryRecords {
|
||||
// Only include summaries for messages that were excluded
|
||||
if record.end_message_index < (totalMessages - selectedCount) {
|
||||
summaries.append(record.summary)
|
||||
}
|
||||
}
|
||||
|
||||
return summaries
|
||||
}
|
||||
|
||||
// MARK: - Importance Scoring
|
||||
|
||||
/// Calculate importance score (0.0 - 1.0) for a message
|
||||
private func getImportanceScore(_ message: Message) -> Double {
|
||||
var score = 0.0
|
||||
|
||||
// Factor 1: Cost (expensive calls are important)
|
||||
if let cost = message.cost {
|
||||
let costScore = min(1.0, cost / 0.01) // $0.01+ = max score
|
||||
score += costScore * 0.5
|
||||
}
|
||||
|
||||
// Factor 2: Length (detailed messages are important)
|
||||
let contentLength = Double(message.content.count)
|
||||
let lengthScore = min(1.0, contentLength / 2000.0) // 2000+ chars = max score
|
||||
score += lengthScore * 0.3
|
||||
|
||||
// Factor 3: Token count (if available)
|
||||
if let tokens = message.tokens {
|
||||
let tokenScore = min(1.0, Double(tokens) / 1000.0) // 1000+ tokens = max score
|
||||
score += tokenScore * 0.2
|
||||
}
|
||||
|
||||
return min(1.0, score)
|
||||
}
|
||||
|
||||
/// Check if a message is starred by the user
|
||||
private func isMessageStarred(_ message: Message) -> Bool {
|
||||
guard let metadata = try? DatabaseService.shared.getMessageMetadata(messageId: message.id) else {
|
||||
return false
|
||||
}
|
||||
return metadata.user_starred == 1
|
||||
}
|
||||
|
||||
// MARK: - Token Estimation
|
||||
|
||||
/// Estimate token count for messages (rough approximation)
|
||||
private func estimateTokens(_ messages: [Message]) -> Int {
|
||||
var total = 0
|
||||
for message in messages {
|
||||
if let tokens = message.tokens {
|
||||
total += tokens
|
||||
} else {
|
||||
// Rough estimate: 1 token ≈ 4 characters
|
||||
total += message.content.count / 4
|
||||
}
|
||||
}
|
||||
return total
|
||||
}
|
||||
}
|
||||
@@ -67,6 +67,49 @@ struct EmailLogRecord: Codable, FetchableRecord, PersistableRecord, Sendable {
|
||||
var modelId: String?
|
||||
}
|
||||
|
||||
struct MessageMetadataRecord: Codable, FetchableRecord, PersistableRecord, Sendable {
|
||||
static let databaseTableName = "message_metadata"
|
||||
|
||||
var message_id: String
|
||||
var importance_score: Double
|
||||
var user_starred: Int
|
||||
var summary: String?
|
||||
var chunk_index: Int
|
||||
}
|
||||
|
||||
struct MessageEmbeddingRecord: Codable, FetchableRecord, PersistableRecord, Sendable {
|
||||
static let databaseTableName = "message_embeddings"
|
||||
|
||||
var message_id: String
|
||||
var embedding: Data
|
||||
var embedding_model: String
|
||||
var embedding_dimension: Int
|
||||
var created_at: String
|
||||
}
|
||||
|
||||
struct ConversationEmbeddingRecord: Codable, FetchableRecord, PersistableRecord, Sendable {
|
||||
static let databaseTableName = "conversation_embeddings"
|
||||
|
||||
var conversation_id: String
|
||||
var embedding: Data
|
||||
var embedding_model: String
|
||||
var embedding_dimension: Int
|
||||
var created_at: String
|
||||
}
|
||||
|
||||
struct ConversationSummaryRecord: Codable, FetchableRecord, PersistableRecord, Sendable {
|
||||
static let databaseTableName = "conversation_summaries"
|
||||
|
||||
var id: String
|
||||
var conversation_id: String
|
||||
var start_message_index: Int
|
||||
var end_message_index: Int
|
||||
var summary: String
|
||||
var token_count: Int?
|
||||
var created_at: String
|
||||
var summary_model: String?
|
||||
}
|
||||
|
||||
// MARK: - DatabaseService
|
||||
|
||||
final class DatabaseService: Sendable {
|
||||
@@ -182,6 +225,73 @@ final class DatabaseService: Sendable {
|
||||
)
|
||||
}
|
||||
|
||||
migrator.registerMigration("v6") { db in
|
||||
// Message metadata for smart context selection
|
||||
try db.create(table: "message_metadata") { t in
|
||||
t.primaryKey("message_id", .text)
|
||||
.references("messages", onDelete: .cascade)
|
||||
t.column("importance_score", .double).notNull().defaults(to: 0.0)
|
||||
t.column("user_starred", .integer).notNull().defaults(to: 0)
|
||||
t.column("summary", .text)
|
||||
t.column("chunk_index", .integer).notNull().defaults(to: 0)
|
||||
}
|
||||
|
||||
try db.create(
|
||||
index: "idx_message_metadata_importance",
|
||||
on: "message_metadata",
|
||||
columns: ["importance_score"]
|
||||
)
|
||||
|
||||
try db.create(
|
||||
index: "idx_message_metadata_starred",
|
||||
on: "message_metadata",
|
||||
columns: ["user_starred"]
|
||||
)
|
||||
}
|
||||
|
||||
migrator.registerMigration("v7") { db in
|
||||
// Message embeddings for semantic search
|
||||
try db.create(table: "message_embeddings") { t in
|
||||
t.primaryKey("message_id", .text)
|
||||
.references("messages", onDelete: .cascade)
|
||||
t.column("embedding", .blob).notNull()
|
||||
t.column("embedding_model", .text).notNull()
|
||||
t.column("embedding_dimension", .integer).notNull()
|
||||
t.column("created_at", .text).notNull()
|
||||
}
|
||||
|
||||
// Conversation embeddings (aggregate of all messages)
|
||||
try db.create(table: "conversation_embeddings") { t in
|
||||
t.primaryKey("conversation_id", .text)
|
||||
.references("conversations", onDelete: .cascade)
|
||||
t.column("embedding", .blob).notNull()
|
||||
t.column("embedding_model", .text).notNull()
|
||||
t.column("embedding_dimension", .integer).notNull()
|
||||
t.column("created_at", .text).notNull()
|
||||
}
|
||||
}
|
||||
|
||||
migrator.registerMigration("v8") { db in
|
||||
// Conversation summaries for progressive summarization
|
||||
try db.create(table: "conversation_summaries") { t in
|
||||
t.primaryKey("id", .text)
|
||||
t.column("conversation_id", .text).notNull()
|
||||
.references("conversations", onDelete: .cascade)
|
||||
t.column("start_message_index", .integer).notNull()
|
||||
t.column("end_message_index", .integer).notNull()
|
||||
t.column("summary", .text).notNull()
|
||||
t.column("token_count", .integer)
|
||||
t.column("created_at", .text).notNull()
|
||||
t.column("summary_model", .text)
|
||||
}
|
||||
|
||||
try db.create(
|
||||
index: "idx_conversation_summaries_conv",
|
||||
on: "conversation_summaries",
|
||||
columns: ["conversation_id"]
|
||||
)
|
||||
}
|
||||
|
||||
return migrator
|
||||
}
|
||||
|
||||
@@ -574,4 +684,202 @@ final class DatabaseService: Sendable {
|
||||
try EmailLogRecord.fetchCount(db)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Message Metadata Operations
|
||||
|
||||
nonisolated func getMessageMetadata(messageId: UUID) throws -> MessageMetadataRecord? {
|
||||
try dbQueue.read { db in
|
||||
try MessageMetadataRecord.fetchOne(db, key: messageId.uuidString)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func setMessageStarred(messageId: UUID, starred: Bool) throws {
|
||||
try dbQueue.write { db in
|
||||
// Check if metadata exists
|
||||
if var existing = try MessageMetadataRecord.fetchOne(db, key: messageId.uuidString) {
|
||||
existing.user_starred = starred ? 1 : 0
|
||||
try existing.update(db)
|
||||
} else {
|
||||
// Create new metadata record
|
||||
let record = MessageMetadataRecord(
|
||||
message_id: messageId.uuidString,
|
||||
importance_score: 0.0,
|
||||
user_starred: starred ? 1 : 0,
|
||||
summary: nil,
|
||||
chunk_index: 0
|
||||
)
|
||||
try record.insert(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func setMessageImportance(messageId: UUID, score: Double) throws {
|
||||
try dbQueue.write { db in
|
||||
if var existing = try MessageMetadataRecord.fetchOne(db, key: messageId.uuidString) {
|
||||
existing.importance_score = score
|
||||
try existing.update(db)
|
||||
} else {
|
||||
let record = MessageMetadataRecord(
|
||||
message_id: messageId.uuidString,
|
||||
importance_score: score,
|
||||
user_starred: 0,
|
||||
summary: nil,
|
||||
chunk_index: 0
|
||||
)
|
||||
try record.insert(db)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func getStarredMessages(conversationId: UUID) throws -> [String] {
|
||||
try dbQueue.read { db in
|
||||
let sql = """
|
||||
SELECT mm.message_id
|
||||
FROM message_metadata mm
|
||||
JOIN messages m ON m.id = mm.message_id
|
||||
WHERE m.conversationId = ? AND mm.user_starred = 1
|
||||
ORDER BY m.sortOrder
|
||||
"""
|
||||
return try String.fetchAll(db, sql: sql, arguments: [conversationId.uuidString])
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Embedding Operations
|
||||
|
||||
nonisolated func saveMessageEmbedding(messageId: UUID, embedding: Data, model: String, dimension: Int) throws {
|
||||
let now = isoFormatter.string(from: Date())
|
||||
let record = MessageEmbeddingRecord(
|
||||
message_id: messageId.uuidString,
|
||||
embedding: embedding,
|
||||
embedding_model: model,
|
||||
embedding_dimension: dimension,
|
||||
created_at: now
|
||||
)
|
||||
try dbQueue.write { db in
|
||||
try record.save(db)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func getMessageEmbedding(messageId: UUID) throws -> Data? {
|
||||
try dbQueue.read { db in
|
||||
try MessageEmbeddingRecord.fetchOne(db, key: messageId.uuidString)?.embedding
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func saveConversationEmbedding(conversationId: UUID, embedding: Data, model: String, dimension: Int) throws {
|
||||
let now = isoFormatter.string(from: Date())
|
||||
let record = ConversationEmbeddingRecord(
|
||||
conversation_id: conversationId.uuidString,
|
||||
embedding: embedding,
|
||||
embedding_model: model,
|
||||
embedding_dimension: dimension,
|
||||
created_at: now
|
||||
)
|
||||
try dbQueue.write { db in
|
||||
try record.save(db)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func getConversationEmbedding(conversationId: UUID) throws -> Data? {
|
||||
try dbQueue.read { db in
|
||||
try ConversationEmbeddingRecord.fetchOne(db, key: conversationId.uuidString)?.embedding
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func getAllConversationEmbeddings() throws -> [(UUID, Data)] {
|
||||
try dbQueue.read { db in
|
||||
let records = try ConversationEmbeddingRecord.fetchAll(db)
|
||||
return records.compactMap { record in
|
||||
guard let id = UUID(uuidString: record.conversation_id) else { return nil }
|
||||
return (id, record.embedding)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Search conversations by semantic similarity
|
||||
nonisolated func searchConversationsBySemantic(queryEmbedding: [Float], limit: Int = 10) throws -> [(Conversation, Float)] {
|
||||
// Get all conversations
|
||||
let allConversations = try listConversations()
|
||||
|
||||
// Get all conversation embeddings
|
||||
let embeddingData = try getAllConversationEmbeddings()
|
||||
let embeddingMap = Dictionary(uniqueKeysWithValues: embeddingData)
|
||||
|
||||
// Calculate similarity scores
|
||||
var results: [(Conversation, Float)] = []
|
||||
for conv in allConversations {
|
||||
guard let embeddingData = embeddingMap[conv.id] else { continue }
|
||||
|
||||
// Deserialize embedding
|
||||
let embedding = deserializeEmbedding(embeddingData)
|
||||
|
||||
// Calculate cosine similarity
|
||||
let similarity = EmbeddingService.shared.cosineSimilarity(queryEmbedding, embedding)
|
||||
results.append((conv, similarity))
|
||||
}
|
||||
|
||||
// Sort by similarity (highest first) and take top N
|
||||
results.sort { $0.1 > $1.1 }
|
||||
return Array(results.prefix(limit))
|
||||
}
|
||||
|
||||
private func deserializeEmbedding(_ data: Data) -> [Float] {
|
||||
var embedding: [Float] = []
|
||||
embedding.reserveCapacity(data.count / 4)
|
||||
|
||||
for offset in stride(from: 0, to: data.count, by: 4) {
|
||||
let bytes = data.subdata(in: offset..<(offset + 4))
|
||||
let bitPattern = bytes.withUnsafeBytes { $0.load(as: UInt32.self) }
|
||||
let value = Float(bitPattern: UInt32(littleEndian: bitPattern))
|
||||
embedding.append(value)
|
||||
}
|
||||
|
||||
return embedding
|
||||
}
|
||||
|
||||
// MARK: - Conversation Summary Operations
|
||||
|
||||
nonisolated func saveConversationSummary(
|
||||
conversationId: UUID,
|
||||
startIndex: Int,
|
||||
endIndex: Int,
|
||||
summary: String,
|
||||
model: String?,
|
||||
tokenCount: Int?
|
||||
) throws {
|
||||
let now = isoFormatter.string(from: Date())
|
||||
let record = ConversationSummaryRecord(
|
||||
id: UUID().uuidString,
|
||||
conversation_id: conversationId.uuidString,
|
||||
start_message_index: startIndex,
|
||||
end_message_index: endIndex,
|
||||
summary: summary,
|
||||
token_count: tokenCount,
|
||||
created_at: now,
|
||||
summary_model: model
|
||||
)
|
||||
try dbQueue.write { db in
|
||||
try record.insert(db)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func getConversationSummaries(conversationId: UUID) throws -> [ConversationSummaryRecord] {
|
||||
try dbQueue.read { db in
|
||||
try ConversationSummaryRecord
|
||||
.filter(Column("conversation_id") == conversationId.uuidString)
|
||||
.order(Column("start_message_index"))
|
||||
.fetchAll(db)
|
||||
}
|
||||
}
|
||||
|
||||
nonisolated func hasSummaryForRange(conversationId: UUID, startIndex: Int, endIndex: Int) throws -> Bool {
|
||||
try dbQueue.read { db in
|
||||
let count = try ConversationSummaryRecord
|
||||
.filter(Column("conversation_id") == conversationId.uuidString)
|
||||
.filter(Column("start_message_index") == startIndex)
|
||||
.filter(Column("end_message_index") == endIndex)
|
||||
.fetchCount(db)
|
||||
return count > 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,15 +180,32 @@ final class EmailHandlerService {
|
||||
let response = try await provider.chat(request: request)
|
||||
|
||||
let fullResponse = response.content
|
||||
let promptTokens = response.usage?.promptTokens
|
||||
let completionTokens = response.usage?.completionTokens
|
||||
let totalTokens = response.usage.map { $0.promptTokens + $0.completionTokens }
|
||||
let totalCost: Double? = nil // Calculate if provider supports it
|
||||
|
||||
// Calculate cost if we have pricing info
|
||||
var totalCost: Double? = nil
|
||||
if let usage = response.usage,
|
||||
let models = try? await provider.listModels(),
|
||||
let modelInfo = models.first(where: { $0.id == settings.emailHandlerModel }) {
|
||||
totalCost = (Double(usage.promptTokens) * modelInfo.pricing.prompt / 1_000_000) +
|
||||
(Double(usage.completionTokens) * modelInfo.pricing.completion / 1_000_000)
|
||||
}
|
||||
|
||||
let responseTime = Date().timeIntervalSince(startTime)
|
||||
|
||||
log.info("AI response generated in \(String(format: "%.2f", responseTime))s")
|
||||
|
||||
// Generate HTML email
|
||||
let htmlBody = generateHTMLEmail(aiResponse: fullResponse, originalEmail: email)
|
||||
// Generate HTML email with stats
|
||||
let htmlBody = generateHTMLEmail(
|
||||
aiResponse: fullResponse,
|
||||
originalEmail: email,
|
||||
responseTime: responseTime,
|
||||
promptTokens: promptTokens,
|
||||
completionTokens: completionTokens,
|
||||
cost: totalCost
|
||||
)
|
||||
|
||||
// Send response email
|
||||
let replySubject = email.subject.hasPrefix("Re:") ? email.subject : "Re: \(email.subject)"
|
||||
@@ -275,10 +292,22 @@ final class EmailHandlerService {
|
||||
|
||||
// MARK: - HTML Email Generation
|
||||
|
||||
private func generateHTMLEmail(aiResponse: String, originalEmail: IncomingEmail) -> String {
|
||||
private func generateHTMLEmail(
|
||||
aiResponse: String,
|
||||
originalEmail: IncomingEmail,
|
||||
responseTime: TimeInterval,
|
||||
promptTokens: Int?,
|
||||
completionTokens: Int?,
|
||||
cost: Double?
|
||||
) -> String {
|
||||
// Convert markdown to HTML (basic implementation)
|
||||
let htmlContent = markdownToHTML(aiResponse)
|
||||
|
||||
// Format stats
|
||||
let timeFormatted = String(format: "%.2f", responseTime)
|
||||
let totalTokens = (promptTokens ?? 0) + (completionTokens ?? 0)
|
||||
let costFormatted = cost.map { String(format: "$%.4f", $0) } ?? "N/A"
|
||||
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
@@ -298,12 +327,34 @@ final class EmailHandlerService {
|
||||
padding: 20px;
|
||||
border-radius: 8px;
|
||||
}
|
||||
.footer {
|
||||
.stats {
|
||||
margin-top: 30px;
|
||||
padding-top: 20px;
|
||||
padding: 15px;
|
||||
background: #f8f9fa;
|
||||
border-radius: 6px;
|
||||
font-size: 11px;
|
||||
color: #666;
|
||||
}
|
||||
.stats-grid {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
gap: 8px 12px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
.stats-label {
|
||||
font-weight: 600;
|
||||
color: #555;
|
||||
}
|
||||
.stats-value {
|
||||
color: #666;
|
||||
}
|
||||
.footer {
|
||||
margin-top: 20px;
|
||||
padding-top: 15px;
|
||||
border-top: 1px solid #e0e0e0;
|
||||
font-size: 12px;
|
||||
color: #666;
|
||||
text-align: center;
|
||||
}
|
||||
code {
|
||||
background: #f5f5f5;
|
||||
@@ -323,6 +374,17 @@ final class EmailHandlerService {
|
||||
<div class="content">
|
||||
\(htmlContent)
|
||||
</div>
|
||||
<div class="stats">
|
||||
<div style="font-weight: 600; margin-bottom: 6px; color: #555;">📊 Processing Stats</div>
|
||||
<div class="stats-grid">
|
||||
<span class="stats-label">Response Time:</span>
|
||||
<span class="stats-value">\(timeFormatted)s</span>
|
||||
<span class="stats-label">Tokens Used:</span>
|
||||
<span class="stats-value">\(totalTokens.formatted()) (\(promptTokens ?? 0) prompt + \(completionTokens ?? 0) completion)</span>
|
||||
<span class="stats-label">Cost:</span>
|
||||
<span class="stats-value">\(costFormatted)</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="footer">
|
||||
<p>🤖 This response was generated by AI using oAI Email Handler</p>
|
||||
</div>
|
||||
|
||||
408
oAI/Services/EmbeddingService.swift
Normal file
408
oAI/Services/EmbeddingService.swift
Normal file
@@ -0,0 +1,408 @@
|
||||
//
|
||||
// EmbeddingService.swift
|
||||
// oAI
|
||||
//
|
||||
// Embedding generation and semantic search
|
||||
// Supports multiple providers: OpenAI, OpenRouter, Google
|
||||
//
|
||||
|
||||
import Foundation
|
||||
import os
|
||||
|
||||
// MARK: - Embedding Provider
|
||||
|
||||
enum EmbeddingProvider {
|
||||
case openai(model: String)
|
||||
case openrouter(model: String)
|
||||
case google(model: String)
|
||||
|
||||
var defaultModel: String {
|
||||
switch self {
|
||||
case .openai: return "text-embedding-3-small"
|
||||
case .openrouter: return "openai/text-embedding-3-small"
|
||||
case .google: return "text-embedding-004"
|
||||
}
|
||||
}
|
||||
|
||||
var dimension: Int {
|
||||
switch self {
|
||||
case .openai(let model):
|
||||
return model == "text-embedding-3-large" ? 3072 : 1536
|
||||
case .openrouter(let model):
|
||||
if model.contains("text-embedding-3-large") {
|
||||
return 3072
|
||||
} else if model.contains("qwen3-embedding-8b") {
|
||||
return 8192
|
||||
} else {
|
||||
return 1536 // Default for most models
|
||||
}
|
||||
case .google:
|
||||
return 768
|
||||
}
|
||||
}
|
||||
|
||||
var displayName: String {
|
||||
switch self {
|
||||
case .openai: return "OpenAI"
|
||||
case .openrouter: return "OpenRouter"
|
||||
case .google: return "Google"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Embedding Service
|
||||
|
||||
final class EmbeddingService {
|
||||
static let shared = EmbeddingService()
|
||||
|
||||
private let settings = SettingsService.shared
|
||||
|
||||
private init() {}
|
||||
|
||||
// MARK: - Provider Detection
|
||||
|
||||
/// Get the embedding provider based on user's selection in settings
|
||||
func getSelectedProvider() -> EmbeddingProvider? {
|
||||
let selectedModel = settings.embeddingProvider
|
||||
|
||||
// Map user's selection to provider
|
||||
switch selectedModel {
|
||||
case "openai-small":
|
||||
guard let key = settings.openaiAPIKey, !key.isEmpty else { return nil }
|
||||
return .openai(model: "text-embedding-3-small")
|
||||
case "openai-large":
|
||||
guard let key = settings.openaiAPIKey, !key.isEmpty else { return nil }
|
||||
return .openai(model: "text-embedding-3-large")
|
||||
case "openrouter-openai-small":
|
||||
guard let key = settings.openrouterAPIKey, !key.isEmpty else { return nil }
|
||||
return .openrouter(model: "openai/text-embedding-3-small")
|
||||
case "openrouter-openai-large":
|
||||
guard let key = settings.openrouterAPIKey, !key.isEmpty else { return nil }
|
||||
return .openrouter(model: "openai/text-embedding-3-large")
|
||||
case "openrouter-qwen":
|
||||
guard let key = settings.openrouterAPIKey, !key.isEmpty else { return nil }
|
||||
return .openrouter(model: "qwen/qwen3-embedding-8b")
|
||||
case "google-gemini":
|
||||
guard let key = settings.googleAPIKey, !key.isEmpty else { return nil }
|
||||
return .google(model: "text-embedding-004")
|
||||
default:
|
||||
// Fall back to best available
|
||||
return getBestAvailableProvider()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the best available embedding provider based on user's API keys (priority: OpenAI → OpenRouter → Google)
|
||||
func getBestAvailableProvider() -> EmbeddingProvider? {
|
||||
// Priority: OpenAI → OpenRouter → Google
|
||||
if let key = settings.openaiAPIKey, !key.isEmpty {
|
||||
return .openai(model: "text-embedding-3-small")
|
||||
}
|
||||
|
||||
if let key = settings.openrouterAPIKey, !key.isEmpty {
|
||||
return .openrouter(model: "openai/text-embedding-3-small")
|
||||
}
|
||||
|
||||
if let key = settings.googleAPIKey, !key.isEmpty {
|
||||
return .google(model: "text-embedding-004")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/// Check if embeddings are available (user has at least one compatible provider)
|
||||
var isAvailable: Bool {
|
||||
return getBestAvailableProvider() != nil
|
||||
}
|
||||
|
||||
// MARK: - Embedding Generation
|
||||
|
||||
/// Generate embedding for text using the configured provider
|
||||
func generateEmbedding(text: String, provider: EmbeddingProvider) async throws -> [Float] {
|
||||
switch provider {
|
||||
case .openai(let model):
|
||||
return try await generateOpenAIEmbedding(text: text, model: model)
|
||||
case .openrouter(let model):
|
||||
return try await generateOpenRouterEmbedding(text: text, model: model)
|
||||
case .google(let model):
|
||||
return try await generateGoogleEmbedding(text: text, model: model)
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate OpenAI embedding
|
||||
private func generateOpenAIEmbedding(text: String, model: String) async throws -> [Float] {
|
||||
guard let apiKey = settings.openaiAPIKey, !apiKey.isEmpty else {
|
||||
throw EmbeddingError.missingAPIKey("OpenAI")
|
||||
}
|
||||
|
||||
let url = URL(string: "https://api.openai.com/v1/embeddings")!
|
||||
var request = URLRequest(url: url)
|
||||
request.httpMethod = "POST"
|
||||
request.setValue("Bearer \(apiKey)", forHTTPHeaderField: "Authorization")
|
||||
request.setValue("application/json", forHTTPHeaderField: "Content-Type")
|
||||
|
||||
let body: [String: Any] = [
|
||||
"input": text,
|
||||
"model": model
|
||||
]
|
||||
request.httpBody = try JSONSerialization.data(withJSONObject: body)
|
||||
|
||||
let (data, response) = try await URLSession.shared.data(for: request)
|
||||
|
||||
guard let httpResponse = response as? HTTPURLResponse else {
|
||||
throw EmbeddingError.invalidResponse
|
||||
}
|
||||
|
||||
guard httpResponse.statusCode == 200 else {
|
||||
let errorMessage = String(data: data, encoding: .utf8) ?? "Unknown error"
|
||||
Log.api.error("OpenAI embedding error (\(httpResponse.statusCode)): \(errorMessage)")
|
||||
throw EmbeddingError.apiError(httpResponse.statusCode, errorMessage)
|
||||
}
|
||||
|
||||
let json = try JSONSerialization.jsonObject(with: data) as? [String: Any]
|
||||
guard let dataArray = json?["data"] as? [[String: Any]],
|
||||
let first = dataArray.first,
|
||||
let embedding = first["embedding"] as? [Double] else {
|
||||
throw EmbeddingError.invalidResponse
|
||||
}
|
||||
|
||||
return embedding.map { Float($0) }
|
||||
}
|
||||
|
||||
/// Generate OpenRouter embedding (OpenAI-compatible API)
|
||||
private func generateOpenRouterEmbedding(text: String, model: String) async throws -> [Float] {
|
||||
guard let apiKey = settings.openrouterAPIKey, !apiKey.isEmpty else {
|
||||
throw EmbeddingError.missingAPIKey("OpenRouter")
|
||||
}
|
||||
|
||||
let url = URL(string: "https://openrouter.ai/api/v1/embeddings")!
|
||||
var request = URLRequest(url: url)
|
||||
request.httpMethod = "POST"
|
||||
request.setValue("Bearer \(apiKey)", forHTTPHeaderField: "Authorization")
|
||||
request.setValue("application/json", forHTTPHeaderField: "Content-Type")
|
||||
request.setValue("https://github.com/yourusername/oAI", forHTTPHeaderField: "HTTP-Referer")
|
||||
|
||||
let body: [String: Any] = [
|
||||
"input": text,
|
||||
"model": model
|
||||
]
|
||||
request.httpBody = try JSONSerialization.data(withJSONObject: body)
|
||||
|
||||
let (data, response) = try await URLSession.shared.data(for: request)
|
||||
|
||||
guard let httpResponse = response as? HTTPURLResponse else {
|
||||
throw EmbeddingError.invalidResponse
|
||||
}
|
||||
|
||||
guard httpResponse.statusCode == 200 else {
|
||||
let errorMessage = String(data: data, encoding: .utf8) ?? "Unknown error"
|
||||
Log.api.error("OpenRouter embedding error (\(httpResponse.statusCode)): \(errorMessage)")
|
||||
throw EmbeddingError.apiError(httpResponse.statusCode, errorMessage)
|
||||
}
|
||||
|
||||
let json = try JSONSerialization.jsonObject(with: data) as? [String: Any]
|
||||
guard let dataArray = json?["data"] as? [[String: Any]],
|
||||
let first = dataArray.first,
|
||||
let embedding = first["embedding"] as? [Double] else {
|
||||
throw EmbeddingError.invalidResponse
|
||||
}
|
||||
|
||||
return embedding.map { Float($0) }
|
||||
}
|
||||
|
||||
/// Generate Google embedding
|
||||
private func generateGoogleEmbedding(text: String, model: String) async throws -> [Float] {
|
||||
guard let apiKey = settings.googleAPIKey, !apiKey.isEmpty else {
|
||||
throw EmbeddingError.missingAPIKey("Google")
|
||||
}
|
||||
|
||||
let url = URL(string: "https://generativelanguage.googleapis.com/v1beta/models/\(model):embedContent?key=\(apiKey)")!
|
||||
var request = URLRequest(url: url)
|
||||
request.httpMethod = "POST"
|
||||
request.setValue("application/json", forHTTPHeaderField: "Content-Type")
|
||||
|
||||
let body: [String: Any] = [
|
||||
"content": [
|
||||
"parts": [
|
||||
["text": text]
|
||||
]
|
||||
]
|
||||
]
|
||||
request.httpBody = try JSONSerialization.data(withJSONObject: body)
|
||||
|
||||
let (data, response) = try await URLSession.shared.data(for: request)
|
||||
|
||||
guard let httpResponse = response as? HTTPURLResponse else {
|
||||
throw EmbeddingError.invalidResponse
|
||||
}
|
||||
|
||||
guard httpResponse.statusCode == 200 else {
|
||||
let errorMessage = String(data: data, encoding: .utf8) ?? "Unknown error"
|
||||
Log.api.error("Google embedding error (\(httpResponse.statusCode)): \(errorMessage)")
|
||||
throw EmbeddingError.apiError(httpResponse.statusCode, errorMessage)
|
||||
}
|
||||
|
||||
let json = try JSONSerialization.jsonObject(with: data) as? [String: Any]
|
||||
guard let embedding = json?["embedding"] as? [String: Any],
|
||||
let values = embedding["values"] as? [Double] else {
|
||||
throw EmbeddingError.invalidResponse
|
||||
}
|
||||
|
||||
return values.map { Float($0) }
|
||||
}
|
||||
|
||||
// MARK: - Similarity Calculation
|
||||
|
||||
/// Calculate cosine similarity between two embeddings
|
||||
func cosineSimilarity(_ a: [Float], _ b: [Float]) -> Float {
|
||||
guard a.count == b.count else {
|
||||
Log.api.error("Embedding dimension mismatch: \(a.count) vs \(b.count)")
|
||||
return 0.0
|
||||
}
|
||||
|
||||
var dotProduct: Float = 0.0
|
||||
var magnitudeA: Float = 0.0
|
||||
var magnitudeB: Float = 0.0
|
||||
|
||||
for i in 0..<a.count {
|
||||
dotProduct += a[i] * b[i]
|
||||
magnitudeA += a[i] * a[i]
|
||||
magnitudeB += b[i] * b[i]
|
||||
}
|
||||
|
||||
magnitudeA = sqrt(magnitudeA)
|
||||
magnitudeB = sqrt(magnitudeB)
|
||||
|
||||
guard magnitudeA > 0 && magnitudeB > 0 else {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
return dotProduct / (magnitudeA * magnitudeB)
|
||||
}
|
||||
|
||||
// MARK: - Database Operations
|
||||
|
||||
/// Save message embedding to database
|
||||
func saveMessageEmbedding(messageId: UUID, embedding: [Float], model: String) throws {
|
||||
let data = serializeEmbedding(embedding)
|
||||
try DatabaseService.shared.saveMessageEmbedding(
|
||||
messageId: messageId,
|
||||
embedding: data,
|
||||
model: model,
|
||||
dimension: embedding.count
|
||||
)
|
||||
}
|
||||
|
||||
/// Get message embedding from database
|
||||
func getMessageEmbedding(messageId: UUID) throws -> [Float]? {
|
||||
guard let data = try DatabaseService.shared.getMessageEmbedding(messageId: messageId) else {
|
||||
return nil
|
||||
}
|
||||
return deserializeEmbedding(data)
|
||||
}
|
||||
|
||||
/// Save conversation embedding to database
|
||||
func saveConversationEmbedding(conversationId: UUID, embedding: [Float], model: String) throws {
|
||||
let data = serializeEmbedding(embedding)
|
||||
try DatabaseService.shared.saveConversationEmbedding(
|
||||
conversationId: conversationId,
|
||||
embedding: data,
|
||||
model: model,
|
||||
dimension: embedding.count
|
||||
)
|
||||
}
|
||||
|
||||
/// Get conversation embedding from database
|
||||
func getConversationEmbedding(conversationId: UUID) throws -> [Float]? {
|
||||
guard let data = try DatabaseService.shared.getConversationEmbedding(conversationId: conversationId) else {
|
||||
return nil
|
||||
}
|
||||
return deserializeEmbedding(data)
|
||||
}
|
||||
|
||||
// MARK: - Serialization
|
||||
|
||||
/// Serialize embedding to binary data (4 bytes per float, little-endian)
|
||||
private func serializeEmbedding(_ embedding: [Float]) -> Data {
|
||||
var data = Data(capacity: embedding.count * 4)
|
||||
for value in embedding {
|
||||
var littleEndian = value.bitPattern.littleEndian
|
||||
withUnsafeBytes(of: &littleEndian) { bytes in
|
||||
data.append(contentsOf: bytes)
|
||||
}
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
/// Deserialize embedding from binary data
|
||||
private func deserializeEmbedding(_ data: Data) -> [Float] {
|
||||
var embedding: [Float] = []
|
||||
embedding.reserveCapacity(data.count / 4)
|
||||
|
||||
for offset in stride(from: 0, to: data.count, by: 4) {
|
||||
let bytes = data.subdata(in: offset..<(offset + 4))
|
||||
let bitPattern = bytes.withUnsafeBytes { $0.load(as: UInt32.self) }
|
||||
let value = Float(bitPattern: UInt32(littleEndian: bitPattern))
|
||||
embedding.append(value)
|
||||
}
|
||||
|
||||
return embedding
|
||||
}
|
||||
|
||||
// MARK: - Conversation Embedding Generation
|
||||
|
||||
/// Generate embedding for an entire conversation (aggregate of messages)
|
||||
func generateConversationEmbedding(conversationId: UUID) async throws {
|
||||
// Use user's selected provider, or fall back to best available
|
||||
guard let provider = getSelectedProvider() else {
|
||||
throw EmbeddingError.noProvidersAvailable
|
||||
}
|
||||
|
||||
// Load conversation messages
|
||||
guard let (_, messages) = try? DatabaseService.shared.loadConversation(id: conversationId) else {
|
||||
throw EmbeddingError.conversationNotFound
|
||||
}
|
||||
|
||||
// Combine all message content
|
||||
let chatMessages = messages.filter { $0.role == .user || $0.role == .assistant }
|
||||
let combinedText = chatMessages.map { $0.content }.joined(separator: "\n\n")
|
||||
|
||||
// Truncate if too long (8191 tokens max for most embedding models)
|
||||
let truncated = String(combinedText.prefix(30000)) // ~7500 tokens
|
||||
|
||||
// Generate embedding
|
||||
let embedding = try await generateEmbedding(text: truncated, provider: provider)
|
||||
|
||||
// Save to database
|
||||
try saveConversationEmbedding(conversationId: conversationId, embedding: embedding, model: provider.defaultModel)
|
||||
|
||||
Log.api.info("Generated conversation embedding for \(conversationId) using \(provider.displayName) (\(embedding.count) dimensions)")
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Errors
|
||||
|
||||
enum EmbeddingError: LocalizedError {
|
||||
case missingAPIKey(String)
|
||||
case invalidResponse
|
||||
case apiError(Int, String)
|
||||
case providerNotImplemented(String)
|
||||
case conversationNotFound
|
||||
case noProvidersAvailable
|
||||
|
||||
var errorDescription: String? {
|
||||
switch self {
|
||||
case .missingAPIKey(let provider):
|
||||
return "\(provider) API key not configured"
|
||||
case .invalidResponse:
|
||||
return "Invalid response from embedding API"
|
||||
case .apiError(let code, let message):
|
||||
return "Embedding API error (\(code)): \(message)"
|
||||
case .providerNotImplemented(let message):
|
||||
return message
|
||||
case .conversationNotFound:
|
||||
return "Conversation not found"
|
||||
case .noProvidersAvailable:
|
||||
return "No embedding providers available. Please configure an API key for OpenAI, OpenRouter, or Google."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -363,6 +363,36 @@ class GitSyncService {
|
||||
|
||||
// MARK: - Auto-Sync
|
||||
|
||||
/// Sync on app startup (pull + import only, no push)
|
||||
/// Runs silently in background to fetch changes from other devices
|
||||
func syncOnStartup() async {
|
||||
// Only run if configured and cloned
|
||||
guard settings.syncConfigured && syncStatus.isCloned else {
|
||||
log.debug("Skipping startup sync (not configured or not cloned)")
|
||||
return
|
||||
}
|
||||
|
||||
log.info("Running startup sync (pull + import)...")
|
||||
|
||||
do {
|
||||
// Pull latest changes
|
||||
try await pull()
|
||||
|
||||
// Import any new/updated conversations
|
||||
let result = try await importAllConversations()
|
||||
|
||||
if result.imported > 0 {
|
||||
log.info("Startup sync: imported \(result.imported) conversations")
|
||||
} else {
|
||||
log.debug("Startup sync: no new conversations to import")
|
||||
}
|
||||
|
||||
} catch {
|
||||
// Don't block app startup on sync errors
|
||||
log.warning("Startup sync failed (non-fatal): \(error.localizedDescription)")
|
||||
}
|
||||
}
|
||||
|
||||
/// Perform auto-sync with debouncing (export + push)
|
||||
/// Debounces multiple rapid sync requests to avoid spamming git
|
||||
func autoSync() async {
|
||||
|
||||
@@ -146,6 +146,54 @@ class SettingsService {
|
||||
}
|
||||
}
|
||||
|
||||
var contextSelectionEnabled: Bool {
|
||||
get { cache["contextSelectionEnabled"] == "true" }
|
||||
set {
|
||||
cache["contextSelectionEnabled"] = String(newValue)
|
||||
DatabaseService.shared.setSetting(key: "contextSelectionEnabled", value: String(newValue))
|
||||
}
|
||||
}
|
||||
|
||||
var contextMaxTokens: Int {
|
||||
get { cache["contextMaxTokens"].flatMap(Int.init) ?? 100_000 }
|
||||
set {
|
||||
cache["contextMaxTokens"] = String(newValue)
|
||||
DatabaseService.shared.setSetting(key: "contextMaxTokens", value: String(newValue))
|
||||
}
|
||||
}
|
||||
|
||||
var embeddingsEnabled: Bool {
|
||||
get { cache["embeddingsEnabled"] == "true" }
|
||||
set {
|
||||
cache["embeddingsEnabled"] = String(newValue)
|
||||
DatabaseService.shared.setSetting(key: "embeddingsEnabled", value: String(newValue))
|
||||
}
|
||||
}
|
||||
|
||||
var embeddingProvider: String {
|
||||
get { cache["embeddingProvider"] ?? "openai-small" }
|
||||
set {
|
||||
cache["embeddingProvider"] = newValue
|
||||
DatabaseService.shared.setSetting(key: "embeddingProvider", value: newValue)
|
||||
}
|
||||
}
|
||||
|
||||
var progressiveSummarizationEnabled: Bool {
|
||||
get { cache["progressiveSummarizationEnabled"] == "true" }
|
||||
set {
|
||||
cache["progressiveSummarizationEnabled"] = String(newValue)
|
||||
DatabaseService.shared.setSetting(key: "progressiveSummarizationEnabled", value: String(newValue))
|
||||
}
|
||||
}
|
||||
|
||||
var summarizationThreshold: Int {
|
||||
get { cache["summarizationThreshold"].flatMap(Int.init) ?? 50 }
|
||||
set {
|
||||
cache["summarizationThreshold"] = String(newValue)
|
||||
DatabaseService.shared.setSetting(key: "summarizationThreshold", value: String(newValue))
|
||||
}
|
||||
}
|
||||
|
||||
var mcpEnabled: Bool {
|
||||
get { cache["mcpEnabled"] == "true" }
|
||||
set {
|
||||
|
||||
Reference in New Issue
Block a user