From 079eccbc4e7e45a5c9b4ea434d66817c3c96e224 Mon Sep 17 00:00:00 2001 From: Rune Olsen Date: Mon, 23 Feb 2026 07:54:16 +0100 Subject: [PATCH] Several bugs fixed --- oAI.xcodeproj/project.pbxproj | 4 +- oAI/Providers/AnthropicProvider.swift | 64 ++++++++++++++++++++++++++- oAI/Services/MCPService.swift | 27 ++++++++++- oAI/ViewModels/ChatViewModel.swift | 34 +++++++------- oAI/Views/Main/ContentView.swift | 3 +- 5 files changed, 110 insertions(+), 22 deletions(-) diff --git a/oAI.xcodeproj/project.pbxproj b/oAI.xcodeproj/project.pbxproj index bd660f1..ef69f70 100644 --- a/oAI.xcodeproj/project.pbxproj +++ b/oAI.xcodeproj/project.pbxproj @@ -279,7 +279,7 @@ LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 26.2; - MARKETING_VERSION = 2.3.2; + MARKETING_VERSION = "2.3.2-bugfix"; PRODUCT_BUNDLE_IDENTIFIER = com.oai.oAI; PRODUCT_NAME = "$(TARGET_NAME)"; REGISTER_APP_GROUPS = YES; @@ -323,7 +323,7 @@ LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; MACOSX_DEPLOYMENT_TARGET = 26.2; - MARKETING_VERSION = 2.3.2; + MARKETING_VERSION = "2.3.2-bugfix"; PRODUCT_BUNDLE_IDENTIFIER = com.oai.oAI; PRODUCT_NAME = "$(TARGET_NAME)"; REGISTER_APP_GROUPS = YES; diff --git a/oAI/Providers/AnthropicProvider.swift b/oAI/Providers/AnthropicProvider.swift index f633d9a..e982526 100644 --- a/oAI/Providers/AnthropicProvider.swift +++ b/oAI/Providers/AnthropicProvider.swift @@ -73,7 +73,11 @@ class AnthropicProvider: AIProvider { // MARK: - Models /// Local metadata used to enrich API results (pricing, context length) and as offline fallback. + /// Entries are matched by exact ID first; if no exact match is found, the enrichment step + /// falls back to prefix matching so newly-released model variants (e.g. "claude-sonnet-4-6-20260301") + /// still inherit the correct pricing tier. private static let knownModels: [ModelInfo] = [ + // Claude 4.x series ModelInfo( id: "claude-opus-4-6", name: "Claude Opus 4.6", @@ -82,6 +86,31 @@ class AnthropicProvider: AIProvider { pricing: .init(prompt: 15.0, completion: 75.0), capabilities: .init(vision: true, tools: true, online: true) ), + ModelInfo( + id: "claude-sonnet-4-6", + name: "Claude Sonnet 4.6", + description: "Best balance of speed and capability", + contextLength: 200_000, + pricing: .init(prompt: 3.0, completion: 15.0), + capabilities: .init(vision: true, tools: true, online: true) + ), + ModelInfo( + id: "claude-haiku-4-6", + name: "Claude Haiku 4.6", + description: "Fastest and most affordable", + contextLength: 200_000, + pricing: .init(prompt: 0.80, completion: 4.0), + capabilities: .init(vision: true, tools: true, online: true) + ), + // Claude 4.5 series + ModelInfo( + id: "claude-opus-4-5", + name: "Claude Opus 4.5", + description: "Previous generation Opus", + contextLength: 200_000, + pricing: .init(prompt: 15.0, completion: 75.0), + capabilities: .init(vision: true, tools: true, online: true) + ), ModelInfo( id: "claude-opus-4-5-20251101", name: "Claude Opus 4.5", @@ -90,6 +119,14 @@ class AnthropicProvider: AIProvider { pricing: .init(prompt: 15.0, completion: 75.0), capabilities: .init(vision: true, tools: true, online: true) ), + ModelInfo( + id: "claude-sonnet-4-5", + name: "Claude Sonnet 4.5", + description: "Best balance of speed and capability", + contextLength: 200_000, + pricing: .init(prompt: 3.0, completion: 15.0), + capabilities: .init(vision: true, tools: true, online: true) + ), ModelInfo( id: "claude-sonnet-4-5-20250929", name: "Claude Sonnet 4.5", @@ -98,6 +135,14 @@ class AnthropicProvider: AIProvider { pricing: .init(prompt: 3.0, completion: 15.0), capabilities: .init(vision: true, tools: true, online: true) ), + ModelInfo( + id: "claude-haiku-4-5", + name: "Claude Haiku 4.5", + description: "Fastest and most affordable", + contextLength: 200_000, + pricing: .init(prompt: 0.80, completion: 4.0), + capabilities: .init(vision: true, tools: true, online: true) + ), ModelInfo( id: "claude-haiku-4-5-20251001", name: "Claude Haiku 4.5", @@ -106,6 +151,7 @@ class AnthropicProvider: AIProvider { pricing: .init(prompt: 0.80, completion: 4.0), capabilities: .init(vision: true, tools: true, online: true) ), + // Claude 3.x series ModelInfo( id: "claude-3-7-sonnet-20250219", name: "Claude 3.7 Sonnet", @@ -124,6 +170,14 @@ class AnthropicProvider: AIProvider { ), ] + /// Pricing tiers used for fuzzy fallback matching on unknown model IDs. + /// Keyed by model name prefix (longest match wins). + private static let pricingFallback: [(prefix: String, prompt: Double, completion: Double)] = [ + ("claude-opus", 15.0, 75.0), + ("claude-sonnet", 3.0, 15.0), + ("claude-haiku", 0.80, 4.0), + ] + /// Fetch live model list from GET /v1/models, enriched with local pricing/context metadata. /// Falls back to knownModels if the request fails (no key, offline, etc.). func listModels() async throws -> [ModelInfo] { @@ -158,14 +212,20 @@ class AnthropicProvider: AIProvider { guard let id = item["id"] as? String, id.hasPrefix("claude-") else { return nil } let displayName = item["display_name"] as? String ?? id + // Exact match first if let known = enrichment[id] { return known } - // Unknown new model — use display name and sensible defaults + // Fuzzy fallback: find the longest prefix that matches + let fallback = Self.pricingFallback + .filter { id.hasPrefix($0.prefix) } + .max(by: { $0.prefix.count < $1.prefix.count }) + let pricing = fallback.map { ModelInfo.Pricing(prompt: $0.prompt, completion: $0.completion) } + ?? ModelInfo.Pricing(prompt: 0, completion: 0) return ModelInfo( id: id, name: displayName, description: item["description"] as? String ?? "", contextLength: 200_000, - pricing: .init(prompt: 0, completion: 0), + pricing: pricing, capabilities: .init(vision: true, tools: true, online: false) ) } diff --git a/oAI/Services/MCPService.swift b/oAI/Services/MCPService.swift index f696476..de4ea95 100644 --- a/oAI/Services/MCPService.swift +++ b/oAI/Services/MCPService.swift @@ -114,7 +114,7 @@ class MCPService { // MARK: - Tool Schema Generation - func getToolSchemas() -> [Tool] { + func getToolSchemas(onlineMode: Bool = false) -> [Tool] { var tools: [Tool] = [ makeTool( name: "read_file", @@ -220,6 +220,19 @@ class MCPService { tools.append(contentsOf: paperlessService.getToolSchemas()) } + // Add web_search tool when online mode is active + // (OpenRouter handles search natively via :online model suffix, so excluded here) + if onlineMode { + tools.append(makeTool( + name: "web_search", + description: "Search the web for current information using DuckDuckGo. Use this when you need up-to-date information, news, or facts not in your training data. Formulate a concise, focused search query.", + properties: [ + "query": prop("string", "The search query to look up") + ], + required: ["query"] + )) + } + return tools } @@ -333,6 +346,18 @@ class MCPService { } return copyFile(source: source, destination: destination) + case "web_search": + let query = args["query"] as? String ?? "" + guard !query.isEmpty else { + return ["error": "Missing required parameter: query"] + } + let results = await WebSearchService.shared.search(query: query) + if results.isEmpty { + return ["results": [], "message": "No results found for: \(query)"] + } + let mapped = results.map { ["title": $0.title, "url": $0.url, "snippet": $0.snippet] } + return ["results": mapped] + default: // Route anytype_* tools to AnytypeMCPService if name.hasPrefix("anytype_") { diff --git a/oAI/ViewModels/ChatViewModel.swift b/oAI/ViewModels/ChatViewModel.swift index b9c8e01..1d05eff 100644 --- a/oAI/ViewModels/ChatViewModel.swift +++ b/oAI/ViewModels/ChatViewModel.swift @@ -164,6 +164,13 @@ Don't narrate future actions ("Let me...") - just use the tools. // Otherwise, build the prompt: default + conditional sections + custom (if append mode) var prompt = defaultSystemPrompt + // Prepend model identity to prevent models trained on Claude data from misidentifying themselves. + // Skip for direct Anthropic/OpenAI providers — those models know who they are. + if let model = selectedModel, + currentProvider != .anthropic && currentProvider != .openai { + prompt = "You are \(model.name).\n\n" + prompt + } + // Add tool-specific guidelines if MCP is enabled (tools are available) if mcpEnabled { prompt += toolUsageGuidelines @@ -435,6 +442,16 @@ Don't narrate future actions ("Let me...") - just use the tools. } /// Infer which provider owns a given model ID based on naming conventions. + /// Update the selected model and keep currentProvider + settings in sync. + /// Call this whenever the user picks a model in the model selector. + func selectModel(_ model: ModelInfo) { + let newProvider = inferProvider(from: model.id) ?? currentProvider + selectedModel = model + currentProvider = newProvider + settings.defaultModel = model.id + settings.defaultProvider = newProvider + } + private func inferProvider(from modelId: String) -> Settings.Provider? { // OpenRouter models always contain a "/" (e.g. "anthropic/claude-3-5-sonnet") if modelId.contains("/") { return .openrouter } @@ -1220,7 +1237,8 @@ Don't narrate future actions ("Let me...") - just use the tools. let startTime = Date() var wasCancelled = false do { - let tools = mcp.getToolSchemas() + // Include web_search tool when online mode is on (not needed for OpenRouter — it handles search via :online suffix) + let tools = mcp.getToolSchemas(onlineMode: onlineMode && currentProvider != .openrouter) // Apply :online suffix for OpenRouter when online mode is active var effectiveModelId = modelId @@ -1259,20 +1277,6 @@ Don't narrate future actions ("Let me...") - just use the tools. ? messages.filter { $0.role != .system } : [messages.last(where: { $0.role == .user })].compactMap { $0 } - // Web search via our WebSearchService - // Append results to last user message content (matching Python oAI approach) - if onlineMode && currentProvider != .openrouter { - if let lastUserIdx = messagesToSend.lastIndex(where: { $0.role == .user }) { - Log.search.info("Running web search for tool-aware path (\(currentProvider.displayName))") - let results = await WebSearchService.shared.search(query: messagesToSend[lastUserIdx].content) - if !results.isEmpty { - let searchContext = "\n\n\(WebSearchService.shared.formatResults(results))\n\nPlease use the above web search results to help answer the user's question." - messagesToSend[lastUserIdx].content += searchContext - Log.search.info("Injected \(results.count) search results into user message") - } - } - } - let systemPrompt: [String: Any] = [ "role": "system", "content": systemContent diff --git a/oAI/Views/Main/ContentView.swift b/oAI/Views/Main/ContentView.swift index 6cb1ef8..145e256 100644 --- a/oAI/Views/Main/ContentView.swift +++ b/oAI/Views/Main/ContentView.swift @@ -63,8 +63,7 @@ struct ContentView: View { selectedModel: chatViewModel.selectedModel, onSelect: { model in let oldModel = chatViewModel.selectedModel - chatViewModel.selectedModel = model - SettingsService.shared.defaultModel = model.id + chatViewModel.selectModel(model) chatViewModel.showModelSelector = false // Trigger auto-save on model switch Task {