Several bugs fixed

This commit is contained in:
2026-02-23 07:54:16 +01:00
parent 56f79a690e
commit 079eccbc4e
5 changed files with 110 additions and 22 deletions

View File

@@ -279,7 +279,7 @@
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 26.2; MACOSX_DEPLOYMENT_TARGET = 26.2;
MARKETING_VERSION = 2.3.2; MARKETING_VERSION = "2.3.2-bugfix";
PRODUCT_BUNDLE_IDENTIFIER = com.oai.oAI; PRODUCT_BUNDLE_IDENTIFIER = com.oai.oAI;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
REGISTER_APP_GROUPS = YES; REGISTER_APP_GROUPS = YES;
@@ -323,7 +323,7 @@
LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks";
"LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks"; "LD_RUNPATH_SEARCH_PATHS[sdk=macosx*]" = "@executable_path/../Frameworks";
MACOSX_DEPLOYMENT_TARGET = 26.2; MACOSX_DEPLOYMENT_TARGET = 26.2;
MARKETING_VERSION = 2.3.2; MARKETING_VERSION = "2.3.2-bugfix";
PRODUCT_BUNDLE_IDENTIFIER = com.oai.oAI; PRODUCT_BUNDLE_IDENTIFIER = com.oai.oAI;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
REGISTER_APP_GROUPS = YES; REGISTER_APP_GROUPS = YES;

View File

@@ -73,7 +73,11 @@ class AnthropicProvider: AIProvider {
// MARK: - Models // MARK: - Models
/// Local metadata used to enrich API results (pricing, context length) and as offline fallback. /// Local metadata used to enrich API results (pricing, context length) and as offline fallback.
/// Entries are matched by exact ID first; if no exact match is found, the enrichment step
/// falls back to prefix matching so newly-released model variants (e.g. "claude-sonnet-4-6-20260301")
/// still inherit the correct pricing tier.
private static let knownModels: [ModelInfo] = [ private static let knownModels: [ModelInfo] = [
// Claude 4.x series
ModelInfo( ModelInfo(
id: "claude-opus-4-6", id: "claude-opus-4-6",
name: "Claude Opus 4.6", name: "Claude Opus 4.6",
@@ -82,6 +86,31 @@ class AnthropicProvider: AIProvider {
pricing: .init(prompt: 15.0, completion: 75.0), pricing: .init(prompt: 15.0, completion: 75.0),
capabilities: .init(vision: true, tools: true, online: true) capabilities: .init(vision: true, tools: true, online: true)
), ),
ModelInfo(
id: "claude-sonnet-4-6",
name: "Claude Sonnet 4.6",
description: "Best balance of speed and capability",
contextLength: 200_000,
pricing: .init(prompt: 3.0, completion: 15.0),
capabilities: .init(vision: true, tools: true, online: true)
),
ModelInfo(
id: "claude-haiku-4-6",
name: "Claude Haiku 4.6",
description: "Fastest and most affordable",
contextLength: 200_000,
pricing: .init(prompt: 0.80, completion: 4.0),
capabilities: .init(vision: true, tools: true, online: true)
),
// Claude 4.5 series
ModelInfo(
id: "claude-opus-4-5",
name: "Claude Opus 4.5",
description: "Previous generation Opus",
contextLength: 200_000,
pricing: .init(prompt: 15.0, completion: 75.0),
capabilities: .init(vision: true, tools: true, online: true)
),
ModelInfo( ModelInfo(
id: "claude-opus-4-5-20251101", id: "claude-opus-4-5-20251101",
name: "Claude Opus 4.5", name: "Claude Opus 4.5",
@@ -90,6 +119,14 @@ class AnthropicProvider: AIProvider {
pricing: .init(prompt: 15.0, completion: 75.0), pricing: .init(prompt: 15.0, completion: 75.0),
capabilities: .init(vision: true, tools: true, online: true) capabilities: .init(vision: true, tools: true, online: true)
), ),
ModelInfo(
id: "claude-sonnet-4-5",
name: "Claude Sonnet 4.5",
description: "Best balance of speed and capability",
contextLength: 200_000,
pricing: .init(prompt: 3.0, completion: 15.0),
capabilities: .init(vision: true, tools: true, online: true)
),
ModelInfo( ModelInfo(
id: "claude-sonnet-4-5-20250929", id: "claude-sonnet-4-5-20250929",
name: "Claude Sonnet 4.5", name: "Claude Sonnet 4.5",
@@ -98,6 +135,14 @@ class AnthropicProvider: AIProvider {
pricing: .init(prompt: 3.0, completion: 15.0), pricing: .init(prompt: 3.0, completion: 15.0),
capabilities: .init(vision: true, tools: true, online: true) capabilities: .init(vision: true, tools: true, online: true)
), ),
ModelInfo(
id: "claude-haiku-4-5",
name: "Claude Haiku 4.5",
description: "Fastest and most affordable",
contextLength: 200_000,
pricing: .init(prompt: 0.80, completion: 4.0),
capabilities: .init(vision: true, tools: true, online: true)
),
ModelInfo( ModelInfo(
id: "claude-haiku-4-5-20251001", id: "claude-haiku-4-5-20251001",
name: "Claude Haiku 4.5", name: "Claude Haiku 4.5",
@@ -106,6 +151,7 @@ class AnthropicProvider: AIProvider {
pricing: .init(prompt: 0.80, completion: 4.0), pricing: .init(prompt: 0.80, completion: 4.0),
capabilities: .init(vision: true, tools: true, online: true) capabilities: .init(vision: true, tools: true, online: true)
), ),
// Claude 3.x series
ModelInfo( ModelInfo(
id: "claude-3-7-sonnet-20250219", id: "claude-3-7-sonnet-20250219",
name: "Claude 3.7 Sonnet", name: "Claude 3.7 Sonnet",
@@ -124,6 +170,14 @@ class AnthropicProvider: AIProvider {
), ),
] ]
/// Pricing tiers used for fuzzy fallback matching on unknown model IDs.
/// Keyed by model name prefix (longest match wins).
private static let pricingFallback: [(prefix: String, prompt: Double, completion: Double)] = [
("claude-opus", 15.0, 75.0),
("claude-sonnet", 3.0, 15.0),
("claude-haiku", 0.80, 4.0),
]
/// Fetch live model list from GET /v1/models, enriched with local pricing/context metadata. /// Fetch live model list from GET /v1/models, enriched with local pricing/context metadata.
/// Falls back to knownModels if the request fails (no key, offline, etc.). /// Falls back to knownModels if the request fails (no key, offline, etc.).
func listModels() async throws -> [ModelInfo] { func listModels() async throws -> [ModelInfo] {
@@ -158,14 +212,20 @@ class AnthropicProvider: AIProvider {
guard let id = item["id"] as? String, guard let id = item["id"] as? String,
id.hasPrefix("claude-") else { return nil } id.hasPrefix("claude-") else { return nil }
let displayName = item["display_name"] as? String ?? id let displayName = item["display_name"] as? String ?? id
// Exact match first
if let known = enrichment[id] { return known } if let known = enrichment[id] { return known }
// Unknown new model use display name and sensible defaults // Fuzzy fallback: find the longest prefix that matches
let fallback = Self.pricingFallback
.filter { id.hasPrefix($0.prefix) }
.max(by: { $0.prefix.count < $1.prefix.count })
let pricing = fallback.map { ModelInfo.Pricing(prompt: $0.prompt, completion: $0.completion) }
?? ModelInfo.Pricing(prompt: 0, completion: 0)
return ModelInfo( return ModelInfo(
id: id, id: id,
name: displayName, name: displayName,
description: item["description"] as? String ?? "", description: item["description"] as? String ?? "",
contextLength: 200_000, contextLength: 200_000,
pricing: .init(prompt: 0, completion: 0), pricing: pricing,
capabilities: .init(vision: true, tools: true, online: false) capabilities: .init(vision: true, tools: true, online: false)
) )
} }

View File

@@ -114,7 +114,7 @@ class MCPService {
// MARK: - Tool Schema Generation // MARK: - Tool Schema Generation
func getToolSchemas() -> [Tool] { func getToolSchemas(onlineMode: Bool = false) -> [Tool] {
var tools: [Tool] = [ var tools: [Tool] = [
makeTool( makeTool(
name: "read_file", name: "read_file",
@@ -220,6 +220,19 @@ class MCPService {
tools.append(contentsOf: paperlessService.getToolSchemas()) tools.append(contentsOf: paperlessService.getToolSchemas())
} }
// Add web_search tool when online mode is active
// (OpenRouter handles search natively via :online model suffix, so excluded here)
if onlineMode {
tools.append(makeTool(
name: "web_search",
description: "Search the web for current information using DuckDuckGo. Use this when you need up-to-date information, news, or facts not in your training data. Formulate a concise, focused search query.",
properties: [
"query": prop("string", "The search query to look up")
],
required: ["query"]
))
}
return tools return tools
} }
@@ -333,6 +346,18 @@ class MCPService {
} }
return copyFile(source: source, destination: destination) return copyFile(source: source, destination: destination)
case "web_search":
let query = args["query"] as? String ?? ""
guard !query.isEmpty else {
return ["error": "Missing required parameter: query"]
}
let results = await WebSearchService.shared.search(query: query)
if results.isEmpty {
return ["results": [], "message": "No results found for: \(query)"]
}
let mapped = results.map { ["title": $0.title, "url": $0.url, "snippet": $0.snippet] }
return ["results": mapped]
default: default:
// Route anytype_* tools to AnytypeMCPService // Route anytype_* tools to AnytypeMCPService
if name.hasPrefix("anytype_") { if name.hasPrefix("anytype_") {

View File

@@ -164,6 +164,13 @@ Don't narrate future actions ("Let me...") - just use the tools.
// Otherwise, build the prompt: default + conditional sections + custom (if append mode) // Otherwise, build the prompt: default + conditional sections + custom (if append mode)
var prompt = defaultSystemPrompt var prompt = defaultSystemPrompt
// Prepend model identity to prevent models trained on Claude data from misidentifying themselves.
// Skip for direct Anthropic/OpenAI providers those models know who they are.
if let model = selectedModel,
currentProvider != .anthropic && currentProvider != .openai {
prompt = "You are \(model.name).\n\n" + prompt
}
// Add tool-specific guidelines if MCP is enabled (tools are available) // Add tool-specific guidelines if MCP is enabled (tools are available)
if mcpEnabled { if mcpEnabled {
prompt += toolUsageGuidelines prompt += toolUsageGuidelines
@@ -435,6 +442,16 @@ Don't narrate future actions ("Let me...") - just use the tools.
} }
/// Infer which provider owns a given model ID based on naming conventions. /// Infer which provider owns a given model ID based on naming conventions.
/// Update the selected model and keep currentProvider + settings in sync.
/// Call this whenever the user picks a model in the model selector.
func selectModel(_ model: ModelInfo) {
let newProvider = inferProvider(from: model.id) ?? currentProvider
selectedModel = model
currentProvider = newProvider
settings.defaultModel = model.id
settings.defaultProvider = newProvider
}
private func inferProvider(from modelId: String) -> Settings.Provider? { private func inferProvider(from modelId: String) -> Settings.Provider? {
// OpenRouter models always contain a "/" (e.g. "anthropic/claude-3-5-sonnet") // OpenRouter models always contain a "/" (e.g. "anthropic/claude-3-5-sonnet")
if modelId.contains("/") { return .openrouter } if modelId.contains("/") { return .openrouter }
@@ -1220,7 +1237,8 @@ Don't narrate future actions ("Let me...") - just use the tools.
let startTime = Date() let startTime = Date()
var wasCancelled = false var wasCancelled = false
do { do {
let tools = mcp.getToolSchemas() // Include web_search tool when online mode is on (not needed for OpenRouter it handles search via :online suffix)
let tools = mcp.getToolSchemas(onlineMode: onlineMode && currentProvider != .openrouter)
// Apply :online suffix for OpenRouter when online mode is active // Apply :online suffix for OpenRouter when online mode is active
var effectiveModelId = modelId var effectiveModelId = modelId
@@ -1259,20 +1277,6 @@ Don't narrate future actions ("Let me...") - just use the tools.
? messages.filter { $0.role != .system } ? messages.filter { $0.role != .system }
: [messages.last(where: { $0.role == .user })].compactMap { $0 } : [messages.last(where: { $0.role == .user })].compactMap { $0 }
// Web search via our WebSearchService
// Append results to last user message content (matching Python oAI approach)
if onlineMode && currentProvider != .openrouter {
if let lastUserIdx = messagesToSend.lastIndex(where: { $0.role == .user }) {
Log.search.info("Running web search for tool-aware path (\(currentProvider.displayName))")
let results = await WebSearchService.shared.search(query: messagesToSend[lastUserIdx].content)
if !results.isEmpty {
let searchContext = "\n\n\(WebSearchService.shared.formatResults(results))\n\nPlease use the above web search results to help answer the user's question."
messagesToSend[lastUserIdx].content += searchContext
Log.search.info("Injected \(results.count) search results into user message")
}
}
}
let systemPrompt: [String: Any] = [ let systemPrompt: [String: Any] = [
"role": "system", "role": "system",
"content": systemContent "content": systemContent

View File

@@ -63,8 +63,7 @@ struct ContentView: View {
selectedModel: chatViewModel.selectedModel, selectedModel: chatViewModel.selectedModel,
onSelect: { model in onSelect: { model in
let oldModel = chatViewModel.selectedModel let oldModel = chatViewModel.selectedModel
chatViewModel.selectedModel = model chatViewModel.selectModel(model)
SettingsService.shared.defaultModel = model.id
chatViewModel.showModelSelector = false chatViewModel.showModelSelector = false
// Trigger auto-save on model switch // Trigger auto-save on model switch
Task { Task {