Initial commit

This commit is contained in:
2026-02-11 22:22:55 +01:00
commit 42f54954c1
58 changed files with 10639 additions and 0 deletions

View File

@@ -0,0 +1,38 @@
//
// Conversation.swift
// oAI
//
// Model for saved conversations
//
import Foundation
struct Conversation: Identifiable, Codable {
let id: UUID
var name: String
var messages: [Message]
let createdAt: Date
var updatedAt: Date
init(
id: UUID = UUID(),
name: String,
messages: [Message] = [],
createdAt: Date = Date(),
updatedAt: Date = Date()
) {
self.id = id
self.name = name
self.messages = messages
self.createdAt = createdAt
self.updatedAt = updatedAt
}
var messageCount: Int {
messages.count
}
var lastMessageDate: Date {
messages.last?.timestamp ?? updatedAt
}
}

125
oAI/Models/Message.swift Normal file
View File

@@ -0,0 +1,125 @@
//
// Message.swift
// oAI
//
// Core message model for chat conversations
//
import Foundation
enum MessageRole: String, Codable {
case user
case assistant
case system
}
struct Message: Identifiable, Codable, Equatable {
let id: UUID
let role: MessageRole
var content: String
var tokens: Int?
var cost: Double?
let timestamp: Date
let attachments: [FileAttachment]?
// Streaming state (not persisted)
var isStreaming: Bool = false
// Generated images from image-output models (base64-decoded PNG/JPEG data)
var generatedImages: [Data]? = nil
init(
id: UUID = UUID(),
role: MessageRole,
content: String,
tokens: Int? = nil,
cost: Double? = nil,
timestamp: Date = Date(),
attachments: [FileAttachment]? = nil,
isStreaming: Bool = false,
generatedImages: [Data]? = nil
) {
self.id = id
self.role = role
self.content = content
self.tokens = tokens
self.cost = cost
self.timestamp = timestamp
self.attachments = attachments
self.isStreaming = isStreaming
self.generatedImages = generatedImages
}
enum CodingKeys: String, CodingKey {
case id, role, content, tokens, cost, timestamp, attachments
}
static func == (lhs: Message, rhs: Message) -> Bool {
lhs.id == rhs.id &&
lhs.content == rhs.content &&
lhs.tokens == rhs.tokens &&
lhs.cost == rhs.cost &&
lhs.isStreaming == rhs.isStreaming &&
lhs.generatedImages == rhs.generatedImages
}
}
struct FileAttachment: Codable, Equatable {
let path: String
let type: AttachmentType
let data: Data? // file contents: raw bytes for images/PDFs, UTF-8 for text
enum AttachmentType: String, Codable {
case image
case pdf
case text
}
/// Detect attachment type from file extension
static func typeFromExtension(_ path: String) -> AttachmentType {
let ext = (path as NSString).pathExtension.lowercased()
switch ext {
case "png", "jpg", "jpeg", "gif", "webp", "bmp", "svg":
return .image
case "pdf":
return .pdf
default:
return .text
}
}
/// MIME type string for the file (used in base64 data URLs)
var mimeType: String {
let ext = (path as NSString).pathExtension.lowercased()
switch ext {
case "png": return "image/png"
case "jpg", "jpeg": return "image/jpeg"
case "gif": return "image/gif"
case "webp": return "image/webp"
case "bmp": return "image/bmp"
case "svg": return "image/svg+xml"
case "pdf": return "application/pdf"
default: return "text/plain"
}
}
}
// MARK: - Display Helpers
extension MessageRole {
var displayName: String {
switch self {
case .user: return "You"
case .assistant: return "Assistant"
case .system: return "System"
}
}
var iconName: String {
switch self {
case .user: return "person.circle.fill"
case .assistant: return "cpu"
case .system: return "info.circle.fill"
}
}
}

240
oAI/Models/MockData.swift Normal file
View File

@@ -0,0 +1,240 @@
//
// MockData.swift
// oAI
//
// Mock data for Phase 1 testing
//
import Foundation
extension Message {
static let mockUser1 = Message(
role: .user,
content: "What is SwiftUI and how does it compare to UIKit?",
tokens: 12,
cost: 0.00002
)
static let mockAssistant1 = Message(
role: .assistant,
content: """
SwiftUI is Apple's modern declarative framework for building user interfaces across all Apple platforms. Here's how it compares to UIKit:
## Key Differences
**SwiftUI (Declarative)**
- Describe WHAT the UI should look like
- State-driven updates
- Less boilerplate code
- Cross-platform by default
**UIKit (Imperative)**
- Describe HOW to build the UI step-by-step
- Manual state management
- More verbose
- iOS/tvOS focused
## Example
```swift
// SwiftUI
struct ContentView: View {
@State private var count = 0
var body: some View {
VStack {
Text("Count: \\(count)")
Button("Increment") {
count += 1
}
}
}
}
```
```swift
// UIKit equivalent
class ViewController: UIViewController {
var count = 0
let label = UILabel()
let button = UIButton()
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
}
@objc func incrementTapped() {
count += 1
label.text = "Count: \\(count)"
}
func setupUI() {
// ... lots more setup code
}
}
```
SwiftUI is recommended for new projects, especially if targeting iOS 14+ and multiple platforms.
""",
tokens: 245,
cost: 0.00049
)
static let mockUser2 = Message(
role: .user,
content: "Can you explain async/await in Swift?",
tokens: 8,
cost: 0.00001
)
static let mockAssistant2 = Message(
role: .assistant,
content: """
Async/await is Swift's modern concurrency model introduced in Swift 5.5. It makes asynchronous code look and behave like synchronous code.
## Basic Example
```swift
// Old way with completion handlers
func fetchData(completion: @escaping (Result<Data, Error>) -> Void) {
URLSession.shared.dataTask(with: url) { data, response, error in
if let error = error {
completion(.failure(error))
} else if let data = data {
completion(.success(data))
}
}.resume()
}
// New way with async/await
func fetchData() async throws -> Data {
let (data, _) = try await URLSession.shared.data(from: url)
return data
}
```
## Key Benefits
1. **Readability**: No callback pyramids
2. **Error handling**: Standard try/catch
3. **Sequential logic**: Reads top-to-bottom
4. **Cancellation**: Built-in with Task
## Usage
```swift
Task {
do {
let data = try await fetchData()
await MainActor.run {
// Update UI
}
} catch {
print("Error: \\(error)")
}
}
```
Much cleaner than completion handlers!
""",
tokens: 189,
cost: 0.00038
)
static let mockSystem = Message(
role: .system,
content: "Conversation cleared. Starting fresh.",
tokens: nil,
cost: nil
)
static let mockMessages = [mockUser1, mockAssistant1, mockUser2, mockAssistant2]
}
extension ModelInfo {
static let mockModels = [
ModelInfo(
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
description: "Balanced intelligence and speed for most tasks",
contextLength: 200_000,
pricing: Pricing(prompt: 3.0, completion: 15.0),
capabilities: ModelCapabilities(vision: true, tools: true, online: true)
),
ModelInfo(
id: "anthropic/claude-opus-4",
name: "Claude Opus 4",
description: "Most capable model for complex tasks",
contextLength: 200_000,
pricing: Pricing(prompt: 15.0, completion: 75.0),
capabilities: ModelCapabilities(vision: true, tools: true, online: true)
),
ModelInfo(
id: "anthropic/claude-haiku-4",
name: "Claude Haiku 4",
description: "Fast and efficient for simple tasks",
contextLength: 200_000,
pricing: Pricing(prompt: 0.8, completion: 4.0),
capabilities: ModelCapabilities(vision: true, tools: true, online: true)
),
ModelInfo(
id: "openai/gpt-4o",
name: "GPT-4o",
description: "OpenAI's flagship multimodal model",
contextLength: 128_000,
pricing: Pricing(prompt: 2.5, completion: 10.0),
capabilities: ModelCapabilities(vision: true, tools: true, online: false)
),
ModelInfo(
id: "openai/gpt-4o-mini",
name: "GPT-4o Mini",
description: "Faster and cheaper GPT-4o variant",
contextLength: 128_000,
pricing: Pricing(prompt: 0.15, completion: 0.6),
capabilities: ModelCapabilities(vision: true, tools: true, online: false)
),
ModelInfo(
id: "openai/o1",
name: "o1",
description: "Advanced reasoning model for complex problems",
contextLength: 200_000,
pricing: Pricing(prompt: 15.0, completion: 60.0),
capabilities: ModelCapabilities(vision: false, tools: false, online: false)
),
ModelInfo(
id: "google/gemini-pro-1.5",
name: "Gemini Pro 1.5",
description: "Google's advanced multimodal model",
contextLength: 2_000_000,
pricing: Pricing(prompt: 1.25, completion: 5.0),
capabilities: ModelCapabilities(vision: true, tools: true, online: false)
),
ModelInfo(
id: "meta-llama/llama-3.1-405b",
name: "Llama 3.1 405B",
description: "Meta's largest open source model",
contextLength: 128_000,
pricing: Pricing(prompt: 2.7, completion: 2.7),
capabilities: ModelCapabilities(vision: false, tools: true, online: false)
)
]
}
extension Conversation {
static let mockConversation1 = Conversation(
name: "SwiftUI Discussion",
messages: [Message.mockUser1, Message.mockAssistant1],
createdAt: Date().addingTimeInterval(-86400), // 1 day ago
updatedAt: Date().addingTimeInterval(-3600) // 1 hour ago
)
static let mockConversation2 = Conversation(
name: "Async/Await Tutorial",
messages: [Message.mockUser2, Message.mockAssistant2],
createdAt: Date().addingTimeInterval(-172800), // 2 days ago
updatedAt: Date().addingTimeInterval(-7200) // 2 hours ago
)
static let mockConversations = [mockConversation1, mockConversation2]
}

View File

@@ -0,0 +1,56 @@
//
// ModelInfo.swift
// oAI
//
// Model information and capabilities
//
import Foundation
struct ModelInfo: Identifiable, Codable, Hashable {
let id: String
let name: String
let description: String?
let contextLength: Int
let pricing: Pricing
let capabilities: ModelCapabilities
var architecture: Architecture? = nil
var topProvider: String? = nil
struct Pricing: Codable, Hashable {
let prompt: Double // per 1M tokens
let completion: Double
}
struct ModelCapabilities: Codable, Hashable {
let vision: Bool // Images/PDFs
let tools: Bool // Function calling
let online: Bool // Web search
var imageGeneration: Bool = false // Image output
}
struct Architecture: Codable, Hashable {
let tokenizer: String?
let instructType: String?
let modality: String?
}
// Computed properties
var contextLengthDisplay: String {
if contextLength >= 1_000_000 {
return "\(contextLength / 1_000_000)M"
} else if contextLength >= 1000 {
return "\(contextLength / 1000)K"
} else {
return "\(contextLength)"
}
}
var promptPriceDisplay: String {
String(format: "$%.2f", pricing.prompt)
}
var completionPriceDisplay: String {
String(format: "$%.2f", pricing.completion)
}
}

View File

@@ -0,0 +1,58 @@
//
// SessionStats.swift
// oAI
//
// Session statistics tracking
//
import Foundation
struct SessionStats {
var totalInputTokens: Int = 0
var totalOutputTokens: Int = 0
var totalCost: Double = 0.0
var messageCount: Int = 0
var totalTokens: Int {
totalInputTokens + totalOutputTokens
}
var totalTokensDisplay: String {
if totalTokens >= 1_000_000 {
return String(format: "%.1fM", Double(totalTokens) / 1_000_000)
} else if totalTokens >= 1000 {
return String(format: "%.1fK", Double(totalTokens) / 1000)
} else {
return "\(totalTokens)"
}
}
var totalCostDisplay: String {
String(format: "$%.4f", totalCost)
}
var averageCostPerMessage: Double {
guard messageCount > 0 else { return 0.0 }
return totalCost / Double(messageCount)
}
mutating func addMessage(inputTokens: Int?, outputTokens: Int?, cost: Double?) {
if let input = inputTokens {
totalInputTokens += input
}
if let output = outputTokens {
totalOutputTokens += output
}
if let messageCost = cost {
totalCost += messageCost
}
messageCount += 1
}
mutating func reset() {
totalInputTokens = 0
totalOutputTokens = 0
totalCost = 0.0
messageCount = 0
}
}

90
oAI/Models/Settings.swift Normal file
View File

@@ -0,0 +1,90 @@
//
// Settings.swift
// oAI
//
// Application settings and configuration
//
import Foundation
struct Settings: Codable {
// Provider settings
var defaultProvider: Provider
var openrouterAPIKey: String?
var anthropicAPIKey: String?
var openaiAPIKey: String?
var ollamaBaseURL: String
// Model settings
var defaultModel: String?
var streamEnabled: Bool
var maxTokens: Int
var systemPrompt: String?
// Feature flags
var onlineMode: Bool
var memoryEnabled: Bool
var mcpEnabled: Bool
// Web search
var searchProvider: SearchProvider
var googleAPIKey: String?
var googleSearchEngineID: String?
// UI
var costWarningThreshold: Double
enum Provider: String, Codable, CaseIterable {
case openrouter
case anthropic
case openai
case ollama
var displayName: String {
rawValue.capitalized
}
var iconName: String {
switch self {
case .openrouter: return "network"
case .anthropic: return "brain"
case .openai: return "sparkles"
case .ollama: return "server.rack"
}
}
}
enum SearchProvider: String, Codable, CaseIterable {
case anthropicNative = "anthropic_native"
case duckduckgo
case google
var displayName: String {
switch self {
case .anthropicNative: return "Anthropic Native"
case .duckduckgo: return "DuckDuckGo"
case .google: return "Google"
}
}
}
// Default settings
static let `default` = Settings(
defaultProvider: .openrouter,
openrouterAPIKey: nil,
anthropicAPIKey: nil,
openaiAPIKey: nil,
ollamaBaseURL: "http://localhost:11434",
defaultModel: nil,
streamEnabled: true,
maxTokens: 4096,
systemPrompt: nil,
onlineMode: false,
memoryEnabled: true,
mcpEnabled: false,
searchProvider: .duckduckgo,
googleAPIKey: nil,
googleSearchEngineID: nil,
costWarningThreshold: 1.0
)
}