Fixed default model setting. Added ctrl+y to copy latest reply in markdown++

This commit is contained in:
2026-02-04 15:16:26 +01:00
parent 1191fa6d19
commit ecc2489eef
9 changed files with 96 additions and 12 deletions

View File

@@ -9,7 +9,7 @@ Author: Rune
License: MIT
"""
__version__ = "2.1.0"
__version__ = "3.0.0-b2"
__author__ = "Rune"
__license__ = "MIT"

View File

@@ -863,7 +863,8 @@ class ConfigCommand(Command):
# Show model selector with search term, same as /model
return CommandResult.success(data={"show_model_selector": True, "search": value, "set_as_default": True})
else:
pass # Show current model, silently ignore
# Show model selector without search filter
return CommandResult.success(data={"show_model_selector": True, "search": "", "set_as_default": True})
elif setting == "system":
from oai.constants import DEFAULT_SYSTEM_PROMPT

View File

@@ -10,12 +10,15 @@ from pathlib import Path
from typing import Set, Dict, Any
import logging
# Import version from single source of truth
from oai import __version__
# =============================================================================
# APPLICATION METADATA
# =============================================================================
APP_NAME = "oAI"
APP_VERSION = "3.0.0"
APP_VERSION = __version__ # Single source of truth in oai/__init__.py
APP_URL = "https://iurl.no/oai"
APP_DESCRIPTION = "OpenRouter AI Chat Client with MCP Integration"

View File

@@ -1,12 +1,15 @@
"""Main Textual TUI application for oAI."""
import asyncio
import platform
from pathlib import Path
from typing import Optional
import pyperclip
from textual.app import App, ComposeResult
from textual.widgets import Input
from oai import __version__
from oai.commands.registry import CommandStatus, registry
from oai.config.settings import Settings
from oai.core.client import AIClient
@@ -66,7 +69,7 @@ class oAIChatApp(App):
"""Compose the TUI layout."""
model_name = self.session.selected_model.get("name", "") if self.session.selected_model else ""
model_info = self.session.selected_model if self.session.selected_model else None
yield Header(version="3.0.0", model=model_name, model_info=model_info)
yield Header(version=__version__, model=model_name, model_info=model_info)
yield ChatDisplay()
yield InputBar()
yield CommandDropdown()
@@ -200,6 +203,10 @@ class oAIChatApp(App):
elif event.key == "ctrl+n":
event.prevent_default()
self.call_later(self._handle_next_command)
elif event.key in ("f3", "ctrl+y"):
# F3 or Ctrl+Y to copy last AI response
event.prevent_default()
self.action_copy_last_response()
def on_input_changed(self, event: Input.Changed) -> None:
"""Handle input value changes to show/hide command dropdown."""
@@ -817,11 +824,17 @@ class oAIChatApp(App):
info_widget = UserMessageWidget(result.message)
await chat_display.add_message(info_widget)
# Handle special command data (e.g., show_model_selector)
if result and result.data:
await self._handle_command_data(result.data)
async def _handle_command_data(self, data: dict) -> None:
"""Handle special command result data."""
# Model selection
if "show_model_selector" in data:
self._show_model_selector(data.get("search", ""))
search = data.get("search", "")
set_as_default = data.get("set_as_default", False)
self._show_model_selector(search, set_as_default)
# Retry prompt
elif "retry_prompt" in data:
@@ -831,7 +844,7 @@ class oAIChatApp(App):
elif "paste_prompt" in data:
await self.handle_message(data["paste_prompt"])
def _show_model_selector(self, search: str = "") -> None:
def _show_model_selector(self, search: str = "", set_as_default: bool = False) -> None:
"""Show the model selector screen."""
def handle_model_selection(selected: Optional[dict]) -> None:
"""Handle the model selection result."""
@@ -840,9 +853,16 @@ class oAIChatApp(App):
header = self.query_one(Header)
header.update_model(selected.get("name", ""), selected)
# Save as default if requested
if set_as_default:
self.settings.set_default_model(selected["id"])
# Show confirmation in chat
async def add_confirmation():
chat_display = self.query_one(ChatDisplay)
if set_as_default:
info_widget = UserMessageWidget(f"✓ Default model set to: {selected['id']}")
else:
info_widget = UserMessageWidget(f"✓ Model changed to: {selected['id']}")
await chat_display.add_message(info_widget)
@@ -1000,3 +1020,36 @@ class oAIChatApp(App):
async def _handle_next_command(self) -> None:
"""Handle Ctrl+N to show next message."""
await self.handle_command("/next")
def action_copy_last_response(self) -> None:
"""Copy the last AI response to clipboard."""
try:
chat_display = self.query_one(ChatDisplay)
# Find the last AssistantMessageWidget
assistant_widgets = [
child for child in chat_display.children
if isinstance(child, AssistantMessageWidget)
]
if not assistant_widgets:
self.notify("No AI responses to copy", severity="warning")
return
# Get the last assistant message
last_assistant = assistant_widgets[-1]
text = last_assistant.full_text
if not text:
self.notify("Last response is empty", severity="warning")
return
# Copy to clipboard
pyperclip.copy(text)
# Show success notification
preview = text[:50] + "..." if len(text) > 50 else text
self.notify(f"✓ Copied: {preview}", severity="information")
except Exception as e:
self.notify(f"Copy failed: {e}", severity="error")

View File

@@ -63,10 +63,12 @@ class HelpScreen(ModalScreen[None]):
[bold cyan]═══ KEYBOARD SHORTCUTS ═══[/]
[bold]F1[/] Show this help (Ctrl+H may not work)
[bold]F2[/] Open model selector (Ctrl+M may not work)
[bold]F3[/] Copy last AI response to clipboard
[bold]Ctrl+S[/] Show session statistics
[bold]Ctrl+L[/] Clear chat display
[bold]Ctrl+P[/] Show previous message
[bold]Ctrl+N[/] Show next message
[bold]Ctrl+Y[/] Copy last AI response (alternative to F3)
[bold]Ctrl+Q[/] Quit application
[bold]Up/Down[/] Navigate input history
[bold]ESC[/] Close dialogs

View File

@@ -56,7 +56,9 @@ AssistantMessageWidget {
#assistant-content {
height: auto;
max-height: 100%;
color: $text;
color: #cccccc;
link-color: #888888;
link-style: none;
}
InputBar {

View File

@@ -8,7 +8,7 @@ from typing import Optional, Dict, Any
class Header(Static):
"""Header displaying app title, version, current model, and capabilities."""
def __init__(self, version: str = "3.0.0", model: str = "", model_info: Optional[Dict[str, Any]] = None):
def __init__(self, version: str = "3.0.1", model: str = "", model_info: Optional[Dict[str, Any]] = None):
super().__init__()
self.version = version
self.model = model

View File

@@ -2,10 +2,29 @@
from typing import Any, AsyncIterator, Tuple
from rich.console import Console
from rich.markdown import Markdown
from rich.style import Style
from rich.theme import Theme
from textual.app import ComposeResult
from textual.widgets import RichLog, Static
# Custom theme for Markdown rendering - neutral colors matching the dark theme
MARKDOWN_THEME = Theme({
"markdown.text": Style(color="#cccccc"),
"markdown.paragraph": Style(color="#cccccc"),
"markdown.code": Style(color="#e0e0e0", bgcolor="#2a2a2a"),
"markdown.code_block": Style(color="#e0e0e0", bgcolor="#2a2a2a"),
"markdown.heading": Style(color="#ffffff", bold=True),
"markdown.h1": Style(color="#ffffff", bold=True),
"markdown.h2": Style(color="#eeeeee", bold=True),
"markdown.h3": Style(color="#dddddd", bold=True),
"markdown.link": Style(color="#aaaaaa", underline=False),
"markdown.link_url": Style(color="#888888"),
"markdown.emphasis": Style(color="#cccccc", italic=True),
"markdown.strong": Style(color="#ffffff", bold=True),
})
class UserMessageWidget(Static):
"""Widget for displaying user messages."""
@@ -54,7 +73,9 @@ class AssistantMessageWidget(Static):
if hasattr(chunk, "delta_content") and chunk.delta_content:
self.full_text += chunk.delta_content
log.clear()
log.write(Markdown(self.full_text))
# Use neutral code theme for syntax highlighting
md = Markdown(self.full_text, code_theme="github-dark", inline_code_theme="github-dark")
log.write(md)
if hasattr(chunk, "usage") and chunk.usage:
usage = chunk.usage
@@ -66,4 +87,6 @@ class AssistantMessageWidget(Static):
self.full_text = content
log = self.query_one("#assistant-content", RichLog)
log.clear()
log.write(Markdown(content))
# Use neutral code theme for syntax highlighting
md = Markdown(content, code_theme="github-dark", inline_code_theme="github-dark")
log.write(md)

View File

@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "oai"
version = "3.0.0"
version = "3.0.0-b2" # MUST match oai/__init__.py __version__
description = "OpenRouter AI Chat Client - A feature-rich terminal-based chat application"
readme = "README.md"
license = {text = "MIT"}