Added default model option++

This commit is contained in:
2025-12-08 13:09:52 +01:00
parent 254d92174c
commit 93cd60b271
2 changed files with 70 additions and 22 deletions

View File

@@ -43,10 +43,10 @@ A command-line interface (CLI) chat application for interacting with AI language
```bash
# Using pip
pip install typer rich openrouter pyperclip requests prompt_toolkit
# Using uv
uv add typer rich openrouter pyperclip requests prompt_toolkit
# Using pip and requirements.txt (recommended)
pip3 install -r requirements.txt
```
@@ -74,17 +74,18 @@ python oai_chat.py chat
### Available Commands
| Command | Description | Example |
|---------------------|-----------------------------------------------------------------------------|-------------------------------------------------------------------------|
|---------------------|-----------------------------------------------------------------------------|:------------------------------------------------------------------------|
| `/help` | Display a table with examples for all commands and features, including new ones like `/clear`. | `/help` |
| `/model [search]` | Select or switch models; optional search for substring matching. | `/model gpt` (Filters to GPT models)<br>`/model` (Shows all models) |
| `/config api` | Update OpenRouter API key and reinitialize the client. | `/config api`<br>Enter key: sk-...<br>[Output: API key updated!] |
| `/config url` | Change the base URL for custom OpenRouter endpoints. | `/config url`<br>Enter URL: https://custom.openrouter.ai/api/v1 |
| `/config stream on/off` | Toggle response streaming (default: on for real-time chat). | `/config stream off` (For full responses at once)<br>`/config stream on`|
| `/config` | View current configurations. | `/config` (Displays table: API Key, Base URL, Streaming, Database path)|
| `/clear` | Clear the terminal screen for a clean interface, preserving chat state. | `/clear`<br>[Output: Screen cleared. Ready for your next input!] |
| `exit`, `quit`, `bye` | Exit the app cleanly. | `exit` |
| Chatting with Files | Attach files using `@path` (e.g., images for vision models, text for context). | `Explain this @~/demo.png`<br>(Attaches image if model supports it) |
| Arrow Key Navigation | Use ↑/↓ arrow keys to scroll through and edit previous user inputs. | While at "You>" prompt, press ↑ for last message, ↓ for next. |
| /config model | "Set a default model that loads on startup; displays models for selection (persisted in DB, doesn't change current session model)." | `/config model gpt`<br />(Shows GPT models; user selects to set as default) |
| `/config stream on/off` | Toggle response streaming (default: on for real-time chat). | `/config stream off` (For full responses at once)<br>`/config stream on` |
| `/config` | View current configurations. | `/config` (Displays table: API Key, Base URL, Streaming, Database path) |
| `/clear` | Clear the terminal screen for a clean interface, preserving chat state. | `/clear`<br>[Output: Screen cleared. Ready for your next input!] |
| `exit`, `quit`, `bye` | Exit the app cleanly. | `exit` |
| Chatting with Files | Attach files using `@path` (e.g., images for vision models, text for context). | `Explain this @~/demo.png`<br>(Attaches image if model supports it) |
| Arrow Key Navigation | Use ↑/↓ arrow keys to scroll through and edit previous user inputs. | While at "You>" prompt, press ↑ for last message, ↓ for next. |
### Example Session

73
oai.py
View File

@@ -25,6 +25,7 @@ homefilepath = Path.home()
filepath = homefilepath.joinpath('.config/oai')
database = filepath.joinpath('oai_config.db')
DB_FILE = database
version = '1.0'
def create_table_if_not_exists():
"""Ensure the config table exists and directories are created."""
@@ -55,6 +56,7 @@ def set_config(key: str, value: str):
API_KEY = get_config('api_key')
OPENROUTER_BASE_URL = get_config('base_url') or "https://openrouter.ai/api/v1" # Default if not set
STREAM_ENABLED = get_config('stream_enabled') or "on" # Default to streaming on
DEFAULT_MODEL_ID = get_config('default_model') # Load default model ID from DB
# Fetch models once at module level (with loaded BASE_URL)
models_data = []
@@ -66,6 +68,12 @@ try:
models_data = response.json()["data"]
# Filter: Exclude "video" models, but allow "image" for attachments
text_models = [m for m in models_data if "modalities" not in m or "video" not in (m.get("modalities") or [])]
# After fetching models, load default model if set and available
selected_model_default = None
if DEFAULT_MODEL_ID:
selected_model_default = next((m for m in text_models if m["id"] == DEFAULT_MODEL_ID), None)
if not selected_model_default:
console.print(f"[bold yellow]Warning: Saved default model '{DEFAULT_MODEL_ID}' is not available. Use '/config model' to set a new one.[/]")
except Exception as e:
models_data = []
text_models = []
@@ -135,7 +143,9 @@ def chat():
console.print("[bold red]No suitable models available or error fetching models (check API key and base URL).[/]")
raise typer.Exit()
selected_model = None
# Initialize selected_model with default if available, else None (session-specific changes via /model)
selected_model = selected_model_default # Set from DB load, or None
client = OpenRouter(api_key=API_KEY)
console.print("[bold blue]Welcome to oAI! Type your message, '/model [search]' to select/change model, '/config api/url/stream' to configure, '/help' for examples, or 'exit'/'quit' to end.[/]")
@@ -152,7 +162,7 @@ def chat():
console.print("[bold yellow]Goodbye![/]")
return
# Handle /model command (unchanged)
# Handle /model command (unchanged - only changes current session model)
if user_input.startswith("/model"):
args = user_input[7:].strip() # Get everything after "/model" as search term
search_term = args if args else ""
@@ -183,7 +193,7 @@ def chat():
console.print("[bold red]Invalid input. Enter a number.[/]")
continue
# Handle /config command (**UPDATED:** Now includes credit info)
# Handle /config command
if user_input.startswith("/config"):
args = user_input[8:].strip().lower() # Get args after "/config"
if args == "api":
@@ -217,15 +227,47 @@ def chat():
console.print(f"[bold green]Streaming {'enabled' if sub_args == 'on' else 'disabled'}.[/]")
else:
console.print("[bold yellow]Usage: /config stream on|off[/]")
# **UPDATED:** Handle /config model for setting default only
elif args.startswith("model"):
sub_args = args[6:].strip() # After "model" (optional search term)
search_term = sub_args if sub_args else ""
filtered_models = text_models
if search_term:
# Substring filter (case-insensitive) on name or id
filtered_models = [m for m in text_models if search_term.lower() in m["name"].lower() or search_term.lower() in m["id"].lower()]
if not filtered_models:
console.print(f"[bold red]No models match '{search_term}'. Try '/config model' without search.[/]")
continue
# Display filtered models
table = Table("No.", "Name", "ID", show_header=True, header_style="bold magenta")
for i, model in enumerate(filtered_models, 1):
table.add_row(str(i), model["name"], model["id"])
console.print(Panel(table, title=f"[bold green]Available Models for Default ({'All' if not search_term else f'Search: {search_term}'})[/]", title_align="left"))
# Prompt selection and save as default (DOES NOT change current selected_model)
while True:
try:
choice = int(typer.prompt("Enter model number (or 0 to cancel)"))
if choice == 0:
break
if 1 <= choice <= len(filtered_models):
default_model = filtered_models[choice - 1]
set_config('default_model', default_model["id"]) # Save default model ID to DB
current_name = selected_model['name'] if selected_model else "None"
console.print(f"[bold cyan]Default model set to: {default_model['name']} ({default_model['id']}). Current model unchanged: {current_name}[/]")
break
console.print("[bold red]Invalid choice. Try again.[/]")
except ValueError:
console.print("[bold red]Invalid input. Enter a number.[/]")
else:
# /config with no args: Display current configs
# /config with no args: Display current configs.
DEFAULT_MODEL_ID = get_config('default_model') # Load default model ID from DB
table = Table("Setting", "Value", show_header=True, header_style="bold magenta")
table.add_row("API Key", API_KEY or "[Not set]")
table.add_row("Base URL", OPENROUTER_BASE_URL or "[Not set]")
table.add_row("Streaming", "Enabled" if STREAM_ENABLED == "on" else "Disabled")
table.add_row("Database", str(database) or "[Not set]")
table.add_row("Default Model", DEFAULT_MODEL_ID or "[Not set]")
table.add_row("Current Model", "[Not set]" if selected_model is None else str(selected_model["name"]))
# Fetch and display credit info
credits = get_credits(API_KEY, OPENROUTER_BASE_URL)
if credits:
@@ -236,7 +278,7 @@ def chat():
table.add_row("Total Credits", "[Unavailable - Check API key]")
table.add_row("Used Credits", "[Unavailable - Check API key]")
table.add_row("Credits Left", "[Unavailable - Check API key]")
console.print(Panel(table, title="[bold green]Current Configurations[/]", title_align="left"))
continue
@@ -255,13 +297,18 @@ def chat():
console.print("[bold cyan]Screen cleared. Ready for your next input![/]")
continue
# Handle /help command (**UPDATED:** Now includes /credits)
# Handle /help command.
if user_input.lower() == "/help":
help_table = Table("Command", "Description", "Example", show_header=True, header_style="bold cyan")
help_table.add_row(
"/model [search]",
"Select or change the model for chatting. Supports searching by name or ID.",
"/model gpt\nYou: 1\n(Selects first matching model)"
"Select or change the current model for the session. Supports searching by name or ID (not persisted).",
"/model gpt\nYou: 1\n(Selects first matching model for this chat only)"
)
help_table.add_row(
"/config model [search]",
"Set a default model that loads on startup; displays models for selection (persisted in DB, doesn't change current session model).",
"/config model gpt\n(Shows GPT models; user selects to set as default)"
)
help_table.add_row(
"/config api",
@@ -274,7 +321,7 @@ def chat():
"/config url\nEnter new base URL: https://api.example.com/v1\n[bold green]Base URL updated![/bold green]"
)
help_table.add_row(
"/config stream on|off",
"/config stream on/off",
"Enable or disable response streaming.",
"/config stream off\n[bold green]Streaming disabled.[/bold green]"
)
@@ -308,7 +355,7 @@ def chat():
"Quit the chat app with either 'exit', 'quit' or 'bye'",
"exit\n[bold yellow]Goodbye![/bold yellow]"
)
console.print(Panel(help_table, title="[bold cyan]oAI Chat Help - Command Examples[/]", title_align="center"))
console.print(Panel(help_table, title="[bold cyan]oAI (Version %s) Chat Help - Command Examples[/]" %(version), title_align="center", subtitle="oAI can be found at https://iurl.no/oai", subtitle_align="center"))
continue
if not selected_model:
@@ -321,7 +368,7 @@ def chat():
file_attachments = []
# Regex to find @path (e.g., @/Users/user/file.jpg or @c:\folder\file.txt)
file_pattern = r'@([^\s]+)' # @ followed by non-spaces
file_pattern = r'@([^\s]+)' # @ followed by non spaces
for match in re.finditer(file_pattern, user_input):
file_path = match.group(1)
expanded_path = os.path.expanduser(os.path.abspath(file_path))