feat(cli): add native /model picker modal for provider → model selection

When /model is called with no arguments in the interactive CLI, open a
two-step prompt_toolkit modal instead of the previous text-only listing:

1. Provider selection — curses_single_select with all authenticated providers
2. Model selection — live API fetch with curated fallback

Also fixes:
- OpenAI Codex model normalization (openai/gpt-5.4 → gpt-5.4)
- Dedicated Codex validation path using provider_model_ids()

Preserves curses_radiolist (used by setup, tools, plugins) alongside the
new curses_single_select. Retains tool elapsed timer in spinner.

Cherry-picked from PR #7438 by MestreY0d4-Uninter.
This commit is contained in:
Mateus Scheuer Macedo
2026-04-11 16:59:41 -07:00
committed by Teknium
parent 39cd57083a
commit 06f862fa1b
5 changed files with 831 additions and 58 deletions

View File

@@ -287,6 +287,129 @@ def _radio_numbered_fallback(
return cancel_returns
def curses_single_select(
title: str,
items: List[str],
default_index: int = 0,
*,
cancel_label: str = "Cancel",
) -> int | None:
"""Curses single-select menu. Returns selected index or None on cancel.
Works inside prompt_toolkit because curses.wrapper() restores the terminal
safely, unlike simple_term_menu which conflicts with /dev/tty.
"""
if not sys.stdin.isatty():
return None
try:
import curses
result_holder: list = [None]
all_items = list(items) + [cancel_label]
cancel_idx = len(items)
def _draw(stdscr):
curses.curs_set(0)
if curses.has_colors():
curses.start_color()
curses.use_default_colors()
curses.init_pair(1, curses.COLOR_GREEN, -1)
curses.init_pair(2, curses.COLOR_YELLOW, -1)
cursor = min(default_index, len(all_items) - 1)
scroll_offset = 0
while True:
stdscr.clear()
max_y, max_x = stdscr.getmaxyx()
try:
hattr = curses.A_BOLD
if curses.has_colors():
hattr |= curses.color_pair(2)
stdscr.addnstr(0, 0, title, max_x - 1, hattr)
stdscr.addnstr(
1, 0,
" ↑↓ navigate ENTER confirm ESC/q cancel",
max_x - 1, curses.A_DIM,
)
except curses.error:
pass
visible_rows = max_y - 3
if cursor < scroll_offset:
scroll_offset = cursor
elif cursor >= scroll_offset + visible_rows:
scroll_offset = cursor - visible_rows + 1
for draw_i, i in enumerate(
range(scroll_offset, min(len(all_items), scroll_offset + visible_rows))
):
y = draw_i + 3
if y >= max_y - 1:
break
arrow = "" if i == cursor else " "
line = f" {arrow} {all_items[i]}"
attr = curses.A_NORMAL
if i == cursor:
attr = curses.A_BOLD
if curses.has_colors():
attr |= curses.color_pair(1)
try:
stdscr.addnstr(y, 0, line, max_x - 1, attr)
except curses.error:
pass
stdscr.refresh()
key = stdscr.getch()
if key in (curses.KEY_UP, ord("k")):
cursor = (cursor - 1) % len(all_items)
elif key in (curses.KEY_DOWN, ord("j")):
cursor = (cursor + 1) % len(all_items)
elif key in (curses.KEY_ENTER, 10, 13):
result_holder[0] = cursor
return
elif key in (27, ord("q")):
result_holder[0] = None
return
curses.wrapper(_draw)
flush_stdin()
if result_holder[0] is not None and result_holder[0] >= cancel_idx:
return None
return result_holder[0]
except Exception:
all_items = list(items) + [cancel_label]
cancel_idx = len(items)
return _numbered_single_fallback(title, all_items, cancel_idx)
def _numbered_single_fallback(
title: str,
items: List[str],
cancel_idx: int,
) -> int | None:
"""Text-based numbered fallback for single-select."""
print(f"\n {title}\n")
for i, label in enumerate(items, 1):
print(f" {i}. {label}")
print()
try:
val = input(f" Choice [1-{len(items)}]: ").strip()
if not val:
return None
idx = int(val) - 1
if 0 <= idx < len(items) and idx < cancel_idx:
return idx
if idx == cancel_idx:
return None
except (ValueError, KeyboardInterrupt, EOFError):
pass
return None
def _numbered_fallback(
title: str,
items: List[str],

View File

@@ -74,13 +74,13 @@ _DOT_TO_HYPHEN_PROVIDERS: frozenset[str] = frozenset({
_STRIP_VENDOR_ONLY_PROVIDERS: frozenset[str] = frozenset({
"copilot",
"copilot-acp",
"openai-codex",
})
# Providers whose native naming is authoritative -- pass through unchanged.
_AUTHORITATIVE_NATIVE_PROVIDERS: frozenset[str] = frozenset({
"gemini",
"huggingface",
"openai-codex",
})
# Direct providers that accept bare native names but should repair a matching
@@ -360,7 +360,11 @@ def normalize_model_for_provider(model_input: str, target_provider: str) -> str:
# --- Copilot: strip matching provider prefix, keep dots ---
if provider in _STRIP_VENDOR_ONLY_PROVIDERS:
return _strip_matching_provider_prefix(name, provider)
stripped = _strip_matching_provider_prefix(name, provider)
if stripped == name and name.startswith("openai/"):
# openai-codex maps openai/gpt-5.4 -> gpt-5.4
return name.split("/", 1)[1]
return stripped
# --- DeepSeek: map to one of two canonical names ---
if provider == "deepseek":

View File

@@ -1809,6 +1809,35 @@ def validate_requested_model(
"message": message,
}
# OpenAI Codex has its own catalog path; /v1/models probing is not the right validation path.
if normalized == "openai-codex":
try:
codex_models = provider_model_ids("openai-codex")
except Exception:
codex_models = []
if codex_models:
if requested_for_lookup in set(codex_models):
return {
"accepted": True,
"persist": True,
"recognized": True,
"message": None,
}
suggestions = get_close_matches(requested_for_lookup, codex_models, n=3, cutoff=0.5)
suggestion_text = ""
if suggestions:
suggestion_text = "\n Similar models: " + ", ".join(f"`{s}`" for s in suggestions)
return {
"accepted": True,
"persist": True,
"recognized": False,
"message": (
f"Note: `{requested}` was not found in the OpenAI Codex model listing. "
f"It may still work if your account has access to it."
f"{suggestion_text}"
),
}
# Probe the live API to check if the model actually exists
api_models = fetch_api_models(api_key, base_url)