Skip to content

Commit aedfb14

Browse files
committed
Default CLI to Groq free model with NIM backup options
1 parent 7307c5a commit aedfb14

File tree

1 file changed

+82
-10
lines changed

1 file changed

+82
-10
lines changed

roast/cli.py

Lines changed: 82 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,11 @@
1515

1616
from roast.analyzer import analyze
1717
from roast.reporter import export_html_report, render_terminal_report
18-
from roast.roaster import generate_roast
18+
from roast.roaster import (
19+
DEFAULT_GROQ_MODEL,
20+
DEFAULT_NIM_MODEL,
21+
generate_roast,
22+
)
1923
from roast.scanner import scan_repo
2024

2125
app = typer.Typer(
@@ -25,6 +29,7 @@
2529
)
2630
console = Console()
2731
LOGGER = logging.getLogger(__name__)
32+
VALID_PROVIDERS = {"auto", "groq", "nim", "openai", "none"}
2833

2934

3035
def _is_github_url(value: str) -> bool:
@@ -57,6 +62,34 @@ def _parse_extensions(raw_extensions: str) -> list[str]:
5762
return parsed or ["py", "js", "ts", "jsx", "tsx"]
5863

5964

65+
def _provider_has_key(provider: str) -> bool:
66+
if provider == "groq":
67+
return bool(os.getenv("GROQ_API_KEY"))
68+
if provider == "nim":
69+
return bool(os.getenv("NVIDIA_NIM_API_KEY") or os.getenv("NIM_API_KEY"))
70+
if provider == "openai":
71+
return bool(os.getenv("OPENAI_API_KEY"))
72+
return False
73+
74+
75+
def _validate_provider(value: str, option_name: str) -> str:
76+
normalized = value.strip().lower()
77+
if normalized not in VALID_PROVIDERS:
78+
raise RuntimeError(
79+
f"Invalid {option_name}: {value}. Use one of: auto, groq, nim, openai, none."
80+
)
81+
return normalized
82+
83+
84+
def _has_any_configured_llm_key(provider: str, backup_provider: str) -> bool:
85+
if provider == "auto":
86+
return any(_provider_has_key(name) for name in ("groq", "nim", "openai"))
87+
providers = [provider]
88+
if backup_provider != "none":
89+
providers.append(backup_provider)
90+
return any(_provider_has_key(name) for name in providers)
91+
92+
6093
@app.command()
6194
def roast(
6295
path_or_url: str = typer.Argument(..., metavar="PATH_OR_URL"),
@@ -66,7 +99,26 @@ def roast(
6699
"-o",
67100
help="Save HTML report to this path.",
68101
),
69-
model: str = typer.Option("gpt-4o-mini", "--model", help="LLM model to use."),
102+
model: str = typer.Option(
103+
DEFAULT_GROQ_MODEL,
104+
"--model",
105+
help="Primary LLM model (default tuned for Groq free tier).",
106+
),
107+
provider: str = typer.Option(
108+
"auto",
109+
"--provider",
110+
help="Primary provider: auto, groq, nim, openai.",
111+
),
112+
backup_provider: str = typer.Option(
113+
"nim",
114+
"--backup-provider",
115+
help="Backup provider: none, nim, groq, openai.",
116+
),
117+
backup_model: str = typer.Option(
118+
DEFAULT_NIM_MODEL,
119+
"--backup-model",
120+
help="Backup provider model.",
121+
),
70122
no_llm: bool = typer.Option(False, "--no-llm", help="Run static analysis only, skip LLM roast."),
71123
extensions: str = typer.Option(
72124
"py,js,ts,jsx,tsx",
@@ -79,12 +131,24 @@ def roast(
79131
logging.basicConfig(level=logging.WARNING, format="%(levelname)s: %(message)s")
80132
ext_list = _parse_extensions(extensions)
81133

82-
if not no_llm and not os.getenv("OPENAI_API_KEY"):
134+
try:
135+
provider = _validate_provider(provider, "provider")
136+
backup_provider = _validate_provider(backup_provider, "backup_provider")
137+
except RuntimeError as exc:
138+
console.print(Panel(str(exc), title="Configuration Error", border_style="red"))
139+
raise typer.Exit(code=1)
140+
141+
if provider == "none":
142+
provider = "auto"
143+
144+
if not no_llm and not _has_any_configured_llm_key(provider, backup_provider):
83145
console.print(
84146
Panel(
85-
"[bold red]OPENAI_API_KEY is not set.[/]\n"
86-
"Set it first, for example:\n"
87-
"[cyan]export OPENAI_API_KEY='your-key-here'[/]\n"
147+
"[bold red]No LLM API keys found.[/]\n"
148+
"Set at least one:\n"
149+
"[cyan]export GROQ_API_KEY='...[/cyan]' (recommended free primary)\n"
150+
"[cyan]export NVIDIA_NIM_API_KEY='...[/cyan]' (recommended backup)\n"
151+
"[cyan]export OPENAI_API_KEY='...[/cyan]' (optional)\n"
88152
"Or run with [cyan]--no-llm[/] to skip AI roast generation.",
89153
title="Configuration Error",
90154
border_style="red",
@@ -111,22 +175,30 @@ def roast(
111175
console.print("[yellow]No matching readable files were found. Report will be mostly empty.[/yellow]")
112176

113177
if no_llm:
114-
roast_result = generate_roast(report, files, model=model, no_llm=True)
178+
roast_result = generate_roast(report, files, no_llm=True)
115179
else:
116180
with Progress(SpinnerColumn(), TextColumn("[bold magenta]{task.description}"), transient=True) as progress:
117181
progress.add_task("Calling LLM for roast generation...", total=None)
118182
try:
119-
roast_result = generate_roast(report, files, model=model, no_llm=False)
183+
roast_result = generate_roast(
184+
report,
185+
files,
186+
model=model,
187+
no_llm=False,
188+
provider=provider,
189+
backup_provider=backup_provider,
190+
backup_model=backup_model,
191+
)
120192
except Exception as exc: # noqa: BLE001
121193
LOGGER.warning("LLM call failed (%s). Falling back to --no-llm mode.", exc)
122194
console.print(
123195
"[yellow]LLM roast failed. Falling back to static roast mode (--no-llm).[/yellow]"
124196
)
125-
roast_result = generate_roast(report, files, model=model, no_llm=True)
197+
roast_result = generate_roast(report, files, no_llm=True)
126198

127199
export_html_report(report, roast_result, output_path=output)
128200
render_terminal_report(report, roast_result, output_path=output, console=console)
129201

130202

131203
if __name__ == "__main__":
132-
app()
204+
app()

0 commit comments

Comments
 (0)