llm models -m option, closes #825

This commit is contained in:
Simon Willison 2025-03-10 14:18:50 -07:00
parent 31d264d9a9
commit 1d552aeacc
4 changed files with 32 additions and 13 deletions

View file

@ -349,6 +349,7 @@ Options:
--async List async models
--schemas List models that support schemas
-q, --query TEXT Search for models matching these strings
-m, --model TEXT Specific model IDs
--help Show this message and exit.
```

View file

@ -319,7 +319,10 @@ Add one or more `-q term` options to search for models matching all of those sea
llm models -q gpt-4o
llm models -q 4o -q mini
```
Use one or more `-m` options to indicate specific models, either by their model ID or one of their aliases:
```bash
llm models -m gpt-4o -m gemini-1.5-pro-002
```
Add `--options` to also see documentation for the options supported by each model:
```bash
llm models --options

View file

@ -1321,7 +1321,8 @@ _type_lookup = {
multiple=True,
help="Search for models matching these strings",
)
def models_list(options, async_, schemas, query):
@click.option("model_ids", "-m", "--model", help="Specific model IDs", multiple=True)
def models_list(options, async_, schemas, query, model_ids):
"List available models"
models_that_have_shown_options = set()
for model_with_aliases in get_models_with_aliases():
@ -1331,6 +1332,12 @@ def models_list(options, async_, schemas, query):
# Only show models where every provided query string matches
if not all(model_with_aliases.matches(q) for q in query):
continue
if model_ids:
ids_and_aliases = set(
[model_with_aliases.model.model_id] + model_with_aliases.aliases
)
if not ids_and_aliases.intersection(model_ids):
continue
if schemas and not model_with_aliases.model.supports_schema:
continue
extra = ""
@ -1382,7 +1389,7 @@ def models_list(options, async_, schemas, query):
"\n".join(" - {}".format(feature) for feature in features)
)
click.echo(output)
if not query and not options and not schemas:
if not query and not options and not schemas and not model_ids:
click.echo(f"Default: {get_default_model()}")

View file

@ -478,25 +478,33 @@ def test_llm_models_async(user_path):
@pytest.mark.parametrize(
"args,expected_model_id,unexpected_model_id",
"args,expected_model_ids,unexpected_model_ids",
(
(["-q", "gpt-4o"], "OpenAI Chat: gpt-4o", None),
(["-q", "mock"], "MockModel: mock", None),
(["--query", "mock"], "MockModel: mock", None),
(["-q", "gpt-4o"], ["OpenAI Chat: gpt-4o"], None),
(["-q", "mock"], ["MockModel: mock"], None),
(["--query", "mock"], ["MockModel: mock"], None),
(
["-q", "4o", "-q", "mini"],
"OpenAI Chat: gpt-4o-mini",
"OpenAI Chat: gpt-4o ",
["OpenAI Chat: gpt-4o-mini"],
["OpenAI Chat: gpt-4o "],
),
(
["-m", "gpt-4o-mini", "-m", "gpt-4.5"],
["OpenAI Chat: gpt-4o-mini", "OpenAI Chat: gpt-4.5"],
["OpenAI Chat: gpt-4o "],
),
),
)
def test_llm_models_query(user_path, args, expected_model_id, unexpected_model_id):
def test_llm_models_filter(user_path, args, expected_model_ids, unexpected_model_ids):
runner = CliRunner()
result = runner.invoke(cli, ["models"] + args, catch_exceptions=False)
assert result.exit_code == 0
assert expected_model_id in result.output
if unexpected_model_id:
assert unexpected_model_id not in result.output
if expected_model_ids:
for expected_model_id in expected_model_ids:
assert expected_model_id in result.output
if unexpected_model_ids:
for unexpected_model_id in unexpected_model_ids:
assert unexpected_model_id not in result.output
def test_llm_user_dir(tmpdir, monkeypatch):