llm models -q/--query option, closes #700

This commit is contained in:
Simon Willison 2025-01-09 11:37:33 -08:00
parent 000e984def
commit b452effa09
5 changed files with 38 additions and 9 deletions

View file

@ -330,9 +330,10 @@ Usage: llm models list [OPTIONS]
List available models
Options:
--options Show options for each model, if available
--async List async models
--help Show this message and exit.
--options Show options for each model, if available
--async List async models
-q, --query TEXT Search for models matching this string
--help Show this message and exit.
```
(help-models-default)=

View file

@ -246,11 +246,18 @@ llm models
```
Example output:
```
OpenAI Chat: gpt-3.5-turbo (aliases: 3.5, chatgpt)
OpenAI Chat: gpt-3.5-turbo-16k (aliases: chatgpt-16k, 3.5-16k)
OpenAI Chat: gpt-4 (aliases: 4, gpt4)
OpenAI Chat: gpt-4-32k (aliases: 4-32k)
PaLM 2: chat-bison-001 (aliases: palm, palm2)
OpenAI Chat: gpt-4o (aliases: 4o)
OpenAI Chat: gpt-4o-mini (aliases: 4o-mini)
OpenAI Chat: o1-preview
OpenAI Chat: o1-mini
GeminiPro: gemini-1.5-pro-002
GeminiPro: gemini-1.5-flash-002
...
```
Add `-q term` to search for models matching a specific search term:
```bash
llm models -q gpt-4o
```
Add `--options` to also see documentation for the options supported by each model:

View file

@ -1076,12 +1076,15 @@ _type_lookup = {
"--options", is_flag=True, help="Show options for each model, if available"
)
@click.option("async_", "--async", is_flag=True, help="List async models")
def models_list(options, async_):
@click.option("-q", "--query", help="Search for models matching this string")
def models_list(options, async_, query):
"List available models"
models_that_have_shown_options = set()
for model_with_aliases in get_models_with_aliases():
if async_ and not model_with_aliases.async_model:
continue
if query and not model_with_aliases.matches(query):
continue
extra = ""
if model_with_aliases.aliases:
extra = " (aliases: {})".format(", ".join(model_with_aliases.aliases))

View file

@ -757,6 +757,16 @@ class ModelWithAliases:
async_model: AsyncModel
aliases: Set[str]
def matches(self, query: str) -> bool:
query = query.lower()
all_strings = []
all_strings.extend(self.aliases)
if self.model:
all_strings.append(str(self.model))
if self.async_model:
all_strings.append(str(self.async_model.model_id))
return any(query in alias.lower() for alias in all_strings)
@dataclass
class EmbeddingModelWithAliases:

View file

@ -604,6 +604,14 @@ def test_llm_models_async(user_path):
assert "AsyncMockModel: mock" in result.output
@pytest.mark.parametrize("option", ("-q", "--query"))
def test_llm_models_query(user_path, option):
runner = CliRunner()
result = runner.invoke(cli, ["models", option, "mockmodel"], catch_exceptions=False)
assert result.exit_code == 0
assert result.output == "MockModel: mock\n"
def test_llm_user_dir(tmpdir, monkeypatch):
user_dir = str(tmpdir / "u")
monkeypatch.setenv("LLM_USER_PATH", user_dir)