Rename "echo" to "simple-echo", refs #1061

This commit is contained in:
Simon Willison 2025-05-20 20:55:54 -07:00
parent 4be2bbdc0e
commit e48a5b9f11
6 changed files with 21 additions and 19 deletions

View file

@ -82,8 +82,8 @@ class MockModel(llm.Model):
)
class EchoModel(llm.Model):
model_id = "echo"
class SimpleEchoModel(llm.Model):
model_id = "simple-echo"
can_stream = True
attachment_types = {"image/png"}
@ -234,18 +234,18 @@ def register_embed_demo_model(embed_demo, mock_model, async_mock_model):
@pytest.fixture(autouse=True)
def register_echo_model():
class EchoModelPlugin:
__name__ = "EchoModelPlugin"
class SimpleEchoModelPlugin:
__name__ = "SimpleEchoModelPlugin"
@llm.hookimpl
def register_models(self, register):
register(EchoModel())
register(SimpleEchoModel())
pm.register(EchoModelPlugin(), name="undo-EchoModelPlugin")
pm.register(SimpleEchoModelPlugin(), name="undo-SimpleEchoModelPlugin")
try:
yield
finally:
pm.unregister(name="undo-EchoModelPlugin")
pm.unregister(name="undo-SimpleEchoModelPlugin")
@pytest.fixture

View file

@ -86,13 +86,13 @@ def test_prompt_uses_model_options(user_path):
path.write_text("{}", "utf-8")
# Prompt should not use an option
runner = CliRunner()
result = runner.invoke(cli, ["-m", "echo", "prompt"])
result = runner.invoke(cli, ["-m", "simple-echo", "prompt"])
assert result.exit_code == 0
assert result.output == "system:\n\n\nprompt:\nprompt\n"
# Now set an option
path.write_text(json.dumps({"echo": {"example_int": 1}}), "utf-8")
path.write_text(json.dumps({"simple-echo": {"example_int": 1}}), "utf-8")
result2 = runner.invoke(cli, ["-m", "echo", "prompt"])
result2 = runner.invoke(cli, ["-m", "simple-echo", "prompt"])
assert result2.exit_code == 0
assert (
result2.output
@ -100,7 +100,9 @@ def test_prompt_uses_model_options(user_path):
)
# Option can be over-ridden
result3 = runner.invoke(cli, ["-m", "echo", "prompt", "-o", "example_int", "2"])
result3 = runner.invoke(
cli, ["-m", "simple-echo", "prompt", "-o", "example_int", "2"]
)
assert result3.exit_code == 0
assert (
result3.output
@ -109,7 +111,7 @@ def test_prompt_uses_model_options(user_path):
# Using an alias should also pick up that option
aliases_path = user_path / "aliases.json"
aliases_path.write_text('{"e": "echo"}', "utf-8")
aliases_path.write_text('{"e": "simple-echo"}', "utf-8")
result4 = runner.invoke(cli, ["-m", "e", "prompt"])
assert result4.exit_code == 0
assert (

View file

@ -625,7 +625,7 @@ def test_schema(mock_model, use_pydantic):
def test_model_environment_variable(monkeypatch):
monkeypatch.setenv("LLM_MODEL", "echo")
monkeypatch.setenv("LLM_MODEL", "simple-echo")
runner = CliRunner()
result = runner.invoke(
cli,

View file

@ -849,7 +849,7 @@ def test_logs_backup(logs_db):
assert not logs_db.tables
runner = CliRunner()
with runner.isolated_filesystem():
runner.invoke(cli, ["-m", "echo", "simple prompt"])
runner.invoke(cli, ["-m", "simple-echo", "simple prompt"])
assert logs_db.tables
expected_path = pathlib.Path("backup.db")
assert not expected_path.exists()

View file

@ -141,7 +141,7 @@ def test_register_fragment_loaders(logs_db, httpx_mock):
# Test the CLI command
runner = CliRunner()
result = runner.invoke(
cli.cli, ["-m", "echo", "-f", "three:x"], catch_exceptions=False
cli.cli, ["-m", "simple-echo", "-f", "three:x"], catch_exceptions=False
)
assert result.exit_code == 0
expected = "prompt:\n" "one:x\n" "two:x\n" "three:x\n"
@ -163,7 +163,7 @@ def test_register_fragment_loaders(logs_db, httpx_mock):
# Test the one that includes an attachment
result3 = runner.invoke(
cli.cli, ["-m", "echo", "-f", "mixed:x"], catch_exceptions=False
cli.cli, ["-m", "simple-echo", "-f", "mixed:x"], catch_exceptions=False
)
assert result3.exit_code == 0
result3.output.strip == textwrap.dedent(
@ -297,11 +297,11 @@ def test_plugins_command():
result = runner.invoke(cli.cli, ["plugins"])
assert result.exit_code == 0
expected = [
{"name": "EchoModelPlugin", "hooks": ["register_models"]},
{
"name": "MockModelsPlugin",
"hooks": ["register_embedding_models", "register_models"],
},
{"name": "SimpleEchoModelPlugin", "hooks": ["register_models"]},
]
actual = json.loads(result.output)
actual.sort(key=lambda p: p["name"])

View file

@ -356,7 +356,7 @@ def test_execute_prompt_from_template_url(httpx_mock, template, expected):
runner = CliRunner()
result = runner.invoke(
cli,
["-t", "https://example.com/prompt.yaml", "-m", "echo"],
["-t", "https://example.com/prompt.yaml", "-m", "simple-echo"],
catch_exceptions=False,
)
assert result.exit_code == 0
@ -370,7 +370,7 @@ def test_execute_prompt_from_template_path():
path.write_text("system: system\nprompt: prompt", "utf-8")
result = runner.invoke(
cli,
["-t", str(path), "-m", "echo"],
["-t", str(path), "-m", "simple-echo"],
catch_exceptions=False,
)
assert result.exit_code == 0, result.output