response.__str__ method, closes #268

This commit is contained in:
Simon Willison 2023-09-12 10:36:29 -07:00
parent 9c33d30843
commit f54f2c659d
3 changed files with 11 additions and 1 deletions

View file

@ -19,7 +19,13 @@ print(response.text())
```
The `llm.get_model()` function accepts model names or aliases - so `chatgpt` would work here too.
Run this command to see a list of available models and their aliases:
The `__str__()` method of `response` also returns the text of the response, so you can do this instead:
```python
print(response)
```
You can run this command to see a list of available models and their aliases:
```bash
llm models

View file

@ -105,6 +105,9 @@ class Response(ABC):
if not self._done:
list(self)
def __str__(self) -> str:
return self.text()
def text(self) -> str:
self._force()
return "".join(self._chunks)

View file

@ -10,6 +10,7 @@ def test_mock_model(mock_model):
model = llm.get_model("mock")
response = model.prompt(prompt="hello")
assert response.text() == "hello world"
assert str(response) == "hello world"
assert model.history[0][0].prompt == "hello"
response2 = model.prompt(prompt="hello again")
assert response2.text() == "second"