mirror of
https://github.com/Hopiu/llm.git
synced 2026-05-14 00:33:10 +00:00
response.__str__ method, closes #268
This commit is contained in:
parent
9c33d30843
commit
f54f2c659d
3 changed files with 11 additions and 1 deletions
|
|
@ -19,7 +19,13 @@ print(response.text())
|
|||
```
|
||||
The `llm.get_model()` function accepts model names or aliases - so `chatgpt` would work here too.
|
||||
|
||||
Run this command to see a list of available models and their aliases:
|
||||
The `__str__()` method of `response` also returns the text of the response, so you can do this instead:
|
||||
|
||||
```python
|
||||
print(response)
|
||||
```
|
||||
|
||||
You can run this command to see a list of available models and their aliases:
|
||||
|
||||
```bash
|
||||
llm models
|
||||
|
|
|
|||
|
|
@ -105,6 +105,9 @@ class Response(ABC):
|
|||
if not self._done:
|
||||
list(self)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.text()
|
||||
|
||||
def text(self) -> str:
|
||||
self._force()
|
||||
return "".join(self._chunks)
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ def test_mock_model(mock_model):
|
|||
model = llm.get_model("mock")
|
||||
response = model.prompt(prompt="hello")
|
||||
assert response.text() == "hello world"
|
||||
assert str(response) == "hello world"
|
||||
assert model.history[0][0].prompt == "hello"
|
||||
response2 = model.prompt(prompt="hello again")
|
||||
assert response2.text() == "second"
|
||||
|
|
|
|||
Loading…
Reference in a new issue