mirror of
https://github.com/Hopiu/llm.git
synced 2026-04-24 23:14:45 +00:00
--no-stream option for llm chat, closes #248
This commit is contained in:
parent
2246e8f4fd
commit
5912bd47c0
1 changed files with 16 additions and 1 deletions
17
llm/cli.py
17
llm/cli.py
|
|
@ -324,8 +324,19 @@ def prompt(
|
|||
multiple=True,
|
||||
help="key/value options for the model",
|
||||
)
|
||||
@click.option("--no-stream", is_flag=True, help="Do not stream output")
|
||||
@click.option("--key", help="API key to use")
|
||||
def chat(system, model_id, _continue, conversation_id, template, param, options, key):
|
||||
def chat(
|
||||
system,
|
||||
model_id,
|
||||
_continue,
|
||||
conversation_id,
|
||||
template,
|
||||
param,
|
||||
options,
|
||||
no_stream,
|
||||
key,
|
||||
):
|
||||
"""
|
||||
Hold an ongoing chat with a model.
|
||||
"""
|
||||
|
|
@ -388,6 +399,10 @@ def chat(system, model_id, _continue, conversation_id, template, param, options,
|
|||
except pydantic.ValidationError as ex:
|
||||
raise click.ClickException(render_errors(ex.errors()))
|
||||
|
||||
should_stream = model.can_stream and not no_stream
|
||||
if not should_stream:
|
||||
validated_options["stream"] = False
|
||||
|
||||
click.echo("Chatting with {}".format(model.model_id))
|
||||
click.echo("Type 'exit' or 'quit' to exit")
|
||||
while True:
|
||||
|
|
|
|||
Loading…
Reference in a new issue