mirror of
https://github.com/Hopiu/llm.git
synced 2026-04-30 09:54:46 +00:00
Initial prototype of llm chat, refs #231
This commit is contained in:
parent
45b380c3f1
commit
969a5d3364
2 changed files with 116 additions and 0 deletions
19
docs/help.md
19
docs/help.md
|
|
@ -61,6 +61,7 @@ Options:
|
|||
Commands:
|
||||
prompt* Execute a prompt
|
||||
aliases Manage model aliases
|
||||
chat Hold an ongoing chat with a model.
|
||||
embed Embed text and store or return the result
|
||||
embed-db Manage the embeddings database
|
||||
embed-models Manage available embedding models
|
||||
|
|
@ -101,6 +102,24 @@ Options:
|
|||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
(help-chat)=
|
||||
### llm chat --help
|
||||
```
|
||||
Usage: llm chat [OPTIONS]
|
||||
|
||||
Hold an ongoing chat with a model.
|
||||
|
||||
Options:
|
||||
-s, --system TEXT System prompt to use
|
||||
-m, --model TEXT Model to use
|
||||
-c, --continue Continue the most recent conversation.
|
||||
--cid, --conversation TEXT Continue the conversation with the given ID.
|
||||
-t, --template TEXT Template to use
|
||||
-p, --param <TEXT TEXT>... Parameters for template
|
||||
--key TEXT API key to use
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
(help-keys)=
|
||||
### llm keys --help
|
||||
```
|
||||
|
|
|
|||
97
llm/cli.py
97
llm/cli.py
|
|
@ -291,6 +291,103 @@ def prompt(
|
|||
response.log_to_db(db)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("-s", "--system", help="System prompt to use")
|
||||
@click.option("model_id", "-m", "--model", help="Model to use")
|
||||
@click.option(
|
||||
"_continue",
|
||||
"-c",
|
||||
"--continue",
|
||||
is_flag=True,
|
||||
flag_value=-1,
|
||||
help="Continue the most recent conversation.",
|
||||
)
|
||||
@click.option(
|
||||
"conversation_id",
|
||||
"--cid",
|
||||
"--conversation",
|
||||
help="Continue the conversation with the given ID.",
|
||||
)
|
||||
@click.option("-t", "--template", help="Template to use")
|
||||
@click.option(
|
||||
"-p",
|
||||
"--param",
|
||||
multiple=True,
|
||||
type=(str, str),
|
||||
help="Parameters for template",
|
||||
)
|
||||
@click.option("--key", help="API key to use")
|
||||
def chat(system, model_id, _continue, conversation_id, template, param, key):
|
||||
"""
|
||||
Hold an ongoing chat with a model.
|
||||
"""
|
||||
log_path = logs_db_path()
|
||||
(log_path.parent).mkdir(parents=True, exist_ok=True)
|
||||
db = sqlite_utils.Database(log_path)
|
||||
migrate(db)
|
||||
|
||||
conversation = None
|
||||
if conversation_id or _continue:
|
||||
# Load the conversation - loads most recent if no ID provided
|
||||
try:
|
||||
conversation = load_conversation(conversation_id)
|
||||
except UnknownModelError as ex:
|
||||
raise click.ClickException(str(ex))
|
||||
|
||||
template_obj = None
|
||||
if template:
|
||||
params = dict(param)
|
||||
# Cannot be used with system
|
||||
if system:
|
||||
raise click.ClickException("Cannot use -t/--template and --system together")
|
||||
template_obj = load_template(template)
|
||||
if model_id is None and template_obj.model:
|
||||
model_id = template_obj.model
|
||||
|
||||
# Figure out which model we are using
|
||||
if model_id is None:
|
||||
if conversation:
|
||||
model_id = conversation.model.model_id
|
||||
else:
|
||||
model_id = get_default_model()
|
||||
|
||||
# Now resolve the model
|
||||
try:
|
||||
model = get_model(model_id)
|
||||
except KeyError:
|
||||
raise click.ClickException("'{}' is not a known model".format(model_id))
|
||||
|
||||
# Provide the API key, if one is needed and has been provided
|
||||
if model.needs_key:
|
||||
model.key = get_key(key, model.needs_key, model.key_env_var)
|
||||
|
||||
if conversation is None:
|
||||
# Start a fresh conversation for this chat
|
||||
conversation = Conversation(
|
||||
model=model, name="Chat with {}".format(model.model_id)
|
||||
)
|
||||
|
||||
click.echo("Chatting with {}".format(model.model_id))
|
||||
click.echo("Type 'exit' or 'quit' to exit")
|
||||
while True:
|
||||
prompt = click.prompt("", prompt_suffix="> ")
|
||||
if template_obj:
|
||||
try:
|
||||
prompt, system = template_obj.evaluate(prompt, params)
|
||||
except Template.MissingVariables as ex:
|
||||
raise click.ClickException(str(ex))
|
||||
if prompt.strip() in ("exit", "quit"):
|
||||
break
|
||||
response = conversation.prompt(prompt, system)
|
||||
# System prompt only sent for the first message:
|
||||
system = None
|
||||
for chunk in response:
|
||||
print(chunk, end="")
|
||||
sys.stdout.flush()
|
||||
response.log_to_db(db)
|
||||
print("")
|
||||
|
||||
|
||||
def load_conversation(conversation_id: Optional[str]) -> Optional[Conversation]:
|
||||
db = sqlite_utils.Database(logs_db_path())
|
||||
migrate(db)
|
||||
|
|
|
|||
Loading…
Reference in a new issue