mirror of
https://github.com/Hopiu/llm.git
synced 2026-05-04 03:44:44 +00:00
-c/--continue and --chat options plus documentation, refs #6
This commit is contained in:
parent
d1db866d8e
commit
ca92c60835
2 changed files with 32 additions and 4 deletions
18
README.md
18
README.md
|
|
@ -39,6 +39,24 @@ You can also send a prompt to standard input, for example:
|
|||
|
||||
echo 'Ten names for cheesecakes' | llm
|
||||
|
||||
### Continuing a conversation
|
||||
|
||||
By default, the tool will start a new conversation each time you run it.
|
||||
|
||||
You can opt to continue the previous conversation by passing the `-c/--continue` option:
|
||||
|
||||
llm 'More names' --continue
|
||||
|
||||
This will re-send the prompts and responses for the previous conversation. Note that this can add up quickly in terms of tokens, especially if you are using more expensive models.
|
||||
|
||||
To continue a conversation that is not the most recent one, use the `--chat <id>` option:
|
||||
|
||||
llm 'More names' --chat 2
|
||||
|
||||
You can find these chat IDs using the `llm logs` command.
|
||||
|
||||
Note that this feature only works if you have been logging your previous conversations to a database, having run the `llm init-db` command described below.
|
||||
|
||||
### Using with a shell
|
||||
|
||||
To generate a description of changes made to a Git repository since the last commit:
|
||||
|
|
|
|||
18
llm/cli.py
18
llm/cli.py
|
|
@ -37,14 +37,18 @@ def cli():
|
|||
"_continue",
|
||||
"-c",
|
||||
"--continue",
|
||||
is_flag=False,
|
||||
is_flag=True,
|
||||
flag_value=-1,
|
||||
help="Continue the last conversation. Optionally takes a chat ID of a specific conversation.",
|
||||
default=None,
|
||||
help="Continue the most recent conversation.",
|
||||
)
|
||||
@click.option(
|
||||
"chat_id",
|
||||
"--chat",
|
||||
help="Continue the conversation with the given chat ID.",
|
||||
type=int,
|
||||
)
|
||||
@click.option("--code", is_flag=True, help="System prompt to optimize for code output")
|
||||
def chatgpt(prompt, system, gpt4, model, stream, no_log, code, _continue):
|
||||
def chatgpt(prompt, system, gpt4, model, stream, no_log, code, _continue, chat_id):
|
||||
"Execute prompt against ChatGPT"
|
||||
if prompt is None:
|
||||
# Read from stdin instead
|
||||
|
|
@ -59,6 +63,12 @@ def chatgpt(prompt, system, gpt4, model, stream, no_log, code, _continue):
|
|||
if code:
|
||||
system = CODE_SYSTEM_PROMPT
|
||||
messages = []
|
||||
if _continue:
|
||||
_continue = -1
|
||||
if chat_id:
|
||||
raise click.ClickException("Cannot use --continue and --chat together")
|
||||
else:
|
||||
_continue = chat_id
|
||||
chat_id, history = get_history(_continue)
|
||||
if history:
|
||||
for entry in history:
|
||||
|
|
|
|||
Loading…
Reference in a new issue