Combine piped and argument prompts, closes #153

This commit is contained in:
Simon Willison 2023-08-20 22:57:29 -07:00
parent 8edd438f74
commit c8e9565f47
3 changed files with 38 additions and 17 deletions

View file

@ -24,6 +24,16 @@ You can also send a prompt to standard input, for example:
```bash
echo 'Ten names for cheesecakes' | llm
```
If you send text to standard input and provide arguments, the resulting prompt will consist of the piped content followed by the arguments:
```bash
cat myscript.py | llm 'explain this code'
```
Will run a prompt of:
```
<contents of myscript.py> explain this code
```
For models that support them, {ref}`system prompts <system-prompts>` are a better tool for this kind of prompting.
Some models support options. You can pass these using `-o/--option name value` - for example, to set the temperature to 1.5 run this:
```bash
@ -57,6 +67,7 @@ llm "Describe these changes: $(git diff)"
```
This pattern of using `$(command)` inside a double quoted string is a useful way to quickly assemble prompts.
(system-prompts)=
## System prompts
You can use `-s/--system '...'` to set a system prompt.

View file

@ -126,14 +126,21 @@ def prompt(
def read_prompt():
nonlocal prompt
if prompt is None:
if template:
# If running a template only consume from stdin if it has data
if not sys.stdin.isatty():
prompt = sys.stdin.read()
elif not save:
# Hang waiting for input to stdin (unless --save)
prompt = sys.stdin.read()
# Is there extra prompt available on stdin?
stdin_prompt = None
if not sys.stdin.isatty():
stdin_prompt = sys.stdin.read()
if stdin_prompt:
bits = [stdin_prompt]
if prompt:
bits.append(prompt)
prompt = " ".join(bits)
if prompt is None and not save and sys.stdin.isatty():
# Hang waiting for input to stdin (unless --save)
prompt = sys.stdin.read()
return prompt
if save:

View file

@ -168,7 +168,7 @@ def test_logs_search(user_path, query, expected):
@mock.patch.dict(os.environ, {"OPENAI_API_KEY": "X"})
@pytest.mark.parametrize("use_stdin", (True, False))
@pytest.mark.parametrize("use_stdin", (True, False, "split"))
@pytest.mark.parametrize(
"logs_off,logs_args,should_log",
(
@ -201,10 +201,13 @@ def test_llm_default_prompt(
# Run the prompt
runner = CliRunner()
prompt = "three names\nfor a pet pelican"
prompt = "three names \nfor a pet pelican"
input = None
args = ["--no-stream"]
if use_stdin:
if use_stdin == "split":
input = "three names"
args.append("\nfor a pet pelican")
elif use_stdin:
input = prompt
else:
args.append(prompt)
@ -224,7 +227,7 @@ def test_llm_default_prompt(
assert len(rows) == 1
expected = {
"model": "gpt-3.5-turbo",
"prompt": "three names\nfor a pet pelican",
"prompt": "three names \nfor a pet pelican",
"system": None,
"options_json": "{}",
"response": "Bob, Alice, Eve",
@ -234,7 +237,7 @@ def test_llm_default_prompt(
assert isinstance(row["duration_ms"], int)
assert isinstance(row["datetime_utc"], str)
assert json.loads(row["prompt_json"]) == {
"messages": [{"role": "user", "content": "three names\nfor a pet pelican"}]
"messages": [{"role": "user", "content": "three names \nfor a pet pelican"}]
}
assert json.loads(row["response_json"]) == {
"model": "gpt-3.5-turbo",
@ -253,11 +256,11 @@ def test_llm_default_prompt(
log_json[0].items()
>= {
"model": "gpt-3.5-turbo",
"prompt": "three names\nfor a pet pelican",
"prompt": "three names \nfor a pet pelican",
"system": None,
"prompt_json": {
"messages": [
{"role": "user", "content": "three names\nfor a pet pelican"}
{"role": "user", "content": "three names \nfor a pet pelican"}
]
},
"options_json": {},
@ -289,13 +292,13 @@ def test_openai_localai_configuration(mocked_localai, user_path):
config_path.write_text(EXTRA_MODELS_YAML, "utf-8")
# Run the prompt
runner = CliRunner()
prompt = "three names\nfor a pet pelican"
prompt = "three names \nfor a pet pelican"
result = runner.invoke(cli, ["--no-stream", "--model", "orca", prompt])
assert result.exit_code == 0
assert result.output == "Bob, Alice, Eve\n"
assert json.loads(mocked_localai.last_request.text) == {
"model": "orca-mini-3b",
"messages": [{"role": "user", "content": "three names\nfor a pet pelican"}],
"messages": [{"role": "user", "content": "three names \nfor a pet pelican"}],
"stream": False,
}