NotImplementedError for system prompts with OpenAI completion models, refs #284

Signed-off-by: Simon Willison <swillison@gmail.com>
This commit is contained in:
Simon Willison 2023-09-18 22:51:22 -07:00
parent f76b2120e4
commit b4ec54ef19
2 changed files with 27 additions and 0 deletions

View file

@ -334,6 +334,10 @@ class Completion(Chat):
return "OpenAI Completion: {}".format(self.model_id)
def execute(self, prompt, stream, response, conversation=None):
if prompt.system:
raise NotImplementedError(
"System prompts are not supported for OpenAI completion models"
)
messages = []
if conversation is not None:
for prev_response in conversation.responses:

View file

@ -344,6 +344,29 @@ def test_openai_completion(mocked_openai_completion, user_path):
assert expected.items() <= row.items()
def test_openai_completion_system_prompt_error():
runner = CliRunner()
result = runner.invoke(
cli,
[
"-m",
"gpt-3.5-turbo-instruct",
"Say this is a test",
"--no-stream",
"--key",
"x",
"--system",
"system prompts not allowed",
],
catch_exceptions=False,
)
assert result.exit_code == 1
assert (
result.output
== "Error: System prompts are not supported for OpenAI completion models\n"
)
def test_openai_completion_logprobs_stream(
mocked_openai_completion_logprobs_stream, user_path
):