From 6cdbc7a27030f3235bda88cde122231f79ecf946 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Mon, 10 Jul 2023 08:34:36 -0700 Subject: [PATCH] Show error for --continue mode, remove deleted code --- llm/cli.py | 39 +++------------------------------------ 1 file changed, 3 insertions(+), 36 deletions(-) diff --git a/llm/cli.py b/llm/cli.py index e0e4d1a..e8c8842 100644 --- a/llm/cli.py +++ b/llm/cli.py @@ -178,6 +178,9 @@ def prompt( model_id = template_obj.model history_model = None + + if _continue or chat_id: + raise click.ClickException("--continue mode is not yet implemented") # TODO: Re-introduce --continue mode # messages = [] # if _continue: @@ -254,42 +257,6 @@ def prompt( # TODO: Figure out OpenAI exception handling - return - # Original code: - # try: - # debug = {} - # if no_stream: - # start = time.time() - # response = openai.ChatCompletion.create( - # model=model, - # messages=messages, - # ) - # debug["model"] = response.model - # debug["usage"] = response.usage - # content = response.choices[0].message.content - # log(no_log, system, prompt, content, model, chat_id, debug, start) - # print(content) - # else: - # start = time.time() - # response = [] - # for chunk in openai.ChatCompletion.create( - # model=model, - # messages=messages, - # stream=True, - # ): - # debug["model"] = chunk.model - # content = chunk["choices"][0].get("delta", {}).get("content") - # if content is not None: - # response.append(content) - # print(content, end="") - # sys.stdout.flush() - # print("") - # log(no_log, system, prompt, "".join(response), model, chat_id, debug, start) - # except openai.error.AuthenticationError as ex: - # raise click.ClickException("{}: {}".format(ex.error.type, ex.error.code)) - # except openai.error.OpenAIError as ex: - # raise click.ClickException(str(ex)) - @cli.command() def init_db():