Switch default model to gpt-4o-mini (from gpt-3.5-turbo), refs #536

This commit is contained in:
Simon Willison 2024-07-18 11:57:19 -07:00
parent 963a5ba467
commit a83421607a
4 changed files with 9 additions and 9 deletions

View file

@ -41,7 +41,7 @@ import yaml
warnings.simplefilter("ignore", ResourceWarning)
DEFAULT_MODEL = "gpt-3.5-turbo"
DEFAULT_MODEL = "gpt-4o-mini"
DEFAULT_EMBEDDING_MODEL = "ada-002"
DEFAULT_TEMPLATE = "prompt: "

View file

@ -145,7 +145,7 @@ def mocked_openai_chat(httpx_mock):
method="POST",
url="https://api.openai.com/v1/chat/completions",
json={
"model": "gpt-3.5-turbo",
"model": "gpt-4o-mini",
"usage": {},
"choices": [{"message": {"content": "Bob, Alice, Eve"}}],
},

View file

@ -260,7 +260,7 @@ def test_llm_default_prompt(
assert len(rows) == 1
expected = {
"model": "gpt-3.5-turbo",
"model": "gpt-4o-mini",
"prompt": "three names \nfor a pet pelican",
"system": None,
"options_json": "{}",
@ -274,7 +274,7 @@ def test_llm_default_prompt(
"messages": [{"role": "user", "content": "three names \nfor a pet pelican"}]
}
assert json.loads(row["response_json"]) == {
"model": "gpt-3.5-turbo",
"model": "gpt-4o-mini",
"choices": [{"message": {"content": "Bob, Alice, Eve"}}],
}
@ -288,7 +288,7 @@ def test_llm_default_prompt(
assert (
log_json[0].items()
>= {
"model": "gpt-3.5-turbo",
"model": "gpt-4o-mini",
"prompt": "three names \nfor a pet pelican",
"system": None,
"prompt_json": {
@ -299,12 +299,12 @@ def test_llm_default_prompt(
"options_json": {},
"response": "Bob, Alice, Eve",
"response_json": {
"model": "gpt-3.5-turbo",
"model": "gpt-4o-mini",
"choices": [{"message": {"content": "Bob, Alice, Eve"}}],
},
# This doesn't have the \n after three names:
"conversation_name": "three names for a pet pelican",
"conversation_model": "gpt-3.5-turbo",
"conversation_model": "gpt-4o-mini",
}.items()
)

View file

@ -115,7 +115,7 @@ def test_templates_prompt_save(templates_path, args, expected_prompt, expected_e
(
"'Summarize this: $input'",
[],
"gpt-3.5-turbo",
"gpt-4o-mini",
"Summarize this: Input text",
None,
),
@ -150,7 +150,7 @@ def test_templates_prompt_save(templates_path, args, expected_prompt, expected_e
(
"prompt: 'Say $hello'",
["-p", "hello", "Blah"],
"gpt-3.5-turbo",
"gpt-4o-mini",
"Say Blah",
None,
),