diff --git a/docs/openai-models.md b/docs/openai-models.md index 2a1aadb..67c0dfc 100644 --- a/docs/openai-models.md +++ b/docs/openai-models.md @@ -57,6 +57,7 @@ OpenAI Chat: o1-2024-12-17 OpenAI Chat: o1-preview OpenAI Chat: o1-mini OpenAI Chat: o3-mini +OpenAI Chat: o3 OpenAI Completion: gpt-3.5-turbo-instruct (aliases: 3.5-instruct, chatgpt-instruct) ``` diff --git a/docs/usage.md b/docs/usage.md index 89e9b06..eac79fe 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -916,6 +916,25 @@ OpenAI Chat: o3-mini Keys: key: openai env_var: OPENAI_API_KEY +OpenAI Chat: o3 + Options: + temperature: float + max_tokens: int + top_p: float + frequency_penalty: float + presence_penalty: float + stop: str + logit_bias: dict, str + seed: int + json_object: boolean + reasoning_effort: str + Features: + - streaming + - schemas + - async + Keys: + key: openai + env_var: OPENAI_API_KEY OpenAI Completion: gpt-3.5-turbo-instruct (aliases: 3.5-instruct, chatgpt-instruct) Options: temperature: float diff --git a/llm/default_plugins/openai_models.py b/llm/default_plugins/openai_models.py index 26d01e8..79a6755 100644 --- a/llm/default_plugins/openai_models.py +++ b/llm/default_plugins/openai_models.py @@ -117,6 +117,10 @@ def register_models(register): Chat("o3-mini", reasoning=True, supports_schema=True), AsyncChat("o3-mini", reasoning=True, supports_schema=True), ) + register( + Chat("o3", reasoning=True, supports_schema=True), + AsyncChat("o3", reasoning=True, supports_schema=True), + ) # The -instruct completion model register( Completion("gpt-3.5-turbo-instruct", default_max_tokens=256),