diff --git a/docs/other-models.md b/docs/other-models.md index 4e51168..c5d68d0 100644 --- a/docs/other-models.md +++ b/docs/other-models.md @@ -47,6 +47,8 @@ Add `completion: true` if the model is a completion model that uses a `/completi If a model does not support streaming, add `can_stream: false` to disable the streaming option. +If a model supports structured output via JSON schemas, you can add `supports_schema: true` to support this feature. + Having configured the model like this, run `llm models` to check that it installed correctly. You can then run prompts against it like so: ```bash @@ -69,4 +71,4 @@ Some providers such as [openrouter.ai](https://openrouter.ai/docs) may require t headers: HTTP-Referer: "https://llm.datasette.io/" X-Title: LLM -``` \ No newline at end of file +``` diff --git a/llm/default_plugins/openai_models.py b/llm/default_plugins/openai_models.py index 9bc37e6..e60d3bb 100644 --- a/llm/default_plugins/openai_models.py +++ b/llm/default_plugins/openai_models.py @@ -135,6 +135,8 @@ def register_models(register): kwargs = {} if extra_model.get("can_stream") is False: kwargs["can_stream"] = False + if extra_model.get("supports_schema") is True: + kwargs["supports_schema"] = True if extra_model.get("completion"): klass = Completion else: