From 84ab4cd409f1251cd7fa880e1a6f76404b645962 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sat, 10 May 2025 10:29:18 -0700 Subject: [PATCH] supports_tools Model property, Tool.function(..., name=) option Refs https://github.com/simonw/llm/issues/935#issuecomment-2869042481 --- llm/__init__.py | 2 ++ llm/default_plugins/openai_models.py | 38 ++++++++++++++++++++-------- llm/models.py | 17 ++++++++++--- 3 files changed, 43 insertions(+), 14 deletions(-) diff --git a/llm/__init__.py b/llm/__init__.py index 289f603..5199292 100644 --- a/llm/__init__.py +++ b/llm/__init__.py @@ -18,6 +18,7 @@ from .models import ( Options, Prompt, Response, + Tool, ) from .utils import schema_dsl, Fragment from .embeddings import Collection @@ -50,6 +51,7 @@ __all__ = [ "Prompt", "Response", "Template", + "Tool", "user_dir", "schema_dsl", ] diff --git a/llm/default_plugins/openai_models.py b/llm/default_plugins/openai_models.py index 3850e71..0929d15 100644 --- a/llm/default_plugins/openai_models.py +++ b/llm/default_plugins/openai_models.py @@ -24,8 +24,8 @@ import yaml def register_models(register): # GPT-4o register( - Chat("gpt-4o", vision=True, supports_schema=True), - AsyncChat("gpt-4o", vision=True, supports_schema=True), + Chat("gpt-4o", vision=True, supports_schema=True, supports_tools=True), + AsyncChat("gpt-4o", vision=True, supports_schema=True, supports_tools=True), aliases=("4o",), ) register( @@ -34,8 +34,10 @@ def register_models(register): aliases=("chatgpt-4o",), ) register( - Chat("gpt-4o-mini", vision=True, supports_schema=True), - AsyncChat("gpt-4o-mini", vision=True, supports_schema=True), + Chat("gpt-4o-mini", vision=True, supports_schema=True, supports_tools=True), + AsyncChat( + "gpt-4o-mini", vision=True, supports_schema=True, supports_tools=True + ), aliases=("4o-mini",), ) for audio_model_id in ( @@ -52,8 +54,8 @@ def register_models(register): # GPT-4.1 for model_id in ("gpt-4.1", "gpt-4.1-mini", "gpt-4.1-nano"): register( - Chat(model_id, vision=True, supports_schema=True), - AsyncChat(model_id, vision=True, supports_schema=True), + Chat(model_id, vision=True, supports_schema=True, supports_tools=True), + AsyncChat(model_id, vision=True, supports_schema=True, supports_tools=True), aliases=(model_id.replace("gpt-", ""),), ) # 3.5 and 4 @@ -78,12 +80,24 @@ def register_models(register): ) # GPT-4.5 register( - Chat("gpt-4.5-preview-2025-02-27", vision=True, supports_schema=True), - AsyncChat("gpt-4.5-preview-2025-02-27", vision=True, supports_schema=True), + Chat( + "gpt-4.5-preview-2025-02-27", + vision=True, + supports_schema=True, + supports_tools=True, + ), + AsyncChat( + "gpt-4.5-preview-2025-02-27", + vision=True, + supports_schema=True, + supports_tools=True, + ), ) register( - Chat("gpt-4.5-preview", vision=True, supports_schema=True), - AsyncChat("gpt-4.5-preview", vision=True, supports_schema=True), + Chat("gpt-4.5-preview", vision=True, supports_schema=True, supports_tools=True), + AsyncChat( + "gpt-4.5-preview", vision=True, supports_schema=True, supports_tools=True + ), aliases=("gpt-4.5",), ) # o1 @@ -95,6 +109,7 @@ def register_models(register): can_stream=False, reasoning=True, supports_schema=True, + supports_tools=True, ), AsyncChat( model_id, @@ -102,6 +117,7 @@ def register_models(register): can_stream=False, reasoning=True, supports_schema=True, + supports_tools=True, ), ) @@ -438,11 +454,13 @@ class _Shared: audio=False, reasoning=False, supports_schema=False, + supports_tools=False, allows_system_prompt=True, ): self.model_id = model_id self.key = key self.supports_schema = supports_schema + self.supports_tools = supports_tools self.model_name = model_name self.api_base = api_base self.api_type = api_type diff --git a/llm/models.py b/llm/models.py index 79ac917..74d60ce 100644 --- a/llm/models.py +++ b/llm/models.py @@ -129,7 +129,7 @@ class Tool: return schema @classmethod - def for_function(cls, function): + def function(cls, function, name=None): """ Turn a Python function into a Tool object by: - Extracting the function name @@ -140,10 +140,15 @@ class Tool: signature = inspect.signature(function) type_hints = get_type_hints(function) + if not name and function.__name__ == "": + raise ValueError( + "Cannot create a Tool from a lambda function without providing name=" + ) + fields = {} for param_name, param in signature.parameters.items(): - # Determine the type annotation (default to Any if missing) - annotated_type = type_hints.get(param_name, Any) + # Determine the type annotation (default to string if missing) + annotated_type = type_hints.get(param_name, str) # Handle default value if present; if there's no default, use '...' if param.default is inspect.Parameter.empty: @@ -165,7 +170,7 @@ class Tool: ) return cls( - name=function.__name__, + name=name or function.__name__, description=function.__doc__ or None, input_schema=input_schema, output_schema=output_schema, @@ -396,6 +401,9 @@ class _BaseResponse: if self.prompt.schema and not self.model.supports_schema: raise ValueError(f"{self.model} does not support schemas") + if self.prompt.tools and not self.model.supports_tools: + raise ValueError(f"{self.model} does not support tools") + def set_usage( self, *, @@ -864,6 +872,7 @@ class _BaseModel(ABC, _get_key_mixin): attachment_types: Set = set() supports_schema = False + supports_tools = False class Options(_Options): pass