llm/setup.py

70 lines
1.8 KiB
Python
Raw Normal View History

2023-07-01 18:36:58 +00:00
from setuptools import setup, find_packages
2023-04-01 21:28:24 +00:00
import os
VERSION = "0.23"
2023-04-01 21:28:24 +00:00
def get_long_description():
with open(
os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md"),
encoding="utf8",
) as fp:
return fp.read()
setup(
2023-04-01 22:00:16 +00:00
name="llm",
description=(
"CLI utility and Python library for interacting with Large Language Models from "
"organizations like OpenAI, Anthropic and Gemini plus local models installed on your own machine."
),
2023-04-01 21:28:24 +00:00
long_description=get_long_description(),
long_description_content_type="text/markdown",
author="Simon Willison",
url="https://github.com/simonw/llm",
project_urls={
"Documentation": "https://llm.datasette.io/",
2023-04-01 21:28:24 +00:00
"Issues": "https://github.com/simonw/llm/issues",
"CI": "https://github.com/simonw/llm/actions",
"Changelog": "https://github.com/simonw/llm/releases",
},
license="Apache License, Version 2.0",
version=VERSION,
2023-07-01 18:36:58 +00:00
packages=find_packages(),
2023-04-01 21:28:24 +00:00
entry_points="""
[console_scripts]
2023-04-01 22:00:16 +00:00
llm=llm.cli:cli
2023-04-01 21:28:24 +00:00
""",
install_requires=[
"click",
2025-01-18 22:10:55 +00:00
"openai>=1.55.3",
"click-default-group>=1.2.3",
2024-07-18 18:49:31 +00:00
"sqlite-utils>=3.37",
"sqlite-migrate>=0.1a2",
"pydantic>=2.0.0",
2023-06-17 07:40:46 +00:00
"PyYAML",
"pluggy",
"python-ulid",
"setuptools",
"pip",
"pyreadline3; sys_platform == 'win32'",
"puremagic",
],
extras_require={
"test": [
"pytest",
"numpy",
2024-10-28 22:36:12 +00:00
"pytest-httpx>=0.33.0",
llm.get_async_model(), llm.AsyncModel base class and OpenAI async models (#613) - https://github.com/simonw/llm/issues/507#issuecomment-2458639308 * register_model is now async aware Refs https://github.com/simonw/llm/issues/507#issuecomment-2458658134 * Refactor Chat and AsyncChat to use _Shared base class Refs https://github.com/simonw/llm/issues/507#issuecomment-2458692338 * fixed function name * Fix for infinite loop * Applied Black * Ran cog * Applied Black * Add Response.from_row() classmethod back again It does not matter that this is a blocking call, since it is a classmethod * Made mypy happy with llm/models.py * mypy fixes for openai_models.py I am unhappy with this, had to duplicate some code. * First test for AsyncModel * Still have not quite got this working * Fix for not loading plugins during tests, refs #626 * audio/wav not audio/wave, refs #603 * Black and mypy and ruff all happy * Refactor to avoid generics * Removed obsolete response() method * Support text = await async_mock_model.prompt("hello") * Initial docs for llm.get_async_model() and await model.prompt() Refs #507 * Initial async model plugin creation docs * duration_ms ANY to pass test * llm models --async option Refs https://github.com/simonw/llm/pull/613#issuecomment-2474724406 * Removed obsolete TypeVars * Expanded register_models() docs for async * await model.prompt() now returns AsyncResponse Refs https://github.com/simonw/llm/pull/613#issuecomment-2475157822 --------- Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2024-11-14 01:51:00 +00:00
"pytest-asyncio",
"cogapp",
2024-05-13 20:00:37 +00:00
"mypy>=1.10.0",
"black>=25.1.0",
2023-07-02 19:41:40 +00:00
"ruff",
2023-07-07 03:45:10 +00:00
"types-click",
"types-PyYAML",
2023-09-10 21:12:09 +00:00
"types-setuptools",
]
},
python_requires=">=3.9",
2023-04-01 21:28:24 +00:00
)