diff --git a/docs/changelog.md b/docs/changelog.md index 4440bd5..298a7b3 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,13 @@ # Changelog +(v0_19a0)= +## 0.19a0 (2024-11-19) + +- Tokens used by a response are now logged to new `input_tokens` and `output_tokens` integer columns and a `token_details` JSON string column, for the default OpenAI models and models from other plugins that {ref}`implement this feature `. [#610](https://github.com/simonw/llm/issues/610) +- `llm prompt` now takes a `-u/--usage` flag to display token usage at the end of the response. +- `llm logs -u/--usage` shows token usage information for logged responses. +- `llm prompt ... --async` responses are now logged to the database. [#641](https://github.com/simonw/llm/issues/641) + (v0_18)= ## 0.18 (2024-11-17) diff --git a/setup.py b/setup.py index 63bfc1e..2ec9897 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages import os -VERSION = "0.18" +VERSION = "0.19a0" def get_long_description():