From 02852fe1a53b5039f1c6e4b2c02c490978d87d08 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Tue, 19 Nov 2024 20:23:54 -0800 Subject: [PATCH] Release 0.19a0 Refs #610, #641 --- docs/changelog.md | 8 ++++++++ setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/changelog.md b/docs/changelog.md index 4440bd5..298a7b3 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,13 @@ # Changelog +(v0_19a0)= +## 0.19a0 (2024-11-19) + +- Tokens used by a response are now logged to new `input_tokens` and `output_tokens` integer columns and a `token_details` JSON string column, for the default OpenAI models and models from other plugins that {ref}`implement this feature `. [#610](https://github.com/simonw/llm/issues/610) +- `llm prompt` now takes a `-u/--usage` flag to display token usage at the end of the response. +- `llm logs -u/--usage` shows token usage information for logged responses. +- `llm prompt ... --async` responses are now logged to the database. [#641](https://github.com/simonw/llm/issues/641) + (v0_18)= ## 0.18 (2024-11-17) diff --git a/setup.py b/setup.py index 63bfc1e..2ec9897 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages import os -VERSION = "0.18" +VERSION = "0.19a0" def get_long_description():