Rough initial version of new logging, to log2 table

This commit is contained in:
Simon Willison 2023-07-02 16:35:36 -07:00
parent 4a9f7f4908
commit 52add96ec1
4 changed files with 75 additions and 10 deletions

View file

@ -1,5 +1,13 @@
from .hookspecs import hookimpl
from .models import Model, Prompt, Response, OptionsError
from .models import LogMessage, Model, Prompt, Response, OptionsError
from .templates import Template
__all__ = ["Template", "Model", "Prompt", "Response", "OptionsError", "hookimpl"]
__all__ = [
"Template",
"Model",
"Prompt",
"Response",
"OptionsError",
"LogMessage",
"hookimpl",
]

View file

@ -1,5 +1,6 @@
import click
from click_default_group import DefaultGroup
from dataclasses import asdict
import datetime
import json
from llm import Template
@ -212,8 +213,22 @@ def prompt(
else:
print(response.text())
# Log to the database
if no_log:
return
log_path = logs_db_path()
if not log_path.exists():
return
db = sqlite_utils.Database(log_path)
migrate(db)
log_message = response.to_log()
log_dict = asdict(log_message)
log_dict["duration_ms"] = response.duration_ms()
log_dict["timestamp_utc"] = response.timestamp_utc()
db["log2"].insert(log_dict, pk="id")
# TODO: Figure out OpenAI exception handling
# TODO: Log to database
return
# Original code:

View file

@ -1,7 +1,8 @@
from llm import Model, Prompt, Response, hookimpl
from llm import LogMessage, Model, Prompt, Response, hookimpl
from llm.errors import NeedsKeyException
from llm.utils import dicts_to_table_string
import click
from dataclasses import asdict
import datetime
import openai
import requests
@ -80,7 +81,6 @@ class ChatResponse(Response):
content = chunk["choices"][0].get("delta", {}).get("content")
if content is not None:
yield content
self._done = True
else:
response = openai.ChatCompletion.create(
model=self.prompt.model.model_id,
@ -89,9 +89,20 @@ class ChatResponse(Response):
)
self._debug["model"] = response.model
self._debug["usage"] = response.usage
content = response.choices[0].message.content
self._done = True
yield content
yield response.choices[0].message.content
def to_log(self) -> LogMessage:
return LogMessage(
model=self.prompt.model.model_id,
prompt=self.prompt.prompt,
system=self.prompt.system,
options=dict(self.prompt.options),
prompt_json=json.dumps(asdict(self.prompt), default=repr),
response=self.text(),
response_json={},
chat_id=None, # TODO
debug_json=self._debug,
)
class Chat(Model):

View file

@ -1,4 +1,6 @@
from dataclasses import dataclass
import datetime
import time
from typing import Any, Dict, Iterator, List, Optional, Set
from abc import ABC, abstractmethod
import os
@ -25,6 +27,19 @@ class OptionsError(Exception):
pass
@dataclass
class LogMessage:
model: str # Actually the model.model_id string
prompt: str # Simplified string version of prompt
system: Optional[str] # Simplified string of system prompt
options: Dict[str, Any] # Any options e.g. temperature
prompt_json: str # Detailed JSON of prompt
response: str # Simplified string version of response
response_json: Dict[str, Any] # Detailed JSON of response
chat_id: Optional[int] # ID of chat, if this is part of one
debug_json: Dict[str, Any] # Any debug info returned by the model
class Response(ABC):
def __init__(self, prompt: Prompt, model: "Model", stream: bool):
self.prompt = prompt
@ -46,12 +61,15 @@ class Response(ABC):
stream=self.stream,
)
def __iter__(self):
def __iter__(self) -> Iterator[str]:
self._start = time.monotonic()
self._start_utcnow = datetime.datetime.utcnow()
if self._done:
return self._chunks
for chunk in self.iter_prompt():
yield chunk
self._chunks.append(chunk)
self._end = time.monotonic()
self._done = True
@abstractmethod
@ -63,10 +81,23 @@ class Response(ABC):
if not self._done:
list(self)
def text(self):
def text(self) -> str:
self._force()
return "".join(self._chunks)
def duration_ms(self) -> int:
self._force()
return int((self._end - self._start) * 1000)
def timestamp_utc(self) -> str:
self._force()
return self._start_utcnow.isoformat()
@abstractmethod
def to_log(self) -> LogMessage:
"Return a LogMessage of data to log"
pass
class Model(ABC):
model_id: str