Fix test failure caused by version bump, refs #1218

This commit is contained in:
Simon Willison 2025-08-11 14:27:16 -07:00
parent 2a54939951
commit e4c1a46d90
4 changed files with 12 additions and 16 deletions

View file

@ -55,7 +55,7 @@ interactions:
code: 200
message: OK
- request:
body: '{"messages":[{"role":"user","content":"What is the current llm version?"},{"role":"assistant","content":""},{"role":"assistant","tool_calls":[{"type":"function","id":"0","function":{"name":"llm_version","arguments":"{}"}}]},{"role":"tool","tool_call_id":"0","content":"0.26"}],"model":"gpt-4.1-mini","stream":true,"stream_options":{"include_usage":true},"tools":[{"type":"function","function":{"name":"llm_version","description":"Return
body: '{"messages":[{"role":"user","content":"What is the current llm version?"},{"role":"assistant","content":""},{"role":"assistant","tool_calls":[{"type":"function","id":"0","function":{"name":"llm_version","arguments":"{}"}}]},{"role":"tool","tool_call_id":"0","content":"0.fixed-version"}],"model":"gpt-4.1-mini","stream":true,"stream_options":{"include_usage":true},"tools":[{"type":"function","function":{"name":"llm_version","description":"Return
the installed version of llm","parameters":{"properties":{},"type":"object"}}}]}'
headers:
accept:
@ -121,7 +121,7 @@ interactions:
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"26"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"fixed-version"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"**."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}

View file

@ -52,7 +52,7 @@ interactions:
code: 200
message: OK
- request:
body: '{"messages":[{"role":"user","content":"What is the current llm version?"},{"role":"assistant","content":""},{"role":"assistant","tool_calls":[{"type":"function","id":"0","function":{"name":"llm_version","arguments":"{}"}}]},{"role":"tool","tool_call_id":"0","content":"0.26"}],"model":"gpt-4.1-mini","stream":true,"stream_options":{"include_usage":true},"tools":[{"type":"function","function":{"name":"llm_version","description":"Return
body: '{"messages":[{"role":"user","content":"What is the current llm version?"},{"role":"assistant","content":""},{"role":"assistant","tool_calls":[{"type":"function","id":"0","function":{"name":"llm_version","arguments":"{}"}}]},{"role":"tool","tool_call_id":"0","content":"0.fixed-version"}],"model":"gpt-4.1-mini","stream":true,"stream_options":{"include_usage":true},"tools":[{"type":"function","function":{"name":"llm_version","description":"Return
the installed version of llm","parameters":{"properties":{},"type":"object"}}}]}'
headers:
accept:
@ -118,7 +118,7 @@ interactions:
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"26"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"fixed-version"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}
data: {"id":"gen-1753242300-j60LWi6MpN4lMZw1zTHK","provider":"Moonshot AI","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753242300,"choices":[{"index":0,"delta":{"role":"assistant","content":"**."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}],"system_fingerprint":"fpv0_170758dd"}

View file

@ -56,7 +56,7 @@ interactions:
code: 200
message: OK
- request:
body: '{"messages":[{"role":"user","content":"What is the current llm version?"},{"role":"assistant","content":""},{"role":"assistant","tool_calls":[{"type":"function","id":"0","function":{"name":"llm_version","arguments":"{}"}}]},{"role":"tool","tool_call_id":"0","content":"0.26"}],"model":"gpt-4.1-mini","stream":true,"stream_options":{"include_usage":true},"tools":[{"type":"function","function":{"name":"llm_version","description":"Return
body: '{"messages":[{"role":"user","content":"What is the current llm version?"},{"role":"assistant","content":""},{"role":"assistant","tool_calls":[{"type":"function","id":"0","function":{"name":"llm_version","arguments":"{}"}}]},{"role":"tool","tool_call_id":"0","content":"0.fixed-version"}],"model":"gpt-4.1-mini","stream":true,"stream_options":{"include_usage":true},"tools":[{"type":"function","function":{"name":"llm_version","description":"Return
the installed version of llm","parameters":{"properties":{},"type":"object"}}}]}'
headers:
accept:
@ -116,7 +116,7 @@ interactions:
data: {"id":"gen-1753248104-uf1xqJDBrAUCJ4g8apK8","provider":"Fireworks","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753248104,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]}
data: {"id":"gen-1753248104-uf1xqJDBrAUCJ4g8apK8","provider":"Fireworks","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753248104,"choices":[{"index":0,"delta":{"role":"assistant","content":"26"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]}
data: {"id":"gen-1753248104-uf1xqJDBrAUCJ4g8apK8","provider":"Fireworks","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753248104,"choices":[{"index":0,"delta":{"role":"assistant","content":"fixed-version"},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]}
data: {"id":"gen-1753248104-uf1xqJDBrAUCJ4g8apK8","provider":"Fireworks","model":"moonshotai/kimi-k2","object":"chat.completion.chunk","created":1753248104,"choices":[{"index":0,"delta":{"role":"assistant","content":"."},"finish_reason":null,"native_finish_reason":null,"logprobs":null}]}

View file

@ -1,4 +1,3 @@
from importlib.metadata import version
import llm
from llm.tools import llm_version
import os
@ -15,9 +14,7 @@ def test_tools_streaming_variant_a():
chain = model.chain(
"What is the current llm version?", tools=[llm_version], key=API_KEY
)
assert "".join(chain) == "The current version of *llm* is **{}**.".format(
version("llm")
)
assert "".join(chain) == "The current version of *llm* is **0.fixed-version**."
# This response contains streaming variant "b" where arguments="{}" is the first partial stream received.
@ -27,9 +24,7 @@ def test_tools_streaming_variant_b():
chain = model.chain(
"What is the current llm version?", tools=[llm_version], key=API_KEY
)
assert "".join(chain) == "The current version of *llm* is **{}**.".format(
version("llm")
)
assert "".join(chain) == "The current version of *llm* is **0.fixed-version**."
# This response contains streaming variant "c".
@ -39,6 +34,7 @@ def test_tools_streaming_variant_c():
chain = model.chain(
"What is the current llm version?", tools=[llm_version], key=API_KEY
)
assert "".join(
chain
) == "The installed version of LLM on this system is {}.".format(version("llm"))
assert (
"".join(chain)
== "The installed version of LLM on this system is 0.fixed-version."
)