Skip to content

Commit

Permalink
fixer yapf
Browse files Browse the repository at this point in the history
  • Loading branch information
Yehoshua Cohen committed Jul 23, 2024
1 parent 9f475a9 commit bb76e61
Showing 1 changed file with 24 additions and 22 deletions.
46 changes: 24 additions & 22 deletions tests/entrypoints/openai/test_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,15 +295,19 @@ async def test_chat_completion_stream_options(client: openai.AsyncOpenAI,
async for chunk in stream:
assert chunk.usage is None

# Test stream=True, stream_options={"include_usage": True,
# "continuous_usage_stats":False}
stream = await client.chat.completions.create(
model=model_name,
messages=messages,
max_tokens=10,
temperature=0.0,
stream=True,
stream_options={"include_usage": True, "continuous_usage_stats": False})
# Test stream=True, stream_options={"include_usage": True,
# "continuous_usage_stats": False}}
stream = await client.chat.completions.create(model=model_name,
messages=messages,
max_tokens=10,
temperature=0.0,
stream=True,
stream_options={
"include_usage":
True,
"continuous_usage_stats":
False
})

async for chunk in stream:
if chunk.choices[0].finish_reason is None:
Expand Down Expand Up @@ -341,26 +345,24 @@ async def test_chat_completion_stream_options(client: openai.AsyncOpenAI,

# Test stream=True, stream_options={"include_usage": True,
# "continuous_usage_stats": True}
stream = await client.chat.completions.create(model=model_name,
messages=messages,
max_tokens=10,
temperature=0.0,
stream=True,
stream_options={
"include_usage":
True,
"continuous_usage_stats":
False
})
stream = await client.chat.completions.create(
model=model_name,
messages=messages,
max_tokens=10,
temperature=0.0,
stream=True,
stream_options={
"include_usage": True,
"continuous_usage_stats": True
},
)
async for chunk in stream:
assert chunk.usage.prompt_tokens >= 0
assert chunk.usage.completion_tokens >= 0
assert chunk.usage.total_tokens == (chunk.usage.prompt_tokens +
chunk.usage.completion_tokens)




# NOTE: Not sure why, but when I place this after `test_guided_regex_chat`
# (i.e. using the same ordering as in the Completions API tests), the test
# will fail on the second `guided_decoding_backend` even when I swap their order
Expand Down

0 comments on commit bb76e61

Please sign in to comment.