Skip to content

Commit

Permalink
Fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
maykcaldas committed Dec 6, 2024
1 parent 6fbf2f2 commit 5d3a3c9
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 4 deletions.
2 changes: 1 addition & 1 deletion llmclient/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -861,7 +861,7 @@ async def call_multiple(
tool_choice: Tool | str | None = TOOL_CHOICE_REQUIRED,
**chat_kwargs,
) -> list[LLMResult]:
if chat_kwargs.get("n", 1) == 1 or self.config.get("n", 1) == 1:
if 1 in {chat_kwargs.get("n", 1), self.config.get("n", 1)}:
if (
chat_kwargs.get("n")
and self.config.get("n")
Expand Down
5 changes: 3 additions & 2 deletions tests/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,8 @@ async def test_single_completion(self, model_name: str) -> None:
result = await model.call(messages)
assert isinstance(result, LLMResult)

result = await model.call(messages, n=1)
result = await model.call(messages, n=1) # noqa: FURB120

assert isinstance(result, LLMResult)
assert result.messages
assert len(result.messages) == 1
Expand All @@ -358,7 +359,7 @@ async def test_multiple_completion(self, model_name: str, request) -> None:
with pytest.raises(litellm.BadRequestError, match="anthropic"):
await model.call(messages)
else:
results = await model.call(messages, n=None)
results = await model.call(messages, n=None) # noqa: FURB120
assert len(results) == self.NUM_COMPLETIONS

results = await model.call(messages, n=self.NUM_COMPLETIONS)
Expand Down
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 5d3a3c9

Please sign in to comment.