Skip to content

Commit

Permalink
core: Add ruff rules for PIE (langchain-ai#26939)
Browse files Browse the repository at this point in the history
All auto-fixes.
  • Loading branch information
cbornet authored and Sheepsta300 committed Oct 1, 2024
1 parent 2182e57 commit 83eab96
Show file tree
Hide file tree
Showing 21 changed files with 16 additions and 52 deletions.
1 change: 0 additions & 1 deletion libs/core/langchain_core/callbacks/streaming_stdout.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any:
action (AgentAction): The agent action.
**kwargs (Any): Additional keyword arguments.
"""
pass

def on_tool_end(self, output: Any, **kwargs: Any) -> None:
"""Run when tool ends running.
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/language_models/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,7 @@ def stream(
stop: Optional[list[str]] = None,
**kwargs: Any,
) -> Iterator[BaseMessageChunk]:
if not self._should_stream(async_api=False, **{**kwargs, **{"stream": True}}):
if not self._should_stream(async_api=False, **{**kwargs, "stream": True}):
# model doesn't implement streaming, so use default implementation
yield cast(
BaseMessageChunk, self.invoke(input, config=config, stop=stop, **kwargs)
Expand Down Expand Up @@ -427,7 +427,7 @@ async def astream(
stop: Optional[list[str]] = None,
**kwargs: Any,
) -> AsyncIterator[BaseMessageChunk]:
if not self._should_stream(async_api=True, **{**kwargs, **{"stream": True}}):
if not self._should_stream(async_api=True, **{**kwargs, "stream": True}):
# No async or sync stream is implemented, so fall back to ainvoke
yield cast(
BaseMessageChunk,
Expand Down Expand Up @@ -550,7 +550,7 @@ def _get_ls_params(

def _get_llm_string(self, stop: Optional[list[str]] = None, **kwargs: Any) -> str:
if self.is_lc_serializable():
params = {**kwargs, **{"stop": stop}}
params = {**kwargs, "stop": stop}
param_string = str(sorted(params.items()))
# This code is not super efficient as it goes back and forth between
# json and dict.
Expand Down
8 changes: 3 additions & 5 deletions libs/core/langchain_core/prompts/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -1007,11 +1007,9 @@ def __init__(
input_vars.update(_message.input_variables)

kwargs = {
**{
"input_variables": sorted(input_vars),
"optional_variables": sorted(optional_variables),
"partial_variables": partial_vars,
},
"input_variables": sorted(input_vars),
"optional_variables": sorted(optional_variables),
"partial_variables": partial_vars,
**kwargs,
}
cast(type[ChatPromptTemplate], super()).__init__(messages=_messages, **kwargs)
Expand Down
2 changes: 0 additions & 2 deletions libs/core/langchain_core/runnables/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@
class EmptyDict(TypedDict, total=False):
"""Empty dict type."""

pass


class RunnableConfig(TypedDict, total=False):
"""Configuration for a Runnable."""
Expand Down
2 changes: 0 additions & 2 deletions libs/core/langchain_core/runnables/configurable.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,8 +457,6 @@ def _prepare(
class StrEnum(str, enum.Enum):
"""String enum."""

pass


_enums_for_spec: WeakValueDictionary[
Union[
Expand Down
3 changes: 2 additions & 1 deletion libs/core/langchain_core/runnables/fallbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,8 @@ def wrapped(*args: Any, **kwargs: Any) -> Any:
return self.__class__(
**{
**self.model_dump(),
**{"runnable": new_runnable, "fallbacks": new_fallbacks},
"runnable": new_runnable,
"fallbacks": new_fallbacks,
}
)

Expand Down
2 changes: 0 additions & 2 deletions libs/core/langchain_core/tools/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -320,8 +320,6 @@ class ToolException(Exception): # noqa: N818
to the agent as observation, and printed in red on the console.
"""

pass


class BaseTool(RunnableSerializable[Union[str, dict, ToolCall], Any]):
"""Interface LangChain tools must implement."""
Expand Down
1 change: 0 additions & 1 deletion libs/core/langchain_core/tracers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -850,7 +850,6 @@ async def on_retriever_end(

async def _on_run_create(self, run: Run) -> None:
"""Process a run upon creation."""
pass

async def _on_run_update(self, run: Run) -> None:
"""Process a run upon update."""
Expand Down
1 change: 0 additions & 1 deletion libs/core/langchain_core/tracers/event_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -829,7 +829,6 @@ async def _astream_events_implementation_v1(
inputs = log_entry["inputs"]
if inputs is not None:
data["input"] = inputs
pass

if event_type == "end":
inputs = log_entry["inputs"]
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/utils/function_calling.py
Original file line number Diff line number Diff line change
Expand Up @@ -561,7 +561,7 @@ def _parse_google_docstring(
if block.startswith("Args:"):
args_block = block
break
elif block.startswith("Returns:") or block.startswith("Example:"):
elif block.startswith(("Returns:", "Example:")):
# Don't break in case Args come after
past_descriptors = True
elif not past_descriptors:
Expand Down
2 changes: 0 additions & 2 deletions libs/core/langchain_core/utils/mustache.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@
class ChevronError(SyntaxError):
"""Custom exception for Chevron errors."""

pass


#
# Helper functions
Expand Down
2 changes: 0 additions & 2 deletions libs/core/langchain_core/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,8 +268,6 @@ def convert_to_secret_str(value: Union[SecretStr, str]) -> SecretStr:
class _NoDefaultType:
"""Type to indicate no default value is provided."""

pass


_NoDefault = _NoDefaultType()

Expand Down
2 changes: 1 addition & 1 deletion libs/core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ python = ">=3.12.4"
[tool.poetry.extras]

[tool.ruff.lint]
select = [ "B", "C4", "E", "F", "I", "N", "T201", "UP",]
select = [ "B", "C4", "E", "F", "I", "N", "PIE", "T201", "UP",]
ignore = [ "UP007",]

[tool.coverage.run]
Expand Down
1 change: 0 additions & 1 deletion libs/core/tests/integration_tests/test_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,3 @@
@pytest.mark.compile
def test_placeholder() -> None:
"""Used for compiling integration tests without running any real tests."""
pass
2 changes: 0 additions & 2 deletions libs/core/tests/unit_tests/_api/test_beta_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ async def beta_async_function() -> str:
class ClassWithBetaMethods:
def __init__(self) -> None:
"""original doc"""
pass

@beta()
def beta_method(self) -> str:
Expand Down Expand Up @@ -244,7 +243,6 @@ def test_whole_class_beta() -> None:
class BetaClass:
def __init__(self) -> None:
"""original doc"""
pass

@beta()
def beta_method(self) -> str:
Expand Down
4 changes: 0 additions & 4 deletions libs/core/tests/unit_tests/_api/test_deprecation.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,6 @@ async def deprecated_async_function() -> str:
class ClassWithDeprecatedMethods:
def __init__(self) -> None:
"""original doc"""
pass

@deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str:
Expand Down Expand Up @@ -268,7 +267,6 @@ def test_whole_class_deprecation() -> None:
class DeprecatedClass:
def __init__(self) -> None:
"""original doc"""
pass

@deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str:
Expand Down Expand Up @@ -311,7 +309,6 @@ def test_whole_class_inherited_deprecation() -> None:
class DeprecatedClass:
def __init__(self) -> None:
"""original doc"""
pass

@deprecated(since="2.0.0", removal="3.0.0")
def deprecated_method(self) -> str:
Expand All @@ -324,7 +321,6 @@ class InheritedDeprecatedClass(DeprecatedClass):

def __init__(self) -> None:
"""original doc"""
pass

@deprecated(since="2.2.0", removal="3.2.0")
def deprecated_method(self) -> str:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def eval_response(callback: BaseFakeCallbackHandler, i: int) -> None:
else:
assert llm_result.generations[0][0].text == message[:i]

for i in range(0, 2):
for i in range(2):
llm = FakeListChatModel(
responses=[message],
error_on_chunk_number=i,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def eval_response(callback: BaseFakeCallbackHandler, i: int) -> None:
else:
assert llm_result.generations[0][0].text == message[:i]

for i in range(0, 2):
for i in range(2):
llm = FakeStreamingListLLM(
responses=[message],
error_on_chunk_number=i,
Expand Down
12 changes: 4 additions & 8 deletions libs/core/tests/unit_tests/test_messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,16 +429,12 @@ def test_message_chunk_to_message() -> None:
expected = AIMessage(
content="I am",
tool_calls=[
create_tool_call(**{"name": "tool1", "args": {"a": 1}, "id": "1"}), # type: ignore[arg-type]
create_tool_call(**{"name": "tool2", "args": {}, "id": "2"}), # type: ignore[arg-type]
create_tool_call(name="tool1", args={"a": 1}, id="1"), # type: ignore[arg-type]
create_tool_call(name="tool2", args={}, id="2"), # type: ignore[arg-type]
],
invalid_tool_calls=[
create_invalid_tool_call(
**{"name": "tool3", "args": None, "id": "3", "error": None}
),
create_invalid_tool_call(
**{"name": "tool4", "args": "abc", "id": "4", "error": None}
),
create_invalid_tool_call(name="tool3", args=None, id="3", error=None),
create_invalid_tool_call(name="tool4", args="abc", id="4", error=None),
],
)
assert message_chunk_to_message(chunk) == expected
Expand Down
1 change: 0 additions & 1 deletion libs/core/tests/unit_tests/test_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -357,7 +357,6 @@ def structured_tool(
some_base_model: SomeBaseModel, another_base_model: AnotherBaseModel
) -> None:
"""Return the arguments directly."""
pass


def test_base_tool_inheritance_base_schema() -> None:
Expand Down
10 changes: 0 additions & 10 deletions libs/core/tests/unit_tests/utils/test_function_calling.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ def dummy_function(
arg2: ExtensionsAnnotated[Literal["bar", "baz"], "one of 'bar', 'baz'"],
) -> None:
"""dummy function"""
pass

return dummy_function

Expand All @@ -68,7 +67,6 @@ def dummy_function(arg1: int, arg2: Literal["bar", "baz"]) -> None:
arg1: foo
arg2: one of 'bar', 'baz'
"""
pass

return dummy_function

Expand Down Expand Up @@ -220,7 +218,6 @@ def dummy_function(self, arg1: int, arg2: Literal["bar", "baz"]) -> None:
arg1: foo
arg2: one of 'bar', 'baz'
"""
pass


class DummyWithClassMethod:
Expand All @@ -232,7 +229,6 @@ def dummy_function(cls, arg1: int, arg2: Literal["bar", "baz"]) -> None:
arg1: foo
arg2: one of 'bar', 'baz'
"""
pass


def test_convert_to_openai_function(
Expand Down Expand Up @@ -334,7 +330,6 @@ class NestedV2(BaseModelV2Maybe):

def my_function(arg1: NestedV2) -> None:
"""dummy function"""
pass

convert_to_openai_function(my_function)

Expand All @@ -348,7 +343,6 @@ class Nested(BaseModel):

def my_function(arg1: Nested) -> None:
"""dummy function"""
pass

expected = {
"name": "my_function",
Expand Down Expand Up @@ -386,7 +380,6 @@ class Nested(BaseModel):

def my_function(arg1: Nested) -> None:
"""dummy function"""
pass

expected = {
"name": "my_function",
Expand Down Expand Up @@ -429,7 +422,6 @@ def func5(
c: Optional[list[Optional[str]]],
) -> None:
"""A test function"""
pass

func = convert_to_openai_function(func5)
req = func["parameters"]["required"]
Expand All @@ -439,7 +431,6 @@ def func5(
def test_function_no_params() -> None:
def nullary_function() -> None:
"""nullary function"""
pass

func = convert_to_openai_function(nullary_function)
req = func["parameters"].get("required")
Expand Down Expand Up @@ -781,7 +772,6 @@ def test_convert_union_type_py_39() -> None:
@tool
def magic_function(input: int | float) -> str:
"""Compute a magic function."""
pass

result = convert_to_openai_function(magic_function)
assert result["parameters"]["properties"]["input"] == {
Expand Down

0 comments on commit 83eab96

Please sign in to comment.