Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

🎨 style: use ruff format #84

Merged
merged 1 commit into from
Dec 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 2 additions & 17 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,27 +10,12 @@ repos:
- id: end-of-file-fixer
- id: mixed-line-ending
- id: trailing-whitespace
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.9.1
hooks:
- id: black
- repo: https://github.com/PyCQA/autoflake
rev: v2.0.1
hooks:
- id: autoflake
args:
- "--in-place"
- "--jobs"
- "10"
- "--expand-star-imports"
- "--remove-duplicate-keys"
# - "--remove-unused-variables"
- "--remove-all-unused-imports"
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.0.292
rev: v0.1.6
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --no-cache]
- id: ruff-format
- repo: https://github.com/asottile/yesqa
rev: v1.4.0
hooks:
Expand Down
8 changes: 4 additions & 4 deletions examples/fx_profiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,10 @@ def summary(self, should_sort: bool = False) -> str:
# Use the ``tabulate`` library to create a well-formatted table
# presenting our summary information
headers: list[str] = [
'Op type',
'Op',
'Average runtime (s)',
'Pct total runtime',
"Op type",
"Op",
"Average runtime (s)",
"Pct total runtime",
]
return tabulate.tabulate(node_summaries, headers=headers)

Expand Down
2 changes: 1 addition & 1 deletion examples/graph_editing.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def net(x, y):
print(traced_layer.get_source())

for node in graph.nodes:
if node.op == 'call_function':
if node.op == "call_function":
with graph.inserting_after(node):
new_node = graph.create_node(
node.op, paddle.add, args=(node.args[0], node.args[0]), kwargs={}
Expand Down
10 changes: 5 additions & 5 deletions examples/native_interpreter/use_interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,24 +38,24 @@ def lower_to_native_interpreter(orig_net):
target, args, out_name = n.target, n.args, n.name
assert len(n.kwargs) == 0, "kwargs currently not supported"

if n.op == 'placeholder':
if n.op == "placeholder":
input_names.append(target)
elif n.op == 'call_function':
elif n.op == "call_function":
assert target in target_to_name, "Unsupported call target " + target
arg_names = []
for arg in args:
if not isinstance(arg, paddlefx.Node):
raise RuntimeError('Unsupported arg' + arg)
raise RuntimeError("Unsupported arg" + arg)
else:
arg_names.append(arg.name)
instructions.append(
[target_to_name[target], arg_names[0], arg_names[1], out_name]
)
elif n.op == 'output':
elif n.op == "output":
# not handled output node for now
pass
else:
raise RuntimeError('Unsupported opcode ' + n.op)
raise RuntimeError("Unsupported opcode " + n.op)


lower_to_native_interpreter(net)
Expand Down
4 changes: 2 additions & 2 deletions examples/targets/target_3_add_paddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def net(a, b):
return c


in_a = paddle.ones([1], dtype='float32')
in_b = paddle.ones([1], dtype='float32')
in_a = paddle.ones([1], dtype="float32")
in_b = paddle.ones([1], dtype="float32")
res = net(in_a, in_b)
print("res = ", res)
18 changes: 15 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,22 +17,34 @@ workers = 4
# https://beta.ruff.rs/docs/configuration/
[tool.ruff]
exclude = [".cmake-format.py"]
target-version = "py38"

[tool.ruff.lint]
select = [
"UP",
"F",
"I"
"I",
# "B",
# "C4",
# "PGH",
# "RUF",
"W",
"YTT"
]
ignore = [
"UP015",
"F405"
]
target-version = "py38"

[tool.ruff.isort]
[tool.ruff.lint.isort]
lines-between-types = 1
known-first-party = ["paddlefx"]
required-imports = ["from __future__ import annotations"]

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401", "I002"]
"setup.py" = ["F401", "I002"]

[tool.pytest.ini_options]
minversion = "7.0.0"
pythonpath = "tests"
Expand Down
60 changes: 30 additions & 30 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def _get_version():


def _get_install_requires():
with open('requirements.txt', 'r') as f:
with open("requirements.txt", "r") as f:
install_requires = f.readlines()
return install_requires

Expand All @@ -31,14 +31,14 @@ def _run_cmd(cmd: str, cwd=None):
cmd,
stdout=subprocess.PIPE,
shell=True,
executable='/bin/bash',
executable="/bin/bash",
cwd=cwd,
)
stdout, _ = p.communicate()
out = stdout.decode().strip()
print(out)
if p.returncode != 0:
raise Exception(f'CMD {cmd} failed')
raise Exception(f"CMD {cmd} failed")
else:
return 0

Expand All @@ -48,36 +48,36 @@ def build_extensions(self):
os.makedirs(self.build_temp, exist_ok=True)
os.makedirs(self.build_lib, exist_ok=True)

debug = int(os.environ.get('DEBUG', 0)) if self.debug is None else self.debug
build_type = 'Debug' if debug else 'Release'
debug = int(os.environ.get("DEBUG", 0)) if self.debug is None else self.debug
build_type = "Debug" if debug else "Release"

cmake_args = (
f'-DCMAKE_BUILD_TYPE={build_type} -DPython3_EXECUTABLE={sys.executable} '
f"-DCMAKE_BUILD_TYPE={build_type} -DPython3_EXECUTABLE={sys.executable} "
)
cmake_args += f'-DCMAKE_INSTALL_PREFIX={HERE}/src/paddlefx '
if 'CMAKE_ARGS' in os.environ:
cmake_args += os.environ.get('CMAKE_ARGS', '')
cmake_args += ' '
cmake_args += f"-DCMAKE_INSTALL_PREFIX={HERE}/src/paddlefx "
if "CMAKE_ARGS" in os.environ:
cmake_args += os.environ.get("CMAKE_ARGS", "")
cmake_args += " "

try:
import ninja

ninja_executable_path = _osp.join(ninja.BIN_DIR, 'ninja')
cmake_args += f'-GNinja -DCMAKE_MAKE_PROGRAM={ninja_executable_path} '
ninja_executable_path = _osp.join(ninja.BIN_DIR, "ninja")
cmake_args += f"-GNinja -DCMAKE_MAKE_PROGRAM={ninja_executable_path} "
except ImportError:
raise Exception('please install ninja first.')
raise Exception("please install ninja first.")

cmd = f'cmake {cmake_args} -S{HERE} -B{self.build_temp};'
cmd += f'cmake --build {self.build_temp} --target install'
cmd = f"cmake {cmake_args} -S{HERE} -B{self.build_temp};"
cmd += f"cmake --build {self.build_temp} --target install"
_run_cmd(cmd)

try:
import mypy # noqa
import mypy

cmd = 'stubgen -m _eval_frame -o .'
_run_cmd(cmd, cwd=f'{HERE}/src/paddlefx')
cmd = "stubgen -m _eval_frame -o ."
_run_cmd(cmd, cwd=f"{HERE}/src/paddlefx")
except ImportError:
warnings.warn('No mypy package is found for stub generating')
warnings.warn("No mypy package is found for stub generating")

# copy extensions
for ext in self.extensions:
Expand All @@ -89,21 +89,21 @@ def build_extensions(self):
self.copy_file(src, dst)


if __name__ == '__main__':
ext_modules = [Extension('paddlefx._eval_frame', [])]
cmdclass = {'build_ext': CMakeBuildExt}
if __name__ == "__main__":
ext_modules = [Extension("paddlefx._eval_frame", [])]
cmdclass = {"build_ext": CMakeBuildExt}
# TODO: add more info
setup(
name='paddlefx',
description='paddlefx is an experimental project of paddle python IR.',
license='Apache 2.0',
license_files=('LICENSE',),
python_requires='>=3.7',
name="paddlefx",
description="paddlefx is an experimental project of paddle python IR.",
license="Apache 2.0",
license_files=("LICENSE",),
python_requires=">=3.7",
install_requires=_get_install_requires(),
package_dir={'': 'src'},
packages=find_packages(where='src'),
package_dir={"": "src"},
packages=find_packages(where="src"),
package_data={
'paddlefx': ['py.typed', '*.pyi'],
"paddlefx": ["py.typed", "*.pyi"],
},
ext_modules=ext_modules,
cmdclass=cmdclass,
Expand Down
18 changes: 8 additions & 10 deletions src/paddlefx/__init__.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
from __future__ import annotations

from .eval_frame import optimize # noqa
from .graph import Graph # noqa
from .graph_layer import GraphLayer # noqa
from .graph_viewer import FxGraphViewer # noqa
from .interpreter import Interpreter # noqa
from .node import Node # noqa
from .symbolic_trace import Tracer, symbolic_trace # noqa
from .eval_frame import optimize
from .graph import Graph
from .graph_layer import GraphLayer
from .graph_viewer import FxGraphViewer
from .interpreter import Interpreter
from .node import Node
from .symbolic_trace import Tracer, symbolic_trace

try:
from ._version import version as __version__
from ._version import version_tuple
except ImportError:
__version__ = version = '0.0.0.unknown'
__version__ = version = "0.0.0.unknown"
__version_tuple__ = version_tuple = (0, 0, 0, "unknown")
2 changes: 1 addition & 1 deletion src/paddlefx/codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from functools import lru_cache
from typing import TYPE_CHECKING

from .bytecode_transformation import * # noqa
from .bytecode_transformation import * # noqa: F403
from .source import LocalSource
from .variable_stack import VariableStack
from .variables.base import TensorVariable
Expand Down
6 changes: 2 additions & 4 deletions src/paddlefx/compiler/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from __future__ import annotations

from .base import CompilerBase, DummyCompiler # noqa: F401
from .base import CompilerBase, DummyCompiler

try:
from .tvm import TVMCompiler # noqa: F401
from .tvm import TVMCompiler
except ImportError:
pass
2 changes: 1 addition & 1 deletion src/paddlefx/compiler/tvm.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(
self.tune_mode = tune_mode

def compile(self, gl: paddlefx.GraphLayer, example_inputs: list) -> Callable:
cache_path = user_cache_dir('paddlefx')
cache_path = user_cache_dir("paddlefx")

shape_dict = {}
for node in gl.graph.nodes:
Expand Down
Loading