Skip to content

Commit

Permalink
Convert directory fbcode/torchx to use the Ruff Formatter
Browse files Browse the repository at this point in the history
Summary:
Converts the directory specified to use the Ruff formatter in pyfmt

ruff_dog

If this diff causes merge conflicts when rebasing, please run
`hg status -n -0 --change . -I '**/*.{py,pyi}' | xargs -0 arc pyfmt`
on your diff, and amend any changes before rebasing onto latest.
That should help reduce or eliminate any merge conflicts.

allow-large-files

Reviewed By: amyreese

Differential Revision: D66248087
  • Loading branch information
Thomas Polasek authored and facebook-github-bot committed Nov 25, 2024
1 parent 5ad30fe commit b62e648
Show file tree
Hide file tree
Showing 23 changed files with 43 additions and 29 deletions.
4 changes: 1 addition & 3 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,9 +191,7 @@ def setup(app):

# In Sphinx 1.8 it was renamed to `add_css_file`, 1.7 and prior it is
# `add_stylesheet` (deprecated in 1.8).
add_css = getattr(
app, "add_css_file", getattr(app, "add_stylesheet", None)
) # noqa B009
add_css = getattr(app, "add_css_file", getattr(app, "add_stylesheet", None)) # noqa B009
for css_file in html_css_files:
add_css(css_file)

Expand Down
5 changes: 3 additions & 2 deletions scripts/collect_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,9 @@ def run(
args=command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
)
raw_output, raw_err = p.communicate()
raw_output, raw_err = raw_output.strip().decode("utf-8"), raw_err.strip().decode(
"utf-8"
raw_output, raw_err = (
raw_output.strip().decode("utf-8"),
raw_err.strip().decode("utf-8"),
)
rc = p.returncode
if rc != 0:
Expand Down
1 change: 1 addition & 0 deletions scripts/component_integration_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
"""
Kubernetes integration tests.
"""

import argparse
import logging
import os
Expand Down
4 changes: 3 additions & 1 deletion tools/linter/adapters/pyre_linter.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ def main() -> None:
level=(
logging.NOTSET
if args.verbose
else logging.DEBUG if len(args.filenames) < 1000 else logging.INFO
else logging.DEBUG
if len(args.filenames) < 1000
else logging.INFO
),
stream=sys.stderr,
)
Expand Down
4 changes: 3 additions & 1 deletion tools/linter/adapters/ufmt_linter.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,9 @@ def main() -> None:
level=(
logging.NOTSET
if args.verbose
else logging.DEBUG if len(args.filenames) < 1000 else logging.INFO
else logging.DEBUG
if len(args.filenames) < 1000
else logging.INFO
),
stream=sys.stderr,
)
Expand Down
2 changes: 0 additions & 2 deletions torchx/cli/test/cmd_run_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,12 @@ def tearDown(self) -> None:
torchxconfig.called_args = set()

def test_run_with_multiple_scheduler_args(self) -> None:

args = ["--scheduler_args", "first_args", "--scheduler_args", "second_args"]
with self.assertRaises(SystemExit) as cm:
self.parser.parse_args(args)
self.assertEqual(cm.exception.code, 1)

def test_run_with_multiple_schedule_args(self) -> None:

args = [
"--scheduler",
"local_cwd",
Expand Down
1 change: 1 addition & 0 deletions torchx/components/dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
Components APIs
-----------------
"""

import os
import re
import shlex
Expand Down
1 change: 1 addition & 0 deletions torchx/components/structured_arg.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
with an experiment tracker. The ``/`` delimiter is a natural way to group runs within experiments.
"""

import warnings
from dataclasses import dataclass
from pathlib import Path
Expand Down
1 change: 1 addition & 0 deletions torchx/runner/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ def my_component(a: int) -> specs.AppDef:
you want to keep personal config overrides on top of a project defined default.
"""

import configparser as configparser
import logging
import os
Expand Down
3 changes: 2 additions & 1 deletion torchx/schedulers/aws_batch_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
https://docs.aws.amazon.com/AmazonECR/latest/userguide/getting-started-cli.html#cli-create-repository
for how to create a image repository.
"""

import getpass
import re
import threading
Expand Down Expand Up @@ -155,7 +156,7 @@ def resource_requirements_from_resource(resource: Resource) -> List[Dict[str, st


def resource_from_resource_requirements(
resource_requirements: List[Dict[str, str]]
resource_requirements: List[Dict[str, str]],
) -> Resource:
resrc_req = {
ResourceType.from_str(r["type"]): int(r["value"]) for r in resource_requirements
Expand Down
8 changes: 5 additions & 3 deletions torchx/schedulers/kubernetes_mcad_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
TorchX Kubernetes_MCAD scheduler depends on AppWrapper + MCAD.
Install MCAD:
See deploying Multi-Cluster-Application-Dispatcher guide
Install MCAD:
See deploying Multi-Cluster-Application-Dispatcher guide
https://github.com/project-codeflare/multi-cluster-app-dispatcher/blob/main/doc/deploy/deployment.md
This implementation requires MCAD v1.34.1 or higher.
Expand Down Expand Up @@ -478,8 +478,10 @@ def get_unique_truncated_appid(app: AppDef) -> str:
unique_id_size = default_size if size > default_size else size

if unique_id_size <= 3:
msg = "Name size has too many characters for some Kubernetes objects. Truncating \
msg = (
"Name size has too many characters for some Kubernetes objects. Truncating \
application name."
)
warnings.warn(msg)
end = 63 - uid_chars - pg_chars
substring = app.name[0:end]
Expand Down
1 change: 1 addition & 0 deletions torchx/schedulers/lsf_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
See the LSF documentation for more details:
https://www.ibm.com/docs/en/cloud-private/3.2.x?topic=paks-spectrum-lsf-community-edition
"""

import os.path
import re
import subprocess
Expand Down
13 changes: 7 additions & 6 deletions torchx/schedulers/ray/ray_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
_step function, this give more flexibility to us if we want to bette handle the
node failures.
"""

import json
import logging
import os
Expand Down Expand Up @@ -148,12 +149,12 @@ def __init__(self, replicas: List[RayActor]) -> None:
else:
self.min_replicas = replicas[0].min_replicas # pyre-ignore[8]

self.placement_groups: List[PlacementGroup] = (
[]
) # all the placement groups, shall never change
self.actor_info_of_id: Dict[str, ActorInfo] = (
{}
) # store the info used to recover an actor
self.placement_groups: List[
PlacementGroup
] = [] # all the placement groups, shall never change
self.actor_info_of_id: Dict[
str, ActorInfo
] = {} # store the info used to recover an actor
self.active_tasks: List["ray.ObjectRef"] = [] # list of active tasks

self.terminating: bool = False # if the job has finished and being terminated
Expand Down
1 change: 1 addition & 0 deletions torchx/schedulers/slurm_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
This contains the TorchX Slurm scheduler which can be used to run TorchX
components on a Slurm cluster.
"""

import csv
import json
import logging
Expand Down
4 changes: 1 addition & 3 deletions torchx/schedulers/test/kubernetes_scheduler_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,9 +112,7 @@ def test_app_to_resource_resolved_macros(self) -> None:
resource = app_to_resource(app, "test_queue", service_account=None)
actual_cmd = (
# pyre-ignore [16]
resource["spec"]["tasks"][0]["template"]
.spec.containers[0]
.command
resource["spec"]["tasks"][0]["template"].spec.containers[0].command
)
expected_cmd = [
"main",
Expand Down
4 changes: 1 addition & 3 deletions torchx/schedulers/test/ray_scheduler_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,9 +584,7 @@ def test_ray_driver_elasticity(self) -> None:
self.assertIsNotNone(driver.rank_0_port)

# 3-2
terminal = (
driver._step()
) # actor 1 finished, actor 2 has been scheduled yet, usually, the driver stops here
terminal = driver._step() # actor 1 finished, actor 2 has been scheduled yet, usually, the driver stops here
self.assertEqual(terminal, True)
self.assertEqual(driver.command_actors_count, 0)
self.assertEqual(len(driver.active_tasks), 1) # actor schedule task
Expand Down
1 change: 1 addition & 0 deletions torchx/specs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
used by components to define the apps which can then be launched via a TorchX
scheduler or pipeline adapter.
"""

import difflib
from typing import Callable, Dict, Optional

Expand Down
1 change: 1 addition & 0 deletions torchx/specs/named_resources_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
have 8 cpus.
"""

from typing import Callable, Mapping

from torchx.specs.api import Resource
Expand Down
1 change: 0 additions & 1 deletion torchx/specs/test/api_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,6 @@ async def update(value: str, time_seconds: int) -> str:
self.assertEqual("nentry", default.entrypoint)

def test_concurrent_override_role(self) -> None:

def delay(value: Tuple[str, str], time_seconds: int) -> Tuple[str, str]:
time.sleep(time_seconds)
return value
Expand Down
1 change: 1 addition & 0 deletions torchx/test/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
"""
Useful test fixtures (classes that you can subclass your python ``unittest.TestCase``)
"""

import os
import shutil
import sys
Expand Down
2 changes: 1 addition & 1 deletion torchx/tracker/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def _extract_tracker_name_and_config_from_environ() -> Mapping[str, Optional[str


def build_trackers(
factory_and_config: Mapping[str, Optional[str]]
factory_and_config: Mapping[str, Optional[str]],
) -> Iterable[TrackerBase]:
trackers = []

Expand Down
4 changes: 3 additions & 1 deletion torchx/tracker/backend/test/fsspec_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,5 +171,7 @@ def test_create(self) -> None:

tracker = create(f"file://{str(configfile)}")
self.assertEqual(
tracker._path_builder.root_dir, tracker_root_path # pyre-ignore
# pyre-fixme[16]: `TrackerBase` has no attribute `_path_builder`.
tracker._path_builder.root_dir,
tracker_root_path,
)
5 changes: 4 additions & 1 deletion torchx/tracker/mlflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,10 @@ def run_ids(self, **kwargs: str) -> Iterable[str]:
return [r.info.run_name for r in runs]

def log_params_flat(
self, run_name: str, cfg: Any, key: str = "" # pyre-ignore[2]
self,
run_name: str,
cfg: Any,
key: str = "", # pyre-ignore[2]
) -> None:
"""
Designed to be primarily used with hydra-style config objects (e.g. dataclasses),
Expand Down

0 comments on commit b62e648

Please sign in to comment.