Skip to content

Commit

Permalink
Merge pull request #315 from argonne-lcf/20
Browse files Browse the repository at this point in the history
Prep for prerelease; lint fixes
  • Loading branch information
tomuram authored Feb 6, 2023
2 parents 1e2180e + 4ea2b3a commit 657959e
Show file tree
Hide file tree
Showing 21 changed files with 8 additions and 31 deletions.
2 changes: 1 addition & 1 deletion balsam/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from balsam.util import config_root_logger

__version__ = "0.7.0.a20"
__version__ = "0.7.0.a21"
config_root_logger()
2 changes: 0 additions & 2 deletions balsam/_api/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,6 @@ def _unpack_list_response(self, response_data: Dict[str, Any]) -> Tuple[int, Lis
def _fetch_pages(
self, filters: Dict[str, Any], ordering: Optional[str], limit: Optional[int], offset: Optional[int]
) -> Tuple[int, List[Dict[str, Any]]]:

base_offset = 0 if offset is None else offset
page_size = MAX_PAGE_SIZE if limit is None else min(limit, MAX_PAGE_SIZE)

Expand Down Expand Up @@ -191,7 +190,6 @@ def _get_list(
limit: Optional[int],
offset: Optional[int],
) -> Tuple[List[T], int]:

filter_chunks = self._chunk_filters(filters)
full_count: int = 0
full_results: List[Dict[str, Any]] = []
Expand Down
8 changes: 4 additions & 4 deletions balsam/_api/models.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# This file was auto-generated via /Users/turam/opt/miniconda3/bin/python balsam/schemas/api_generator.py
# [git rev 3fcc4a5]
# [git rev ce4bdce]
# Do *not* make changes to the API by changing this file!

import datetime
Expand Down Expand Up @@ -765,7 +765,7 @@ class BatchJob(balsam._api.bases.BatchJobBase):
job_mode = Field[balsam.schemas.batchjob.JobMode]()
optional_params = Field[typing.Dict[str, str]]()
filter_tags = Field[typing.Dict[str, str]]()
partitions = Field[Optional[typing.Union[typing.List[balsam.schemas.batchjob.BatchJobPartition], None]]]()
partitions = Field[typing.Optional[typing.List[balsam.schemas.batchjob.BatchJobPartition]]]()
site_id = Field[int]()
project = Field[str]()
queue = Field[str]()
Expand All @@ -786,7 +786,7 @@ def __init__(
queue: str,
optional_params: Optional[typing.Dict[str, str]] = None,
filter_tags: Optional[typing.Dict[str, str]] = None,
partitions: Optional[typing.Union[typing.List[balsam.schemas.batchjob.BatchJobPartition], None]] = None,
partitions: Optional[typing.Optional[typing.List[balsam.schemas.batchjob.BatchJobPartition]]] = None,
**kwargs: Any,
) -> None:
"""
Expand Down Expand Up @@ -918,7 +918,7 @@ def create(
queue: str,
optional_params: Optional[typing.Dict[str, str]] = None,
filter_tags: Optional[typing.Dict[str, str]] = None,
partitions: Optional[typing.Union[typing.List[balsam.schemas.batchjob.BatchJobPartition], None]] = None,
partitions: Optional[typing.Optional[typing.List[balsam.schemas.batchjob.BatchJobPartition]]] = None,
) -> BatchJob:
"""
Create a new BatchJob object and save it to the API in one step.
Expand Down
1 change: 0 additions & 1 deletion balsam/client/requests_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ class NotAuthenticatedError(Exception):


class RequestsClient(RESTClient):

_client_classes: "Dict[str, Type[RequestsClient]]" = {}

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion balsam/cmdline/_launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def launcher(
for proc in launcher_procs:
try:
proc.wait(timeout=1)
except (subprocess.TimeoutExpired):
except subprocess.TimeoutExpired:
pass
else:
done_procs.append(proc)
Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/alcf_cooley_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@


class CooleyNode(ComputeNode):

cpu_ids = list(range(12))
gpu_ids = list(range(2))

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/alcf_polaris_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


class PolarisNode(ComputeNode):

# turam: confirm number of cpus
cpu_ids = list(range(64))
gpu_ids: List[IntStr] = list(range(4))
Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/alcf_sunspot_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@


class SunspotNode(ComputeNode):

cpu_ids = list(range(104))
gpu_ids: List[IntStr]

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/alcf_thetagpu_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


class ThetaGPUNode(ComputeNode):

cpu_ids = list(range(128))
gpu_ids: List[IntStr] = list(range(8))

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/alcf_thetaknl_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@


class ThetaKNLNode(ComputeNode):

cpu_ids = list(range(64))
gpu_ids: List[Union[int, str]] = []

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/compute_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@


class ComputeNode:

cpu_ids: List[IntStr] = []
gpu_ids: List[IntStr] = []

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@


class DefaultNode(ComputeNode):

cpu_ids = list(range(psutil.cpu_count() or 4))
gpu_ids: List[Union[int, str]] = []

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/nersc_corihas_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@


class CoriHaswellNode(ComputeNode):

cpu_ids = list(range(32))
gpu_ids: List[Union[int, str]] = []

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/nersc_coriknl_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@


class CoriKNLNode(ComputeNode):

cpu_ids: List[IntStr] = list(range(68))
gpu_ids: List[IntStr] = []

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/nersc_perlmutter_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@


class PerlmutterGPUNode(ComputeNode):

cpu_ids = list(range(64))
gpu_ids: List[Union[int, str]] = list(range(4))

Expand Down
1 change: 0 additions & 1 deletion balsam/platform/compute_node/summit_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@


class SummitNode(ComputeNode):

cpu_ids = list(range(42))
gpu_ids = list(range(6))

Expand Down
4 changes: 0 additions & 4 deletions balsam/platform/tests/test_mpirun.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ def setUp(self):

@staticmethod
def parse_output(output_fn):

ranks = []
sizes = []
with open(output_fn) as file:
Expand Down Expand Up @@ -164,7 +163,6 @@ def setUp(self):

@staticmethod
def parse_output(output_fn):

ranks = []
sizes = []
with open(output_fn) as file:
Expand Down Expand Up @@ -218,7 +216,6 @@ def setUp(self):

@staticmethod
def parse_output(output_fn):

ranks = []
sizes = []
with open(output_fn) as file:
Expand Down Expand Up @@ -280,7 +277,6 @@ def setUp(self):

@staticmethod
def parse_output(output_fn):

ranks = []
sizes = []
with open(output_fn) as file:
Expand Down
4 changes: 0 additions & 4 deletions balsam/platform/tests/test_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ def assertInPath(self, exe):
self.assertTrue(which_exe is not None, f"'{exe}' not in PATH")

def test_submit(self):

# verify script exists
self.assertTrue(os.path.exists(self.script_path))
# verify submit command is in path
Expand Down Expand Up @@ -127,7 +126,6 @@ def tearDown(self):


class CobaltTest(SchedulerTestMixin, unittest.TestCase):

submit_script = """!#/usr/bin/env bash
echo [$SECONDS] Running test submit script
echo [$SECONDS] COBALT_JOBID = $COBALT_JOBID
Expand Down Expand Up @@ -171,7 +169,6 @@ def tearDown(self):


class SlurmTest(SchedulerTestMixin, unittest.TestCase):

submit_script = """#!/usr/bin/env bash -l
echo [$SECONDS] Running test submit script
echo [$SECONDS] SLURM_JOB_ID = SLURM_JOB_ID
Expand Down Expand Up @@ -221,7 +218,6 @@ def test_get_backfill_windows(self):


class LsfTest(SchedulerTestMixin, unittest.TestCase):

submit_script = """#!/usr/bin/env bash
echo [$SECONDS] Running test submit script
echo [$SECONDS] LSB_JOBID = $LSB_JOBID
Expand Down
1 change: 0 additions & 1 deletion balsam/server/models/crud/sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,6 @@ def _footprint_func() -> Any:
def acquire(
db: Session, owner: schemas.UserOut, session_id: int, spec: schemas.SessionAcquire
) -> List[Dict[str, Any]]:

session = (owned_session_query(db, owner).filter(models.Session.id == session_id)).one()
session.heartbeat = datetime.utcnow()

Expand Down
2 changes: 1 addition & 1 deletion balsam/site/launcher/_serial_mode_master.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def update_job_states(
for id in done_ids:
self.status_updater.put(id, JobState.run_done, state_timestamp=now)

for (id, retcode, tail) in error_logs:
for id, retcode, tail in error_logs:
self.status_updater.put(
id, JobState.run_error, state_timestamp=now, state_data={"returncode": retcode, "error": tail}
)
Expand Down
2 changes: 1 addition & 1 deletion tests/benchmark/locustfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
)
TEST_BALSAM_SERVER = os.environ.get("BALSAM_TESET_SERVER", "http://0.0.0.0:8000")


# overload client using username/password token authentication
class LocustBalsamClientA(BasicAuthRequestsClient):
def __init__(self, api_root: str, request_event: EventHook) -> None:
Expand Down Expand Up @@ -236,7 +237,6 @@ def bulk_job_submission(self) -> None:
# simulate runnings jobs in batches
steps = int(len(jobs) / simulated_nodes) + 1
for step in range(steps):

# indices of jobs to operate on
start = simulated_nodes * step
end = min(simulated_nodes * (step + 1), len(jobs))
Expand Down

0 comments on commit 657959e

Please sign in to comment.