Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Apply pyupgrade to builtin repos #823

Merged
merged 1 commit into from
Jan 14, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@ repos:
- id: pyupgrade
name: python-old-syntax-check
args: [--py36-plus]
files: lib/ramble/ramble/.*\.py$
files: (lib/ramble/ramble/|var/ramble/repos/).*\.py$

4 changes: 2 additions & 2 deletions var/ramble/repos/builtin/applications/minixyce/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ def _analyze_experiments(self, workspace, app_inst=None):

if os.path.isfile(output_file):
names = []
with open(output_file, "r") as f:
with open(output_file) as f:
names = f.readline().split()
for line in f.readlines()[-1:]:
values = line.split()
Expand All @@ -271,6 +271,6 @@ def _analyze_experiments(self, workspace, app_inst=None):
for i, (name, value) in enumerate(
zip(names[1:-2], values[1:-2])
):
f.write("{}: {} = {}\n".format((i + 1), name, value))
f.write(f"{(i + 1)}: {name} = {value}\n")

super()._analyze_experiments(workspace)
2 changes: 1 addition & 1 deletion var/ramble/repos/builtin/applications/namd/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def _analyze_experiments(self, workspace, app_inst=None):
dpns = None

if os.path.isfile(log_path):
with open(log_path, "r") as f:
with open(log_path) as f:
for line in f.readlines():
match = ns_regex.match(line)
if match:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ def _prepare_analysis(self, workspace, app_inst=None):
fom_type = None
log_file = self.expander.expand_var_name("log_file")
if os.path.isfile(log_file):
with open(log_file, "r") as f:
with open(log_file) as f:
for line in f.readlines():
for test_fom_type in self.fom_types:
if self.fom_regex_headers[test_fom_type].match(line):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,7 @@ def _preprocess_log(self, workspace, app_inst):
final_regex = re.compile(self.final_epoch_regex)

if os.path.exists(log_file):
with open(log_file, "r", encoding="ISO-8859-1") as f:
with open(log_file, encoding="ISO-8859-1") as f:
data = f.read()

processed_log = self.expander.expand_var(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ def evaluate_success(self):
if not os.path.isfile(spack_file):
return False

with open(spack_file, "r") as f:
with open(spack_file) as f:
spack_data = syaml.load_config(f)

tty.debug(f"Spack data: {spack_data}")
Expand Down
2 changes: 1 addition & 1 deletion var/ramble/repos/builtin/applications/wrfv3/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def _analyze_experiments(self, workspace, app_inst=None):
sum_time = 0.0
count = 0
for out_file in file_list:
with open(out_file, "r") as f:
with open(out_file) as f:
for line in f.readlines():
m = timing_regex.match(line)
if m:
Expand Down
2 changes: 1 addition & 1 deletion var/ramble/repos/builtin/applications/wrfv4/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def _analyze_experiments(self, workspace, app_inst=None):
sum_time = 0.0
count = 0
for out_file in file_list:
with open(out_file, "r") as f:
with open(out_file) as f:
for line in f.readlines():
m = timing_regex.match(line)
if m:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@ def artifact_inventory(self, workspace, app_inst=None):
)

if os.path.exists(hash_file_path):
with open(hash_file_path, "r") as f:
with open(hash_file_path) as f:
container_hash = f.read()

else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ def generate_requirement_file(self):
existing_req_mtime = os.path.getmtime(req_file)
existing_lock_mtime = os.path.getmtime(lock_file)
if existing_lock_mtime >= existing_req_mtime:
with open(req_file, "r") as f:
with open(req_file) as f:
if f.read() == contents:
self.installed = True
logger.debug("requirement file already up-to-date")
Expand Down Expand Up @@ -428,7 +428,7 @@ def define_path_vars(self, app_inst, cache):
if not lock_file:
raise RunnerError(f"Lock file {lock_file} is missing")
pkgs = []
with open(lock_file, "r") as f:
with open(lock_file) as f:
for line in f.readlines():
# pip freeze generates such a comment, which serves as a divider
# for packages that are added as deps of the ones defined directly.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _software_install_requested_compilers(self, workspace, app_inst=None):

cache_tupl = ("spack-compilers", env_path)
if workspace.check_cache(cache_tupl):
logger.debug("{} already in cache.".format(cache_tupl))
logger.debug(f"{cache_tupl} already in cache.")
return
else:
workspace.add_to_cache(cache_tupl)
Expand Down Expand Up @@ -114,7 +114,7 @@ def _software_create_env(self, workspace, app_inst=None):

cache_tupl = ("spack-env", env_path)
if workspace.check_cache(cache_tupl):
logger.debug("{} already in cache.".format(cache_tupl))
logger.debug(f"{cache_tupl} already in cache.")
return
else:
workspace.add_to_cache(cache_tupl)
Expand Down Expand Up @@ -222,7 +222,7 @@ def _software_configure(self, workspace, app_inst=None):

cache_tupl = ("concretize-env", env_path)
if workspace.check_cache(cache_tupl):
logger.debug("{} already in cache.".format(cache_tupl))
logger.debug(f"{cache_tupl} already in cache.")
return
else:
workspace.add_to_cache(cache_tupl)
Expand Down Expand Up @@ -273,7 +273,7 @@ def _mirror_software(self, workspace, app_inst=None):

cache_tupl = ("spack-mirror", env_path)
if workspace.check_cache(cache_tupl):
logger.debug("{} already in cache.".format(cache_tupl))
logger.debug(f"{cache_tupl} already in cache.")
return
else:
workspace.add_to_cache(cache_tupl)
Expand Down Expand Up @@ -334,7 +334,7 @@ def _push_to_spack_cache(self, workspace, app_inst=None):
env_path = self.app_inst.expander.env_path
cache_tupl = ("push-to-cache", env_path)
if workspace.check_cache(cache_tupl):
logger.debug("{} already pushed, skipping".format(cache_tupl))
logger.debug(f"{cache_tupl} already pushed, skipping")
return
else:
workspace.add_to_cache(cache_tupl)
Expand Down Expand Up @@ -1039,7 +1039,7 @@ def generate_env_file(self):
env_data = syaml.load_config(
syaml.dump_config(env_file, default_flow_style=False)
)
with open(spack_env_file, "r") as f:
with open(spack_env_file) as f:
existing_data = syaml.load_config(f)
gen_env_hash = ramble.util.hashing.hash_json(env_data)
existing_env_hash = ramble.util.hashing.hash_json(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def _software_install(self, workspace, app_inst=None):

cache_tupl = ("spack-install", env_path)
if workspace.check_cache(cache_tupl):
logger.debug("{} already in cache.".format(cache_tupl))
logger.debug(f"{cache_tupl} already in cache.")
return
else:
workspace.add_to_cache(cache_tupl)
Expand Down
Loading