diff --git a/src/somd2/config/_config.py b/src/somd2/config/_config.py index 53204e0..a597263 100644 --- a/src/somd2/config/_config.py +++ b/src/somd2/config/_config.py @@ -310,13 +310,13 @@ def as_dict(self, sire_compatible=False): this simply converts any options with a value of None to a boolean with the value False. """ - from pathlib import PosixPath as _PosixPath + from pathlib import Path as _Path from sire.cas import LambdaSchedule as _LambdaSchedule d = {} for attr, value in self.__dict__.items(): attr_l = attr[1:] - if isinstance(value, _PosixPath): + if isinstance(value, _Path): d[attr_l] = str(value) else: try: diff --git a/src/somd2/runner/_dynamics.py b/src/somd2/runner/_dynamics.py index a29cb75..213d43e 100644 --- a/src/somd2/runner/_dynamics.py +++ b/src/somd2/runner/_dynamics.py @@ -20,7 +20,7 @@ ##################################################################### __all__ = ["Dynamics"] - +import os as _os from pathlib import Path as _Path from ..config import Config as _Config @@ -29,6 +29,11 @@ from somd2 import _logger +if _os.platform == "win32": + lam_sym = "lambda" +else: + lam_sym = "λ" + class Dynamics: """ @@ -191,7 +196,7 @@ def _minimisation(self, lambda_min=None): lambda_val. """ if lambda_min is None: - _logger.info(f"Minimising at λ = {self._lambda_val}") + _logger.info(f"Minimising at {lam_sym} = {self._lambda_val}") try: m = self._system.minimisation( cutoff_type=self._config.cutoff_type, @@ -206,7 +211,7 @@ def _minimisation(self, lambda_min=None): except: raise else: - _logger.info(f"Minimising at λ = {lambda_min}") + _logger.info(f"Minimising at {lam_sym} = {lambda_min}") try: m = self._system.minimisation( cutoff_type=self._config.cutoff_type, @@ -229,7 +234,7 @@ def _equilibration(self): Currently just runs dynamics without any saving """ - _logger.info(f"Equilibrating at λ = {self._lambda_val}") + _logger.info(f"Equilibrating at {lam_sym} = {self._lambda_val}") self._setup_dynamics(equilibration=True) self._dyn.run( self._config.equilibration_time, @@ -282,7 +287,7 @@ def generate_lam_vals(lambda_base, increment): else: lam_arr = self._lambda_array + self._lambda_grad - _logger.info(f"Running dynamics at λ = {self._lambda_val}") + _logger.info(f"Running dynamics at {lam_sym} = {self._lambda_val}") if self._config.checkpoint_frequency.value() > 0.0: ### Calc number of blocks and remainder (surely there's a better way?)### @@ -343,7 +348,7 @@ def generate_lam_vals(lambda_base, increment): df.iloc[-int(energy_per_block) :], ) _logger.info( - f"Finished block {x+1} of {num_blocks} for λ = {self._lambda_val}" + f"Finished block {x+1} of {num_blocks} for {lam_sym} = {self._lambda_val}" ) except: raise diff --git a/src/somd2/runner/_runner.py b/src/somd2/runner/_runner.py index f3dcb54..216be04 100644 --- a/src/somd2/runner/_runner.py +++ b/src/somd2/runner/_runner.py @@ -21,7 +21,7 @@ __all__ = ["Runner"] - +import os as _os from sire import stream as _stream from sire.system import System as _System @@ -31,6 +31,11 @@ from somd2 import _logger +if _os.platform == "win32": + lam_sym = "lambda" +else: + lam_sym = "λ" + class Runner: """ @@ -456,7 +461,7 @@ def _initialise_simulation(self, system, lambda_value, device=None): has_space=self._has_space, ) except: - _logger.warning(f"System creation at λ = {lambda_value} failed") + _logger.warning(f"System creation at {lam_sym} = {lambda_value} failed") raise def _cleanup_simulation(self): @@ -521,8 +526,9 @@ def run(self): result = job.result() except Exception as e: result = False + _logger.error( - f"Exception raised for λ = {lambda_value}: {e}" + f"Exception raised for {lam_sym} = {lambda_value}: {e}" ) with self._lock: results.append(result) @@ -583,8 +589,8 @@ def _run(sim): return df, lambda_grad, speed except Exception as e: _logger.warning( - f"Minimisation/dynamics at λ = {lambda_value} failed with the " - f"following exception {e}, trying again with minimsation at λ = 0." + f"Minimisation/dynamics at {lam_sym} = {lambda_value} failed with the " + f"following exception {e}, trying again with minimsation at {lam_sym} = 0." ) try: df = sim._run(lambda_minimisation=0.0) @@ -593,8 +599,8 @@ def _run(sim): return df, lambda_grad, speed except Exception as e: _logger.error( - f"Minimisation/dynamics at λ = {lambda_value} failed, even after " - f"minimisation at λ = 0. The following warning was raised: {e}." + f"Minimisation/dynamics at {lam_sym} = {lambda_value} failed, even after " + f"minimisation at {lam_sym} = 0. The following warning was raised: {e}." ) raise else: @@ -605,7 +611,7 @@ def _run(sim): return df, lambda_grad, speed except Exception as e: _logger.error( - f"Dynamics at λ = {lambda_value} failed. The following warning was " + f"Dynamics at {lam_sym} = {lambda_value} failed. The following warning was " f"raised: {e}. This may be due to a lack of minimisation." ) @@ -619,18 +625,20 @@ def _run(sim): ).clone() except: _logger.warning( - f"Unable to load checkpoint file for λ={lambda_value}, starting from scratch." + f"Unable to load checkpoint file for {lam_sym}={lambda_value}, starting from scratch." ) else: system = self._system.clone() if self._config.restart: acc_time = system.time() if acc_time > self._config.runtime - self._config.timestep: - _logger.success(f"λ = {lambda_value} already complete. Skipping.") + _logger.success( + f"{lam_sym} = {lambda_value} already complete. Skipping." + ) return True else: _logger.debug( - f"Restarting λ = {lambda_value} at time {acc_time}, time remaining = {self._config.runtime - acc_time}" + f"Restarting {lam_sym} = {lambda_value} at time {acc_time}, time remaining = {self._config.runtime - acc_time}" ) # GPU platform. if self._is_gpu: @@ -639,11 +647,13 @@ def _run(sim): gpu_num = self._gpu_pool[0] self._remove_gpu_from_pool(gpu_num) if lambda_value is not None: - _logger.info(f"Running λ = {lambda_value} on GPU {gpu_num}") + _logger.info( + f"Running {lam_sym} = {lambda_value} on GPU {gpu_num}" + ) # Assumes that device for non-parallel GPU jobs is 0 else: gpu_num = 0 - _logger.info("Running λ = {lambda_value} on GPU 0") + _logger.info("Running {lam_sym} = {lambda_value} on GPU 0") self._initialise_simulation(system, lambda_value, device=gpu_num) try: df, lambda_grad, speed = _run(self._sim) @@ -660,7 +670,7 @@ def _run(sim): # All other platforms. else: - _logger.info(f"Running λ = {lambda_value}") + _logger.info(f"Running {lam_sym} = {lambda_value}") self._initialise_simulation(system, lambda_value) try: @@ -685,5 +695,5 @@ def _run(sim): filename=self._fnames[lambda_value]["energy_traj"], ) del system - _logger.success(f"λ = {lambda_value} complete") + _logger.success(f"{lam_sym} = {lambda_value} complete") return True diff --git a/tests/runner/test_config.py b/tests/runner/test_config.py index 8365992..05ec6b3 100644 --- a/tests/runner/test_config.py +++ b/tests/runner/test_config.py @@ -82,10 +82,9 @@ def test_logfile_creation(): config = Config(output_directory=tmpdir, log_file="test.log") assert config.log_file is not None assert Path.exists(config.output_directory / config.log_file) - Path.unlink(config.output_directory / config.log_file) # Instantiate a runner using the default config. # (All default options, other than platform="cpu".) - runner = Runner(mols, Config(output_directory=tmpdir, log_file="test.log")) + runner = Runner(mols, Config(output_directory=tmpdir, log_file="test1.log")) assert runner._config.log_file is not None assert Path.exists(runner._config.output_directory / runner._config.log_file)