diff --git a/arc/job/adapter.py b/arc/job/adapter.py index abf07d8dcb..428abfee1c 100644 --- a/arc/job/adapter.py +++ b/arc/job/adapter.py @@ -85,6 +85,7 @@ class JobEnum(str, Enum): # ESS cfour = 'cfour' gaussian = 'gaussian' + mockter = 'mockter' molpro = 'molpro' orca = 'orca' psi4 = 'psi4' @@ -553,7 +554,8 @@ def set_file_paths(self): self.local_path = os.path.join(self.project_directory, 'calcs', folder_name, self.species_label, self.job_name) else: self.local_path = os.path.join(self.project_directory, 'calcs', folder_name, self.species[0].multi_species, self.job_name) - self.local_path_to_output_file = os.path.join(self.local_path, 'output.out') + self.local_path_to_output_file = os.path.join(self.local_path, settings['output_filenames'][self.job_adapter]) \ + if self.job_adapter in settings['output_filenames'] else 'output.out' self.local_path_to_orbitals_file = os.path.join(self.local_path, 'orbitals.fchk') self.local_path_to_check_file = os.path.join(self.local_path, 'check.chk') self.local_path_to_hess_file = os.path.join(self.local_path, 'input.hess') diff --git a/arc/job/adapters/__init__.py b/arc/job/adapters/__init__.py index 66d051101d..62896bc6fe 100644 --- a/arc/job/adapters/__init__.py +++ b/arc/job/adapters/__init__.py @@ -1,5 +1,6 @@ import arc.job.adapters.common import arc.job.adapters.gaussian +import arc.job.adapters.mockter import arc.job.adapters.molpro import arc.job.adapters.orca import arc.job.adapters.obabel diff --git a/arc/job/adapters/mockter.py b/arc/job/adapters/mockter.py new file mode 100644 index 0000000000..039b1f1cd1 --- /dev/null +++ b/arc/job/adapters/mockter.py @@ -0,0 +1,267 @@ +""" +An adapter for dummy jobs, meant for testing and debugging only. +""" + +import datetime +import os +from typing import TYPE_CHECKING, List, Optional, Tuple, Union + +from arc.common import get_logger, read_yaml_file, save_yaml_file +from arc.imports import settings +from arc.job.adapter import JobAdapter +from arc.job.adapters.common import _initialize_adapter, update_input_dict_with_args +from arc.job.factory import register_job_adapter +from arc.level import Level +from arc.species.converter import xyz_to_str, str_to_xyz + +if TYPE_CHECKING: + from arc.reaction import ARCReaction + from arc.species import ARCSpecies + +logger = get_logger() + +default_job_settings, global_ess_settings, input_filenames, output_filenames, servers, submit_filenames = \ + settings['default_job_settings'], settings['global_ess_settings'], settings['input_filenames'], \ + settings['output_filenames'], settings['servers'], settings['submit_filenames'] + + +class MockAdapter(JobAdapter): + """ + A class for executing mock jobs. + + Args: + project (str): The project's name. Used for setting the remote path. + project_directory (str): The path to the local project directory. + job_type (list, str): The job's type, validated against ``JobTypeEnum``. If it's a list, pipe.py will be called. + args (dict, optional): Methods (including troubleshooting) to be used in input files. + Keys are either 'keyword', 'block', or 'trsh', values are dictionaries with values + to be used either as keywords or as blocks in the respective software input file. + If 'trsh' is specified, an action might be taken instead of appending a keyword or a + block to the input file (e.g., change server or change scan resolution). + bath_gas (str, optional): A bath gas. Currently only used in OneDMin to calculate L-J parameters. + checkfile (str, optional): The path to a previous Gaussian checkfile to be used in the current job. + conformer (int, optional): Conformer number if optimizing conformers. + constraints (list, optional): A list of constraints to use during an optimization or scan. + cpu_cores (int, optional): The total number of cpu cores requested for a job. + dihedral_increment (float, optional): The degrees increment to use when scanning dihedrals of TS guesses. + dihedrals (List[float], optional): The dihedral angels corresponding to self.torsions. + directed_scan_type (str, optional): The type of the directed scan. + ess_settings (dict, optional): A dictionary of available ESS and a corresponding server list. + ess_trsh_methods (List[str], optional): A list of troubleshooting methods already tried out. + execution_type (str, optional): The execution type, 'incore', 'queue', or 'pipe'. + fine (bool, optional): Whether to use fine geometry optimization parameters. Default: ``False``. + initial_time (datetime.datetime or str, optional): The time at which this job was initiated. + irc_direction (str, optional): The direction of the IRC job (`forward` or `reverse`). + job_id (int, optional): The job's ID determined by the server. + job_memory_gb (int, optional): The total job allocated memory in GB (14 by default). + job_name (str, optional): The job's name (e.g., 'opt_a103'). + job_num (int, optional): Used as the entry number in the database, as well as in ``job_name``. + job_server_name (str, optional): Job's name on the server (e.g., 'a103'). + job_status (list, optional): The job's server and ESS statuses. + level (Level, optional): The level of theory to use. + max_job_time (float, optional): The maximal allowed job time on the server in hours (can be fractional). + run_multi_species (bool, optional): Whether to run a job for multiple species in the same input file. + reactions (List[ARCReaction], optional): Entries are ARCReaction instances, used for TS search methods. + rotor_index (int, optional): The 0-indexed rotor number (key) in the species.rotors_dict dictionary. + server (str): The server to run on. + server_nodes (list, optional): The nodes this job was previously submitted to. + species (List[ARCSpecies], optional): Entries are ARCSpecies instances. + Either ``reactions`` or ``species`` must be given. + testing (bool, optional): Whether the object is generated for testing purposes, ``True`` if it is. + times_rerun (int, optional): Number of times this job was re-run with the same arguments (no trsh methods). + torsions (List[List[int]], optional): The 0-indexed atom indices of the torsion(s). + tsg (int, optional): TSGuess number if optimizing TS guesses. + xyz (dict, optional): The 3D coordinates to use. If not give, species.get_xyz() will be used. + """ + + def __init__(self, + project: str, + project_directory: str, + job_type: Union[List[str], str], + args: Optional[dict] = None, + bath_gas: Optional[str] = None, + checkfile: Optional[str] = None, + conformer: Optional[int] = None, + constraints: Optional[List[Tuple[List[int], float]]] = None, + cpu_cores: Optional[str] = None, + dihedral_increment: Optional[float] = None, + dihedrals: Optional[List[float]] = None, + directed_scan_type: Optional[str] = None, + ess_settings: Optional[dict] = None, + ess_trsh_methods: Optional[List[str]] = None, + execution_type: Optional[str] = None, + fine: bool = False, + initial_time: Optional[Union['datetime.datetime', str]] = None, + irc_direction: Optional[str] = None, + job_id: Optional[int] = None, + job_memory_gb: float = 14.0, + job_name: Optional[str] = None, + job_num: Optional[int] = None, + job_server_name: Optional[str] = None, + job_status: Optional[List[Union[dict, str]]] = None, + level: Optional[Level] = None, + max_job_time: Optional[float] = None, + run_multi_species: bool = False, + reactions: Optional[List['ARCReaction']] = None, + rotor_index: Optional[int] = None, + server: Optional[str] = None, + server_nodes: Optional[list] = None, + queue: Optional[str] = None, + attempted_queues: Optional[List[str]] = None, + species: Optional[List['ARCSpecies']] = None, + testing: bool = False, + times_rerun: int = 0, + torsions: Optional[List[List[int]]] = None, + tsg: Optional[int] = None, + xyz: Optional[dict] = None, + ): + + self.incore_capacity = 1 + self.job_adapter = 'mockter' + self.execution_type = 'incore' + self.command = 'mockter' + self.url = '' + + if species is None: + raise ValueError('Cannot execute Mockter without an ARCSpecies object.') + + _initialize_adapter(obj=self, + is_ts=False, + project=project, + project_directory=project_directory, + job_type=job_type, + args=args, + bath_gas=bath_gas, + checkfile=checkfile, + conformer=conformer, + constraints=constraints, + cpu_cores=cpu_cores, + dihedral_increment=dihedral_increment, + dihedrals=dihedrals, + directed_scan_type=directed_scan_type, + ess_settings=ess_settings, + ess_trsh_methods=ess_trsh_methods, + fine=fine, + initial_time=initial_time, + irc_direction=irc_direction, + job_id=job_id, + job_memory_gb=job_memory_gb, + job_name=job_name, + job_num=job_num, + job_server_name=job_server_name, + job_status=job_status, + level=level, + max_job_time=max_job_time, + run_multi_species=run_multi_species, + reactions=reactions, + rotor_index=rotor_index, + server=server, + server_nodes=server_nodes, + queue=queue, + attempted_queues=attempted_queues, + species=species, + testing=testing, + times_rerun=times_rerun, + torsions=torsions, + tsg=tsg, + xyz=xyz, + ) + + def write_input_file(self) -> None: + """ + Write the input file to execute the job on the server. + """ + input_dict = dict() + input_dict['basis'] = self.level.basis or '' + input_dict['charge'] = self.charge + input_dict['label'] = self.species_label + input_dict['memory'] = self.input_file_memory + input_dict['method'] = self.level.method + input_dict['multiplicity'] = self.multiplicity + input_dict['xyz'] = xyz_to_str(self.xyz) + input_dict['job_type'] = self.job_type + input_dict['memory'] = self.input_file_memory + + input_dict = update_input_dict_with_args(args=self.args, input_dict=input_dict) + save_yaml_file(path=os.path.join(self.local_path, input_filenames[self.job_adapter]), content=input_dict) + + def set_files(self) -> None: + """ + Set files to be uploaded and downloaded. Writes the files if needed. + Modifies the self.files_to_upload and self.files_to_download attributes. + + self.files_to_download is a list of remote paths. + + self.files_to_upload is a list of dictionaries, each with the following keys: + ``'name'``, ``'source'``, ``'make_x'``, ``'local'``, and ``'remote'``. + If ``'source'`` = ``'path'``, then the value in ``'local'`` is treated as a file path. + Else if ``'source'`` = ``'input_files'``, then the value in ``'local'`` will be taken + from the respective entry in inputs.py + If ``'make_x'`` is ``True``, the file will be made executable. + """ + # 1. ** Upload ** + # 1.1. submit file + if self.execution_type != 'incore': + # we need a submit file for single or array jobs (either submitted to local or via SSH) + self.write_submit_script() + self.files_to_upload.append(self.get_file_property_dictionary( + file_name=submit_filenames[servers[self.server]['cluster_soft']])) + # 1.2. input file + if not self.iterate_by: + # if this is not a job array, we need the ESS input file + self.write_input_file() + self.files_to_upload.append(self.get_file_property_dictionary(file_name=input_filenames[self.job_adapter])) + # 1.3. HDF5 file + if self.iterate_by and os.path.isfile(os.path.join(self.local_path, 'data.hdf5')): + self.files_to_upload.append(self.get_file_property_dictionary(file_name='data.hdf5')) + # 1.4 job.sh + job_sh_dict = self.set_job_shell_file_to_upload() # Set optional job.sh files if relevant. + if job_sh_dict is not None: + self.files_to_upload.append(job_sh_dict) + # 2. ** Download ** + # 2.1. HDF5 file + if self.iterate_by and os.path.isfile(os.path.join(self.local_path, 'data.hdf5')): + self.files_to_download.append(self.get_file_property_dictionary(file_name='data.hdf5')) + else: + # 2.2. output file + self.files_to_download.append(self.get_file_property_dictionary(file_name=output_filenames[self.job_adapter])) + + def set_additional_file_paths(self) -> None: + """ + Set additional file paths specific for the adapter. + Called from set_file_paths() and extends it. + """ + pass + + def set_input_file_memory(self) -> None: + """ + Set the input_file_memory attribute. + """ + self.input_file_memory = self.job_memory_gb + + def execute_incore(self): + """ + Execute a job incore. + """ + input = read_yaml_file(os.path.join(self.local_path, input_filenames[self.job_adapter])) + xyz = str_to_xyz(input['xyz']) + e_elect = 0.0 if not self.species[0].is_ts else 50 + freqs = [500 + 20 * i for i in range(3 * len(xyz['symbols']) - 6)] + if self.species[0].is_ts: + freqs[0] = -500 + output = {'adapter': 'mockter', + 'xyz': input['xyz'], + 'sp': e_elect, + 'T1': 0.0002, + 'freqs': freqs, + } + save_yaml_file(path=os.path.join(self.local_path, output_filenames[self.job_adapter]), content=output) + + def execute_queue(self): + """ + Execute a job to the server's queue. + """ + self.legacy_queue_execution() + + +register_job_adapter('mockter', MockAdapter) diff --git a/arc/job/adapters/mockter_test.py b/arc/job/adapters/mockter_test.py new file mode 100644 index 0000000000..ce12cdbc45 --- /dev/null +++ b/arc/job/adapters/mockter_test.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 +# encoding: utf-8 + +""" +This module contains unit tests of the arc.job.adapters.mockter module +""" + +import os +import shutil +import unittest + +from arc.common import ARC_PATH, read_yaml_file +from arc.job.adapters.mockter import MockAdapter +from arc.level import Level +from arc.settings.settings import input_filenames, output_filenames +from arc.species import ARCSpecies + + +class TestMockAdapter(unittest.TestCase): + """ + Contains unit tests for the MockAdapter class. + """ + @classmethod + def setUpClass(cls): + """ + A method that is run before all unit tests in this class. + """ + cls.job_1 = MockAdapter(execution_type='incore', + job_type='sp', + level=Level(method='CCMockSD(T)', basis='cc-pVmockZ'), + project='test', + project_directory=os.path.join(ARC_PATH, 'arc', 'testing', 'test_MockAdapter_1'), + species=[ARCSpecies(label='spc1', xyz=['O 0 0 1'])], + testing=True, + ) + cls.job_2 = MockAdapter(job_type='opt', + level=Level(method='CCMockSD(T)', basis='cc-pVmockZ'), + project='test', + project_directory=os.path.join(ARC_PATH, 'arc', 'testing', 'test_MockAdapter_2'), + species=[ARCSpecies(label='spc2', xyz=['O 0 0 1'])], + testing=True, + ) + cls.job_3 = MockAdapter(job_type='freq', + level=Level(method='CCMockSD(T)', basis='cc-pVmockZ'), + project='test', + project_directory=os.path.join(ARC_PATH, 'arc', 'testing', 'test_MockAdapter_3'), + species=[ARCSpecies(label='spc3', xyz=['O 0 0 1\nH 0 0 0\nH 1 0 0'], is_ts=True)], + testing=True, + ) + + def test_set_cpu_and_mem(self): + """Test assigning number of cpu's and memory""" + self.job_1.cpu_cores = 48 + self.job_1.input_file_memory = None + self.job_1.submit_script_memory = 14 + self.job_1.set_cpu_and_mem() + self.assertEqual(self.job_1.cpu_cores, 48) + + def test_set_input_file_memory(self): + """Test setting the input_file_memory argument""" + self.job_1.input_file_memory = None + self.job_1.cpu_cores = 48 + self.job_1.set_input_file_memory() + self.assertEqual(self.job_1.input_file_memory, 14) + + def test_write_input_file(self): + """Test writing Gaussian input files""" + self.job_1.cpu_cores = 48 + self.job_1.set_input_file_memory() + self.job_1.write_input_file() + content_1 = read_yaml_file(os.path.join(self.job_1.local_path, input_filenames[self.job_1.job_adapter])) + job_1_expected_input_file = {'basis': 'cc-pvmockz', + 'charge': 0, + 'job_type': 'sp', + 'label': 'spc1', + 'memory': 14.0, + 'method': 'ccmocksd(t)', + 'multiplicity': 3, + 'xyz': 'O 0.00000000 0.00000000 1.00000000'} + self.assertEqual(content_1, job_1_expected_input_file) + + self.job_2.cpu_cores = 48 + self.job_2.set_input_file_memory() + self.job_2.write_input_file() + content_2 = read_yaml_file(os.path.join(self.job_2.local_path, input_filenames[self.job_2.job_adapter])) + job_2_expected_input_file = {'basis': 'cc-pvmockz', + 'charge': 0, + 'job_type': 'opt', + 'label': 'spc2', + 'memory': 14.0, + 'method': 'ccmocksd(t)', + 'multiplicity': 3, + 'xyz': 'O 0.00000000 0.00000000 1.00000000'} + self.assertEqual(content_2, job_2_expected_input_file) + + self.job_3.cpu_cores = 48 + self.job_3.set_input_file_memory() + self.job_3.write_input_file() + content_3 = read_yaml_file(os.path.join(self.job_3.local_path, input_filenames[self.job_3.job_adapter])) + job_3_expected_input_file = {'basis': 'cc-pvmockz', + 'charge': 0, + 'job_type': 'freq', + 'label': 'spc3', + 'memory': 14.0, + 'method': 'ccmocksd(t)', + 'multiplicity': 1, + 'xyz': 'O 0.00000000 0.00000000 1.00000000\n' + 'H 0.00000000 0.00000000 0.00000000\n' + 'H 1.00000000 0.00000000 0.00000000'} + self.assertEqual(content_3, job_3_expected_input_file) + + def test_executing_mockter(self): + """Test executing mockter""" + self.job_1.execute() + output = read_yaml_file(os.path.join(self.job_1.local_path, output_filenames[self.job_1.job_adapter])) + self.assertEqual(output['sp'], 0.0) + self.assertEqual(output['T1'], 0.0002) + + self.job_2.execute() + output = read_yaml_file(os.path.join(self.job_2.local_path, output_filenames[self.job_2.job_adapter])) + self.assertEqual(output['xyz'], 'O 0.00000000 0.00000000 1.00000000') + + self.job_3.execute() + output = read_yaml_file(os.path.join(self.job_3.local_path, output_filenames[self.job_3.job_adapter])) + self.assertEqual(output['freqs'], [-500, 520, 540]) + self.assertEqual(output['adapter'], 'mockter') + + @classmethod + def tearDownClass(cls): + """ + A function that is run ONCE after all unit tests in this class. + Delete all project directories created during these unit tests + """ + for folder in ['test_MockAdapter_1', 'test_MockAdapter_2', 'test_MockAdapter_3']: + shutil.rmtree(os.path.join(ARC_PATH, 'arc', 'testing', folder), ignore_errors=True) + + +if __name__ == '__main__': + unittest.main(testRunner=unittest.TextTestRunner(verbosity=2)) diff --git a/arc/job/adapters/molpro_test.py b/arc/job/adapters/molpro_test.py index c73aeaa974..a69b036c62 100644 --- a/arc/job/adapters/molpro_test.py +++ b/arc/job/adapters/molpro_test.py @@ -5,7 +5,6 @@ This module contains unit tests of the arc.job.adapters.molpro module """ -import math import os import shutil import unittest diff --git a/arc/main_test.py b/arc/main_test.py index 6e39d9e227..c40adaa3a1 100644 --- a/arc/main_test.py +++ b/arc/main_test.py @@ -92,6 +92,7 @@ def test_as_dict(self): 'ess_settings': {'cfour': ['local'], 'gaussian': ['local', 'server2'], 'gcn': ['local'], + 'mockter': ['local'], 'molpro': ['local', 'server2'], 'onedmin': ['server1'], 'openbabel': ['local'], diff --git a/arc/parser.py b/arc/parser.py index d5a2a73b62..028d5e9405 100644 --- a/arc/parser.py +++ b/arc/parser.py @@ -231,11 +231,9 @@ def parse_geometry(path: str) -> Optional[Dict[str, tuple]]: raise InputError(f'Could not find file {path}') if path.endswith('.yml'): content = read_yaml_file(path) - if isinstance(content, dict): - if 'xyz' in content.keys(): - return content['xyz'] if isinstance(content['xyz'], dict) else str_to_xyz(content['xyz']) - elif 'opt_xyz' in content.keys(): - return content['opt_xyz'] if isinstance(content['opt_xyz'], dict) else str_to_xyz(content['opt_xyz']) + for key in ['xyz', 'opt_xyz']: + if isinstance(content, dict) and key in content.keys(): + return content[key] if isinstance(content[key], dict) else str_to_xyz(content[key]) software = identify_ess(path) xyz_str = '' if software == 'xtb': @@ -296,6 +294,10 @@ def parse_t1(path: str) -> Optional[float]: """ if not os.path.isfile(path): raise InputError(f'Could not find file {path}') + if path.endswith('.yml'): + content = read_yaml_file(path) + if isinstance(content, dict) and 'T1' in content.keys(): + return content['T1'] log = ess_factory(fullpath=path, check_for_errors=False) try: t1 = log.get_T1_diagnostic() @@ -355,6 +357,10 @@ def identify_ess(path: str) -> Optional[str]: Optional[str]: The ESS. """ software = None + if path.endswith('.yml'): + content = read_yaml_file(path) + if isinstance(content, dict) and 'adapter' in content.keys(): + return content['adapter'] with open(path, 'r') as f: for _ in range(25): line = f.readline() diff --git a/arc/parser_test.py b/arc/parser_test.py index 7f774d0232..2643cb2e9d 100644 --- a/arc/parser_test.py +++ b/arc/parser_test.py @@ -44,6 +44,7 @@ def test_parse_frequencies(self): ts_xtb_freqs_path = os.path.join(ARC_PATH, 'arc', 'testing', 'freq', 'TS_NH2+N2H3_xtb.out') yml_freqs_path = os.path.join(ARC_PATH, 'arc', 'testing', 'freq', 'output.yml') vibspectrum_path = os.path.join(ARC_PATH, 'arc', 'testing', 'freq', 'vibspectrum') + mock_path = os.path.join(ARC_PATH, 'arc', 'testing', 'mockter.yml') no3_freqs = parser.parse_frequencies(path=no3_path, software='QChem') c2h6_freqs = parser.parse_frequencies(path=c2h6_path, software='QChem') @@ -58,6 +59,7 @@ def test_parse_frequencies(self): ts_xtb_freqs = parser.parse_frequencies(path=ts_xtb_freqs_path) yml_freqs = parser.parse_frequencies(path=yml_freqs_path) vibspectrum_freqs = parser.parse_frequencies(path=vibspectrum_path, software='xTB') + mock_freqs = parser.parse_frequencies(path=mock_path, software='Mockter') np.testing.assert_almost_equal(no3_freqs, np.array([-390.08, -389.96, 822.75, 1113.23, 1115.24, 1195.35], np.float64)) @@ -111,6 +113,8 @@ def test_parse_frequencies(self): np.float64)) np.testing.assert_almost_equal(vibspectrum_freqs, np.array([4225.72], np.float64)) + np.testing.assert_almost_equal(mock_freqs, np.array([-500., 520., 540.], np.float64)) + def test_parse_normal_mode_displacement(self): """Test parsing frequencies and normal mode displacements""" freq_path = os.path.join(ARC_PATH, 'arc', 'testing', 'freq', 'Gaussian_neg_freq.out') @@ -373,6 +377,12 @@ def test_parse_geometry(self): (1.1691669229762556, -2.0726946137332924, -0.4703870247902347))} self.assertTrue(almost_equal_coords(xyz_3, expected_xyz_3)) + path_4 = os.path.join(ARC_PATH, 'arc', 'testing', 'mockter.yml') + xyz_4 = parser.parse_geometry(path=path_4) + expected_xyz_4 = {'symbols': ('O', 'H', 'H'), 'isotopes': (16, 1, 1), + 'coords': ((0.0, 0.0, 1.0), (0.0, 0.0, 0.0), (1.0, 0.0, 0.0))} + self.assertTrue(almost_equal_coords(xyz_4, expected_xyz_4)) + def test_parse_trajectory(self): """Test parsing trajectories""" path = os.path.join(ARC_PATH, 'arc', 'testing', 'xyz', 'scan_optim.xyz') @@ -434,6 +444,10 @@ def test_parse_t1(self): t1 = parser.parse_t1(path) self.assertEqual(t1, 0.0086766) + path = os.path.join(ARC_PATH, 'arc', 'testing', 'mockter.yml') + t1 = parser.parse_t1(path) + self.assertEqual(t1, 0.0002) + def test_parse_e_elect(self): """Test parsing the electronic energy from a single-point job output file""" path = os.path.join(ARC_PATH, 'arc', 'testing', 'sp', 'mehylamine_CCSD(T).out') @@ -464,10 +478,16 @@ def test_parse_e_elect(self): e_elect = parser.parse_e_elect(path) self.assertAlmostEqual(e_elect, -40692.56663699465) + path = os.path.join(ARC_PATH, 'arc', 'testing', 'mockter.yml') + e_elect = parser.parse_e_elect(path) + self.assertAlmostEqual(e_elect, 50) + def test_identify_ess(self): """Test the identify_ess() function.""" ess = parser.identify_ess(os.path.join(ARC_PATH, 'arc', 'testing', 'sp', 'NCC_xTB.out')) self.assertEqual(ess, 'xtb') + ess = parser.identify_ess(os.path.join(ARC_PATH, 'arc', 'testing', 'mockter.yml')) + self.assertEqual(ess, 'mockter') def test_parse_zpe(self): """Test the parse_zpe() function for parsing zero point energies""" diff --git a/arc/settings/settings.py b/arc/settings/settings.py index 8014bb9cf1..de3da3f596 100644 --- a/arc/settings/settings.py +++ b/arc/settings/settings.py @@ -71,6 +71,7 @@ 'cfour': 'local', 'gaussian': ['local', 'server2'], 'gcn': 'local', + 'mockter': 'local', 'molpro': ['local', 'server2'], 'onedmin': 'server1', 'orca': 'local', @@ -83,7 +84,7 @@ } # Electronic structure software ARC may access (use lowercase): -supported_ess = ['cfour', 'gaussian', 'molpro', 'orca', 'qchem', 'terachem', 'onedmin', 'xtb', 'torchani', 'openbabel'] +supported_ess = ['cfour', 'gaussian', 'mockter', 'molpro', 'orca', 'qchem', 'terachem', 'onedmin', 'xtb', 'torchani', 'openbabel'] # TS methods to try when appropriate for a reaction (other than user guesses which are always allowed): ts_adapters = ['heuristics', 'AutoTST', 'GCN', 'xtb_gsm'] @@ -107,6 +108,7 @@ levels_ess = { 'cfour': ['casscf'], 'gaussian': ['apfd', 'b3lyp', 'm062x'], + 'mockter': ['mock'], 'molpro': ['ccsd', 'cisd', 'vpz'], 'qchem': ['m06-2x'], 'orca': ['dlpno'], @@ -154,6 +156,7 @@ input_filenames = {'cfour': 'ZMAT', 'gaussian': 'input.gjf', + 'mockter': 'input.yml', 'molpro': 'input.in', 'onedmin': 'input.in', 'orca': 'input.in', @@ -165,6 +168,7 @@ output_filenames = {'cfour': 'output.out', 'gaussian': 'input.log', 'gcn': 'output.yml', + 'mockter': 'output.yml', 'molpro': 'input.out', 'onedmin': 'output.out', 'orca': 'input.log', diff --git a/arc/testing/mockter.yml b/arc/testing/mockter.yml new file mode 100644 index 0000000000..6af337287f --- /dev/null +++ b/arc/testing/mockter.yml @@ -0,0 +1,11 @@ +T1: 0.0002 +adapter: mockter +freqs: +- -500 +- 520 +- 540 +sp: 50 +xyz: |- + O 0.00000000 0.00000000 1.00000000 + H 0.00000000 0.00000000 0.00000000 + H 1.00000000 0.00000000 0.00000000