Skip to content

Commit

Permalink
Remove deprecated cleanup option for KKRimp
Browse files Browse the repository at this point in the history
  • Loading branch information
PhilippRue committed Oct 31, 2023
1 parent 40aefe2 commit 9fb8bab
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 327 deletions.
9 changes: 1 addition & 8 deletions aiida_kkr/calculations/kkrimp.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
__copyright__ = (u'Copyright (c), 2018, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.8.2'
__version__ = '0.9.0'
__contributors__ = (u'Philipp Rüßmann', u'Fabian Bertoldo')

#TODO: implement 'ilayer_center' consistency check
Expand Down Expand Up @@ -171,13 +171,6 @@ def define(cls, spec):
Note: The length of the theta, phi and fix_dir lists have to be equal to the number of atoms in the impurity cluster.
"""
)
spec.input(
'cleanup_outfiles',
valid_type=Bool,
required=False,
default=lambda: Bool(False),
help='Cleanup and compress output (works only in aiida-core<2.0 and breaks caching ability).'
)

# define outputs
spec.output('output_parameters', valid_type=Dict, required=True, help='results of the KKRimp calculation')
Expand Down
20 changes: 1 addition & 19 deletions aiida_kkr/parsers/kkr.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
__copyright__ = (u'Copyright (c), 2017, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.7.0'
__version__ = '0.8.0'
__contributors__ = ('Jens Broeder', u'Philipp Rüßmann')


Expand Down Expand Up @@ -229,21 +229,3 @@ def parse(self, debug=False, **kwargs):

if not success:
return self.exit_codes.ERROR_KKR_PARSING_FAILED
else: # cleanup after parsing (only if parsing was successful)
# cleanup only works below aiida-core v2.0
if int(aiida_core_version.split('.')[0]) < 2:
# delete completely parsed output files
self.remove_unnecessary_files()
# then (maybe) tar the output to save space
# TODO needs implementing (see kkrimp parser)

def remove_unnecessary_files(self):
"""
Remove files that are not needed anymore after parsing
The information is completely parsed (i.e. in outdict of calculation)
and keeping the file would just be a duplication.
"""
files_to_delete = [KkrCalculation._POTENTIAL, KkrCalculation._SHAPEFUN]
for fileid in files_to_delete:
if fileid in self.retrieved.list_object_names():
self.retrieved.delete_object(fileid, force=True)
91 changes: 4 additions & 87 deletions aiida_kkr/parsers/kkrimp.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
__copyright__ = (u'Copyright (c), 2018, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.5.0'
__version__ = '0.6.0'
__contributors__ = ('Philipp Rüßmann')


Expand All @@ -42,7 +42,7 @@ def __init__(self, calc):

# pylint: disable=protected-access

def parse(self, debug=False, **kwargs):
def parse(self, debug=False, ignore_nan=False, **kwargs):
"""
Parse output data folder, store results in database.
Expand Down Expand Up @@ -113,7 +113,7 @@ def parse(self, debug=False, **kwargs):

# now we can parse the output files
success, msg_list, out_dict = KkrimpParserFunctions().parse_kkrimp_outputfile(
out_dict, named_file_handles, debug=debug
out_dict, named_file_handles, debug=debug, ignore_nan=ignore_nan
)

out_dict['parser_errors'] = msg_list
Expand All @@ -132,22 +132,7 @@ def parse(self, debug=False, **kwargs):
# create output node and link
self.out('output_parameters', Dict(dict=out_dict))

# cleanup after parsing (only if parsing was successful), only works below aiida-core v2.0
if success:
if int(aiida_core_version.split('.')[0]) < 2:
# check if we should do the cleanup or not
cleanup_outfiles = False
if 'cleanup_outfiles' in self.node.inputs:
cleanup_outfiles = self.node.inputs.cleanup_outfiles.value
if cleanup_outfiles:
# reduce size of timing file
self.cleanup_outfiles(files['out_timing'], ['Iteration number', 'time until scf starts'])
# reduce size of out_log file
self.cleanup_outfiles(files['out_log'], ['Iteration Number'])
# delete completely parsed output files and create a tar ball to reduce size
self.remove_unnecessary_files()
self.final_cleanup()
else:
if not success:
return self.exit_codes.ERROR_PARSING_KKRIMPCALC

def _check_file_existance(self, files, keyname, fname, icrit, file_errors):
Expand All @@ -168,71 +153,3 @@ def _check_file_existance(self, files, keyname, fname, icrit, file_errors):
raise ValueError('icrit should be either 1 or 2')
file_errors.append((icrit, crit_level + f" File '{fname}' not found."))
files[keyname] = None

def cleanup_outfiles(self, fileidentifier, keyslist):
"""open file and remove unneeded output"""
if fileidentifier is not None:
lineids = []
with self.retrieved.open(fileidentifier) as tfile:
txt = tfile.readlines()
for iline in range(len(txt)):
for key in keyslist: # go through all keys
if key in txt[iline]: # add line id to list if key has been found
lineids.append(iline)
# rewrite file deleting the middle part
if len(lineids) > 1: # cut only if more than one iteration was found
txt = txt[:lineids[0]] + \
['# ... [removed output except for last iteration] ...\n'] + \
txt[lineids[-1]:]
with self.retrieved.open(fileidentifier, 'w') as tfilenew:
tfilenew.writelines(txt)

def remove_unnecessary_files(self):
"""
Remove files that are not needed anymore after parsing
The information is completely parsed (i.e. in outdict of calculation)
and keeping the file would just be a duplication.
"""
# first delete unused files (completely in parsed output)
files_to_delete = [
KkrimpCalculation._OUT_ENERGYSP_PER_ATOM, KkrimpCalculation._OUT_ENERGYTOT_PER_ATOM,
KkrimpCalculation._SHAPEFUN
]
for fileid in files_to_delete:
if fileid in self.retrieved.list_object_names():
self.retrieved.delete_object(fileid, force=True)

def final_cleanup(self):
"""Create a tarball of the rest."""

# short name for retrieved folder
ret = self.retrieved

# Now create tarball of output
#
# check if output has been packed to tarfile already
# only if tarfile is not there we create the output tar file
if KkrimpCalculation._FILENAME_TAR not in ret.list_object_names():
# first create dummy file which is used to extract the full path that is given to tarfile.open
with ret.open(KkrimpCalculation._FILENAME_TAR, 'w') as f:
filepath_tar = f.name

# now create tarfile and loop over content of retrieved directory
to_delete = []
with tarfile.open(filepath_tar, 'w:gz') as tf:
for f in ret.list_object_names():
with ret.open(f) as ftest:
filesize = os.stat(ftest.name).st_size
ffull = ftest.name
if (
f != KkrimpCalculation._FILENAME_TAR # ignore tar file
and filesize > 0 # ignore empty files
# ignore files starting with '.' like '.nfs...'
and f[0] != '.'
):
tf.add(ffull, arcname=os.path.basename(ffull))
to_delete.append(f)

# finally delete files that have been added to tarfile
for f in to_delete:
ret.delete_object(f, force=True)
30 changes: 1 addition & 29 deletions aiida_kkr/workflows/kkr_imp.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from aiida_kkr.tools import test_and_get_codenode, neworder_potential_wf, update_params_wf
from aiida_kkr.workflows.gf_writeout import kkr_flex_wc
from aiida_kkr.workflows.voro_start import kkr_startpot_wc
from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc, clean_sfd
from aiida_kkr.workflows.kkr_imp_sub import kkr_imp_sub_wc
import numpy as np
from aiida_kkr.tools.save_output_nodes import create_out_dict_node

Expand Down Expand Up @@ -339,7 +339,6 @@ def start(self):
self.ctx.hfield = wf_dict.get('hfield', self._wf_default['hfield'])
self.ctx.init_pos = wf_dict.get('init_pos', self._wf_default['init_pos'])
self.ctx.accuracy_params = wf_dict.get('accuracy_params', self._wf_default['accuracy_params'])
self.ctx.do_final_cleanup = wf_dict.get('do_final_cleanup', self._wf_default['do_final_cleanup'])
# set up new parameter dict to pass to kkrimp subworkflow later
self.ctx.kkrimp_params_dict = Dict({
'nsteps': self.ctx.nsteps,
Expand All @@ -355,15 +354,11 @@ def start(self):
'hfield': self.ctx.hfield,
'init_pos': self.ctx.init_pos,
'accuracy_params': self.ctx.accuracy_params,
'do_final_cleanup': self.ctx.do_final_cleanup
})

# retrieve option for kkrlfex files
self.ctx.retrieve_kkrflex = wf_dict.get('retrieve_kkrflex', self._wf_default['retrieve_kkrflex'])

# list of things that are cleaned if everything ran through
self.ctx.sfd_final_cleanup = []

# report the chosen parameters to the user
self.report(
'INFO: use the following parameter:\n'
Expand Down Expand Up @@ -739,8 +734,6 @@ def construct_startpot(self):

# add starting potential for kkrimp calculation to context
self.ctx.startpot_kkrimp = startpot_kkrimp
# add to list for final cleanup
self.ctx.sfd_final_cleanup.append(startpot_kkrimp)

self.report(
'INFO: created startpotential (pid: {}) for the impurity calculation '
Expand Down Expand Up @@ -856,10 +849,6 @@ def return_results(self):
self.out('converged_potential', self.ctx.kkrimp_scf_sub.outputs.host_imp_pot)
self.out('remote_data_gf', self.ctx.gf_remote)

# cleanup things that are not needed anymore
if self.ctx.do_final_cleanup:
self.final_cleanup()

# print final message before exiting
self.report('INFO: created 3 output nodes for the KKR impurity workflow.')
self.report(
Expand All @@ -872,23 +861,6 @@ def return_results(self):
self.report(self.exit_codes.ERROR_KKRIMP_SUB_WORKFLOW_FAILURE) # pylint: disable=no-member
return self.exit_codes.ERROR_KKRIMP_SUB_WORKFLOW_FAILURE # pylint: disable=no-member

def final_cleanup(self):
"""
Remove unneeded files to save space
"""
for sfd in self.ctx.sfd_final_cleanup:
clean_sfd(sfd)
if self.ctx.create_startpot:
kkr_startpot = self.ctx.last_voro_calc
vorocalc = kkr_startpot.outputs.last_voronoi_remote.get_incoming(link_label_filter=u'remote_folder'
).first().node
ret = vorocalc.outputs.retrieved
for fname in ret.list_object_names():
if fname not in [VoronoiCalculation._OUTPUT_FILE_NAME, VoronoiCalculation._OUT_POTENTIAL_voronoi]:
# delete all except vor default output file
with ret.open(fname) as f:
ret.delete_object(fname, force=True)

def error_handler(self):
"""Capture errors raised in validate_input"""
if self.ctx.exit_code is not None:
Expand Down
34 changes: 1 addition & 33 deletions aiida_kkr/workflows/kkr_imp_dos.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
__copyright__ = (u'Copyright (c), 2019, Forschungszentrum Jülich GmbH, '
'IAS-1/PGI-1, Germany. All rights reserved.')
__license__ = 'MIT license, see LICENSE.txt file'
__version__ = '0.6.14'
__version__ = '0.7.0'
__contributors__ = (u'Fabian Bertoldo', u'Philipp Rüßmann')

# activate verbose output, for debugging only
Expand Down Expand Up @@ -63,7 +63,6 @@ class kkr_imp_dos_wc(WorkChain):
} # execute KKR with mpi or without

_wf_default = {
'clean_impcalc_retrieved': True, # remove output of KKRimp calculation after successful parsing of DOS files
'jij_run': False, # calculate Jij's energy resolved
'lmdos': False, # calculate also (l,m) or only l-resolved DOS
'retrieve_kkrflex': True, # retrieve kkrflex files to repository or leave on remote computer only
Expand Down Expand Up @@ -254,10 +253,6 @@ def start(self):
if k not in self.ctx.dos_params_dict.keys():
self.ctx.dos_params_dict[k] = v

self.ctx.cleanup_impcalc_output = wf_dict.get(
'clean_impcalc_retrieved', self._wf_default['clean_impcalc_retrieved']
)

# set workflow parameters for the KKR impurity calculation
self.ctx.jij_run = wf_dict.get('jij_run', self._wf_default['jij_run'])

Expand Down Expand Up @@ -493,7 +488,6 @@ def run_imp_dos(self):
'dos_run': True,
'lmdos': self.ctx.lmdos,
'jij_run': self.ctx.jij_run,
'do_final_cleanup': self.ctx.cleanup_impcalc_output
})
kkrimp_params = self.ctx.kkrimp_params_dict
label_imp = 'KKRimp DOS (GF: {}, imp_pot: {}, Zimp: {}, ilayer_cent: {})'.format(
Expand Down Expand Up @@ -602,13 +596,6 @@ def return_results(self):
if self.ctx.lmdos:
self.out('dos_data_lm', dosXyDatas['dos_data_lm'])
self.out('dos_data_interpol_lm', dosXyDatas['dos_data_interpol_lm'])
# maybe cleanup retrieved folder of DOS calculation
if self.ctx.cleanup_impcalc_output:
message = 'INFO: cleanup after storing of DOS data'
print(message)
self.report(message)
pk_impcalc = self.ctx.kkrimp_dos.outputs.workflow_info['pks_all_calcs'][0]
cleanup_kkrimp_retrieved(pk_impcalc)

message = f'INFO: workflow_info node: {outputnode_t.uuid}'
print(message)
Expand Down Expand Up @@ -799,22 +786,3 @@ def parse_impdosfiles(folder, natom, nspin, ef, use_lmdos):
output = {'dos_data': dosnode, 'dos_data_interpol': dosnode2}

return output


def cleanup_kkrimp_retrieved(pk_impcalc):
"""
remove output_all.tar.gz from retrieved of impurity calculation identified by pk_impcalc
"""
from aiida.orm import load_node
from aiida_kkr.calculations import KkrimpCalculation

# extract retrieved folder
doscalc = load_node(pk_impcalc)
ret = doscalc.outputs.retrieved

# name of tarfile
tfname = KkrimpCalculation._FILENAME_TAR

# remove tarfile from retreived dir
if tfname in ret.list_object_names():
ret.delete_object(tfname, force=True)
Loading

0 comments on commit 9fb8bab

Please sign in to comment.