diff --git a/src/nomad_measurements/utils.py b/src/nomad_measurements/utils.py index 2312d25..68c01b9 100644 --- a/src/nomad_measurements/utils.py +++ b/src/nomad_measurements/utils.py @@ -25,6 +25,7 @@ ) import h5py +import copy import numpy as np import pint from nomad.datamodel.hdf5 import HDF5Reference @@ -334,18 +335,17 @@ def write_file(self): main archive file (e.g. HDF5, NeXus). """ if self.nexus: - self._write_nx_file() - # try: - # self._write_nx_file() - # except Exception as e: - # self.nexus = False - # self.logger.warning( - # f'Encountered "{e}" error while creating nexus file. ' - # 'Creating h5 file instead.' - # ) - # self._write_hdf5_file() - # else: - # self._write_hdf5_file() + try: + self._write_nx_file() + except Exception as e: + self.nexus = False + self.logger.warning( + f'Encountered "{e}" error while creating nexus file. ' + 'Creating h5 file instead.' + ) + self._write_hdf5_file() + else: + self._write_hdf5_file() def _write_nx_file(self): """ @@ -364,43 +364,52 @@ def _write_nx_file(self): populate_nx_dataset_and_attribute( archive=self.archive, attr_dict=attr_dict, dataset_dict=dataset_dict ) - for nx_path, dset in list(self._hdf5_datasets.items()) + list( + for nx_path, dset_ori in list(self._hdf5_datasets.items()) + list( dataset_dict.items() ): + dset = copy.deepcopy(dset_ori) + if dset.internal_reference: + # convert to the nexus type link + dset.data = {'link': self._remove_nexus_annotations(dset.data)} + try: template[nx_path] = dset.data except KeyError: template['optional'][nx_path] = dset.data - hdf_path = self._remove_nexus_annotations(nx_path) + hdf5_path = self._remove_nexus_annotations(nx_path) self._set_hdf5_reference( - self.archive, - dset.archive_path, - f'/uploads/{self.archive.m_context.upload_id}/raw' - f'/{self.data_file}#{hdf_path}', - ) + self.archive, + dset.archive_path, + f'/uploads/{self.archive.m_context.upload_id}/raw' + f'/{self.data_file}#{hdf5_path}', + ) for nx_path, attr_d in list(self._hdf5_attributes.items()) + list( attr_dict.items() ): + # hdf5_path = self._remove_nexus_annotations(nx_path) for attr_k, attr_v in attr_d.items(): - if attr_k: + if attr_v != 'dimensionless' and attr_v: try: template[f'{nx_path}/@{attr_k}'] = attr_v except KeyError: template['optional'][f'{nx_path}/@{attr_k}'] = attr_v try: - nx_full_path = os.path.join( + nx_full_file_path = os.path.join( self.archive.m_context.raw_path(), self.data_file ) + if self.archive.m_context.raw_path_exists(self.data_file): + os.remove(nx_full_file_path) + pynxtools_writer( - data=template, nxdl_f_path=nxdl_f_path, output_path=nx_full_path + data=template, nxdl_f_path=nxdl_f_path, output_path=nx_full_file_path ).write() - # entry_list = Entry.objects( - # upload_id=self.archive.m_context.upload_id, mainfile=self.data_file - # ) - # if not entry_list: - # self.archive.m_context.process_updated_raw_file(self.data_file) + entry_list = Entry.objects( + upload_id=self.archive.m_context.upload_id, mainfile=self.data_file + ) + if not entry_list: + self.archive.m_context.process_updated_raw_file(self.data_file) except Exception as exc: if os.path.exists(self.data_file): @@ -408,13 +417,6 @@ def _write_nx_file(self): self.data_file = self.data_file.rsplit(os.pathsep, 1)[-1] raise Exception('NeXus file can not be generated.') from exc - # raise NotImplementedError('Method `write_nx_file` is not implemented.') - # TODO add archive data to `hdf5_data_dict` before creating the nexus file. Use - # `populate_hdf5_data_dict` method for each quantity that is needed in .nxs - # file. Create a NeXus file with the data in `hdf5_data_dict`. - # One issue here is as we populate the `hdf5_data_dict` with the archive data, - # we will always have to over write the nexus file - def _write_hdf5_file(self): # noqa: PLR0912 """ Method for creating an HDF5 file. diff --git a/src/nomad_measurements/xrd/nx.py b/src/nomad_measurements/xrd/nx.py index 8ac549a..5459f0c 100644 --- a/src/nomad_measurements/xrd/nx.py +++ b/src/nomad_measurements/xrd/nx.py @@ -50,10 +50,12 @@ CONCEPT_MAP = { + '/ENTRY[entry]/@default': 'experiment_result', + '/ENTRY[entry]/definition': 'NXxrd_pan', '/ENTRY[entry]/method': 'archive.data.method', '/ENTRY[entry]/measurement_type': 'archive.data.diffraction_method_name', + '/ENTRY[entry]/experiment_result/@signal': 'intensity', '/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/scan_axis': 'archive.data.results[0].scan_axis', - '/ENTRY[entry]/experiment_config/count_time': 'archive.data.results[0].integration_time', '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_material': 'archive.data.xrd_settings.source.xray_tube_material', '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_current': 'archive.data.xrd_settings.source.xray_tube_current', '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_voltage': 'archive.data.xrd_settings.source.xray_tube_voltage', @@ -61,9 +63,6 @@ '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_two': 'archive.data.xrd_settings.source.kalpha_two', '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/ratio_k_alphatwo_k_alphaone': 'archive.data.xrd_settings.source.ratio_kalphatwo_kalphaone', '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/kbeta': 'archive.data.xrd_settings.source.kbeta', - '/ENTRY[entry]/@default': 'experiment_result', - '/ENTRY[entry]/experiment_result/@signal': 'intensity', - '/ENTRY[entry]/definition': 'NXxrd_pan', } @@ -116,7 +115,7 @@ def populate_nx_dataset_and_attribute( ) if isinstance(data, pint.Quantity): - if str(data.units) != 'unitless' and str(data.units): + if str(data.units) != 'dimensionless' and str(data.units): attr_tmp = {nx_path: dict(units=str(data.units))} attr_dict.update(attr_tmp) # attr_dict[nx_path].update({'units': str(data.units)})