diff --git a/src/nomad_measurements/utils.py b/src/nomad_measurements/utils.py index 58389fca..17382fbb 100644 --- a/src/nomad_measurements/utils.py +++ b/src/nomad_measurements/utils.py @@ -15,12 +15,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import collections import os.path +import re from typing import ( TYPE_CHECKING, + Any, + Optional, ) +import h5py import numpy as np +import pint +from nomad.datamodel.hdf5 import HDF5Reference +from nomad.units import ureg +from pydantic import BaseModel, Field if TYPE_CHECKING: from nomad.datamodel.data import ( @@ -153,3 +162,325 @@ def get_bounding_range_2d(ax1, ax2): ] return ax1_range, ax2_range + + +class DatasetModel(BaseModel): + """ + Pydantic model for the dataset to be stored in the HDF5 file. + """ + + data: Any = Field(description='The data to be stored in the HDF5 file.') + archive_path: Optional[str] = Field( + None, description='The path of the quantity in the NOMAD archive.' + ) + internal_reference: Optional[bool] = Field( + False, + description='If True, an internal reference is set to an existing HDF5 ' + 'dataset.', + ) + + +class HDF5Handler: + """ + Class for handling the creation of auxiliary files to store big data arrays outside + the main archive file (e.g. HDF5, NeXus). + """ + + def __init__( + self, + filename: str, + archive: 'EntryArchive', + logger: 'BoundLogger', + valid_dataset_paths: list = None, + nexus: bool = False, + ): + """ + Initialize the handler. + + Args: + filename (str): The name of the auxiliary file. + archive (EntryArchive): The NOMAD archive. + logger (BoundLogger): A structlog logger. + valid_dataset_paths (list): The list of valid dataset paths. + nexus (bool): If True, the file is created as a NeXus file. + """ + if not filename.endswith(('.nxs', '.h5')): + raise ValueError('Only .h5 or .nxs files are supported.') + + self.data_file = filename + self.archive = archive + self.logger = logger + self.valid_dataset_paths = [] + if valid_dataset_paths: + self.valid_dataset_paths = valid_dataset_paths + self.nexus = nexus + + self._hdf5_datasets = collections.OrderedDict() + self._hdf5_attributes = collections.OrderedDict() + + def add_dataset( + self, + path: str, + params: dict, + validate_path: bool = True, + ): + """ + Add a dataset to the HDF5 file. The dataset is written lazily to the file + when `write_file` method is called. The `path` is validated against the + `valid_dataset_paths` if provided before adding the data. + + `params` should be a dictionary containing `data`. Optionally, + it can also contain `archive_path` and `internal_reference`: + { + 'data': Any, + 'archive_path': str, + 'internal_reference': bool, + } + + Args: + path (str): The dataset path to be used in the HDF5 file. + params (dict): The dataset parameters. + validate_path (bool): If True, the dataset path is validated. + """ + if not params: + self.logger.warning('Dataset `params` must be provided.') + return + + dataset = DatasetModel( + **params, + ) + if ( + validate_path + and self.valid_dataset_paths + and path not in self.valid_dataset_paths + ): + self.logger.warning(f'Invalid dataset path "{path}".') + return + + # handle the pint.Quantity and add data + if isinstance(dataset.data, pint.Quantity): + self.add_attribute( + path=path, + params=dict( + units=str(dataset.data.units), + ), + ) + dataset.data = dataset.data.magnitude + + self._hdf5_datasets[path] = dataset + + def add_attribute( + self, + path: str, + params: dict, + ): + """ + Add an attribute to the dataset or group at the given path. The attribute is + written lazily to the file when `write_file` method is called. + + Args: + path (str): The dataset or group path in the HDF5 file. + params (dict): The attributes to be added. + """ + if not params: + self.logger.warning('Attribute `params` must be provided.') + return + self._hdf5_attributes[path] = params + + def read_dataset(self, path: str): + """ + Returns the dataset at the given path. If the quantity has `units` as an + attribute, tries to returns a `pint.Quantity`. + If the dataset available in the `self._hdf5_datasets`, it is returned directly. + + Args: + path (str): The dataset path in the HDF5 file. + """ + if path is None: + return + file_path, dataset_path = path.split('#') + + # find path in the instance variables + value = None + if dataset_path in self._hdf5_datasets: + value = self._hdf5_datasets[dataset_path].data + if dataset_path in self._hdf5_attributes: + units = self._hdf5_attributes[dataset_path].get('units') + if units: + value *= ureg(units) + return value + + file_name = file_path.rsplit('/raw/', 1)[1] + with h5py.File(self.archive.m_context.raw_file(file_name, 'rb')) as h5: + if dataset_path not in h5: + self.logger.warning(f'Dataset "{dataset_path}" not found.') + else: + value = h5[dataset_path][...] + try: + units = h5[dataset_path].attrs['units'] + value *= ureg(units) + except KeyError: + pass + return value + + def write_file(self): + """ + Method for creating an auxiliary file to store big data arrays outside the + main archive file (e.g. HDF5, NeXus). + """ + if self.nexus: + try: + self._write_nx_file() + except Exception as e: + self.nexus = False + self.logger.warning( + f'Encountered "{e}" error while creating nexus file. ' + 'Creating h5 file instead.' + ) + self._write_hdf5_file() + else: + self._write_hdf5_file() + + def _write_nx_file(self): + """ + Method for creating a NeXus file. Additional data from the archive is added + to the `hdf5_data_dict` before creating the nexus file. This provides a NeXus + view of the data in addition to storing array data. + """ + if self.data_file.endswith('.h5'): + self.data_file = self.data_file.replace('.h5', '.nxs') + raise NotImplementedError('Method `write_nx_file` is not implemented.') + # TODO add archive data to `hdf5_data_dict` before creating the nexus file. Use + # `populate_hdf5_data_dict` method for each quantity that is needed in .nxs + # file. Create a NeXus file with the data in `hdf5_data_dict`. + # One issue here is as we populate the `hdf5_data_dict` with the archive data, + # we will always have to over write the nexus file + + def _write_hdf5_file(self): # noqa: PLR0912 + """ + Method for creating an HDF5 file. + """ + if self.data_file.endswith('.nxs'): + self.data_file = self.data_file.replace('.nxs', '.h5') + if not self._hdf5_datasets and not self._hdf5_attributes: + return + # remove the nexus annotations from the dataset paths if any + tmp_dict = {} + for key, value in self._hdf5_datasets.items(): + new_key = self._remove_nexus_annotations(key) + tmp_dict[new_key] = value + self._hdf5_datasets = tmp_dict + tmp_dict = {} + for key, value in self._hdf5_attributes.items(): + tmp_dict[self._remove_nexus_annotations(key)] = value + self._hdf5_attributes = tmp_dict + + # create the HDF5 file + mode = 'r+b' if self.archive.m_context.raw_path_exists(self.data_file) else 'wb' + with h5py.File( + self.archive.m_context.raw_file(self.data_file, mode), 'a' + ) as h5: + for key, value in self._hdf5_datasets.items(): + if value.data is None: + self.logger.warning(f'No data found for "{key}". Skipping.') + continue + elif value.internal_reference: + # resolve the internal reference + try: + data = h5[self._remove_nexus_annotations(value.data)] + except KeyError: + self.logger.warning( + f'Internal reference "{value.data}" not found. Skipping.' + ) + continue + else: + data = value.data + + group_name, dataset_name = key.rsplit('/', 1) + group = h5.require_group(group_name) + + if key in h5: + group[dataset_name][...] = data + else: + group.create_dataset( + name=dataset_name, + data=data, + ) + self._set_hdf5_reference( + self.archive, + value.archive_path, + f'/uploads/{self.archive.m_context.upload_id}/raw' + f'/{self.data_file}#{key}', + ) + for key, value in self._hdf5_attributes.items(): + if key in h5: + h5[key].attrs.update(value) + else: + self.logger.warning(f'Path "{key}" not found to add attribute.') + + # reset hdf5 datasets and atttributes + self._hdf5_datasets = collections.OrderedDict() + self._hdf5_attributes = collections.OrderedDict() + + @staticmethod + def _remove_nexus_annotations(path: str) -> str: + """ + Remove the nexus related annotations from the dataset path. + For e.g., + '/ENTRY[entry]/experiment_result/intensity' -> + '/entry/experiment_result/intensity' + + Args: + path (str): The dataset path with nexus annotations. + + Returns: + str: The dataset path without nexus annotations. + """ + if not path: + return path + + pattern = r'.*\[.*\]' + new_path = '' + for part in path.split('/')[1:]: + if re.match(pattern, part): + new_path += '/' + part.split('[')[0].strip().lower() + else: + new_path += '/' + part + new_path = new_path.replace('.nxs', '.h5') + return new_path + + @staticmethod + def _set_hdf5_reference( + section: 'ArchiveSection' = None, path: str = None, ref: str = None + ): + """ + Method for setting a HDF5Reference quantity in a section. It can handle + nested quantities and repeatable sections, provided that the quantity itself + is of type `HDF5Reference`. + For example, one can set the reference for a quantity path like + `data.results[0].intensity`. + + Args: + section (Section): The NOMAD section containing the quantity. + path (str): The path to the quantity. + ref (str): The reference to the HDF5 dataset. + """ + # TODO handle the case when section in the path is not initialized + + if not section or not path or not ref: + return + attr = section + path = path.split('.') + quantity_name = path.pop() + + for subpath in path: + if re.match(r'.*\[.*\]', subpath): + index = int(subpath.split('[')[1].split(']')[0]) + attr = attr.m_get(subpath.split('[')[0], index=index) + else: + attr = attr.m_get(subpath) + + if isinstance( + attr.m_get_quantity_definition(quantity_name).type, HDF5Reference + ): + attr.m_set(quantity_name, ref) diff --git a/src/nomad_measurements/xrd/nx.py b/src/nomad_measurements/xrd/nx.py index e1b41fcf..db73af2f 100644 --- a/src/nomad_measurements/xrd/nx.py +++ b/src/nomad_measurements/xrd/nx.py @@ -15,168 +15,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import TYPE_CHECKING -from pynxtools import dataconverter -from pynxtools.nomad.dataconverter import populate_nexus_subsection - -if TYPE_CHECKING: - from nomad.datamodel.datamodel import EntryArchive - from structlog.stdlib import ( - BoundLogger, - ) - - -def walk_through_object(parent_obj, attr_chain, default=None): - """ - Walk though the object until reach the leaf. - - Args: - parent_obj: This is a python obj. - attr_chain: Dot separated obj chain. - default: A value to be returned by default, if not data is found. - """ - expected_parts = 2 - if isinstance(attr_chain, str): - parts = attr_chain.split('.', 1) - - if len(parts) == expected_parts: - child_nm, rest_part = parts - if '[' in child_nm: - child_nm, index = child_nm.split('[') - index = int(index[:-1]) - child_obj = getattr(parent_obj, child_nm)[index] - else: - child_obj = getattr(parent_obj, child_nm) - return walk_through_object(child_obj, rest_part, default=default) - else: - return getattr(parent_obj, attr_chain, default) - - -def connect_concepts(template, archive: 'EntryArchive', scan_type: str): # noqa: PLR0912 - """ - Connect the concepts between `ELNXrayDiffraction` and `NXxrd_pan` schema. - - Args: - template (Template): The pynxtools template, a inherited class from python dict. - archive (EntryArchive): Nomad archive contains secttions, subsections and - quantities. - scan_type (str): Name of the scan type such as line and RSM. - """ - - # General concepts - # ruff: noqa: E501 - concept_map = { - '/ENTRY[entry]/method': 'archive.data.method', - '/ENTRY[entry]/measurement_type': 'archive.data.diffraction_method_name', - '/ENTRY[entry]/experiment_result/intensity': 'archive.data.results[0].intensity.magnitude', - '/ENTRY[entry]/experiment_result/two_theta': 'archive.data.results[0].two_theta.magnitude', - '/ENTRY[entry]/experiment_result/two_theta/@units': 'archive.data.results[0].two_theta.units', - '/ENTRY[entry]/experiment_result/omega': 'archive.data.results[0].omega.magnitude', - '/ENTRY[entry]/experiment_result/omega/@units': 'archive.data.results[0].omega.units', - '/ENTRY[entry]/experiment_result/chi': 'archive.data.results[0].chi.magnitude', - '/ENTRY[entry]/experiment_result/chi/@units': 'archive.data.results[0].chi.units', - '/ENTRY[entry]/experiment_result/phi': 'archive.data.results[0].phi.magnitude', - '/ENTRY[entry]/experiment_result/phi/@units': 'archive.data.results[0].phi.units', - '/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/scan_axis': 'archive.data.results[0].scan_axis', - '/ENTRY[entry]/experiment_config/count_time': 'archive.data.results[0].count_time.magnitude', - 'line': '', # For future implementation - 'rsm': { - '/ENTRY[entry]/experiment_result/q_parallel': 'archive.data.results[0].q_parallel', - '/ENTRY[entry]/experiment_result/q_parallel/@units': 'archive.data.results[0].q_parallel.units', - '/ENTRY[entry]/experiment_result/q_perpendicular': 'archive.data.results[0].q_perpendicular.magnitude', - '/ENTRY[entry]/experiment_result/q_perpendicular/@units': 'archive.data.results[0].q_perpendicular.units', - '/ENTRY[entry]/experiment_result/q_norm': 'archive.data.results[0].q_norm.magnitude', - '/ENTRY[entry]/experiment_result/q_norm/@units': 'archive.data.results[0].q_norm.units', - }, - # Source - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_material': 'archive.data.xrd_settings.source.xray_tube_material', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_current': 'archive.data.xrd_settings.source.xray_tube_current.magnitude', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_current/@units': 'archive.data.xrd_settings.source.xray_tube_current.units', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_voltage': 'archive.data.xrd_settings.source.xray_tube_voltage.magnitude', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_voltage/@units': 'archive.data.xrd_settings.source.xray_tube_voltage.units', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_one': 'archive.data.xrd_settings.source.kalpha_one.magnitude', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_one/@units': 'archive.data.xrd_settings.source.kalpha_one.units', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_two': 'archive.data.xrd_settings.source.kalpha_two.magnitude', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_two/@units': 'archive.data.xrd_settings.source.kalpha_two.units', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/ratio_k_alphatwo_k_alphaone': 'archive.data.xrd_settings.source.ratio_kalphatwo_kalphaone', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/kbeta': 'archive.data.xrd_settings.source.kbeta.magnitude', - '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/kbeta/@units': 'archive.data.xrd_settings.source.kbeta.units', - } - - for key, archive_concept in concept_map.items(): - if isinstance(archive_concept, dict): - if key == scan_type: - for sub_key, sub_archive_concept in archive_concept.items(): - _, arch_attr = sub_archive_concept.split('.', 1) - value = None - try: - value = walk_through_object(archive, arch_attr) - except (AttributeError, IndexError, KeyError, ValueError): - pass - finally: - if value is not None: - template[sub_key] = ( - str(value) if sub_key.endswith('units') else value - ) - else: - continue - elif archive_concept: - _, arch_attr = archive_concept.split('.', 1) - value = None - try: - value = walk_through_object(archive, arch_attr) - # Use multiple excepts to avoid catching all exceptions - except (AttributeError, IndexError, KeyError, ValueError): - pass - finally: - if value is not None: - template[key] = str(value) if key.endswith('units') else value - - template['/ENTRY[entry]/definition'] = 'NXxrd_pan' - - # Links to the data and concepts - template['/ENTRY[entry]/@default'] = 'experiment_result' - template['/ENTRY[entry]/experiment_result/@signal'] = 'intensity' - template['/ENTRY[entry]/experiment_result/@axes'] = 'two_theta' - template['/ENTRY[entry]/q_data/q'] = { - 'link': '/ENTRY[entry]/experiment_result/q_norm' - } - template['/ENTRY[entry]/q_data/intensity'] = { - 'link': '/ENTRY[entry]/experiment_result/intensity' - } - template['/ENTRY[entry]/q_data/q_parallel'] = { - 'link': '/ENTRY[entry]/experiment_result/q_parallel' - } - template['/ENTRY[entry]/q_data/q_perpendicular'] = { - 'link': '/ENTRY[entry]/experiment_result/q_perpendicular' - } - - -def write_nx_section_and_create_file( - archive: 'EntryArchive', logger: 'BoundLogger', scan_type: str = 'line' -): - """ - Uses the archive to generate the NeXus section and .nxs file. - - Args: - archive (EntryArchive): The archive containing the section. - logger (BoundLogger): A structlog logger. - generate_nexus_file (boolean): If True, the function will generate a .nxs file. - nxs_as_entry (boolean): If True, the function will generate a .nxs file - as a nomad entry. - """ - nxdl_root, _ = dataconverter.helpers.get_nxdl_root_and_path('NXxrd_pan') - template = dataconverter.template.Template() - dataconverter.helpers.generate_template_from_nxdl(nxdl_root, template) - connect_concepts(template, archive, scan_type=scan_type) - archive_name = archive.metadata.mainfile.split('.')[0] - nexus_output = f'{archive_name}.nxs' - - populate_nexus_subsection( - template=template, - app_def='NXxrd_pan', - archive=archive, - logger=logger, - output_file_path=nexus_output, - ) +NEXUS_DATASET_PATHS = [ + '/ENTRY[entry]/experiment_result/intensity', + '/ENTRY[entry]/experiment_result/two_theta', + '/ENTRY[entry]/experiment_result/omega', + '/ENTRY[entry]/experiment_result/chi', + '/ENTRY[entry]/experiment_result/phi', + '/ENTRY[entry]/experiment_config/count_time', + '/ENTRY[entry]/experiment_result/q_norm', + '/ENTRY[entry]/experiment_result/q_parallel', + '/ENTRY[entry]/experiment_result/q_perpendicular', + '/ENTRY[entry]/method', + '/ENTRY[entry]/measurement_type', + '/ENTRY[entry]/INSTRUMENT[instrument]/DETECTOR[detector]/scan_axis', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_material', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_current', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/xray_tube_voltage', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_one', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/k_alpha_two', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/ratio_k_alphatwo_k_alphaone', + '/ENTRY[entry]/INSTRUMENT[instrument]/SOURCE[source]/kbeta', +] diff --git a/src/nomad_measurements/xrd/schema.py b/src/nomad_measurements/xrd/schema.py index fcb91a27..37ccc65e 100644 --- a/src/nomad_measurements/xrd/schema.py +++ b/src/nomad_measurements/xrd/schema.py @@ -22,19 +22,25 @@ ) import numpy as np +import pint import plotly.express as px from fairmat_readers_xrd import ( read_bruker_brml, read_panalytical_xrdml, read_rigaku_rasx, ) +from nomad.config import config from nomad.datamodel.data import ( ArchiveSection, EntryData, ) +from nomad.datamodel.hdf5 import ( + HDF5Reference, +) from nomad.datamodel.metainfo.annotations import ( ELNAnnotation, ELNComponentEnum, + H5WebAnnotation, ) from nomad.datamodel.metainfo.basesections import ( CompositeSystemReference, @@ -42,10 +48,7 @@ MeasurementResult, ReadableIdentifiers, ) -from nomad.datamodel.metainfo.plot import ( - PlotlyFigure, - PlotSection, -) +from nomad.datamodel.metainfo.plot import PlotlyFigure from nomad.datamodel.results import ( DiffractionPattern, MeasurementMethod, @@ -67,11 +70,14 @@ from nomad_measurements.general import ( NOMADMeasurementsCategory, ) -from nomad_measurements.utils import get_bounding_range_2d, merge_sections -from nomad_measurements.xrd.nx import write_nx_section_and_create_file +from nomad_measurements.utils import ( + HDF5Handler, + get_bounding_range_2d, + merge_sections, +) +from nomad_measurements.xrd.nx import NEXUS_DATASET_PATHS if TYPE_CHECKING: - import pint from nomad.datamodel.datamodel import ( EntryArchive, ) @@ -80,18 +86,16 @@ ) -from nomad.config import config - configuration = config.get_plugin_entry_point('nomad_measurements.xrd:schema') m_package = SchemaPackage(aliases=['nomad_measurements.xrd.parser.parser']) def calculate_two_theta_or_q( - wavelength: 'pint.Quantity', - q: 'pint.Quantity' = None, - two_theta: 'pint.Quantity' = None, -) -> tuple['pint.Quantity', 'pint.Quantity']: + wavelength: pint.Quantity, + q: pint.Quantity = None, + two_theta: pint.Quantity = None, +) -> tuple[pint.Quantity, pint.Quantity]: """ Calculate the two-theta array from the scattering vector (q) or vice-versa, given the wavelength of the X-ray source. @@ -113,10 +117,10 @@ def calculate_two_theta_or_q( return q, two_theta -def calculate_q_vectors_RSM( - wavelength: 'pint.Quantity', - two_theta: 'pint.Quantity', - omega: 'pint.Quantity', +def calculate_q_vectors_rsm( + wavelength: pint.Quantity, + two_theta: pint.Quantity, + omega: pint.Quantity, ): """ Calculate the q-vectors for RSM scans in coplanar configuration. @@ -265,6 +269,205 @@ class XRDSettings(ArchiveSection): source = SubSection(section_def=XRayTubeSource) +class XRDResultPlotIntensity(ArchiveSection): + m_def = Section( + a_h5web=H5WebAnnotation( + axes=['two_theta', 'omega', 'phi', 'chi'], signal='intensity' + ) + ) + intensity = Quantity( + type=HDF5Reference, + description='The count at each 2-theta value, dimensionless', + ) + two_theta = Quantity( + type=HDF5Reference, + description='The 2-theta range of the diffractogram', + ) + omega = Quantity( + type=HDF5Reference, + description='The omega range of the diffractogram', + ) + + def normalize(self, archive, logger): + super().normalize(archive, logger) + prefix = '/ENTRY[entry]/experiment_result' + try: + hdf5_handler = self.m_parent.m_parent.hdf5_handler + assert isinstance(hdf5_handler, HDF5Handler) + except (AttributeError, AssertionError): + return + + if self.intensity is None or self.two_theta is None: + return + + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity/two_theta', + params=dict( + data=f'{prefix}/two_theta', + archive_path='data.results[0].plot_intensity.two_theta', + internal_reference=True, + ), + validate_path=False, + ) + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity/intensity', + params=dict( + data=f'{prefix}/intensity', + archive_path='data.results[0].plot_intensity.intensity', + internal_reference=True, + ), + validate_path=False, + ) + hdf5_handler.add_attribute( + path=f'{prefix}/plot_intensity', + params=dict( + axes='two_theta', + signal='intensity', + NX_class='NXdata', + ), + ) + for var_axis in ['omega', 'phi', 'chi']: + if self.get(var_axis) is not None: + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity/{var_axis}', + params=dict( + data=f'{prefix}/{var_axis}', + archive_path=f'data.results[0].plot_intensity.{var_axis}', + internal_reference=True, + ), + validate_path=False, + ) + hdf5_handler.add_attribute( + path=f'{prefix}/plot_intensity', + params=dict( + axes=[var_axis, 'two_theta'], + signal='intensity', + NX_class='NXdata', + ), + ) + break + + hdf5_handler.write_file() + + +class XRDResultPlotIntensityScatteringVector(ArchiveSection): + m_def = Section( + a_h5web=H5WebAnnotation( + axes=['q_parallel', 'q_perpendicular', 'q_norm'], signal='intensity' + ) + ) + intensity = Quantity( + type=HDF5Reference, + description=""" + The count at each q value. In case of RSM, it contains interpolated values of + `intensity` at regularized grid of `q` vectors. + """, + ) + q_norm = Quantity( + type=HDF5Reference, + description='The q range of the diffractogram', + ) + q_parallel = Quantity( + type=HDF5Reference, + description='The regularized grid of `q_parallel` range for plotting.', + ) + q_perpendicular = Quantity( + type=HDF5Reference, + description='The regularized grid of `q_perpendicular` range for plotting.', + ) + + def normalize(self, archive, logger): + super().normalize(archive, logger) + prefix = '/ENTRY[entry]/experiment_result' + try: + hdf5_handler = self.m_parent.m_parent.hdf5_handler + assert isinstance(hdf5_handler, HDF5Handler) + except (AttributeError, AssertionError): + return + + if self.intensity is None: + return + + if self.q_norm is not None: + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity_scattering_vector/intensity', + params=dict( + data=f'{prefix}/intensity', + archive_path='data.results[0].plot_intensity_scattering_vector.intensity', + internal_reference=True, + ), + validate_path=False, + ) + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity_scattering_vector/q_norm', + params=dict( + data=f'{prefix}/q_norm', + archive_path='data.results[0].plot_intensity_scattering_vector.q_norm', + internal_reference=True, + ), + validate_path=False, + ) + hdf5_handler.add_attribute( + path=f'{prefix}/plot_intensity_scattering_vector', + params=dict( + axes='q_norm', + signal='intensity', + NX_class='NXdata', + ), + ) + elif self.q_parallel is not None and self.q_perpendicular is not None: + intensity = hdf5_handler.read_dataset(self.intensity) + q_parallel = hdf5_handler.read_dataset(self.q_parallel) + q_perpendicular = hdf5_handler.read_dataset(self.q_perpendicular) + # q_vectors lead to irregular grid + # generate a regular grid using interpolation + x = q_parallel.to('1/angstrom').magnitude.flatten() + y = q_perpendicular.to('1/angstrom').magnitude.flatten() + x_regular = np.linspace(x.min(), x.max(), intensity.shape[0]) + y_regular = np.linspace(y.min(), y.max(), intensity.shape[1]) + x_grid, y_grid = np.meshgrid(x_regular, y_regular) + z_interpolated = griddata( + points=(x, y), + values=intensity.flatten(), + xi=(x_grid, y_grid), + method='linear', + fill_value=intensity.min(), + ) + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity_scattering_vector/q_parallel', + params=dict( + data=x_regular, + archive_path='data.results[0].plot_intensity_scattering_vector.q_parallel', + ), + validate_path=False, + ) + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity_scattering_vector/q_perpendicular', + params=dict( + data=y_regular, + archive_path='data.results[0].plot_intensity_scattering_vector.q_perpendicular', + ), + validate_path=False, + ) + hdf5_handler.add_dataset( + path=f'{prefix}/plot_intensity_scattering_vector/intensity', + params=dict( + data=z_interpolated, + archive_path='data.results[0].plot_intensity_scattering_vector.intensity', + ), + validate_path=False, + ) + hdf5_handler.add_attribute( + path=f'{prefix}/plot_intensity_scattering_vector', + params=dict( + axes=['q_perpendicular', 'q_parallel'], + signal='intensity', + NX_class='NXdata', + ), + ) + hdf5_handler.write_file() + + class XRDResult(MeasurementResult): """ Section containing the result of an X-ray diffraction scan. @@ -272,52 +475,28 @@ class XRDResult(MeasurementResult): m_def = Section() - array_index = Quantity( - type=np.dtype(np.float64), - shape=['*'], - description=( - 'A placeholder for the indices of vectorial quantities. ' - 'Used as x-axis for plots within quantities.' - ), - a_display={'visible': False}, - ) intensity = Quantity( - type=np.dtype(np.float64), - shape=['*'], - unit='dimensionless', + type=HDF5Reference, description='The count at each 2-theta value, dimensionless', - a_plot={'x': 'array_index', 'y': 'intensity'}, ) two_theta = Quantity( - type=np.dtype(np.float64), - shape=['*'], - unit='deg', + type=HDF5Reference, description='The 2-theta range of the diffractogram', - a_plot={'x': 'array_index', 'y': 'two_theta'}, ) q_norm = Quantity( - type=np.dtype(np.float64), - shape=['*'], - unit='meter**(-1)', + type=HDF5Reference, description='The norm of scattering vector *Q* of the diffractogram', - a_plot={'x': 'array_index', 'y': 'q_norm'}, ) omega = Quantity( - type=np.dtype(np.float64), - shape=['*'], - unit='deg', + type=HDF5Reference, description='The omega range of the diffractogram', ) phi = Quantity( - type=np.dtype(np.float64), - shape=['*'], - unit='deg', + type=HDF5Reference, description='The phi range of the diffractogram', ) chi = Quantity( - type=np.dtype(np.float64), - shape=['*'], - unit='deg', + type=HDF5Reference, description='The chi range of the diffractogram', ) source_peak_wavelength = Quantity( @@ -331,11 +510,13 @@ class XRDResult(MeasurementResult): description='Axis scanned', ) integration_time = Quantity( - type=np.dtype(np.float64), - unit='s', - shape=['*'], + type=HDF5Reference, description='Integration time per channel', ) + plot_intensity = SubSection(section_def=XRDResultPlotIntensity) + plot_intensity_scattering_vector = SubSection( + section_def=XRDResultPlotIntensityScatteringVector + ) class XRDResult1D(XRDResult): @@ -343,9 +524,7 @@ class XRDResult1D(XRDResult): Section containing the result of a 1D X-ray diffraction scan. """ - m_def = Section() - - def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): + def generate_plots(self): """ Plot the 1D diffractogram. @@ -358,12 +537,20 @@ def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): (dict, dict): line_linear, line_log """ plots = [] - if self.two_theta is None or self.intensity is None: + + try: + hdf5_handler = self.m_parent.hdf5_handler + assert isinstance(hdf5_handler, HDF5Handler) + except (AttributeError, AssertionError): return plots - x = self.two_theta.to('degree').magnitude - y = self.intensity.magnitude + two_theta = hdf5_handler.read_dataset(self.two_theta) + intensity = hdf5_handler.read_dataset(self.intensity) + if two_theta is None or intensity is None: + return plots + x = two_theta.to('degree').magnitude + y = intensity.magnitude fig_line_linear = px.line( x=x, y=y, @@ -449,10 +636,11 @@ def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): ) ) - if self.q_norm is None: + q_norm = hdf5_handler.read_dataset(self.q_norm) + if q_norm is None: return plots - x = self.q_norm.to('1/angstrom').magnitude + x = q_norm.to('1/angstrom').magnitude fig_line_log = px.line( x=x, y=y, @@ -515,12 +703,45 @@ def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'): self.name = f'{self.scan_axis} Scan Result' else: self.name = 'XRD Scan Result' + + try: + hdf5_handler = self.m_parent.hdf5_handler + assert isinstance(hdf5_handler, HDF5Handler) + except (AttributeError, AssertionError): + return + if self.source_peak_wavelength is not None: - self.q_norm, self.two_theta = calculate_two_theta_or_q( + q_norm = hdf5_handler.read_dataset(self.q_norm) + two_theta = hdf5_handler.read_dataset(self.two_theta) + q_norm, two_theta = calculate_two_theta_or_q( wavelength=self.source_peak_wavelength, - two_theta=self.two_theta, - q=self.q_norm, + two_theta=two_theta, + q=q_norm, + ) + hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/q_norm', + params=dict( + data=q_norm, + archive_path='data.results[0].q_norm', + ), + ) + hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/two_theta', + params=dict( + data=two_theta, + archive_path='data.results[0].two_theta', + ), ) + hdf5_handler.write_file() + self.m_setdefault('plot_intensity_scattering_vector') + self.plot_intensity_scattering_vector.intensity = self.intensity + self.plot_intensity_scattering_vector.q_norm = self.q_norm + self.plot_intensity_scattering_vector.normalize(archive, logger) + + self.m_setdefault('plot_intensity') + self.plot_intensity.intensity = self.intensity + self.plot_intensity.two_theta = self.two_theta + self.plot_intensity.normalize(archive, logger) class XRDResultRSM(XRDResult): @@ -528,27 +749,16 @@ class XRDResultRSM(XRDResult): Section containing the result of a Reciprocal Space Map (RSM) scan. """ - m_def = Section() q_parallel = Quantity( - type=np.dtype(np.float64), - shape=['*', '*'], - unit='meter**(-1)', + type=HDF5Reference, description='The scattering vector *Q_parallel* of the diffractogram', ) q_perpendicular = Quantity( - type=np.dtype(np.float64), - shape=['*', '*'], - unit='meter**(-1)', + type=HDF5Reference, description='The scattering vector *Q_perpendicular* of the diffractogram', ) - intensity = Quantity( - type=np.dtype(np.float64), - shape=['*', '*'], - unit='dimensionless', - description='The count at each position, dimensionless', - ) - def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): + def generate_plots(self): """ Plot the 2D RSM diffractogram. @@ -561,14 +771,24 @@ def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): (dict, dict): json_2theta_omega, json_q_vector """ plots = [] - if self.two_theta is None or self.intensity is None or self.omega is None: + + try: + hdf5_handler = self.m_parent.hdf5_handler + assert isinstance(hdf5_handler, HDF5Handler) + except (AttributeError, AssertionError): + return plots + + two_theta = hdf5_handler.read_dataset(self.two_theta) + intensity = hdf5_handler.read_dataset(self.intensity) + omega = hdf5_handler.read_dataset(self.omega) + if two_theta is None or intensity is None or omega is None: return plots # Plot for 2theta-omega RSM # Zero values in intensity become -inf in log scale and are not plotted - x = self.omega.to('degree').magnitude - y = self.two_theta.to('degree').magnitude - z = self.intensity.magnitude + x = omega.to('degree').magnitude + y = two_theta.to('degree').magnitude + z = intensity.magnitude log_z = np.log10(z) x_range, y_range = get_bounding_range_2d(x, y) @@ -636,9 +856,11 @@ def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): ) # Plot for RSM in Q-vectors - if self.q_parallel is not None and self.q_perpendicular is not None: - x = self.q_parallel.to('1/angstrom').magnitude.flatten() - y = self.q_perpendicular.to('1/angstrom').magnitude.flatten() + q_parallel = hdf5_handler.read_dataset(self.q_parallel) + q_perpendicular = hdf5_handler.read_dataset(self.q_perpendicular) + if q_parallel is not None and q_perpendicular is not None: + x = q_parallel.to('1/angstrom').magnitude.flatten() + y = q_perpendicular.to('1/angstrom').magnitude.flatten() # q_vectors lead to irregular grid # generate a regular grid using interpolation x_regular = np.linspace(x.min(), x.max(), z.shape[0]) @@ -721,21 +943,58 @@ def generate_plots(self, archive: 'EntryArchive', logger: 'BoundLogger'): def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'): super().normalize(archive, logger) + if self.name is None: self.name = 'RSM Scan Result' - var_axis = 'omega' - if self.source_peak_wavelength is not None: - for var_axis in ['omega', 'chi', 'phi']: - if ( - self[var_axis] is not None - and len(np.unique(self[var_axis].magnitude)) > 1 - ): - self.q_parallel, self.q_perpendicular = calculate_q_vectors_RSM( - wavelength=self.source_peak_wavelength, - two_theta=self.two_theta * np.ones_like(self.intensity), - omega=self[var_axis], - ) - break + + try: + hdf5_handler = self.m_parent.hdf5_handler + assert isinstance(hdf5_handler, HDF5Handler) + except (AttributeError, AssertionError): + return + + var_axis = None + for axis in ['omega', 'chi', 'phi']: + axis_value = hdf5_handler.read_dataset(getattr(self, axis)) + if axis_value is not None and len(np.unique(axis_value.magnitude)) > 1: + var_axis = axis + break + + if self.source_peak_wavelength is not None and var_axis is not None: + two_theta = hdf5_handler.read_dataset(self.two_theta) + intensity = hdf5_handler.read_dataset(self.intensity) + q_parallel, q_perpendicular = calculate_q_vectors_rsm( + wavelength=self.source_peak_wavelength, + two_theta=two_theta * np.ones_like(intensity), + omega=hdf5_handler.read_dataset(getattr(self, var_axis)), + ) + hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/q_parallel', + params=dict( + data=q_parallel, + archive_path='data.results[0].q_parallel', + ), + ) + hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/q_perpendicular', + params=dict( + data=q_perpendicular, + archive_path='data.results[0].q_perpendicular', + ), + ) + hdf5_handler.write_file() + self.m_setdefault('plot_intensity_scattering_vector') + self.plot_intensity_scattering_vector.intensity = self.intensity + self.plot_intensity_scattering_vector.q_parallel = self.q_parallel + self.plot_intensity_scattering_vector.q_perpendicular = self.q_perpendicular + self.plot_intensity_scattering_vector.normalize(archive, logger) + + if var_axis is not None: + self.m_setdefault('plot_intensity') + self.plot_intensity.intensity = self.intensity + self.plot_intensity.two_theta = self.two_theta + self.plot_intensity.m_set(var_axis, getattr(self, var_axis)) + self.plot_intensity.normalize(archive, logger) class XRayDiffraction(Measurement): @@ -802,31 +1061,39 @@ def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'): archive.results = Results() if not archive.results.properties: archive.results.properties = Properties() + if not archive.results.method: + archive.results.method = Method( + method_name='XRD', + measurement=MeasurementMethod( + xrd=XRDMethod(diffraction_method_name=self.diffraction_method_name) + ), + ) + + try: + hdf5_handler = self.hdf5_handler + except AttributeError: + return if not archive.results.properties.structural: diffraction_patterns = [] for result in self.results: - if len(result.intensity.shape) == 1: + intensity = hdf5_handler.read_dataset(result.intensity) + if len(intensity.shape) == 1: + two_theta = hdf5_handler.read_dataset(result.two_theta) + q_norm = hdf5_handler.read_dataset(result.q_norm) diffraction_patterns.append( DiffractionPattern( incident_beam_wavelength=result.source_peak_wavelength, - two_theta_angles=result.two_theta, - intensity=result.intensity, - q_vector=result.q_norm, + two_theta_angles=two_theta, + intensity=intensity, + q_vector=q_norm, ) ) archive.results.properties.structural = StructuralProperties( diffraction_pattern=diffraction_patterns ) - if not archive.results.method: - archive.results.method = Method( - method_name='XRD', - measurement=MeasurementMethod( - xrd=XRDMethod(diffraction_method_name=self.diffraction_method_name) - ), - ) -class ELNXRayDiffraction(XRayDiffraction, EntryData, PlotSection): +class ELNXRayDiffraction(XRayDiffraction, EntryData): """ Example section for how XRayDiffraction can be implemented with a general reader for common XRD file types. @@ -841,6 +1108,12 @@ class ELNXRayDiffraction(XRayDiffraction, EntryData, PlotSection): a_template={ 'measurement_identifiers': {}, }, + a_h5web=H5WebAnnotation( + paths=[ + 'results/0/plot_intensity', + 'results/0/plot_intensity_scattering_vector', + ] + ), ) data_file = Quantity( type=str, @@ -849,6 +1122,14 @@ class ELNXRayDiffraction(XRayDiffraction, EntryData, PlotSection): component=ELNComponentEnum.FileEditQuantity, ), ) + auxiliary_file = Quantity( + type=str, + description='Auxiliary file (like .h5 or .nxs) containing the entry data.', + a_eln=ELNAnnotation( + component=ELNComponentEnum.FileEditQuantity, + ), + ) + hdf5_handler = None measurement_identifiers = SubSection( section_def=ReadableIdentifiers, ) @@ -856,21 +1137,11 @@ class ELNXRayDiffraction(XRayDiffraction, EntryData, PlotSection): diffraction_method_name.m_annotations['eln'] = ELNAnnotation( component=ELNComponentEnum.EnumEditQuantity, ) - generate_nexus_file = Quantity( - type=bool, - description='Whether or not to generate a NeXus output file (if possible).', - default=True, - a_eln=ELNAnnotation( - component=ELNComponentEnum.BoolEditQuantity, - label='Generate NeXus file', - ), - ) def get_read_write_functions(self) -> tuple[Callable, Callable]: """ Method for getting the correct read and write functions for the current data file. - Returns: tuple[Callable, Callable]: The read, write functions. """ @@ -899,49 +1170,81 @@ def write_xrd_data( metadata_dict: dict = xrd_dict.get('metadata', {}) source_dict: dict = metadata_dict.get('source', {}) - scan_type = metadata_dict.get('scan_type', None) - if scan_type == 'line': - result = XRDResult1D( - intensity=xrd_dict.get('intensity', None), - two_theta=xrd_dict.get('2Theta', None), - omega=xrd_dict.get('Omega', None), - chi=xrd_dict.get('Chi', None), - phi=xrd_dict.get('Phi', None), - scan_axis=metadata_dict.get('scan_axis', None), - integration_time=xrd_dict.get('countTime', None), - ) - result.normalize(archive, logger) + scan_type = metadata_dict.get('scan_type') + if scan_type not in ['line', 'rsm']: + logger.error(f'Scan type `{scan_type}` is not supported.') + return + # Create a new result section + results = [] + result = None + if scan_type == 'line': + result = XRDResult1D() elif scan_type == 'rsm': - result = XRDResultRSM( - intensity=xrd_dict.get('intensity', None), - two_theta=xrd_dict.get('2Theta', None), - omega=xrd_dict.get('Omega', None), - chi=xrd_dict.get('Chi', None), - phi=xrd_dict.get('Phi', None), - scan_axis=metadata_dict.get('scan_axis', None), - integration_time=xrd_dict.get('countTime', None), + result = XRDResultRSM() + + if result is not None: + result.scan_axis = metadata_dict.get('scan_axis') + self.hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/intensity', + params=dict( + data=xrd_dict.get('intensity'), + archive_path='data.results[0].intensity', + ), + ) + self.hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/two_theta', + params=dict( + data=xrd_dict.get('2Theta'), + archive_path='data.results[0].two_theta', + ), + ) + self.hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/omega', + params=dict( + data=xrd_dict.get('Omega'), + archive_path='data.results[0].omega', + ), + ) + self.hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/chi', + params=dict( + data=xrd_dict.get('Chi'), + archive_path='data.results[0].chi', + ), + ) + self.hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_result/phi', + params=dict( + data=xrd_dict.get('Phi'), + archive_path='data.results[0].phi', + ), + ) + self.hdf5_handler.add_dataset( + path='/ENTRY[entry]/experiment_config/count_time', + params=dict( + data=xrd_dict.get('countTime'), + archive_path='data.results[0].integration_time', + ), ) result.normalize(archive, logger) - else: - raise NotImplementedError(f'Scan type `{scan_type}` is not supported.') + results.append(result) source = XRayTubeSource( - xray_tube_material=source_dict.get('anode_material', None), - kalpha_one=source_dict.get('kAlpha1', None), - kalpha_two=source_dict.get('kAlpha2', None), - ratio_kalphatwo_kalphaone=source_dict.get('ratioKAlpha2KAlpha1', None), - kbeta=source_dict.get('kBeta', None), - xray_tube_voltage=source_dict.get('voltage', None), - xray_tube_current=source_dict.get('current', None), + xray_tube_material=source_dict.get('anode_material'), + kalpha_one=source_dict.get('kAlpha1'), + kalpha_two=source_dict.get('kAlpha2'), + ratio_kalphatwo_kalphaone=source_dict.get('ratioKAlpha2KAlpha1'), + kbeta=source_dict.get('kBeta'), + xray_tube_voltage=source_dict.get('voltage'), + xray_tube_current=source_dict.get('current'), ) source.normalize(archive, logger) - xrd_settings = XRDSettings(source=source) xrd_settings.normalize(archive, logger) samples = [] - if metadata_dict.get('sample_id', None) is not None: + if metadata_dict.get('sample_id') is not None: sample = CompositeSystemReference( lab_id=metadata_dict['sample_id'], ) @@ -949,12 +1252,23 @@ def write_xrd_data( samples.append(sample) xrd = ELNXRayDiffraction( - results=[result], + results=results, xrd_settings=xrd_settings, samples=samples, ) + merge_sections(self, xrd, logger) + def backward_compatibility(self): + """ + Method for backward compatibility. + """ + # Migration to using HFD5References: removing exisiting results + if self.get('results'): + self.results = [] + if self.get('figures'): + self.figures = [] + def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'): """ The normalize function of the `ELNXRayDiffraction` section. @@ -964,7 +1278,16 @@ def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'): normalized. logger (BoundLogger): A structlog logger. """ + self.backward_compatibility() if self.data_file is not None: + self.auxiliary_file = f'{self.data_file}.nxs' + self.hdf5_handler = HDF5Handler( + filename=self.auxiliary_file, + archive=archive, + logger=logger, + valid_dataset_paths=NEXUS_DATASET_PATHS, + nexus=True, + ) read_function, write_function = self.get_read_write_functions() if read_function is None or write_function is None: logger.warn( @@ -974,15 +1297,10 @@ def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'): with archive.m_context.raw_file(self.data_file) as file: xrd_dict = read_function(file.name, logger) write_function(xrd_dict, archive, logger) + self.hdf5_handler.write_file() + if self.hdf5_handler.data_file != self.auxiliary_file: + self.auxiliary_file = self.hdf5_handler.data_file super().normalize(archive, logger) - if not self.results: - return - - scan_type = xrd_dict.get('metadata', {}).get('scan_type', None) - if self.generate_nexus_file and self.data_file is not None: - write_nx_section_and_create_file(archive, logger, scan_type=scan_type) - - self.figures = self.results[0].generate_plots(archive, logger) class RawFileXRDData(EntryData): diff --git a/tests/test_parser.py b/tests/test_parser.py index 2b236023..89c6018d 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -20,6 +20,8 @@ import pytest from nomad.client import normalize_all, parse +from nomad_measurements.xrd.schema import XRDResult1D + @pytest.fixture( name='parsed_archive', @@ -51,7 +53,7 @@ def fixture_parsed_archive(request): yield measurement_archive - for file_path in [measurement, measurement.replace('archive.json', 'nxs')]: + for file_path in [measurement, rel_file + '.nxs', rel_file + '.h5']: if os.path.exists(file_path): os.remove(file_path) @@ -75,7 +77,7 @@ def test_normalize_all(parsed_archive, caplog): assert parsed_archive.data.results[ 0 ].source_peak_wavelength.magnitude == pytest.approx(1.540598, 1e-2) - if len(parsed_archive.data.results[0].intensity.shape) == 1: + if isinstance(parsed_archive.data.results[0], XRDResult1D): assert parsed_archive.results.properties.structural.diffraction_pattern[ 0 ].incident_beam_wavelength.magnitude * 1e10 == pytest.approx(1.540598, 1e-2)