Skip to content

Commit

Permalink
parking
Browse files Browse the repository at this point in the history
  • Loading branch information
ka-sarthak committed Sep 5, 2024
1 parent 60e8a52 commit 0bc1a1d
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 33 deletions.
33 changes: 33 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "tests",
"type": "debugpy",
"request": "launch",
"cwd": "${workspaceFolder}",
"program": "${workspaceFolder}/.pyenv/bin/pytest",
"justMyCode": false,
"env": {
"_PYTEST_RAISE": "1"
},
"args": [
"-sv",
"/home/sarthak-kapoor/repositories/fairmat/nomad-measurements/tests/test_parser.py"
]
},
{
"name": "nomad parse",
"type": "debugpy",
"request": "launch",
"cwd": "${workspaceFolder}",
"program": "${workspaceFolder}/.pyenv/bin/nomad",
"justMyCode": false,
"args": [
"parse",
"--show-archive",
"/home/sarthak-kapoor/repositories/fairmat/nomad-measurements/tests/data/"
]
},
]
}
53 changes: 30 additions & 23 deletions src/nomad_measurements/xrd/nx.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from typing import TYPE_CHECKING

from pynxtools import dataconverter
from pynxtools.nomad.dataconverter import populate_nexus_subsection
from pynxtools.dataconverter import writer as pynxtools_writer
from pynxtools.dataconverter.helpers import (
generate_template_from_nxdl,
get_nxdl_root_and_path,
)
from pynxtools.dataconverter.template import Template

if TYPE_CHECKING:
from nomad.datamodel.datamodel import EntryArchive
Expand Down Expand Up @@ -153,30 +158,32 @@ def connect_concepts(template, archive: 'EntryArchive', scan_type: str): # noqa
}


def write_nx_section_and_create_file(
archive: 'EntryArchive', logger: 'BoundLogger', scan_type: str = 'line'
def write_nx_file(
xrd_dict: dict,
archive: 'EntryArchive',
logger: 'BoundLogger',
):
"""
Uses the archive to generate the NeXus section and .nxs file.
Args:
archive (EntryArchive): The archive containing the section.
xrd_dict (dict): A dictionary containing the data from experiment file and
eln data under the key 'eln_dict
archive (EntryArchive): The Nomad archive containing the root section.
logger (BoundLogger): A structlog logger.
generate_nexus_file (boolean): If True, the function will generate a .nxs file.
nxs_as_entry (boolean): If True, the function will generate a .nxs file
as a nomad entry.
"""
nxdl_root, _ = dataconverter.helpers.get_nxdl_root_and_path('NXxrd_pan')
template = dataconverter.template.Template()
dataconverter.helpers.generate_template_from_nxdl(nxdl_root, template)
connect_concepts(template, archive, scan_type=scan_type)
archive_name = archive.metadata.mainfile.split('.')[0]
nexus_output = f'{archive_name}.nxs'

populate_nexus_subsection(
template=template,
app_def='NXxrd_pan',
archive=archive,
logger=logger,
output_file_path=nexus_output,
)
nx_file = archive.metadata.mainfile.split('.')[0] + '.nxs'
metadata_dict: dict = xrd_dict.get('metadata', {})
scan_type = metadata_dict.get('scan_type', default='line')

app_def = 'NXxrd_pan'
nxdl_root, nxdl_f_path = get_nxdl_root_and_path(app_def)
template = Template()
generate_template_from_nxdl(nxdl_root, template)
connect_concepts(xrd_dict=xrd_dict, template=template, scan_type=scan_type)

archive.data.output = os.path.join(archive.m_context.raw_path(), nx_file)
pynxtools_writer.Writer(
data=template, nxdl_f_path=nxdl_f_path, output_path=archive.data.output
).write()

return nx_file
48 changes: 38 additions & 10 deletions src/nomad_measurements/xrd/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
TYPE_CHECKING,
Any,
Callable,
Optional,
)

import numpy as np
Expand All @@ -28,10 +29,12 @@
read_panalytical_xrdml,
read_rigaku_rasx,
)
from nomad.config import config
from nomad.datamodel.data import (
ArchiveSection,
EntryData,
)
from nomad.datamodel.hdf5 import HDF5Reference
from nomad.datamodel.metainfo.annotations import (
ELNAnnotation,
ELNComponentEnum,
Expand Down Expand Up @@ -62,6 +65,7 @@
Section,
SubSection,
)
from nomad.units import ureg
from scipy.interpolate import griddata

from nomad_measurements.general import (
Expand All @@ -73,6 +77,7 @@
merge_sections,
set_data,
)
from nomad_measurements.xrd.nx import write_nx_file

if TYPE_CHECKING:
import pint
Expand All @@ -84,17 +89,15 @@
)


from nomad.config import config

configuration = config.get_plugin_entry_point('nomad_measurements.xrd:schema')

m_package = SchemaPackage(aliases=['nomad_measurements.xrd.parser.parser'])


def calculate_two_theta_or_q(
wavelength: 'pint.Quantity',
q: 'pint.Quantity' = None,
two_theta: 'pint.Quantity' = None,
q: Optional['pint.Quantity'] = None,
two_theta: Optional['pint.Quantity'] = None,
) -> tuple['pint.Quantity', 'pint.Quantity']:
"""
Calculate the two-theta array from the scattering vector (q) or vice-versa,
Expand All @@ -103,8 +106,8 @@ def calculate_two_theta_or_q(
Args:
wavelength (pint.Quantity): Wavelength of the X-ray source.
q (pint.Quantity, optional): Array of scattering vectors. Defaults to None.
two_theta (pint.Quantity, optional): Array of two-theta angles.
Defaults to None.
two_theta (pint.Quantity, optional): Array of two-theta angles. Deafults to
None.
Returns:
tuple[pint.Quantity, pint.Quantity]: Tuple of scattering vector, two-theta
Expand Down Expand Up @@ -1038,6 +1041,35 @@ def write_xrd_data(
)
merge_sections(self, xrd, logger)

if self.state_use_nexus_file:
nx_file_path = write_nx_file(xrd_dict, archive, logger)
self.write_hdf5_references(nx_file_path, archive, logger)

def create_array_results(self, xrd_dict: dict[str, Any], archive: 'EntryArchive'):
class Results:
pass

def write_hdf5_references(
self, nx_file_path, archive: 'EntryArchive', logger: 'BoundLogger'
):
self.results[0].intensity = f'{nx_file_path}:/entry/experiment_result/intensity'
self.results[0].two_theta = f'{nx_file_path}:/entry/experiment_result/two_theta'
self.results[0].omega = f'{nx_file_path}:/entry/experiment_result/omega'
self.results[0].chi = f'{nx_file_path}:/entry/experiment_result/chi'
self.results[0].phi = f'{nx_file_path}:/entry/experiment_result/phi'

if isinstance(self.results[0], XRDResult1D_HDF5):
self.results[0].q_norm = f'{nx_file_path}:/entry/experiment_result/q_norm'
if isinstance(self.results[0], XRDResultRSM_HDF5):
self.results[
0
].q_parallel = f'{nx_file_path}:/entry/experiment_result/q_parallel'
self.results[
0
].q_perpendicular = (
f'{nx_file_path}:/entry/experiment_result/q_perpendicular'
)

def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'):
"""
The normalize function of the `ELNXRayDiffraction` section.
Expand Down Expand Up @@ -1067,10 +1099,6 @@ def normalize(self, archive: 'EntryArchive', logger: 'BoundLogger'):
if not self.results:
return

scan_type = xrd_dict.get('metadata', {}).get('scan_type', None)
if self.generate_nexus_file and self.data_file is not None:
write_nx_section_and_create_file(archive, logger, scan_type=scan_type)

self.figures = self.results[0].generate_plots(archive, logger)


Expand Down

0 comments on commit 0bc1a1d

Please sign in to comment.