Skip to content

Commit

Permalink
Merge branch 'master' into lint-tomoutil
Browse files Browse the repository at this point in the history
  • Loading branch information
Zack Singer committed Jul 11, 2024
2 parents c2eb442 + 5ecf297 commit 2638c99
Show file tree
Hide file tree
Showing 68 changed files with 2,586 additions and 6,491 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/container_build.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
#!/usr/bin/env bash
cd /github/workspace/

# Use archive mirror for CentOS 7 until we are ready to migrate to CentOS 8
sed -i -e 's/mirrorlist/#mirrorlist/g' \
-e 's|#baseurl=http://mirror.centos.org|baseurl=http://vault.centos.org|g' \
/etc/yum.repos.d/CentOS-*

# Install dependencies
yum install -y wget git centos-release-scl ca-certificates

Expand Down
21 changes: 20 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,9 @@ jobs:
if: ${{ matrix.config.os == 'macos-latest'}}

- name: Install HEXRD
# Install in editable mode for codecov
run: |
pip install .
pip install -e .
working-directory: hexrd

- name: Install requirements-dev.txt
Expand All @@ -75,4 +76,22 @@ jobs:
HEXRD_EXAMPLE_REPO_PATH: ${{ github.workspace }}/examples
run: |
pytest -s tests/
if: ${{ matrix.config.os != 'ubuntu-latest'}}
working-directory: hexrd

- name: Run tests with codecov
env:
HEXRD_EXAMPLE_REPO_PATH: ${{ github.workspace }}/examples
run: |
pytest -s --cov hexrd --cov-report xml:coverage.xml tests/
if: ${{ matrix.config.os == 'ubuntu-latest'}}
working-directory: hexrd

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: coverage.xml
working-directory: hexrd
if: ${{ matrix.config.os == 'ubuntu-latest'}}

3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8033939.svg)](https://doi.org/10.5281/zenodo.8033939) ![conda-package](https://github.com/HEXRD/hexrd/workflows/conda-package/badge.svg) ![test](https://github.com/HEXRD/hexrd/workflows/test/badge.svg) ![latest version](https://anaconda.org/hexrd/hexrd/badges/version.svg) ![last updated](https://anaconda.org/hexrd/hexrd/badges/latest_release_relative_date.svg) ![downloads](https://anaconda.org/hexrd/hexrd/badges/downloads.svg)
[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8033939.svg)](https://doi.org/10.5281/zenodo.8033939) ![conda-package](https://github.com/HEXRD/hexrd/workflows/conda-package/badge.svg) ![test](https://github.com/HEXRD/hexrd/workflows/test/badge.svg) [![codecov](https://codecov.io/gh/Hexrd/hexrd/graph/badge.svg)](https://codecov.io/gh/Hexrd/hexrd) ![latest version](https://anaconda.org/hexrd/hexrd/badges/version.svg) ![last updated](https://anaconda.org/hexrd/hexrd/badges/latest_release_relative_date.svg) ![downloads](https://anaconda.org/hexrd/hexrd/badges/downloads.svg)

# HEXRD
The HEXRD project is developing a cross-platform, open-source library for the general analysis of X-ray diffraction data. This includes powder diffraction, Laue diffraction, and High Energy Diffraction Microscopy (_a.k.a._ 3DXRD, multi-grain rotation method) modalities. At its core, HEXRD provides an abstraction of a generic diffraction instrument with support for multiple detectors. This includes optimized transforms from the direct and reciprocal crystal lattices to the local detector coordinates, harnesses for interpolating image data into scattering angle coordinates, and sophisticated calibration routines.

Expand Down
1 change: 0 additions & 1 deletion hexrd/cli/fit_grains.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,6 @@ def execute(args, parser):
gid_list = None
if args.grains is not None:
gid_list = [int(i) for i in args.grains.split(',')]
pass

fit_results = fit_grains(
cfg,
Expand Down
15 changes: 1 addition & 14 deletions hexrd/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,24 +251,13 @@ def _readenv(name, ctor, default):
del warnings
return default


# 0 = do NOT use numba
# 1 = use numba (default)
USE_NUMBA = _readenv("HEXRD_USE_NUMBA", int, 1)
if USE_NUMBA:
try:
import numba
except ImportError:
print("*** Numba not available, processing may run slower ***")
USE_NUMBA = False

del _readenv


def set_numba_cache():
"""Set the numba cache only if the following are true:
1. We are using numba
1. We are using numba - assumed true now
2. We are on Windows
3. We don't have write access to this file
4. The NUMBA_CACHE_DIR environment variable is not defined
Expand All @@ -277,8 +266,6 @@ def set_numba_cache():
directory where it doesn't have permission, and cause the application to
freeze. Avoid that by setting the cache dir ourselves.
"""
if not USE_NUMBA:
return

if os.name != 'nt':
return
Expand Down
35 changes: 35 additions & 0 deletions hexrd/deprecation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import os
import functools


class DeprecatedFunctionError(Exception):
"""Custom exception for deprecated functions."""
pass


def deprecated(new_func: str = None, removal_date: str = None):
"""
Decorator to mark functions as deprecated. Raises an error if
the 'ACK_DEPRECATED' environment variable is not set. Alerts the
user to the replacement function if provided.
"""

def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if new_func is not None:
print(
f"Warning: {func.__name__} is deprecated and is marked for"
f" removal. Please use {new_func} instead."
f" Removal date: {removal_date}"
)
if os.getenv('ACK_DEPRECATED') != 'true':
raise DeprecatedFunctionError(
f"Function {func.__name__} is deprecated. Set environment "
"variable 'ACK_DEPRECATED' to 'true' to acknowledge."
)
return func(*args, **kwargs)

return wrapper

return decorator
103 changes: 37 additions & 66 deletions hexrd/distortion/dexela_2923.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,9 @@
@author: Joel V. Bernier
"""
import numpy as np
import numba

from hexrd import constants
from hexrd.constants import USE_NUMBA
if USE_NUMBA:
import numba

from .distortionabc import DistortionABC
from .registry import _RegisterDistortionClass
Expand Down Expand Up @@ -69,71 +67,44 @@ def _find_quadrant(xy_in):
return quad_label


if USE_NUMBA:
@numba.njit(nogil=True, cache=True)
def _dexela_2923_distortion(out_, in_, params):
for el in range(len(in_)):
xi, yi = in_[el, :]
if xi < 0.:
if yi < 0.:
# 3rd quadrant
out_[el, :] = in_[el, :] + params[4:6]
else:
# 2nd quadrant
out_[el, :] = in_[el, :] + params[2:4]
@numba.njit(nogil=True, cache=True)
def _dexela_2923_distortion(out_, in_, params):
for el in range(len(in_)):
xi, yi = in_[el, :]
if xi < 0.:
if yi < 0.:
# 3rd quadrant
out_[el, :] = in_[el, :] + params[4:6]
else:
if yi < 0.:
# 4th quadrant
out_[el, :] = in_[el, :] + params[6:8]
else:
# 1st quadrant
out_[el, :] = in_[el, :] + params[0:2]

@numba.njit(nogil=True, cache=True)
def _dexela_2923_inverse_distortion(out_, in_, params):
for el in range(len(in_)):
xi, yi = in_[el, :]
if xi < 0.:
if yi < 0.:
# 3rd quadrant
out_[el, :] = in_[el, :] - params[4:6]
else:
# 2nd quadrant
out_[el, :] = in_[el, :] - params[2:4]
# 2nd quadrant
out_[el, :] = in_[el, :] + params[2:4]
else:
if yi < 0.:
# 4th quadrant
out_[el, :] = in_[el, :] + params[6:8]
else:
if yi < 0.:
# 4th quadrant
out_[el, :] = in_[el, :] - params[6:8]
else:
# 1st quadrant
out_[el, :] = in_[el, :] - params[0:2]
else:
def _dexela_2923_distortion(out_, in_, params):
# find quadrant
ql = _find_quadrant(in_)
ql1 = ql == 1
ql2 = ql == 2
ql3 = ql == 3
ql4 = ql == 4
out_[ql1, :] = in_[ql1] + np.tile(params[0:2], (sum(ql1), 1))
out_[ql2, :] = in_[ql2] + np.tile(params[2:4], (sum(ql2), 1))
out_[ql3, :] = in_[ql3] + np.tile(params[4:6], (sum(ql3), 1))
out_[ql4, :] = in_[ql4] + np.tile(params[6:8], (sum(ql4), 1))
return

def _dexela_2923_inverse_distortion(out_, in_, params):
ql = _find_quadrant(in_)
ql1 = ql == 1
ql2 = ql == 2
ql3 = ql == 3
ql4 = ql == 4
out_[ql1, :] = in_[ql1] - np.tile(params[0:2], (sum(ql1), 1))
out_[ql2, :] = in_[ql2] - np.tile(params[2:4], (sum(ql2), 1))
out_[ql3, :] = in_[ql3] - np.tile(params[4:6], (sum(ql3), 1))
out_[ql4, :] = in_[ql4] - np.tile(params[6:8], (sum(ql4), 1))
return


# 1st quadrant
out_[el, :] = in_[el, :] + params[0:2]


@numba.njit(nogil=True, cache=True)
def _dexela_2923_inverse_distortion(out_, in_, params):
for el in range(len(in_)):
xi, yi = in_[el, :]
if xi < 0.:
if yi < 0.:
# 3rd quadrant
out_[el, :] = in_[el, :] - params[4:6]
else:
# 2nd quadrant
out_[el, :] = in_[el, :] - params[2:4]
else:
if yi < 0.:
# 4th quadrant
out_[el, :] = in_[el, :] - params[6:8]
else:
# 1st quadrant
out_[el, :] = in_[el, :] - params[0:2]

def test_disortion():
pts = np.random.randn(16, 2)
Expand Down
4 changes: 2 additions & 2 deletions hexrd/distortion/distortionabc.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ class DistortionABC(metaclass=abc.ABCMeta):
@abc.abstractmethod
def apply(self, xy_in):
"""Apply distortion mapping"""
pass
raise NotImplementedError

@abc.abstractmethod
def apply_inverse(self, xy_in):
"""Apply inverse distortion mapping"""
pass
raise NotImplementedError
2 changes: 0 additions & 2 deletions hexrd/distortion/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,3 @@ def register(cls, acls):
"""Register adapter class"""
if acls.__name__ != 'DistortionBase':
cls.distortion_registry[acls.maptype] = acls

pass # end class
15 changes: 3 additions & 12 deletions hexrd/findorientations.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,6 @@ def generate_orientation_fibers(cfg, eta_ome):
ome_c = eta_ome.omeEdges[0] + (0.5 + coms[i][ispot][0])*del_ome
eta_c = eta_ome.etaEdges[0] + (0.5 + coms[i][ispot][1])*del_eta
input_p.append(np.hstack([this_hkl, this_tth, eta_c, ome_c]))
pass
pass
pass

# do the mapping
start = timeit.default_timer()
Expand All @@ -155,7 +152,6 @@ def generate_orientation_fibers(cfg, eta_ome):
discretefiber_reduced, input_p, chunksize=chunksize
), total=ntotal
):
pass
print(_.shape)
'''
pool.close()
Expand Down Expand Up @@ -352,8 +348,6 @@ def quat_distance(x, y):
qbar[:, i] = rot.quatAverageCluster(
qfib_r[:, cl == i + 1], qsym
).flatten()
pass
pass

if algorithm in ('dbscan', 'ort-dbscan') and qbar.size/4 > 1:
logger.info("\tchecking for duplicate orientations...")
Expand All @@ -374,10 +368,7 @@ def quat_distance(x, y):
tmp[:, i] = rot.quatAverageCluster(
qbar[:, cl == i + 1].reshape(4, npts), qsym
).flatten()
pass
qbar = tmp
pass
pass

logger.info("clustering took %f seconds", timeit.default_timer() - start)
logger.info(
Expand Down Expand Up @@ -592,7 +583,7 @@ def generate_eta_ome_maps(cfg, hkls=None, save=True):
map_fname
)

eta_ome.save(fn)
eta_ome.save_eta_ome_maps(fn)

logger.info('saved eta/ome orientation maps to "%s"', fn)

Expand All @@ -617,7 +608,7 @@ def _filter_eta_ome_maps(eta_ome, filter_stdev=False):
"""
gl_filter = ndimage.filters.gaussian_laplace
for i, pf in enumerate(eta_ome.dataStore):
for pf in eta_ome.dataStore:
# first compoute row-wise median over omega channel
ome_median = np.tile(np.nanmedian(pf, axis=0), (len(pf), 1))

Expand Down Expand Up @@ -894,7 +885,7 @@ def find_orientations(cfg,
logger.info("\tmean reflections per grain: %d", mean_rpg)
logger.info("\tneighborhood size: %d", min_samples)

qbar, cl = run_cluster(
qbar, _ = run_cluster(
completeness, qfib, plane_data.getQSym(), cfg,
min_samples=min_samples,
compl_thresh=compl_thresh,
Expand Down
8 changes: 1 addition & 7 deletions hexrd/fitgrains.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,8 +184,7 @@ def fit_grain_FF_reduced(grain_id):
culled_results[det_key] = [presults[i] for i in np.where(idx)[0]]
num_refl_tot += len(valid_refl_ids)
num_refl_valid += sum(valid_refl_ids)

pass # now we have culled data
# now we have culled data

# CAVEAT: completeness from pullspots only; incl saturated and overlaps
# <JVB 2015-12-15>
Expand Down Expand Up @@ -220,8 +219,6 @@ def fit_grain_FF_reduced(grain_id):
plane_data.latVecOps['B'], plane_data.wavelength,
ome_period,
simOnly=False, return_value_flag=2)
pass # end conditional on fit
pass # end tolerance looping

if refit is not None:
# first get calculated x, y, ome from previous solution
Expand Down Expand Up @@ -279,7 +276,6 @@ def fit_grain_FF_reduced(grain_id):
]

num_refl_valid += sum(idx_new)
pass

# only execute fit if left with enough reflections
if num_refl_valid > 12:
Expand All @@ -297,8 +293,6 @@ def fit_grain_FF_reduced(grain_id):
plane_data.latVecOps['B'], plane_data.wavelength,
ome_period,
simOnly=False, return_value_flag=2)
pass
pass # close refit conditional
return grain_id, completeness, chisq, grain_params


Expand Down
Loading

0 comments on commit 2638c99

Please sign in to comment.