Skip to content

Commit

Permalink
switched from flake8 to ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
johannesulf committed Jan 22, 2025
1 parent 9da0588 commit 45008a6
Show file tree
Hide file tree
Showing 8 changed files with 26 additions and 22 deletions.
10 changes: 4 additions & 6 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,11 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest pytest-cov
- name: Lint with flake8
python -m pip install ruff pytest pytest-cov
- name: Lint with ruff
run: |
# stop the test if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
ruff check nautilus
ruff check --exit-zero --select ALL nautilus
- name: Test with pytest
run: |
python -m pip install .[tests]
Expand Down
3 changes: 2 additions & 1 deletion nautilus/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Neural Network-Boosted Importance Sampling for Bayesian Statistics"""
"""Neural Network-Boosted Importance Sampling for Bayesian Statistics."""

from .sampler import Sampler
from .prior import Prior

__author__ = 'Johannes U. Lange'
__email__ = '[email protected]'
__version__ = '1.0.5'
__all__ = ['Prior', 'Sampler']
3 changes: 3 additions & 0 deletions nautilus/bounds/nautilus.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ class NautilusBound():
n_reject : int
Number of points rejected due to not falling into the neural
network-based bounds.
"""

@classmethod
Expand Down Expand Up @@ -267,6 +268,7 @@ def n_ell(self):
-------
n_ell : int
The number of ellipsoids.
"""
return np.sum([np.any(~bound.dim_cube) for bound in
self.outer_bound.bounds])
Expand All @@ -279,6 +281,7 @@ def n_net(self):
-------
n_net : int
The number of neural networks.
"""
if self.neural_bounds[0].emulator is not None:
return len(self.neural_bounds) * len(
Expand Down
1 change: 1 addition & 0 deletions nautilus/bounds/neural.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class NeuralBound():
score_predict_min : float
Minimum score predicted by the emulator to be considered part of the
bound.
"""

@classmethod
Expand Down
2 changes: 2 additions & 0 deletions nautilus/bounds/periodic.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ def read(cls, group, rng=None):
----------
group : h5py.Group
HDF5 group to write to.
rng : None or numpy.random.Generator, optional
Determines random number generation. Default is None.
Returns
-------
Expand Down
19 changes: 9 additions & 10 deletions nautilus/pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,20 @@


def initialize_worker(likelihood):
"""
Initialize a worker for likelihood evaluations.
"""Initialize a worker for likelihood evaluations.
Parameters
----------
likelihood : function
Likelihood function that each worker will evaluate.
"""
global LIKELIHOOD
LIKELIHOOD = likelihood


def likelihood_worker(*args):
"""
Have the worker evaluate the likelihood.
"""Have the worker evaluate the likelihood.
Parameters
----------
Expand All @@ -29,13 +28,13 @@ def likelihood_worker(*args):
-------
object
Return value of the likelihood function.
"""
return LIKELIHOOD(*args)


class NautilusPool:
"""
Wrapper for avoiding implementation-specific details elsewhere.
"""Wrapper for avoiding implementation-specific details elsewhere.
Attributes
----------
Expand All @@ -45,15 +44,16 @@ class NautilusPool:
"""

def __init__(self, pool, likelihood=None):
"""
Initialize a pool.
"""Initialize a pool.
Parameters
----------
pool : object
Pool used for parallelization. If a number, initialize a pool
from the `multiprocessing` library with the specified number of
workers.
likelihood : function, optional
Likelihood function to cache. Defaul is None.
"""
if isinstance(pool, int):
Expand All @@ -63,8 +63,7 @@ def __init__(self, pool, likelihood=None):
self.pool = pool

def map(self, func, iterable):
"""
Loop a function over an iterable, similar to the built-in map function.
"""Loop a function over an iterable like the built-in map function.
Parameters
----------
Expand Down
2 changes: 2 additions & 0 deletions nautilus/prior.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ def add_parameter(self, key=None, dist=(0, 1)):
ValueError
If a new key already exists in the key list or if `dist` is a
string but does not refer to a previously defined key.
"""
if key is None:
self.keys.append('x_{}'.format(len(self.keys)))
Expand Down Expand Up @@ -76,6 +77,7 @@ def dimensionality(self):
-------
n_dim : int
The number of free model parameters.
"""
return sum(not isinstance(dist, (numbers.Number, str)) for dist in
self.dists)
Expand Down
8 changes: 3 additions & 5 deletions nautilus/sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,7 @@


class Sampler():
"""
A dynamic sampler built upon the framework of importance nested sampling.
"""A dynamic sampler built upon the framework of importance nested sampling.
Attributes
----------
Expand Down Expand Up @@ -128,8 +127,7 @@ def __init__(self, prior, likelihood, n_dim=None, n_live=2000,
n_like_new_bound=None, vectorized=False, pass_dict=None,
pool=None, seed=None, blobs_dtype=None, filepath=None,
resume=True):
r"""
Initialize the sampler.
r"""Initialize the sampler.
Parameters
----------
Expand Down Expand Up @@ -601,7 +599,7 @@ def posterior(self, return_as_dict=None, equal_weight=False,
points = np.concatenate([p[s:] for p, s in zip(self.points, start)])
log_v = np.repeat(self.shell_log_v -
np.log(np.maximum(self.shell_n, 1)), self.shell_n)
log_l = np.concatenate([l[s:] for l, s in zip(self.log_l, start)])
log_l = np.concatenate([ll[s:] for ll, s in zip(self.log_l, start)])
log_w = log_v + log_l
if return_blobs:
if self.blobs is None:
Expand Down

0 comments on commit 45008a6

Please sign in to comment.