Skip to content

Commit

Permalink
Fixes from Brett's initial review
Browse files Browse the repository at this point in the history
  • Loading branch information
emolter committed Feb 24, 2025
1 parent efcb467 commit 21e9014
Show file tree
Hide file tree
Showing 7 changed files with 61 additions and 82 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ select = [

[tool.numpydoc_validation]
checks = [
"all",
"all", # Adds all rules except the ones listed below
"EX01", # No examples section found
"SA01", # See Also section not found
"ES01", # No extended summary found
Expand Down
44 changes: 9 additions & 35 deletions src/stdatamodels/fits_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@

def is_builtin_fits_keyword(key):
"""
Check if key is a fits builtin.
Check if key is a FITS builtin.
Builtins are those managed by ``astropy.io.fits``, and we don't
want to propagate those through the `_extra_fits` mechanism.
Expand Down Expand Up @@ -179,7 +179,7 @@ def get_hdu(hdulist, hdu_name, index=None, _cache=None):
Parameters
----------
hdulist : weakref.ReferenceType
hdulist : astropy.io.fits.hdu.hdulist.HDUList
A FITS HDUList
hdu_name : str
The name of the HDU to retrieve
Expand Down Expand Up @@ -284,7 +284,7 @@ def _fits_comment_section_handler(fits_context, validator, properties, instance,
title = schema.get("title")
if title is not None:
current_comment_stack = fits_context.comment_stack
current_comment_stack.append(ensure_ascii(title))
current_comment_stack.append(_ensure_ascii(title))

for prop, subschema in properties.items():
if prop in instance:
Expand Down Expand Up @@ -313,12 +313,12 @@ def _fits_element_writer(fits_context, validator, fits_keyword, instance, schema
hdu.header.append((" ", ""), end=True)
fits_context.comment_stack = []

comment = ensure_ascii(get_short_doc(schema))
instance = ensure_ascii(instance)
comment = _ensure_ascii(_get_short_doc(schema))
instance = _ensure_ascii(instance)

if fits_keyword in ("COMMENT", "HISTORY"):
for item in instance:
hdu.header[fits_keyword] = ensure_ascii(item)
hdu.header[fits_keyword] = _ensure_ascii(item)

Check warning on line 321 in src/stdatamodels/fits_support.py

View check run for this annotation

Codecov / codecov/patch

src/stdatamodels/fits_support.py#L321

Added line #L321 was not covered by tests
elif fits_keyword in hdu.header:
hdu.header[fits_keyword] = (instance, comment)
else:
Expand Down Expand Up @@ -813,7 +813,7 @@ def from_fits(hdulist, schema, context, skip_fits_update=None, **kwargs):

def from_fits_asdf(hdulist, ignore_unrecognized_tag=False, **kwargs):
"""
Wrap asdf call to extract optional arguments.
Open the ASDF extension from a FITS hdulist.
Parameters
----------
Expand Down Expand Up @@ -1010,20 +1010,7 @@ def fits_hash(hdulist):
return fits_hash.hexdigest()


def get_short_doc(schema):
"""
Get the short description from the schema.
Parameters
----------
schema : dict
The schema to extract the description
Returns
-------
description : str
The short description
"""
def _get_short_doc(schema):
title = schema.get("title", None)
description = schema.get("description", None)
if description is None:
Expand All @@ -1034,20 +1021,7 @@ def get_short_doc(schema):
return description.partition("\n")[0]


def ensure_ascii(s):
"""
Ensure that a string is ASCII.
Parameters
----------
s : str
Input string.
Returns
-------
str
Output string, ascii.
"""
def _ensure_ascii(s):
# TODO: This function seems to only ever receive
# string input. Also it's not checking that the
# characters in the string fall within the valid
Expand Down
43 changes: 24 additions & 19 deletions src/stdatamodels/model_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from pathlib import Path, PurePath
import sys
import warnings
import functools

import numpy as np

Expand Down Expand Up @@ -449,15 +450,15 @@ def copy(self, memo=None):
__copy__ = __deepcopy__ = copy

def validate(self):
"""Re-validate the model instance against its schema."""
"""Validate the model instance against its schema."""
validate.value_change(str(self), self._instance, self._schema, self)

def info(self, *args, **kwargs):
"""Return information about the model.""" # numpydoc ignore=RT01
@functools.wraps(asdf.AsdfFile.info)
def info(self, *args, **kwargs): # noqa: D102
return self._asdf.info(**kwargs)

def search(self, *args, **kwargs):
"""Search the asdf tree for nodes that match the given criteria.""" # numpydoc ignore=RT01
@functools.wraps(asdf.AsdfFile.search)
def search(self, *args, **kwargs): # noqa: D102
return self._asdf.search(*args, **kwargs)

try:
Expand Down Expand Up @@ -547,7 +548,6 @@ def save(self, path, dir_path=None, *args, **kwargs):
File path to save to.
If function, it takes one argument with is
model.meta.filename and returns the full path string.
dir_path : str
Directory to save to. If not None, this will override
any directory information in the `path`
Expand Down Expand Up @@ -738,8 +738,8 @@ def extend_schema(self, new_schema):
Returns
-------
schema : DataModel
The updated schema.
self : DataModel
The datamodel with its schema updated.
"""
schema = {"allOf": [self._schema, new_schema]}
self._schema = mschema.merge_property_trees(schema)
Expand All @@ -761,8 +761,8 @@ def add_schema_entry(self, position, new_schema):
Returns
-------
schema : DataModel
The updated schema.
self : DataModel
The datamodel with the schema entry added.
"""
parts = position.split(".")
schema = new_schema
Expand Down Expand Up @@ -853,7 +853,7 @@ def __setitem__(self, key, value):

def items(self):
"""
Iterate over all of the schema items in a flat way.
Iterate over all of the datamodel contents in a flat way.
Each element is a pair (`key`, `value`). Each `key` is a
dot-separated name. For example, the schema element
Expand All @@ -880,7 +880,7 @@ def recurse(tree, path=None):

def keys(self):
"""
Iterate over all of the schema keys in a flat way.
Iterate over all of the datamodel contents in a flat way.
Yields
------
Expand All @@ -895,7 +895,7 @@ def keys(self):

def values(self):
"""
Iterate over all of the schema values in a flat way.
Iterate over all of the datamodel contents in a flat way.
Yields
------
Expand Down Expand Up @@ -1030,7 +1030,7 @@ def protected_keyword(path):

def to_flat_dict(self, include_arrays=True):
"""
Return a dictionary of all of the schema items as a flat dictionary.
Return a dictionary of all of the datamodel contents as a flat dictionary.
Each dictionary key is a dot-separated name. For example, the
schema element `meta.observation.date` will end up in the
Expand All @@ -1047,7 +1047,7 @@ def to_flat_dict(self, include_arrays=True):
Returns
-------
flat_dict : dict
A dictionary of all of the schema items as a flat dictionary.
A dictionary of all of the datamodel contents as a flat dictionary.
"""

def convert_val(val):
Expand All @@ -1067,7 +1067,15 @@ def convert_val(val):
}

@property
def schema(self): # noqa: D102
def schema(self):
"""
Retrieve the schema for this model.
Returns
-------
dict
The datamodel schema.
"""
return self._schema

@property
Expand Down Expand Up @@ -1111,12 +1119,10 @@ def get_fits_wcs(self, hdu_name="SCI", hdu_ver=1, key=" "):
The name of the HDU to get the WCS from. This must use
named HDU's, not numerical order HDUs. To get the primary
HDU, pass ``'PRIMARY'``.
hdu_ver : int, optional
The extension version. Used when there is more than one
extension with the same name. The default value, 1,
is the first.
key : str, optional
The name of a particular WCS transform to use. This may
be either ``' '`` or ``'A'``-``'Z'`` and corresponds to
Expand Down Expand Up @@ -1145,7 +1151,6 @@ def set_fits_wcs(self, wcs, hdu_name="SCI"):
----------
wcs : `astropy.wcs.WCS` or `pywcs.WCS` object
The object containing FITS WCS information
hdu_name : str, optional
The name of the HDU to set the WCS from. This must use
named HDU's, not numerical order HDUs. To set the primary
Expand Down
20 changes: 10 additions & 10 deletions src/stdatamodels/properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def _as_fitsrec(val):

def _get_schema_type(schema):
"""
Find the type of a schema and its subschemas.
Find the type or types used by a schema.
Create a list of types used by a schema and its subschemas when
the subschemas are joined by combiners. Then return a type string
Expand All @@ -167,8 +167,8 @@ def _get_schema_type(schema):
Returns
-------
schema_type : str
The type of the schema, or 'mixed' if the schema has multiple types.
str
The type used by a schema, or 'mixed' if the schema has multiple types.
"""

def callback(subschema, path, combiner, types, recurse):
Expand Down Expand Up @@ -336,7 +336,7 @@ def _find_property(schema, attr):


class Node:
"""A schema-validated object."""
"""An object that supports validation against a schema."""

def __init__(self, attr, instance, schema, ctx, parent):
self._name = attr
Expand All @@ -354,7 +354,7 @@ def instance(self):


class ObjectNode(Node):
"""A schema-validated dictionary-like structure."""
"""A dictionary-like Node."""

def __dir__(self):
added = set(self._schema.get("properties", {}).keys())
Expand Down Expand Up @@ -436,7 +436,7 @@ def items(self): # noqa: D102


class ListNode(Node):
"""A list of schema-validated objects."""
"""A list-like Node."""

def __cast(self, other):
if isinstance(other, ListNode):
Expand Down Expand Up @@ -570,7 +570,7 @@ def put_value(path, value, tree):
The path to the element.
value : any
The value to place
tree : JSON object tree
tree : `asdf.tags.core.AsdfObject`
The tree to place the value into.
"""
cursor = tree
Expand Down Expand Up @@ -598,14 +598,14 @@ def merge_tree(a, b):
Parameters
----------
a : JSON object tree
a : `asdf.tags.core.AsdfObject`
The tree to merge into.
b : JSON object tree
b : `asdf.tags.core.AsdfObject`
The tree to merge from.
Returns
-------
a : JSON object tree
a : `asdf.tags.core.AsdfObject`
The merged tree.
"""

Expand Down
14 changes: 7 additions & 7 deletions src/stdatamodels/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def gentle_asarray(a, dtype, allow_extra_columns=False):
The dtype of the new array.
allow_extra_columns : bool
If True, extra columns in the input array are allowed and will be
appended to the output array. If False, extra columns will raise an
included in the output array. If False, extra columns will raise an
exception.
Returns
Expand Down Expand Up @@ -284,7 +284,7 @@ def get_model_type(init):
Parameters
----------
init : asdf.AsdfFile or astropy.io.fits.HDUList
The file object to extract the model type from.
The object to extract the model type from.
Returns
-------
Expand All @@ -311,12 +311,12 @@ def remove_none_from_tree(tree):
Parameters
----------
tree : object
tree : dict
The root node of the tree.
Returns
-------
object
dict
Modified tree.
"""

Expand All @@ -335,13 +335,13 @@ def convert_fitsrec_to_array_in_tree(tree):
Parameters
----------
tree : object
The root node of the tree.
tree : dict
A tree that may contain FITS record arrays.
Returns
-------
object
Modified tree.
A copy of the input tree with FITS record arrays converted to numpy arrays.
"""

def _convert_fitsrec(node):
Expand Down
6 changes: 3 additions & 3 deletions src/stdatamodels/validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


class ValidationWarning(Warning):
"""Raise when a value fails to validate a model's schema."""
"""Warning issued when a value fails validation against a schema."""

pass

Expand Down Expand Up @@ -61,7 +61,7 @@ def value_change(path, value, schema, ctx):

def _validate_datatype(validator, schema_datatype, instance, schema):
"""
Validate that an array has the correct datatype.
Validate a datatype instance against a schema.
This extends the ASDF datatype validator to support ndim
and max_ndim within individual fields of structured arrays,
Expand Down Expand Up @@ -187,7 +187,7 @@ def _check_value(value, schema, ctx):
schema : dict
The schema to validate against.
ctx : DataModel
The datamodel that the value is being added to
The datamodel to use as context.
"""
# Do not validate None values. These are regarded as missing in DataModel,
# and will eventually be stripped out when the model is saved to FITS or ASDF.
Expand Down
Loading

0 comments on commit 21e9014

Please sign in to comment.