Skip to content

Commit

Permalink
Merge branch 'main' of github.com:CCSDSPy/ccsdspy into main
Browse files Browse the repository at this point in the history
  • Loading branch information
ddasilva committed May 24, 2024
2 parents 8db942f + 31f1a33 commit e9eeb28
Show file tree
Hide file tree
Showing 16 changed files with 206 additions and 32 deletions.
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ The following example shows how simple it is to read in fixed length CCSDS packe
PacketField(name='OPMODE', data_type='uint', bit_length=3),
PacketField(name='SPACER', data_type='fill', bit_length=1),
PacketField(name='VOLTAGE', data_type='int', bit_length=8),
PacketArray(
PacketArray(
name='SENSOR_GRID',
data_type='uint',
bit_length=16,
array_shape=(32, 32),
array_order='C'
),
),
])
result = pkt.load('mypackets.bin')
Expand Down
56 changes: 42 additions & 14 deletions ccsdspy/packet_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ def from_file(cls, file):
file : str
Path to file on the local file system that defines the packet fields.
Currently only supports csv files.
See :download:`simple_csv_3col.csv <../../ccsdspy/tests/data/packet_def/simple_csv_3col.csv>` # noqa: E501
and :download:`simple_csv_4col.csv <../../ccsdspy/tests/data/packet_def/simple_csv_4col.csv>` # noqa: E501
See :download:`basic_csv_3col.csv <../../ccsdspy/tests/data/packet_def/basic_csv_3col.csv>` # noqa: E501
and :download:`extended_csv_4col.csv <../../ccsdspy/tests/data/packet_def/extended_csv_4col.csv>` # noqa: E501
Returns
-------
Expand Down Expand Up @@ -99,7 +99,13 @@ def add_converted_field(self, input_field_name, output_field_name, converter):

# Check that each of the input field names exists in the packet, and report
# the missing fields if not
fields_in_packet_set = {field._name for field in self._fields}
# Collect valid names of fields, which include primary header fields as well
# as fields defined in the packet.
fields_in_packet_set = set()

for field in _prepend_primary_header_fields(self._fields):
fields_in_packet_set.add(field._name)

input_field_names_set = set(input_field_names)
all_fields_present = input_field_names_set <= fields_in_packet_set # subset

Expand Down Expand Up @@ -524,6 +530,34 @@ def _prepend_primary_header_fields(existing_fields):
return return_fields


def _parse_csv_array_shape(data_type_str):
"""Parse a data type string from a CSV to determine the array shape.
Parameters
----------
data_type_str : str
Full string specifying the data type, e.g. `uint(1, 2)`
Returns
-------
array_shape : str, int, tuple of int
Parsed array shape to be used in loading CSV.
"""
array_shape_str = data_type_str[data_type_str.find("(") + 1 : data_type_str.find(")")]
if array_shape_str == "expand":
array_shape = "expand"
elif "," in array_shape_str:
try:
array_shape = tuple(map(int, array_shape_str.split(", ")))
except ValueError:
raise ValueError(
"Array shape must be `expand`, the name of another field, or a tuple of ints."
)
else: # string is either another field for reference or a single integer for a one dimensional array shape
array_shape = int(array_shape_str) if array_shape_str.isnumeric() else array_shape_str
return array_shape


def _get_fields_csv_file(csv_file):
"""Parse a simple comma-delimited file that defines a packet.
Expand Down Expand Up @@ -554,19 +588,16 @@ def _get_fields_csv_file(csv_file):
raise ValueError(f"Minimum required columns are {req_columns}.")

for row in reader: # skip the header row
if "bit_offset" not in headers: # 3 col csv file
if "bit_offset" not in headers: # basic 3 col csv file
if (row["data_type"].count("(") == 1) and (row["data_type"].count(")") == 1):
data_type = row["data_type"].split("(")[0]
array_shape_str = row["data_type"][
row["data_type"].find("(") + 1 : row["data_type"].find(")")
]
array_shape = tuple(map(int, array_shape_str.split(", ")))
array_shape = _parse_csv_array_shape(row["data_type"])
fields.append(
PacketArray(
name=row["name"],
data_type=data_type,
bit_length=int(row["bit_length"]),
array_shape=(array_shape),
array_shape=array_shape,
)
)
else:
Expand All @@ -577,14 +608,11 @@ def _get_fields_csv_file(csv_file):
bit_length=int(row["bit_length"]),
)
)
if "bit_offset" in headers: # 4 col csv file provides bit offsets
if "bit_offset" in headers: # extended 4 col csv file provides bit offsets
# TODO: Check the consistency of bit_offsets versus previous bit_lengths
if (row["data_type"].count("(") == 1) and (row["data_type"].count(")") == 1):
data_type = row["data_type"].split("(")[0]
array_shape_str = row["data_type"][
row["data_type"].find("(") + 1 : row["data_type"].find(")")
]
array_shape = tuple(map(int, array_shape_str.split(", ")))
array_shape = _parse_csv_array_shape(row["data_type"])
fields.append(
PacketArray(
name=row["name"],
Expand Down
6 changes: 6 additions & 0 deletions ccsdspy/tests/data/packet_def/basic_csv_3col_with_all.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
name, data_type, bit_length
SHCOARSE, uint, 32
SHFINE, "uint(expand)", 20
OPMODE, "uint(SHFINE)", 3
SPACER, fill, 1
VOLTAGE, "int(12, 24)", 8
6 changes: 6 additions & 0 deletions ccsdspy/tests/data/packet_def/basic_csv_3col_with_expand.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
name, data_type, bit_length
SHCOARSE, uint, 32
SHFINE, uint, 20
OPMODE, uint, 3
SPACER, fill, 1
VOLTAGE, "uint(expand)", 8
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
name, data_type, bit_length
SHCOARSE, uint, 32
SHFINE, uint, 20
OPMODE, uint, 3
SPACER, fill, 1
VOLTAGE, "uint(OPMODE)", 8
38 changes: 33 additions & 5 deletions ccsdspy/tests/test_packet_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,17 @@

from .. import FixedLength, VariableLength, PacketField, PacketArray
from ..constants import BITS_PER_BYTE
from ..packet_types import _get_fields_csv_file
from ..packet_types import _get_fields_csv_file, _parse_csv_array_shape

dir_path = os.path.dirname(os.path.realpath(__file__))
packet_def_dir = os.path.join(dir_path, "data", "packet_def")
csv_file_4col = os.path.join(packet_def_dir, "simple_csv_4col.csv")
csv_file_3col = os.path.join(packet_def_dir, "simple_csv_3col.csv")
csv_file_4col_with_array = os.path.join(packet_def_dir, "simple_csv_4col_with_array.csv")
csv_file_3col_with_array = os.path.join(packet_def_dir, "simple_csv_3col_with_array.csv")
csv_file_4col = os.path.join(packet_def_dir, "extended_csv_4col.csv")
csv_file_3col = os.path.join(packet_def_dir, "basic_csv_3col.csv")
csv_file_4col_with_array = os.path.join(packet_def_dir, "extended_csv_4col_with_array.csv")
csv_file_3col_with_array = os.path.join(packet_def_dir, "basic_csv_3col_with_array.csv")
csv_file_3col_with_expand = os.path.join(packet_def_dir, "basic_csv_3col_with_expand.csv")
csv_file_3col_with_reference = os.path.join(packet_def_dir, "basic_csv_3col_with_reference.csv")
csv_file_3col_with_all = os.path.join(packet_def_dir, "basic_csv_3col_with_all.csv")

hs_packet_dir = os.path.join(dir_path, "data", "hs")
random_binary_file = os.path.join(
Expand Down Expand Up @@ -102,6 +105,31 @@ def test_FixedLength_from_file_not_supported(filename):
FixedLength.from_file(filename)


@pytest.mark.parametrize(
"shape_str, expected_value",
[
("uint(4)", 4),
("uint(1, 2)", (1, 2)),
("uint(expand)", "expand"),
("uint(OPMODE)", "OPMODE"),
],
)
def test_parse_csv_array_shape(shape_str, expected_value):
assert _parse_csv_array_shape(shape_str) == expected_value


def test_parse_csv_array_shape_fails_on_invalid_shape_str():
with pytest.raises(ValueError):
_parse_csv_array_shape("uint(4, FIELD)")


def test_VariableLength_from_file():
"""Test that from_file returns a VariableLength instance"""
assert isinstance(VariableLength.from_file(csv_file_3col_with_expand), VariableLength)
assert isinstance(VariableLength.from_file(csv_file_3col_with_reference), VariableLength)
assert isinstance(VariableLength.from_file(csv_file_3col_with_all), VariableLength)


@pytest.mark.parametrize(
"cls,numpy_dtype,ccsdspy_data_type,ccsdspy_bit_length,array_order,include_bit_offset",
[
Expand Down
10 changes: 1 addition & 9 deletions docs/user-guide/fixedlength.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,16 +33,8 @@ The following code defines a simple fixed length packet
])
Note that the CCSDS header need not be included as it is included by default.
A packet need not be defined in code.
It can also be defined in a text file.
For example,

With this file, it is then possible to define the packet object with

.. code-block:: python
import ccsdspy
pkt = ccsdspy.FixedLength.from_file('packet_definition.csv')
Alternatively, fixed length packets can be :ref:`loaded from a CSV file <loadfile>`.

Parsing a file
==============
Expand Down
1 change: 1 addition & 0 deletions docs/user-guide/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,6 @@ For more details checkout the :ref:`reference`.
packetfields
fixedlength
variablelength
loadfile
converters
utils
102 changes: 102 additions & 0 deletions docs/user-guide/loadfile.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
.. _loadfile:

******************************************
Loading Packet Definitions from a CSV File
******************************************

Overview
=========

:ref:`fixed` can be loaded from a CSV (comma separated value) file.
This is an alternative method for defining packet layouts which may be desirable to some users,
and is currently undergoing development. The syntax for loading a `~ccsdspy.FixedLength` packet from a CSV file is:

.. code-block:: python
import ccsdspy
pkt = ccsdspy.FixedLength.from_file('packet_definition.csv')
The syntax is the same for `~ccdspy.VariableLength` packets:

.. code-block:: python
import ccsdspy
pkt = ccsdspy.VariableLength.from_file('packet_definition.csv')
The only requirement is that the CSV is structured as either the :ref:`threecolumn`
or :ref:`fourcolumn`.

.. contents::
:depth: 2

.. _threecolumn:

Basic Layout (Three Columns)
============================

The basic CSV layout has columns for `name`, `data_type`, and `bit_length`. The first row of the CSV should be a
header line where the columns are named. Subsequent rows encode packet fields. This format is appropriate if the CSV
defines all the packets one after another without skipping any. The three column format automatically
calculates the bit offsets assuming that the packet order is correct. See the :ref:`fourcolumn` format
for more flexibility.

.. csv-table:: Basic Layout CSV
:file: ../../ccsdspy/tests/data/packet_def/basic_csv_3col.csv
:widths: 30, 30, 30
:header-rows: 1

When the example above is loaded, five `~ccsdspy.PacketField` objects are defined
with varying names, data types, and bit lengths. To create a `~ccsdspy.PacketArray` instead, define the data type with
both the type and array shape.

.. csv-table:: Basic Layout CSV with `~ccsdspy.PacketArray`
:file: ../../ccsdspy/tests/data/packet_def/basic_csv_3col_with_array.csv
:widths: 30, 30, 30
:header-rows: 1

In the example above, `VOLTAGE` would instead be a `~ccsdspy.PacketArray` of type `int` with shape `(12, 24)`.

For :ref:`variable`, the array shape string can be specified either as `expand` or as the name of another field.

.. csv-table:: Basic Layout CSV with `~ccsdspy.PacketArray` for Variable Length Packets
:file: ../../ccsdspy/tests/data/packet_def/basic_csv_3col_with_all.csv
:widths: 30, 30, 30
:header-rows: 1

.. _fourcolumn:

Extended Layout (Four Columns)
==============================

The extended CSV layout has columns for `name`, `data_type`, `bit_length`, and `bit_offset`.
The first row of the CSV should be a header line where the columns are named. Subsequent rows encode packet fields.
This format allows more flexibility than the basic layout because bit offsets are explicitly defined instead
of automatically calculated. Due to this, some packet fields can be skipped
since the bit offset indicates exactly where the packet begins.

.. csv-table:: Extended Layout CSV
:file: ../../ccsdspy/tests/data/packet_def/extended_csv_4col.csv
:widths: 30, 30, 30, 30
:header-rows: 1

When the example above is loaded, five `~ccsdspy.PacketField` objects are defined
with varying names, data types, and bit lengths. To create a `~ccsdspy.PacketArray` instead, define the data type with
both the type and array shape.

.. csv-table:: Extended Layout CSV with `~ccsdspy.PacketArray`
:file: ../../ccsdspy/tests/data/packet_def/extended_csv_4col_with_array.csv
:widths: 30, 30, 30, 30
:header-rows: 1

In the example above, `SHSCOARSE` would instead be a `~ccsdspy.PacketArray` of type `uint` with shape `(4)`.

.. note::
:ref:`variable` are not supported in the extended layout since `bit_offset` cannot be specified for variable length packets.

Limitations of the CSV format
=============================

The CSV format is in development and is currently limited. The limitations are:

* the byte order cannot be defined in the CSV.
* the array order cannot be defined in the CSV.
3 changes: 3 additions & 0 deletions docs/user-guide/variablelength.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ The result will be a dictionary with the names as the keys.
The values are arrays with the `~ccsdspy.PacketArray` field providing arrays with variable sizes.
It is also possible to get access to the packet primary header. See :ref:`getting-header`.

.. warning::
`bit_offset` cannot be specified for variable length packets. Instead, the packet definition must define all packets, and the bit offsets are calculated automatically.

.. contents::
:depth: 2

Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ dependencies = [
[project.optional-dependencies]
dev = [
'coverage>=6.5.0',
'pytest>=7.1.3',
'pytest>=7.1.3, <8.1.0',
'pytest-astropy',
'pytest-cov',
'black==22.10.0',
Expand Down Expand Up @@ -61,7 +61,7 @@ write_to = "ccsdspy/_version.py"
minversion = "6.0"
testpaths = ["ccsdspy/tests", "docs"]
norecursedirs = ["build", "docs/_build", "docs/generated", "*.egg-info", "attic"]
doctest_plus = "enabled"
#doctest_plus = "enabled"
text_file_format = "rst"
addopts = "--doctest-modules"
collect_ignore_glob= ['_*.py']
Expand Down
2 changes: 2 additions & 0 deletions readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ build:
os: ubuntu-22.04
tools:
python: "3.12"
apt_packages:
- graphviz

# Build documentation in the docs/ directory with Sphinx
sphinx:
Expand Down

0 comments on commit e9eeb28

Please sign in to comment.