Skip to content

Commit

Permalink
Bug fix and adding spectrum fits file creation (#14)
Browse files Browse the repository at this point in the history
bug fix to is_consecutive and adding draft spectrum fits file creation
  • Loading branch information
ehsteve authored Dec 4, 2024
1 parent f294bc4 commit b966244
Show file tree
Hide file tree
Showing 4 changed files with 88 additions and 17 deletions.
55 changes: 53 additions & 2 deletions padre_meddea/calibration/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from astropy.io import fits, ascii
from astropy.time import Time
from astropy.table import Table
from astropy.timeseries import TimeSeries

from swxsoc.util.util import record_timeseries

Expand Down Expand Up @@ -56,6 +57,9 @@ def process_file(filename: Path, overwrite=False) -> list:
parsed_data = read_raw_file(file_path)
if parsed_data["photons"] is not None: # we have event list data
event_list, pkt_list = parsed_data["photons"]
log.info(
f"Found photon data, {len(event_list)} photons and {len(pkt_list)} packets."
)
primary_hdr = get_primary_header()
primary_hdr = add_process_info_to_header(primary_hdr)
primary_hdr["LEVEL"] = (0, get_std_comment("LEVEL"))
Expand Down Expand Up @@ -137,7 +141,7 @@ def process_file(filename: Path, overwrite=False) -> list:
hk_hdu = fits.BinTableHDU(data=hk_table, name="HK")
hk_hdu.add_checksum()

# add command response data if it exists
# add command response data if it exists in the same fits file
if parsed_data["cmd_resp"] is not None:
data_ts = parsed_data["cmd_resp"]
this_header = fits.Header()
Expand Down Expand Up @@ -169,6 +173,7 @@ def process_file(filename: Path, overwrite=False) -> list:
hdul = fits.HDUList([empty_primary_hdu, hk_hdu, cmd_hdu])

path = create_science_filename(
"meddea",
time=date_beg,
level="l1",
descriptor="hk",
Expand All @@ -178,8 +183,54 @@ def process_file(filename: Path, overwrite=False) -> list:
hdul.writeto(path, overwrite=overwrite)
output_files.append(path)
if parsed_data["spectra"] is not None:
spec_data = parsed_data["spectra"]
timestamps, data, spectra, ids = parsed_data["spectra"]
asic_nums = (ids & 0b11100000) >> 5
channel_nums = ids & 0b00011111
time_s = data["TIME_S"]
time_clk = data["TIME_CLOCKS"]

primary_hdr = get_primary_header()
primary_hdr = add_process_info_to_header(primary_hdr)
primary_hdr["LEVEL"] = (0, get_std_comment("LEVEL"))
primary_hdr["DATATYPE"] = ("spectrum", get_std_comment("DATATYPE"))
primary_hdr["ORIGAPID"] = (
padre_meddea.APID["spectrum"],
get_std_comment("ORIGAPID"),
)
primary_hdr["ORIGFILE"] = (file_path.name, get_std_comment("ORIGFILE"))
dates = {
"DATE-BEG": timestamps[0].fits,
"DATE-END": timestamps[-1].fits,
"DATE-AVG": timestamps[len(timestamps) // 2].fits,
}
primary_hdr["DATEREF"] = (dates["DATE-BEG"], get_std_comment("DATEREF"))
for this_keyword, value in dates.items():
primary_hdr[this_keyword] = (
value,
get_std_comment(this_keyword),
)
spec_hdu = fits.ImageHDU(data=spectra, name="SPEC")
spec_hdu.add_checksum()
data_table = Table()
data_table["pkttimes"] = time_s
data_table["pktclock"] = time_clk
data_table["asic"] = asic_nums
data_table["channel"] = channel_nums
data_table["seqcount"] = data["CCSDS_SEQUENCE_COUNT"]
record_timeseries(TimeSeries(time=timestamps, data=data_table), "spectrum")
pkt_hdu = fits.BinTableHDU(data=data_table, name="PKT")
pkt_hdu.add_checksum()
hdul = fits.HDUList([empty_primary_hdu, spec_hdu, pkt_hdu])
path = create_science_filename(
"meddea",
time=dates["DATE-BEG"],
level="l1",
descriptor="spec",
test=True,
version="0.1.0",
)
hdul.writeto(path, overwrite=overwrite)
output_files.append(path)
# add other tasks below
return output_files

Expand Down
20 changes: 10 additions & 10 deletions padre_meddea/io/file_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,12 +328,12 @@ def parse_spectrum_packets(filename: Path):
packet_definition = packet_definition_hist2()
pkt = ccsdspy.FixedLength(packet_definition)
data = pkt.load(packet_bytes, include_primary_header=True)
timestamps = calc_time(data["TIMESTAMPS"], data["TIMESTAMPCLOCK"])
num_packets = len(timestamps)
timestamps = calc_time(data["TIME_S"], data["TIME_CLOCKS"])
num_packets = len(data["TIME_S"])
h = data["HISTOGRAM_DATA"].reshape((num_packets, 24, 513))
histogram_data = h[:, :, 1:] # remove the id field
ids = h[:, :, 0]
return timestamps, histogram_data, ids
return timestamps, data, histogram_data, ids


def parse_cmd_response_packets(filename: Path):
Expand Down Expand Up @@ -374,10 +374,10 @@ def parse_cmd_response_packets(filename: Path):
packet_definition = packet_definition_cmd_response()
pkt = ccsdspy.FixedLength(packet_definition)
data = pkt.load(packet_bytes, include_primary_header=True)
timestamps = calc_time(data["TIMESTAMPS"], data["TIMESTAMPCLOCK"])
timestamps = calc_time(data["TIME_S"], data["TIME_CLOCKS"])
data = {
"time_s": data["TIMESTAMPS"],
"time_clock": data["TIMESTAMPCLOCK"],
"time_s": data["TIME_S"],
"time_clock": data["TIME_CLOCKS"],
"address": data["ADDR"],
"value": data["VALUE"],
"seqcount": data["CCSDS_SEQUENCE_COUNT"],
Expand Down Expand Up @@ -438,8 +438,8 @@ def packet_definition_hist2():

# the header
p = [
PacketField(name="TIMESTAMPS", data_type="uint", bit_length=32),
PacketField(name="TIMESTAMPCLOCK", data_type="uint", bit_length=32),
PacketField(name="TIME_S", data_type="uint", bit_length=32),
PacketField(name="TIME_CLOCKS", data_type="uint", bit_length=32),
PacketField(name="INTEGRATION_TIME", data_type="uint", bit_length=32),
PacketField(name="LIVE_TIME", data_type="uint", bit_length=32),
]
Expand Down Expand Up @@ -477,8 +477,8 @@ def packet_definition_ph():
def packet_definition_cmd_response():
"""Return the packet definiton for the register read response"""
p = [
PacketField(name="TIMESTAMPS", data_type="uint", bit_length=32),
PacketField(name="TIMESTAMPCLOCK", data_type="uint", bit_length=32),
PacketField(name="TIME_S", data_type="uint", bit_length=32),
PacketField(name="TIME_CLOCKS", data_type="uint", bit_length=32),
PacketField(name="ADDR", data_type="uint", bit_length=16),
PacketField(name="VALUE", data_type="uint", bit_length=16),
PacketField(name="CHECKSUM", data_type="uint", bit_length=16),
Expand Down
5 changes: 5 additions & 0 deletions padre_meddea/tests/test_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,14 @@ def test_is_consecutive():
assert util.is_consecutive(np.arange(10))
assert util.is_consecutive(range(100))
assert util.is_consecutive(np.arange(10, 100, 1))
# test if the array loops over
assert util.is_consecutive(
np.concatenate((np.arange(0, 2**14), np.arange(0, 2000)))
)


def test_is_not_consecutive():
"""Test if not consecutive"""
assert not util.is_consecutive([0, 2, 3, 4, 5])
assert not util.is_consecutive(
np.concatenate((np.arange(1, 10), np.arange(11, 20)))
Expand Down
25 changes: 20 additions & 5 deletions padre_meddea/util/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,14 +102,29 @@ def has_baseline(filename: Path, packet_count=10) -> bool:
return np.sum(num_hits - np.floor(num_hits)) == 0


def is_consecutive(arr: np.array):
"""Return True if the array is all consecutive integers or has not missing numbers."""
return np.all(np.diff(arr) == 1)


def str_to_fits_keyword(keyword: str) -> str:
"""Given a keyword string, return a fits compatible keyword string
which must not include special characters and have fewer have no more
than 8 characters."""
clean_keyword = "".join(e for e in keyword if e.isalnum()).strip().upper()
return clean_keyword[0:8]


def is_consecutive(arr: np.array) -> bool:
"""Return True if the packet sequence numbers are all consecutive integers, has no missing numbers."""
MAX_SEQCOUNT = 2**14 - 1 # 16383
# check if seqcount has wrapped around
indices = np.where(arr == MAX_SEQCOUNT)
if len(indices[0]) == 0: # no wrap
return np.all(np.diff(arr) == 1)
else:
last_index = 0
result = True
for this_ind in indices[0]:
this_arr = arr[last_index : this_ind + 1]
result = result & np.all(np.diff(this_arr) == 1)
last_index = this_ind + 1
# now do the remaining part of the array
this_arr = arr[last_index + 1 :]
result = result & np.all(np.diff(this_arr) == 1)
return result

0 comments on commit b966244

Please sign in to comment.