Skip to content

Commit

Permalink
first working commit of fits file creation
Browse files Browse the repository at this point in the history
  • Loading branch information
ehsteve committed Oct 25, 2024
1 parent a826db3 commit 51bbde9
Show file tree
Hide file tree
Showing 7 changed files with 261 additions and 130 deletions.
132 changes: 77 additions & 55 deletions padre_meddea/calibration/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@

from padre_meddea.util.util import create_science_filename, calc_time
from padre_meddea.io.file_tools import read_raw_file
from padre_meddea.io.fits_tools import (
add_process_info_to_header,
get_primary_header,
get_std_comment,
)

__all__ = [
"process_file",
Expand Down Expand Up @@ -51,40 +56,28 @@ def process_file(filename: Path, overwrite=False) -> list:
parsed_data = read_raw_file(file_path)
if parsed_data["photons"] is not None: # we have event list data
event_list, pkt_list = parsed_data["photons"]
primary_hdr = get_primary_header()
primary_hdr = add_process_info_to_header(primary_hdr)
primary_hdr["LEVEL"] = (0, get_std_comment("LEVEL"))
primary_hdr["DATATYPE"] = ("event_list", get_std_comment("DATATYPE"))
primary_hdr["ORIGAPID"] = (
padre_meddea.APID["photon"],
get_std_comment("ORIGAPID"),
)
primary_hdr["ORIGFILE"] = (file_path.name, get_std_comment("ORIGFILE"))

primary_hdr = fits.Header()

# fill in metadata
primary_hdr["DATE"] = (Time.now().fits, "FITS file creation date in UTC")
for this_keyword, this_str in zip(
["DATE-BEG", "DATE-END", "DATE-AVG"],
[
"Acquisition start time",
"Acquisition end time",
"Average time of acquisition",
],
):
for this_keyword in ["DATE-BEG", "DATE-END", "DATE-AVG"]:
primary_hdr[this_keyword] = (
event_list.meta.get(this_keyword, ""),
this_str,
get_std_comment(this_keyword),
)

primary_hdr["LEVEL"] = (0, "Data level of fits file")

# add processing information
primary_hdr = add_process_info_to_header(primary_hdr)

# custom keywords
primary_hdr["DATATYPE"] = ("event_list", "Description of the data")
primary_hdr["ORIGAPID"] = (padre_meddea.APID["photon"], "APID(s) of the originating data")
primary_hdr["ORIGFILE"] = (file_path.name, "Originating file(s)")

empty_primary = fits.PrimaryHDU(header=primary_hdr)
empty_primary_hdu = fits.PrimaryHDU(header=primary_hdr)
pkt_hdu = fits.BinTableHDU(pkt_list, name="PKT")
pkt_hdu.add_checksum()
hit_hdu = fits.BinTableHDU(event_list, name="SCI")
hit_hdu.add_checksum()
hdul = fits.HDUList([empty_primary, hit_hdu, pkt_hdu])
hdul = fits.HDUList([empty_primary_hdu, hit_hdu, pkt_hdu])

path = create_science_filename(
time=primary_hdr["DATE-BEG"],
Expand All @@ -102,43 +95,77 @@ def process_file(filename: Path, overwrite=False) -> list:

# Write the file, with the overwrite option controlled by the environment variable
hdul.writeto(path, overwrite=overwrite)

# Store the output file path in a list
output_files.append(path)
if parsed_data["housekeeping"] is not None:
hk_data = parsed_data["housekeeping"]
# send data to AWS Timestream for Grafana dashboard
record_timeseries(hk_data, "housekeeping")
hk_table = Table(hk_data)
primary_hdr = fits.Header()
# fill in metadata
primary_hdr["DATE"] = (Time.now().fits, "FITS file creation date in UTC")
primary_hdr["LEVEL"] = (0, "Data level of fits file")
primary_hdr["DATATYPE"] = ("housekeeping", "Description of the data")
primary_hdr["ORIGAPID"] = (padre_meddea.APID["housekeeping"], "APID(s) of the originating data")
primary_hdr["ORIGFILE"] = (file_path.name, "Originating file(s)")
date_beg = calc_time(hk_data['timestamp'][0])
primary_hdr["DATEREF"] = (date_beg.fits, "Reference date")

# add processing information
primary_hdr = add_process_info_to_header(primary_hdr)

# add common fits keywords
fits_meta = read_fits_keyword_file(
padre_meddea._data_directory / "fits_keywords_primaryhdu.csv"
primary_hdr = get_primary_header()
primary_hdr = add_process_info_to_header(primary_hdr)
primary_hdr["LEVEL"] = (0, get_std_comment("LEVEL"))
primary_hdr["DATATYPE"] = ("housekeeping", get_std_comment("DATATYPE"))
primary_hdr["ORIGAPID"] = (
padre_meddea.APID["housekeeping"],
get_std_comment("ORIGAPID"),
)
for row in fits_meta:
primary_hdr[row["keyword"]] = (row["value"], row["comment"])
hk_table['seqcount'] = hk_table["CCSDS_SEQUENCE_COUNT"]
colnames_to_remove = ["CCSDS_VERSION_NUMBER", "CCSDS_PACKET_TYPE", "CCSDS_SECONDARY_FLAG", "CCSDS_SEQUENCE_FLAG", "CCSDS_APID", "CCSDS_SEQUENCE_COUNT", "CCSDS_PACKET_LENGTH", "CHECKSUM", "time"]
primary_hdr["ORIGFILE"] = (file_path.name, get_std_comment("ORIGFILE"))

date_beg = calc_time(hk_data["timestamp"][0])
primary_hdr["DATEREF"] = (date_beg.fits, get_std_comment("DATEREF"))

hk_table["seqcount"] = hk_table["CCSDS_SEQUENCE_COUNT"]
colnames_to_remove = [
"CCSDS_VERSION_NUMBER",
"CCSDS_PACKET_TYPE",
"CCSDS_SECONDARY_FLAG",
"CCSDS_SEQUENCE_FLAG",
"CCSDS_APID",
"CCSDS_SEQUENCE_COUNT",
"CCSDS_PACKET_LENGTH",
"CHECKSUM",
"time",
]
for this_col in colnames_to_remove:
if this_col in hk_table.colnames:
hk_table.remove_column(this_col)

empty_primary = fits.PrimaryHDU(header=primary_hdr)
hk_hdu = fits.BinTableHDU(hk_table, name="HK")
empty_primary_hdu = fits.PrimaryHDU(header=primary_hdr)
hk_hdu = fits.BinTableHDU(data=hk_table, name="HK")
hk_hdu.add_checksum()
hdul = fits.HDUList([empty_primary, hk_hdu])

# add command response data if it exists
if parsed_data["cmd_resp"] is not None:
data_ts = parsed_data["cmd_resp"]
this_header = fits.Header()
this_header["DATEREF"] = (
data_ts.time[0].fits,
get_std_comment("DATEREF"),
)
record_timeseries(data_ts, "housekeeping")
data_table = Table(data_ts)
colnames_to_remove = [
"CCSDS_VERSION_NUMBER",
"CCSDS_PACKET_TYPE",
"CCSDS_SECONDARY_FLAG",
"CCSDS_SEQUENCE_FLAG",
"CCSDS_APID",
"CCSDS_SEQUENCE_COUNT",
"CCSDS_PACKET_LENGTH",
"CHECKSUM",
"time",
]
for this_col in colnames_to_remove:
if this_col in hk_table.colnames:
data_table.remove_column(this_col)
cmd_hdu = fits.BinTableHDU(data=data_table, name="READ")
cmd_hdu.add_checksum()
else: # if None still end an empty Binary Table
this_header = fits.Header()
cmd_hdu = fits.BinTableHDU(data=None, header=this_header, name="READ")
hdul = fits.HDUList([empty_primary_hdu, hk_hdu, cmd_hdu])

path = create_science_filename(
time=date_beg,
Expand All @@ -149,13 +176,8 @@ def process_file(filename: Path, overwrite=False) -> list:
)
hdul.writeto(path, overwrite=overwrite)
output_files.append(path)




# calibrated_file = calibrate_file(data_filename)
# data_plot_files = plot_file(data_filename)
# calib_plot_files = plot_file(calibrated_file)
if parsed_data["spectra"] is not None:
spec_data = parsed_data["spectra"]

# add other tasks below
return output_files
Expand Down
28 changes: 28 additions & 0 deletions padre_meddea/data/fits/fits_keywords_dict.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
keyword,comment
AUTHOR,Who designed the observation
CREATOR,Name of software pipeline that produced the FITS file
DETECTOR,Name of the detector
INFO_URL,a human-readable web page describing the data
OBSRVTRY,Name of the observatory
TIMESYS,Time scale of the time-related keywords
TELESCOP,Telescope/Sensor name
INSTRUME,Instrument name
MISSION,Mission name
ORIGIN,File originator
DATE-BEG,Acquisition start time
DATE-END,Acquisition end time
DATE-AVG,Average time of acquisition
LEVEL,Data level of fits file
DATE,File creation date in UTC
DATATYPE,Description of the data
ORIGAPID,APID(s) of the originating data
ORIGFILE,Originating file(s)
DATEREF,Reference date
PRSTEP(?P<count>[1-9]),Processing step type
PRPROC(?P<count>[1-9]),Name of procedure performing PRSTEP<count>
PRPVER(?P<count>[1-9]),Version of procedure PRPROC<count>
PRLIB(?P<count>[1-9])A,Software library containing PRPROC<count>
PRVER(?P<count>[1-9])A,Version of PRLIB1A
PRHSH(?P<count>[1-9])A,GIT commit hash for PRLIB<count>A
PRBRA(?P<count>[1-9])A,GIT/SVN repository branch of PRLIB<count>A
PRVER(?P<count>[1-9])B,Date of last commit of PRLIB<count>B
11 changes: 11 additions & 0 deletions padre_meddea/data/fits/fits_keywords_primaryhdu.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
keyword,value
AUTHOR,Steven D. Christe
CREATOR,padre_meddea
DETECTOR,meddea
INFO_URL,https://padre-meddea.readthedocs.io/en/latest/user-guide/data.html
OBSRVTRY,PADRE
TIMESYS,UTC
TELESCOP,PADRE/MeDDEA
INSTRUME,MeDDEA
MISSION,PADRE
ORIGIN,NASA GSFC
Loading

0 comments on commit 51bbde9

Please sign in to comment.