Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fits meta #12

Merged
merged 22 commits into from
Nov 18, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
cleaning up fits tools
  • Loading branch information
ehsteve committed Oct 23, 2024
commit a826db303286ae6a7340c60af5888abee6344c3a
3 changes: 2 additions & 1 deletion docs/user-guide/level0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -73,4 +73,5 @@ Each spectrum has a total of 512 energy bins.

Level 0 housekeeping files
--------------------------
These files contain housekeeping data as described in the housekeeping packet.
These files contain housekeeping data as described in the housekeeping packet.
It also includes any register read responses that may exist during that time period.
63 changes: 1 addition & 62 deletions padre_meddea/calibration/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,10 @@
from astropy.table import Table

from swxsoc.util.util import record_timeseries
import git

import padre_meddea
from padre_meddea import log
from padre_meddea.io import file_tools
from padre_meddea.io import file_tools, fits_tools

from padre_meddea.util.util import create_science_filename, calc_time
from padre_meddea.io.file_tools import read_raw_file
Expand Down Expand Up @@ -80,13 +79,6 @@ def process_file(filename: Path, overwrite=False) -> list:
primary_hdr["ORIGAPID"] = (padre_meddea.APID["photon"], "APID(s) of the originating data")
primary_hdr["ORIGFILE"] = (file_path.name, "Originating file(s)")

# add common fits keywords
fits_meta = read_fits_keyword_file(
padre_meddea._data_directory / "fits_keywords_primaryhdu.csv"
)
for row in fits_meta:
primary_hdr[row["keyword"]] = (row["value"], row["comment"])

empty_primary = fits.PrimaryHDU(header=primary_hdr)
pkt_hdu = fits.BinTableHDU(pkt_list, name="PKT")
pkt_hdu.add_checksum()
Expand Down Expand Up @@ -176,50 +168,6 @@ def raw_to_l0(filename: Path):
data = file_tools.read_raw_file(filename)


def add_process_info_to_header(header: fits.Header) -> fits.Header:
"""Add processing info metadata to fits header.

Parameters
----------
header : fits.Header

Returns
-------
header : fits.Header
"""
header["PRSTEP1"] = ("PROCESS Raw to L1", "Processing step type")
header["PRPROC1"] = (
"padre_meddea.calibration.process",
"Name of procedure performing PRSTEP1",
)
header["PRPVER1"] = (
padre_meddea.__version__,
"Version of procedure PRPROC1",
)
header["PRLIB1A"] = (
"padre_meddea",
"Software library containing PRPROC1",
)
header["PRVER1A"] = (padre_meddea.__version__, "Version of PRLIB1A")
repo = git.Repo(search_parent_directories=True)
header["PRHSH1A"] = (
repo.head.object.hexsha,
"GIT commit hash for PRLIB1A",
)
header["PRBRA1A"] = (
repo.active_branch.name,
"GIT/SVN repository branch of PRLIB1A",
)
commits = list(repo.iter_commits("main", max_count=1))
header["PRVER1B"] = (
Time(commits[0].committed_datetime).fits,
"Date of last commit of PRLIB1B",
)
# primary_hdr["PRLOG1"] add log information, need to do this after the fact
# primary_hdr["PRENV1"] add information about processing env, need to do this after the fact
return header


def get_calibration_file(time: Time) -> Path:
"""
Given a time, return the appropriate calibration file.
Expand Down Expand Up @@ -262,12 +210,3 @@ def read_calibration_file(calib_filename: Path):
# if can't read the file

return None


def read_fits_keyword_file(csv_file: Path):
"""Read csv file with default fits metadata information."""
fits_meta_table = ascii.read(
padre_meddea._data_directory / "fits_keywords_primaryhdu.csv",
format="csv",
)
return fits_meta_table
2 changes: 2 additions & 0 deletions padre_meddea/data/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,5 @@ Stores detector constants.
hk_channel_defs.csv
-------------------
Stores the definitions for the values provided in housekeeping packets.

fits_
2 changes: 1 addition & 1 deletion padre_meddea/data/calibration/README.rst
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Calbiration directory
Calibration directory
=====================

This directory contains calibration files included with the package source
Expand Down
13 changes: 13 additions & 0 deletions padre_meddea/data/fits/README.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FITS file data directory
========================

This directory contains data related to FITS file creation.

fits_keyword_dict.csv
---------------------
Keyword names and comments.
Used to lookup standard comment based on keyword.

fits_keyword_primaryhdu.csv
---------------------------
Standard keyword values.
11 changes: 0 additions & 11 deletions padre_meddea/data/fits_keywords_primaryhdu.csv

This file was deleted.

92 changes: 92 additions & 0 deletions padre_meddea/io/fits_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
"""
This module provides a utilities to manage fits files reading and writing.
"""
import re
import git

from astropy.io import ascii
import astropy.io.fits as fits
from astropy.time import Time

import padre_meddea

FITS_HDR0 = ascii.read(padre_meddea._data_directory / "fits" / "fits_keywords_primaryhdu.csv",format="csv")
FITS_HDR0.add_index('keyword')
FITS_HDR_KEYTOCOMMENT = ascii.read(padre_meddea._data_directory / "fits" / "fits_keywords_dict.csv",format="csv")
FITS_HDR_KEYTOCOMMENT.add_index('keyword')


def get_primary_header() -> fits.Header:
"""Return a standard FITS file primary header."""
header = fits.Header()
for row in FITS_HDR0:
this_comment = FITS_HDR_KEYTOCOMMENT.loc[row["keyword"]]['comment']
header[row["keyword"]] = (row["value"], this_comment)

return header


def get_std_comment(keyword: str) -> str:
"""Given a keyword, return the standard comment for a header card."""
if keyword.upper() in FITS_HDR_KEYTOCOMMENT['keyword']:
return FITS_HDR_KEYTOCOMMENT.loc[keyword]['comment']
for this_row in FITS_HDR_KEYTOCOMMENT:
res = re.fullmatch(this_row['keyword'], keyword)
if res:
comment = this_row['comment']
if len(res.groupdict()) > 0: # check if there was a match
for key, value in res.groupdict().items():
comment = comment.replace(f"<{key}>", value)
return comment


def add_process_info_to_header(header: fits.Header, n=1) -> fits.Header:
"""Add processing info metadata to a fits header.

It adds the following SOLARNET compatible FITS cards;
PRSTEPn, PRPROCn, PRPVERn, PRLIBnA, PRVERnA, PRLIBnA, PRHSHnA, PRVERnB

Parameters
----------
header : fits.Header
The fits header to add the new cards to
n : int, default 1
The processing step number. Must be greater than or equal to 1.

Returns
-------
header : fits.Header
"""
if n < 1:
ValueError("Processing number, n, must be greater than or equal to 1.")
header[f"PRSTEP{n}"] = ("PROCESS Raw to L1", get_std_comment(f'PRSTEP{n}'))
header[f"PRPROC{n}"] = (
"padre_meddea.calibration.process",
get_std_comment(f'PRPROC{n}'),
)
header[f"PRPVER{n}"] = (
padre_meddea.__version__,
get_std_comment(f'PRPVER{n}'),
)
header[f"PRLIB{n}A"] = (
"padre_meddea",
get_std_comment(f'PRLIB{n}A'),
)
header[f"PRVER{n}A"] = (padre_meddea.__version__, get_std_comment(f'PRVER{n}A'))
repo = git.Repo(search_parent_directories=True)
header[f"PRHSH{n}A"] = (
repo.head.object.hexsha,
get_std_comment(f'PRHSH{n}A'),
)
header[f"PRBRA{n}A"] = (
repo.active_branch.name,
get_std_comment(f'PRBRA{n}A'),
)
commits = list(repo.iter_commits("main", max_count=1))
header[f"PRVER{n}B"] = (
Time(commits[0].committed_datetime).fits,
get_std_comment(f'PRVER{n}B'),
)
# primary_hdr["PRLOG1"] add log information, need to do this after the fact
# primary_hdr["PRENV1"] add information about processing env, need to do this after the fact
return header
41 changes: 41 additions & 0 deletions padre_meddea/tests/test_fits_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""Test for the fits_tools module"""
import pytest

import astropy.io.fits as fits

from padre_meddea.io.fits_tools import *


def test_comment_lookup_hdr0():
"""Test that all keywords in fits_keyword_primaryhdu are listed in fits_keyword_dict"""
hdr0_keywords = list(FITS_HDR0['keyword'])
keyword_to_comment = list(FITS_HDR_KEYTOCOMMENT['keyword'])
for this_keyword in hdr0_keywords:
assert this_keyword in keyword_to_comment


def test_get_primary_header():
assert isinstance(get_primary_header(), fits.Header)


def test_add_process_info_to_header():
"""Test that new header cards are added."""
header = get_primary_header()
orig_header = header.copy()
header = add_process_info_to_header(header)
# check that keywords were added
assert len(header) > len(orig_header)
orig_keywords = [this_keyword for this_keyword in orig_header]
# check that the cards that were added have content
for this_card in header.cards:
if this_card.keyword not in orig_keywords:
assert len(this_card.value) > 0
assert len(this_card.comment) > 0

@pytest.mark.parametrize("test_input,expected", [("PRSTEP2", "Processing step type"),
("PRSTEP1", "Processing step type"),
("PRPROC3", "Name of procedure performing PRSTEP3"),
("PRHSH5A", "GIT commit hash for PRLIB5A")
])
def test_get_std_comment(test_input, expected):
assert get_std_comment(test_input) == expected
Loading