From 9becd73de8c3897b01656e6fb838e06eff62bb59 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 23 Oct 2024 14:50:27 +0200 Subject: [PATCH 01/63] update structure --- .gitignore | 11 +++-------- src/{ => sunbather}/RT.py | 0 src/{ => sunbather}/RT_tables/Al+10_levels_NIST.txt | 0 .../RT_tables/Al+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+11_levels_NIST.txt | 0 .../RT_tables/Al+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+12_levels_NIST.txt | 0 .../RT_tables/Al+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+2_levels_NIST.txt | 0 .../RT_tables/Al+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+3_levels_NIST.txt | 0 .../RT_tables/Al+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+4_levels_NIST.txt | 0 .../RT_tables/Al+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+5_levels_NIST.txt | 0 .../RT_tables/Al+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+6_levels_NIST.txt | 0 .../RT_tables/Al+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+7_levels_NIST.txt | 0 .../RT_tables/Al+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+8_levels_NIST.txt | 0 .../RT_tables/Al+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+9_levels_NIST.txt | 0 .../RT_tables/Al+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al+_levels_NIST.txt | 0 .../RT_tables/Al+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Al_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Al_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+10_levels_NIST.txt | 0 .../RT_tables/Ar+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+11_levels_NIST.txt | 0 .../RT_tables/Ar+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+12_levels_NIST.txt | 0 .../RT_tables/Ar+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+2_levels_NIST.txt | 0 .../RT_tables/Ar+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+3_levels_NIST.txt | 0 .../RT_tables/Ar+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+4_levels_NIST.txt | 0 .../RT_tables/Ar+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+5_levels_NIST.txt | 0 .../RT_tables/Ar+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+6_levels_NIST.txt | 0 .../RT_tables/Ar+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+7_levels_NIST.txt | 0 .../RT_tables/Ar+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+8_levels_NIST.txt | 0 .../RT_tables/Ar+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+9_levels_NIST.txt | 0 .../RT_tables/Ar+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar+_levels_NIST.txt | 0 .../RT_tables/Ar+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ar_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ar_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/B+2_levels_NIST.txt | 0 .../RT_tables/B+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/B+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/B+3_levels_NIST.txt | 0 .../RT_tables/B+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/B+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/B+4_levels_NIST.txt | 0 .../RT_tables/B+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/B+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/B+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/B+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/B+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/B_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/B_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/B_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Be+2_levels_NIST.txt | 0 .../RT_tables/Be+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Be+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Be+3_levels_NIST.txt | 0 .../RT_tables/Be+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Be+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Be+_levels_NIST.txt | 0 .../RT_tables/Be+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Be+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Be_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Be_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Be_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/C+2_levels_NIST.txt | 0 .../RT_tables/C+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/C+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/C+3_levels_NIST.txt | 0 .../RT_tables/C+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/C+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/C+4_levels_NIST.txt | 0 .../RT_tables/C+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/C+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/C+5_levels_NIST.txt | 0 .../RT_tables/C+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/C+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/C+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/C+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/C+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/C_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/C_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/C_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+10_levels_NIST.txt | 0 .../RT_tables/Ca+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+11_levels_NIST.txt | 0 .../RT_tables/Ca+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+12_levels_NIST.txt | 0 .../RT_tables/Ca+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+2_levels_NIST.txt | 0 .../RT_tables/Ca+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+3_levels_NIST.txt | 0 .../RT_tables/Ca+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+4_levels_NIST.txt | 0 .../RT_tables/Ca+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+5_levels_NIST.txt | 0 .../RT_tables/Ca+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+6_levels_NIST.txt | 0 .../RT_tables/Ca+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+7_levels_NIST.txt | 0 .../RT_tables/Ca+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+8_levels_NIST.txt | 0 .../RT_tables/Ca+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+9_levels_NIST.txt | 0 .../RT_tables/Ca+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca+_levels_NIST.txt | 0 .../RT_tables/Ca+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ca_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ca_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+10_levels_NIST.txt | 0 .../RT_tables/Cl+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+11_levels_NIST.txt | 0 .../RT_tables/Cl+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+12_levels_NIST.txt | 0 .../RT_tables/Cl+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+2_levels_NIST.txt | 0 .../RT_tables/Cl+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+3_levels_NIST.txt | 0 .../RT_tables/Cl+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+4_levels_NIST.txt | 0 .../RT_tables/Cl+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+5_levels_NIST.txt | 0 .../RT_tables/Cl+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+6_levels_NIST.txt | 0 .../RT_tables/Cl+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+7_levels_NIST.txt | 0 .../RT_tables/Cl+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+8_levels_NIST.txt | 0 .../RT_tables/Cl+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+9_levels_NIST.txt | 0 .../RT_tables/Cl+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl+_levels_NIST.txt | 0 .../RT_tables/Cl+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cl_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cl_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+10_levels_NIST.txt | 0 .../RT_tables/Co+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+11_levels_NIST.txt | 0 .../RT_tables/Co+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+12_levels_NIST.txt | 0 .../RT_tables/Co+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+2_levels_NIST.txt | 0 .../RT_tables/Co+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+3_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+4_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+6_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+7_levels_NIST.txt | 0 .../RT_tables/Co+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+8_levels_NIST.txt | 0 .../RT_tables/Co+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+9_levels_NIST.txt | 0 .../RT_tables/Co+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co+_levels_NIST.txt | 0 .../RT_tables/Co+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Co_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Co_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+10_levels_NIST.txt | 0 .../RT_tables/Cr+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+11_levels_NIST.txt | 0 .../RT_tables/Cr+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+12_levels_NIST.txt | 0 .../RT_tables/Cr+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+2_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+3_levels_NIST.txt | 0 .../RT_tables/Cr+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+4_levels_NIST.txt | 0 .../RT_tables/Cr+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+6_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+7_levels_NIST.txt | 0 .../RT_tables/Cr+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+8_levels_NIST.txt | 0 .../RT_tables/Cr+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+9_levels_NIST.txt | 0 .../RT_tables/Cr+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr+_levels_NIST.txt | 0 .../RT_tables/Cr+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cr_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cr_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+10_levels_NIST.txt | 0 .../RT_tables/Cu+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+11_levels_NIST.txt | 0 .../RT_tables/Cu+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+12_levels_NIST.txt | 0 .../RT_tables/Cu+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+2_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+3_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+4_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+6_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+7_levels_NIST.txt | 0 .../RT_tables/Cu+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+8_levels_NIST.txt | 0 .../RT_tables/Cu+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+9_levels_NIST.txt | 0 .../RT_tables/Cu+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu+_levels_NIST.txt | 0 .../RT_tables/Cu+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Cu_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Cu_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+2_levels_NIST.txt | 0 .../RT_tables/F+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+3_levels_NIST.txt | 0 .../RT_tables/F+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+4_levels_NIST.txt | 0 .../RT_tables/F+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+5_levels_NIST.txt | 0 .../RT_tables/F+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+6_levels_NIST.txt | 0 .../RT_tables/F+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+7_levels_NIST.txt | 0 .../RT_tables/F+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+8_levels_NIST.txt | 0 .../RT_tables/F+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/F+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/F+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/F_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/F_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+10_levels_NIST.txt | 0 .../RT_tables/Fe+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+11_levels_NIST.txt | 0 .../RT_tables/Fe+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+12_levels_NIST.txt | 0 .../RT_tables/Fe+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+2_levels_NIST.txt | 0 .../RT_tables/Fe+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+3_levels_NIST.txt | 0 .../RT_tables/Fe+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+4_levels_NIST.txt | 0 .../RT_tables/Fe+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+5_levels_NIST.txt | 0 .../RT_tables/Fe+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+6_levels_NIST.txt | 0 .../RT_tables/Fe+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+7_levels_NIST.txt | 0 .../RT_tables/Fe+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+8_levels_NIST.txt | 0 .../RT_tables/Fe+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+9_levels_NIST.txt | 0 .../RT_tables/Fe+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe+_levels_NIST.txt | 0 .../RT_tables/Fe+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Fe_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Fe_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/H_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/H_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/H_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/H_lines_NIST_all.txt | 0 src/{ => sunbather}/RT_tables/He+_levels_NIST.txt | 0 .../RT_tables/He+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/He+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/He_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/He_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/He_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+10_levels_NIST.txt | 0 .../RT_tables/K+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+11_levels_NIST.txt | 0 .../RT_tables/K+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+12_levels_NIST.txt | 0 .../RT_tables/K+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+2_levels_NIST.txt | 0 .../RT_tables/K+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+3_levels_NIST.txt | 0 .../RT_tables/K+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+4_levels_NIST.txt | 0 .../RT_tables/K+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+5_levels_NIST.txt | 0 .../RT_tables/K+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+6_levels_NIST.txt | 0 .../RT_tables/K+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+7_levels_NIST.txt | 0 .../RT_tables/K+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+8_levels_NIST.txt | 0 .../RT_tables/K+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+9_levels_NIST.txt | 0 .../RT_tables/K+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/K+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/K_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/K_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/K_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Li+2_levels_NIST.txt | 0 .../RT_tables/Li+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Li+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Li+_levels_NIST.txt | 0 .../RT_tables/Li+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Li+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Li_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Li_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Li_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+10_levels_NIST.txt | 0 .../RT_tables/Mg+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+11_levels_NIST.txt | 0 .../RT_tables/Mg+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+2_levels_NIST.txt | 0 .../RT_tables/Mg+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+3_levels_NIST.txt | 0 .../RT_tables/Mg+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+4_levels_NIST.txt | 0 .../RT_tables/Mg+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+5_levels_NIST.txt | 0 .../RT_tables/Mg+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+6_levels_NIST.txt | 0 .../RT_tables/Mg+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+7_levels_NIST.txt | 0 .../RT_tables/Mg+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+8_levels_NIST.txt | 0 .../RT_tables/Mg+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+9_levels_NIST.txt | 0 .../RT_tables/Mg+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg+_levels_NIST.txt | 0 .../RT_tables/Mg+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mg_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mg_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+10_levels_NIST.txt | 0 .../RT_tables/Mn+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+11_levels_NIST.txt | 0 .../RT_tables/Mn+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+12_levels_NIST.txt | 0 .../RT_tables/Mn+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+2_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+3_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+4_levels_NIST.txt | 0 .../RT_tables/Mn+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+6_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+7_levels_NIST.txt | 0 .../RT_tables/Mn+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+8_levels_NIST.txt | 0 .../RT_tables/Mn+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+9_levels_NIST.txt | 0 .../RT_tables/Mn+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn+_levels_NIST.txt | 0 .../RT_tables/Mn+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Mn_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Mn_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+2_levels_NIST.txt | 0 .../RT_tables/N+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+3_levels_NIST.txt | 0 .../RT_tables/N+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+4_levels_NIST.txt | 0 .../RT_tables/N+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+5_levels_NIST.txt | 0 .../RT_tables/N+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+6_levels_NIST.txt | 0 .../RT_tables/N+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/N+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/N_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/N_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/N_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+10_levels_NIST.txt | 0 .../RT_tables/Na+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+2_levels_NIST.txt | 0 .../RT_tables/Na+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+3_levels_NIST.txt | 0 .../RT_tables/Na+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+4_levels_NIST.txt | 0 .../RT_tables/Na+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+5_levels_NIST.txt | 0 .../RT_tables/Na+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+6_levels_NIST.txt | 0 .../RT_tables/Na+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+7_levels_NIST.txt | 0 .../RT_tables/Na+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+8_levels_NIST.txt | 0 .../RT_tables/Na+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+9_levels_NIST.txt | 0 .../RT_tables/Na+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na+_levels_NIST.txt | 0 .../RT_tables/Na+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Na_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Na_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+2_levels_NIST.txt | 0 .../RT_tables/Ne+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+3_levels_NIST.txt | 0 .../RT_tables/Ne+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+4_levels_NIST.txt | 0 .../RT_tables/Ne+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+5_levels_NIST.txt | 0 .../RT_tables/Ne+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+6_levels_NIST.txt | 0 .../RT_tables/Ne+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+7_levels_NIST.txt | 0 .../RT_tables/Ne+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+8_levels_NIST.txt | 0 .../RT_tables/Ne+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+9_levels_NIST.txt | 0 .../RT_tables/Ne+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne+_levels_NIST.txt | 0 .../RT_tables/Ne+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ne_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ne_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+10_levels_NIST.txt | 0 .../RT_tables/Ni+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+11_levels_NIST.txt | 0 .../RT_tables/Ni+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+12_levels_NIST.txt | 0 .../RT_tables/Ni+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+2_levels_NIST.txt | 0 .../RT_tables/Ni+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+3_levels_NIST.txt | 0 .../RT_tables/Ni+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+4_levels_NIST.txt | 0 .../RT_tables/Ni+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+6_levels_NIST.txt | 0 .../RT_tables/Ni+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+7_levels_NIST.txt | 0 .../RT_tables/Ni+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+8_levels_NIST.txt | 0 .../RT_tables/Ni+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+9_levels_NIST.txt | 0 .../RT_tables/Ni+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni+_levels_NIST.txt | 0 .../RT_tables/Ni+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ni_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ni_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+2_levels_NIST.txt | 0 .../RT_tables/O+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+3_levels_NIST.txt | 0 .../RT_tables/O+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+4_levels_NIST.txt | 0 .../RT_tables/O+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+5_levels_NIST.txt | 0 .../RT_tables/O+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+6_levels_NIST.txt | 0 .../RT_tables/O+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+7_levels_NIST.txt | 0 .../RT_tables/O+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/O+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/O_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/O_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/O_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+10_levels_NIST.txt | 0 .../RT_tables/P+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+11_levels_NIST.txt | 0 .../RT_tables/P+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+12_levels_NIST.txt | 0 .../RT_tables/P+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+2_levels_NIST.txt | 0 .../RT_tables/P+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+3_levels_NIST.txt | 0 .../RT_tables/P+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+4_levels_NIST.txt | 0 .../RT_tables/P+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+5_levels_NIST.txt | 0 .../RT_tables/P+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+6_levels_NIST.txt | 0 .../RT_tables/P+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+7_levels_NIST.txt | 0 .../RT_tables/P+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+8_levels_NIST.txt | 0 .../RT_tables/P+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+9_levels_NIST.txt | 0 .../RT_tables/P+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/P+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/P_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/P_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/P_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+10_levels_NIST.txt | 0 .../RT_tables/S+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+11_levels_NIST.txt | 0 .../RT_tables/S+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+12_levels_NIST.txt | 0 .../RT_tables/S+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+2_levels_NIST.txt | 0 .../RT_tables/S+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+3_levels_NIST.txt | 0 .../RT_tables/S+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+4_levels_NIST.txt | 0 .../RT_tables/S+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+5_levels_NIST.txt | 0 .../RT_tables/S+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+6_levels_NIST.txt | 0 .../RT_tables/S+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+7_levels_NIST.txt | 0 .../RT_tables/S+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+8_levels_NIST.txt | 0 .../RT_tables/S+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+9_levels_NIST.txt | 0 .../RT_tables/S+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/S+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/S_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/S_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/S_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+10_levels_NIST.txt | 0 .../RT_tables/Sc+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+11_levels_NIST.txt | 0 .../RT_tables/Sc+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+12_levels_NIST.txt | 0 .../RT_tables/Sc+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+2_levels_NIST.txt | 0 .../RT_tables/Sc+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+3_levels_NIST.txt | 0 .../RT_tables/Sc+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+4_levels_NIST.txt | 0 .../RT_tables/Sc+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+5_levels_NIST.txt | 0 .../RT_tables/Sc+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+6_levels_NIST.txt | 0 .../RT_tables/Sc+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+7_levels_NIST.txt | 0 .../RT_tables/Sc+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+8_levels_NIST.txt | 0 .../RT_tables/Sc+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+9_levels_NIST.txt | 0 .../RT_tables/Sc+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc+_levels_NIST.txt | 0 .../RT_tables/Sc+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Sc_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Sc_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+10_levels_NIST.txt | 0 .../RT_tables/Si+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+11_levels_NIST.txt | 0 .../RT_tables/Si+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+12_levels_NIST.txt | 0 .../RT_tables/Si+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+2_levels_NIST.txt | 0 .../RT_tables/Si+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+3_levels_NIST.txt | 0 .../RT_tables/Si+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+4_levels_NIST.txt | 0 .../RT_tables/Si+4_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+5_levels_NIST.txt | 0 .../RT_tables/Si+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+6_levels_NIST.txt | 0 .../RT_tables/Si+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+7_levels_NIST.txt | 0 .../RT_tables/Si+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+8_levels_NIST.txt | 0 .../RT_tables/Si+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+9_levels_NIST.txt | 0 .../RT_tables/Si+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si+_levels_NIST.txt | 0 .../RT_tables/Si+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Si_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Si_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+10_levels_NIST.txt | 0 .../RT_tables/Ti+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+11_levels_NIST.txt | 0 .../RT_tables/Ti+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+12_levels_NIST.txt | 0 .../RT_tables/Ti+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+2_levels_NIST.txt | 0 .../RT_tables/Ti+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+3_levels_NIST.txt | 0 .../RT_tables/Ti+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+4_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+5_levels_NIST.txt | 0 .../RT_tables/Ti+5_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+6_levels_NIST.txt | 0 .../RT_tables/Ti+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+7_levels_NIST.txt | 0 .../RT_tables/Ti+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+8_levels_NIST.txt | 0 .../RT_tables/Ti+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+9_levels_NIST.txt | 0 .../RT_tables/Ti+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti+_levels_NIST.txt | 0 .../RT_tables/Ti+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Ti_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Ti_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+10_levels_NIST.txt | 0 .../RT_tables/V+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+11_levels_NIST.txt | 0 .../RT_tables/V+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+12_levels_NIST.txt | 0 .../RT_tables/V+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+2_levels_NIST.txt | 0 .../RT_tables/V+2_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+3_levels_NIST.txt | 0 .../RT_tables/V+3_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+4_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+6_levels_NIST.txt | 0 .../RT_tables/V+6_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+7_levels_NIST.txt | 0 .../RT_tables/V+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+8_levels_NIST.txt | 0 .../RT_tables/V+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+9_levels_NIST.txt | 0 .../RT_tables/V+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/V+_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/V_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/V_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/V_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+10_levels_NIST.txt | 0 .../RT_tables/Zn+10_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn+10_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+11_levels_NIST.txt | 0 .../RT_tables/Zn+11_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn+11_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+12_levels_NIST.txt | 0 .../RT_tables/Zn+12_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn+12_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+2_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+2_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+3_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+3_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+4_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+4_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+5_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+5_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+6_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+6_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+7_levels_NIST.txt | 0 .../RT_tables/Zn+7_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn+7_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+8_levels_NIST.txt | 0 .../RT_tables/Zn+8_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn+8_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+9_levels_NIST.txt | 0 .../RT_tables/Zn+9_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn+9_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn+_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn_levels_NIST.txt | 0 src/{ => sunbather}/RT_tables/Zn_levels_processed.txt | 0 src/{ => sunbather}/RT_tables/Zn_lines_NIST.txt | 0 src/{ => sunbather}/RT_tables/clean_H_lines.py | 0 src/sunbather/__init__.py | 0 src/{ => sunbather}/construct_parker.py | 0 src/{ => sunbather}/convergeT_parker.py | 0 src/{ => sunbather}/solveT.py | 0 src/{ => sunbather}/species_enlim.txt | 0 src/{ => sunbather}/tools.py | 0 919 files changed, 3 insertions(+), 8 deletions(-) rename src/{ => sunbather}/RT.py (100%) rename src/{ => sunbather}/RT_tables/Al+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Al_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Al_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ar_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ar_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/B+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/B+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/B+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/B+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/B_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/B_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Be+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Be+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Be+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Be_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Be_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/C+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/C+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/C+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/C+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/C+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/C_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/C_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ca_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ca_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cl_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cl_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Co_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Co_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cr_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cr_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Cu_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Cu_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/F+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/F_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Fe_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Fe_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/H_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/H_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/H_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/H_lines_NIST_all.txt (100%) rename src/{ => sunbather}/RT_tables/He+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/He+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/He+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/He_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/He_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/He_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/K_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/K_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Li+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Li+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Li+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Li+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Li+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Li+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Li_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Li_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Li_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mg_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mg_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Mn_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Mn_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/N_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/N_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Na_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Na_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ne_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ne_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ni_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ni_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/O_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/O_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/P_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/P_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/S_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/S_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Sc_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Sc_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+4_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Si_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Si_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+5_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Ti_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Ti_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+2_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+3_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+6_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V+_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/V_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/V_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+10_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+10_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+10_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+11_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+11_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+11_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+12_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+12_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+12_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+2_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+2_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+3_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+3_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+4_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+4_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+5_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+5_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+6_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+6_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+7_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+7_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+7_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+8_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+8_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+8_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+9_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+9_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+9_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn+_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn_levels_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/Zn_levels_processed.txt (100%) rename src/{ => sunbather}/RT_tables/Zn_lines_NIST.txt (100%) rename src/{ => sunbather}/RT_tables/clean_H_lines.py (100%) create mode 100644 src/sunbather/__init__.py rename src/{ => sunbather}/construct_parker.py (100%) rename src/{ => sunbather}/convergeT_parker.py (100%) rename src/{ => sunbather}/solveT.py (100%) rename src/{ => sunbather}/species_enlim.txt (100%) rename src/{ => sunbather}/tools.py (100%) diff --git a/.gitignore b/.gitignore index 5020ed5..fa2cd40 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,7 @@ .DS_Store -src/.DS_Store -src/RT_tables/.DS_Store -examples/.DS_Store -examples/materials/.DS_Store -tests/.DS_Store -tests/materials/.DS_Store -src/__pycache__/ -examples/.ipynb_checkpoints +__pycache__/ +.ipynb_checkpoints examples/WASP52b_dT.csv examples/WASP52b_sigmaT.csv examples/WASP52b_nsig_fit.csv +env/ diff --git a/src/RT.py b/src/sunbather/RT.py similarity index 100% rename from src/RT.py rename to src/sunbather/RT.py diff --git a/src/RT_tables/Al+10_levels_NIST.txt b/src/sunbather/RT_tables/Al+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+10_levels_NIST.txt rename to src/sunbather/RT_tables/Al+10_levels_NIST.txt diff --git a/src/RT_tables/Al+10_levels_processed.txt b/src/sunbather/RT_tables/Al+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+10_levels_processed.txt rename to src/sunbather/RT_tables/Al+10_levels_processed.txt diff --git a/src/RT_tables/Al+10_lines_NIST.txt b/src/sunbather/RT_tables/Al+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+10_lines_NIST.txt rename to src/sunbather/RT_tables/Al+10_lines_NIST.txt diff --git a/src/RT_tables/Al+11_levels_NIST.txt b/src/sunbather/RT_tables/Al+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+11_levels_NIST.txt rename to src/sunbather/RT_tables/Al+11_levels_NIST.txt diff --git a/src/RT_tables/Al+11_levels_processed.txt b/src/sunbather/RT_tables/Al+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+11_levels_processed.txt rename to src/sunbather/RT_tables/Al+11_levels_processed.txt diff --git a/src/RT_tables/Al+11_lines_NIST.txt b/src/sunbather/RT_tables/Al+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+11_lines_NIST.txt rename to src/sunbather/RT_tables/Al+11_lines_NIST.txt diff --git a/src/RT_tables/Al+12_levels_NIST.txt b/src/sunbather/RT_tables/Al+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+12_levels_NIST.txt rename to src/sunbather/RT_tables/Al+12_levels_NIST.txt diff --git a/src/RT_tables/Al+12_levels_processed.txt b/src/sunbather/RT_tables/Al+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+12_levels_processed.txt rename to src/sunbather/RT_tables/Al+12_levels_processed.txt diff --git a/src/RT_tables/Al+12_lines_NIST.txt b/src/sunbather/RT_tables/Al+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+12_lines_NIST.txt rename to src/sunbather/RT_tables/Al+12_lines_NIST.txt diff --git a/src/RT_tables/Al+2_levels_NIST.txt b/src/sunbather/RT_tables/Al+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+2_levels_NIST.txt rename to src/sunbather/RT_tables/Al+2_levels_NIST.txt diff --git a/src/RT_tables/Al+2_levels_processed.txt b/src/sunbather/RT_tables/Al+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+2_levels_processed.txt rename to src/sunbather/RT_tables/Al+2_levels_processed.txt diff --git a/src/RT_tables/Al+2_lines_NIST.txt b/src/sunbather/RT_tables/Al+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+2_lines_NIST.txt rename to src/sunbather/RT_tables/Al+2_lines_NIST.txt diff --git a/src/RT_tables/Al+3_levels_NIST.txt b/src/sunbather/RT_tables/Al+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+3_levels_NIST.txt rename to src/sunbather/RT_tables/Al+3_levels_NIST.txt diff --git a/src/RT_tables/Al+3_levels_processed.txt b/src/sunbather/RT_tables/Al+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+3_levels_processed.txt rename to src/sunbather/RT_tables/Al+3_levels_processed.txt diff --git a/src/RT_tables/Al+3_lines_NIST.txt b/src/sunbather/RT_tables/Al+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+3_lines_NIST.txt rename to src/sunbather/RT_tables/Al+3_lines_NIST.txt diff --git a/src/RT_tables/Al+4_levels_NIST.txt b/src/sunbather/RT_tables/Al+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+4_levels_NIST.txt rename to src/sunbather/RT_tables/Al+4_levels_NIST.txt diff --git a/src/RT_tables/Al+4_levels_processed.txt b/src/sunbather/RT_tables/Al+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+4_levels_processed.txt rename to src/sunbather/RT_tables/Al+4_levels_processed.txt diff --git a/src/RT_tables/Al+4_lines_NIST.txt b/src/sunbather/RT_tables/Al+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+4_lines_NIST.txt rename to src/sunbather/RT_tables/Al+4_lines_NIST.txt diff --git a/src/RT_tables/Al+5_levels_NIST.txt b/src/sunbather/RT_tables/Al+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+5_levels_NIST.txt rename to src/sunbather/RT_tables/Al+5_levels_NIST.txt diff --git a/src/RT_tables/Al+5_levels_processed.txt b/src/sunbather/RT_tables/Al+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+5_levels_processed.txt rename to src/sunbather/RT_tables/Al+5_levels_processed.txt diff --git a/src/RT_tables/Al+5_lines_NIST.txt b/src/sunbather/RT_tables/Al+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+5_lines_NIST.txt rename to src/sunbather/RT_tables/Al+5_lines_NIST.txt diff --git a/src/RT_tables/Al+6_levels_NIST.txt b/src/sunbather/RT_tables/Al+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+6_levels_NIST.txt rename to src/sunbather/RT_tables/Al+6_levels_NIST.txt diff --git a/src/RT_tables/Al+6_levels_processed.txt b/src/sunbather/RT_tables/Al+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+6_levels_processed.txt rename to src/sunbather/RT_tables/Al+6_levels_processed.txt diff --git a/src/RT_tables/Al+6_lines_NIST.txt b/src/sunbather/RT_tables/Al+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+6_lines_NIST.txt rename to src/sunbather/RT_tables/Al+6_lines_NIST.txt diff --git a/src/RT_tables/Al+7_levels_NIST.txt b/src/sunbather/RT_tables/Al+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+7_levels_NIST.txt rename to src/sunbather/RT_tables/Al+7_levels_NIST.txt diff --git a/src/RT_tables/Al+7_levels_processed.txt b/src/sunbather/RT_tables/Al+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+7_levels_processed.txt rename to src/sunbather/RT_tables/Al+7_levels_processed.txt diff --git a/src/RT_tables/Al+7_lines_NIST.txt b/src/sunbather/RT_tables/Al+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+7_lines_NIST.txt rename to src/sunbather/RT_tables/Al+7_lines_NIST.txt diff --git a/src/RT_tables/Al+8_levels_NIST.txt b/src/sunbather/RT_tables/Al+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+8_levels_NIST.txt rename to src/sunbather/RT_tables/Al+8_levels_NIST.txt diff --git a/src/RT_tables/Al+8_levels_processed.txt b/src/sunbather/RT_tables/Al+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+8_levels_processed.txt rename to src/sunbather/RT_tables/Al+8_levels_processed.txt diff --git a/src/RT_tables/Al+8_lines_NIST.txt b/src/sunbather/RT_tables/Al+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+8_lines_NIST.txt rename to src/sunbather/RT_tables/Al+8_lines_NIST.txt diff --git a/src/RT_tables/Al+9_levels_NIST.txt b/src/sunbather/RT_tables/Al+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+9_levels_NIST.txt rename to src/sunbather/RT_tables/Al+9_levels_NIST.txt diff --git a/src/RT_tables/Al+9_levels_processed.txt b/src/sunbather/RT_tables/Al+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+9_levels_processed.txt rename to src/sunbather/RT_tables/Al+9_levels_processed.txt diff --git a/src/RT_tables/Al+9_lines_NIST.txt b/src/sunbather/RT_tables/Al+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+9_lines_NIST.txt rename to src/sunbather/RT_tables/Al+9_lines_NIST.txt diff --git a/src/RT_tables/Al+_levels_NIST.txt b/src/sunbather/RT_tables/Al+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al+_levels_NIST.txt rename to src/sunbather/RT_tables/Al+_levels_NIST.txt diff --git a/src/RT_tables/Al+_levels_processed.txt b/src/sunbather/RT_tables/Al+_levels_processed.txt similarity index 100% rename from src/RT_tables/Al+_levels_processed.txt rename to src/sunbather/RT_tables/Al+_levels_processed.txt diff --git a/src/RT_tables/Al+_lines_NIST.txt b/src/sunbather/RT_tables/Al+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al+_lines_NIST.txt rename to src/sunbather/RT_tables/Al+_lines_NIST.txt diff --git a/src/RT_tables/Al_levels_NIST.txt b/src/sunbather/RT_tables/Al_levels_NIST.txt similarity index 100% rename from src/RT_tables/Al_levels_NIST.txt rename to src/sunbather/RT_tables/Al_levels_NIST.txt diff --git a/src/RT_tables/Al_levels_processed.txt b/src/sunbather/RT_tables/Al_levels_processed.txt similarity index 100% rename from src/RT_tables/Al_levels_processed.txt rename to src/sunbather/RT_tables/Al_levels_processed.txt diff --git a/src/RT_tables/Al_lines_NIST.txt b/src/sunbather/RT_tables/Al_lines_NIST.txt similarity index 100% rename from src/RT_tables/Al_lines_NIST.txt rename to src/sunbather/RT_tables/Al_lines_NIST.txt diff --git a/src/RT_tables/Ar+10_levels_NIST.txt b/src/sunbather/RT_tables/Ar+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+10_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+10_levels_NIST.txt diff --git a/src/RT_tables/Ar+10_levels_processed.txt b/src/sunbather/RT_tables/Ar+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+10_levels_processed.txt rename to src/sunbather/RT_tables/Ar+10_levels_processed.txt diff --git a/src/RT_tables/Ar+10_lines_NIST.txt b/src/sunbather/RT_tables/Ar+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+10_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+10_lines_NIST.txt diff --git a/src/RT_tables/Ar+11_levels_NIST.txt b/src/sunbather/RT_tables/Ar+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+11_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+11_levels_NIST.txt diff --git a/src/RT_tables/Ar+11_levels_processed.txt b/src/sunbather/RT_tables/Ar+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+11_levels_processed.txt rename to src/sunbather/RT_tables/Ar+11_levels_processed.txt diff --git a/src/RT_tables/Ar+11_lines_NIST.txt b/src/sunbather/RT_tables/Ar+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+11_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+11_lines_NIST.txt diff --git a/src/RT_tables/Ar+12_levels_NIST.txt b/src/sunbather/RT_tables/Ar+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+12_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+12_levels_NIST.txt diff --git a/src/RT_tables/Ar+12_levels_processed.txt b/src/sunbather/RT_tables/Ar+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+12_levels_processed.txt rename to src/sunbather/RT_tables/Ar+12_levels_processed.txt diff --git a/src/RT_tables/Ar+12_lines_NIST.txt b/src/sunbather/RT_tables/Ar+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+12_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+12_lines_NIST.txt diff --git a/src/RT_tables/Ar+2_levels_NIST.txt b/src/sunbather/RT_tables/Ar+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+2_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+2_levels_NIST.txt diff --git a/src/RT_tables/Ar+2_levels_processed.txt b/src/sunbather/RT_tables/Ar+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+2_levels_processed.txt rename to src/sunbather/RT_tables/Ar+2_levels_processed.txt diff --git a/src/RT_tables/Ar+2_lines_NIST.txt b/src/sunbather/RT_tables/Ar+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+2_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+2_lines_NIST.txt diff --git a/src/RT_tables/Ar+3_levels_NIST.txt b/src/sunbather/RT_tables/Ar+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+3_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+3_levels_NIST.txt diff --git a/src/RT_tables/Ar+3_levels_processed.txt b/src/sunbather/RT_tables/Ar+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+3_levels_processed.txt rename to src/sunbather/RT_tables/Ar+3_levels_processed.txt diff --git a/src/RT_tables/Ar+3_lines_NIST.txt b/src/sunbather/RT_tables/Ar+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+3_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+3_lines_NIST.txt diff --git a/src/RT_tables/Ar+4_levels_NIST.txt b/src/sunbather/RT_tables/Ar+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+4_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+4_levels_NIST.txt diff --git a/src/RT_tables/Ar+4_levels_processed.txt b/src/sunbather/RT_tables/Ar+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+4_levels_processed.txt rename to src/sunbather/RT_tables/Ar+4_levels_processed.txt diff --git a/src/RT_tables/Ar+4_lines_NIST.txt b/src/sunbather/RT_tables/Ar+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+4_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+4_lines_NIST.txt diff --git a/src/RT_tables/Ar+5_levels_NIST.txt b/src/sunbather/RT_tables/Ar+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+5_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+5_levels_NIST.txt diff --git a/src/RT_tables/Ar+5_levels_processed.txt b/src/sunbather/RT_tables/Ar+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+5_levels_processed.txt rename to src/sunbather/RT_tables/Ar+5_levels_processed.txt diff --git a/src/RT_tables/Ar+5_lines_NIST.txt b/src/sunbather/RT_tables/Ar+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+5_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+5_lines_NIST.txt diff --git a/src/RT_tables/Ar+6_levels_NIST.txt b/src/sunbather/RT_tables/Ar+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+6_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+6_levels_NIST.txt diff --git a/src/RT_tables/Ar+6_levels_processed.txt b/src/sunbather/RT_tables/Ar+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+6_levels_processed.txt rename to src/sunbather/RT_tables/Ar+6_levels_processed.txt diff --git a/src/RT_tables/Ar+6_lines_NIST.txt b/src/sunbather/RT_tables/Ar+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+6_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+6_lines_NIST.txt diff --git a/src/RT_tables/Ar+7_levels_NIST.txt b/src/sunbather/RT_tables/Ar+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+7_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+7_levels_NIST.txt diff --git a/src/RT_tables/Ar+7_levels_processed.txt b/src/sunbather/RT_tables/Ar+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+7_levels_processed.txt rename to src/sunbather/RT_tables/Ar+7_levels_processed.txt diff --git a/src/RT_tables/Ar+7_lines_NIST.txt b/src/sunbather/RT_tables/Ar+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+7_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+7_lines_NIST.txt diff --git a/src/RT_tables/Ar+8_levels_NIST.txt b/src/sunbather/RT_tables/Ar+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+8_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+8_levels_NIST.txt diff --git a/src/RT_tables/Ar+8_levels_processed.txt b/src/sunbather/RT_tables/Ar+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+8_levels_processed.txt rename to src/sunbather/RT_tables/Ar+8_levels_processed.txt diff --git a/src/RT_tables/Ar+8_lines_NIST.txt b/src/sunbather/RT_tables/Ar+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+8_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+8_lines_NIST.txt diff --git a/src/RT_tables/Ar+9_levels_NIST.txt b/src/sunbather/RT_tables/Ar+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+9_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+9_levels_NIST.txt diff --git a/src/RT_tables/Ar+9_levels_processed.txt b/src/sunbather/RT_tables/Ar+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+9_levels_processed.txt rename to src/sunbather/RT_tables/Ar+9_levels_processed.txt diff --git a/src/RT_tables/Ar+9_lines_NIST.txt b/src/sunbather/RT_tables/Ar+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+9_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+9_lines_NIST.txt diff --git a/src/RT_tables/Ar+_levels_NIST.txt b/src/sunbather/RT_tables/Ar+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar+_levels_NIST.txt rename to src/sunbather/RT_tables/Ar+_levels_NIST.txt diff --git a/src/RT_tables/Ar+_levels_processed.txt b/src/sunbather/RT_tables/Ar+_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar+_levels_processed.txt rename to src/sunbather/RT_tables/Ar+_levels_processed.txt diff --git a/src/RT_tables/Ar+_lines_NIST.txt b/src/sunbather/RT_tables/Ar+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar+_lines_NIST.txt rename to src/sunbather/RT_tables/Ar+_lines_NIST.txt diff --git a/src/RT_tables/Ar_levels_NIST.txt b/src/sunbather/RT_tables/Ar_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ar_levels_NIST.txt rename to src/sunbather/RT_tables/Ar_levels_NIST.txt diff --git a/src/RT_tables/Ar_levels_processed.txt b/src/sunbather/RT_tables/Ar_levels_processed.txt similarity index 100% rename from src/RT_tables/Ar_levels_processed.txt rename to src/sunbather/RT_tables/Ar_levels_processed.txt diff --git a/src/RT_tables/Ar_lines_NIST.txt b/src/sunbather/RT_tables/Ar_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ar_lines_NIST.txt rename to src/sunbather/RT_tables/Ar_lines_NIST.txt diff --git a/src/RT_tables/B+2_levels_NIST.txt b/src/sunbather/RT_tables/B+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/B+2_levels_NIST.txt rename to src/sunbather/RT_tables/B+2_levels_NIST.txt diff --git a/src/RT_tables/B+2_levels_processed.txt b/src/sunbather/RT_tables/B+2_levels_processed.txt similarity index 100% rename from src/RT_tables/B+2_levels_processed.txt rename to src/sunbather/RT_tables/B+2_levels_processed.txt diff --git a/src/RT_tables/B+2_lines_NIST.txt b/src/sunbather/RT_tables/B+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/B+2_lines_NIST.txt rename to src/sunbather/RT_tables/B+2_lines_NIST.txt diff --git a/src/RT_tables/B+3_levels_NIST.txt b/src/sunbather/RT_tables/B+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/B+3_levels_NIST.txt rename to src/sunbather/RT_tables/B+3_levels_NIST.txt diff --git a/src/RT_tables/B+3_levels_processed.txt b/src/sunbather/RT_tables/B+3_levels_processed.txt similarity index 100% rename from src/RT_tables/B+3_levels_processed.txt rename to src/sunbather/RT_tables/B+3_levels_processed.txt diff --git a/src/RT_tables/B+3_lines_NIST.txt b/src/sunbather/RT_tables/B+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/B+3_lines_NIST.txt rename to src/sunbather/RT_tables/B+3_lines_NIST.txt diff --git a/src/RT_tables/B+4_levels_NIST.txt b/src/sunbather/RT_tables/B+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/B+4_levels_NIST.txt rename to src/sunbather/RT_tables/B+4_levels_NIST.txt diff --git a/src/RT_tables/B+4_levels_processed.txt b/src/sunbather/RT_tables/B+4_levels_processed.txt similarity index 100% rename from src/RT_tables/B+4_levels_processed.txt rename to src/sunbather/RT_tables/B+4_levels_processed.txt diff --git a/src/RT_tables/B+4_lines_NIST.txt b/src/sunbather/RT_tables/B+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/B+4_lines_NIST.txt rename to src/sunbather/RT_tables/B+4_lines_NIST.txt diff --git a/src/RT_tables/B+_levels_NIST.txt b/src/sunbather/RT_tables/B+_levels_NIST.txt similarity index 100% rename from src/RT_tables/B+_levels_NIST.txt rename to src/sunbather/RT_tables/B+_levels_NIST.txt diff --git a/src/RT_tables/B+_levels_processed.txt b/src/sunbather/RT_tables/B+_levels_processed.txt similarity index 100% rename from src/RT_tables/B+_levels_processed.txt rename to src/sunbather/RT_tables/B+_levels_processed.txt diff --git a/src/RT_tables/B+_lines_NIST.txt b/src/sunbather/RT_tables/B+_lines_NIST.txt similarity index 100% rename from src/RT_tables/B+_lines_NIST.txt rename to src/sunbather/RT_tables/B+_lines_NIST.txt diff --git a/src/RT_tables/B_levels_NIST.txt b/src/sunbather/RT_tables/B_levels_NIST.txt similarity index 100% rename from src/RT_tables/B_levels_NIST.txt rename to src/sunbather/RT_tables/B_levels_NIST.txt diff --git a/src/RT_tables/B_levels_processed.txt b/src/sunbather/RT_tables/B_levels_processed.txt similarity index 100% rename from src/RT_tables/B_levels_processed.txt rename to src/sunbather/RT_tables/B_levels_processed.txt diff --git a/src/RT_tables/B_lines_NIST.txt b/src/sunbather/RT_tables/B_lines_NIST.txt similarity index 100% rename from src/RT_tables/B_lines_NIST.txt rename to src/sunbather/RT_tables/B_lines_NIST.txt diff --git a/src/RT_tables/Be+2_levels_NIST.txt b/src/sunbather/RT_tables/Be+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Be+2_levels_NIST.txt rename to src/sunbather/RT_tables/Be+2_levels_NIST.txt diff --git a/src/RT_tables/Be+2_levels_processed.txt b/src/sunbather/RT_tables/Be+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Be+2_levels_processed.txt rename to src/sunbather/RT_tables/Be+2_levels_processed.txt diff --git a/src/RT_tables/Be+2_lines_NIST.txt b/src/sunbather/RT_tables/Be+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Be+2_lines_NIST.txt rename to src/sunbather/RT_tables/Be+2_lines_NIST.txt diff --git a/src/RT_tables/Be+3_levels_NIST.txt b/src/sunbather/RT_tables/Be+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Be+3_levels_NIST.txt rename to src/sunbather/RT_tables/Be+3_levels_NIST.txt diff --git a/src/RT_tables/Be+3_levels_processed.txt b/src/sunbather/RT_tables/Be+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Be+3_levels_processed.txt rename to src/sunbather/RT_tables/Be+3_levels_processed.txt diff --git a/src/RT_tables/Be+3_lines_NIST.txt b/src/sunbather/RT_tables/Be+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Be+3_lines_NIST.txt rename to src/sunbather/RT_tables/Be+3_lines_NIST.txt diff --git a/src/RT_tables/Be+_levels_NIST.txt b/src/sunbather/RT_tables/Be+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Be+_levels_NIST.txt rename to src/sunbather/RT_tables/Be+_levels_NIST.txt diff --git a/src/RT_tables/Be+_levels_processed.txt b/src/sunbather/RT_tables/Be+_levels_processed.txt similarity index 100% rename from src/RT_tables/Be+_levels_processed.txt rename to src/sunbather/RT_tables/Be+_levels_processed.txt diff --git a/src/RT_tables/Be+_lines_NIST.txt b/src/sunbather/RT_tables/Be+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Be+_lines_NIST.txt rename to src/sunbather/RT_tables/Be+_lines_NIST.txt diff --git a/src/RT_tables/Be_levels_NIST.txt b/src/sunbather/RT_tables/Be_levels_NIST.txt similarity index 100% rename from src/RT_tables/Be_levels_NIST.txt rename to src/sunbather/RT_tables/Be_levels_NIST.txt diff --git a/src/RT_tables/Be_levels_processed.txt b/src/sunbather/RT_tables/Be_levels_processed.txt similarity index 100% rename from src/RT_tables/Be_levels_processed.txt rename to src/sunbather/RT_tables/Be_levels_processed.txt diff --git a/src/RT_tables/Be_lines_NIST.txt b/src/sunbather/RT_tables/Be_lines_NIST.txt similarity index 100% rename from src/RT_tables/Be_lines_NIST.txt rename to src/sunbather/RT_tables/Be_lines_NIST.txt diff --git a/src/RT_tables/C+2_levels_NIST.txt b/src/sunbather/RT_tables/C+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/C+2_levels_NIST.txt rename to src/sunbather/RT_tables/C+2_levels_NIST.txt diff --git a/src/RT_tables/C+2_levels_processed.txt b/src/sunbather/RT_tables/C+2_levels_processed.txt similarity index 100% rename from src/RT_tables/C+2_levels_processed.txt rename to src/sunbather/RT_tables/C+2_levels_processed.txt diff --git a/src/RT_tables/C+2_lines_NIST.txt b/src/sunbather/RT_tables/C+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/C+2_lines_NIST.txt rename to src/sunbather/RT_tables/C+2_lines_NIST.txt diff --git a/src/RT_tables/C+3_levels_NIST.txt b/src/sunbather/RT_tables/C+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/C+3_levels_NIST.txt rename to src/sunbather/RT_tables/C+3_levels_NIST.txt diff --git a/src/RT_tables/C+3_levels_processed.txt b/src/sunbather/RT_tables/C+3_levels_processed.txt similarity index 100% rename from src/RT_tables/C+3_levels_processed.txt rename to src/sunbather/RT_tables/C+3_levels_processed.txt diff --git a/src/RT_tables/C+3_lines_NIST.txt b/src/sunbather/RT_tables/C+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/C+3_lines_NIST.txt rename to src/sunbather/RT_tables/C+3_lines_NIST.txt diff --git a/src/RT_tables/C+4_levels_NIST.txt b/src/sunbather/RT_tables/C+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/C+4_levels_NIST.txt rename to src/sunbather/RT_tables/C+4_levels_NIST.txt diff --git a/src/RT_tables/C+4_levels_processed.txt b/src/sunbather/RT_tables/C+4_levels_processed.txt similarity index 100% rename from src/RT_tables/C+4_levels_processed.txt rename to src/sunbather/RT_tables/C+4_levels_processed.txt diff --git a/src/RT_tables/C+4_lines_NIST.txt b/src/sunbather/RT_tables/C+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/C+4_lines_NIST.txt rename to src/sunbather/RT_tables/C+4_lines_NIST.txt diff --git a/src/RT_tables/C+5_levels_NIST.txt b/src/sunbather/RT_tables/C+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/C+5_levels_NIST.txt rename to src/sunbather/RT_tables/C+5_levels_NIST.txt diff --git a/src/RT_tables/C+5_levels_processed.txt b/src/sunbather/RT_tables/C+5_levels_processed.txt similarity index 100% rename from src/RT_tables/C+5_levels_processed.txt rename to src/sunbather/RT_tables/C+5_levels_processed.txt diff --git a/src/RT_tables/C+5_lines_NIST.txt b/src/sunbather/RT_tables/C+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/C+5_lines_NIST.txt rename to src/sunbather/RT_tables/C+5_lines_NIST.txt diff --git a/src/RT_tables/C+_levels_NIST.txt b/src/sunbather/RT_tables/C+_levels_NIST.txt similarity index 100% rename from src/RT_tables/C+_levels_NIST.txt rename to src/sunbather/RT_tables/C+_levels_NIST.txt diff --git a/src/RT_tables/C+_levels_processed.txt b/src/sunbather/RT_tables/C+_levels_processed.txt similarity index 100% rename from src/RT_tables/C+_levels_processed.txt rename to src/sunbather/RT_tables/C+_levels_processed.txt diff --git a/src/RT_tables/C+_lines_NIST.txt b/src/sunbather/RT_tables/C+_lines_NIST.txt similarity index 100% rename from src/RT_tables/C+_lines_NIST.txt rename to src/sunbather/RT_tables/C+_lines_NIST.txt diff --git a/src/RT_tables/C_levels_NIST.txt b/src/sunbather/RT_tables/C_levels_NIST.txt similarity index 100% rename from src/RT_tables/C_levels_NIST.txt rename to src/sunbather/RT_tables/C_levels_NIST.txt diff --git a/src/RT_tables/C_levels_processed.txt b/src/sunbather/RT_tables/C_levels_processed.txt similarity index 100% rename from src/RT_tables/C_levels_processed.txt rename to src/sunbather/RT_tables/C_levels_processed.txt diff --git a/src/RT_tables/C_lines_NIST.txt b/src/sunbather/RT_tables/C_lines_NIST.txt similarity index 100% rename from src/RT_tables/C_lines_NIST.txt rename to src/sunbather/RT_tables/C_lines_NIST.txt diff --git a/src/RT_tables/Ca+10_levels_NIST.txt b/src/sunbather/RT_tables/Ca+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+10_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+10_levels_NIST.txt diff --git a/src/RT_tables/Ca+10_levels_processed.txt b/src/sunbather/RT_tables/Ca+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+10_levels_processed.txt rename to src/sunbather/RT_tables/Ca+10_levels_processed.txt diff --git a/src/RT_tables/Ca+10_lines_NIST.txt b/src/sunbather/RT_tables/Ca+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+10_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+10_lines_NIST.txt diff --git a/src/RT_tables/Ca+11_levels_NIST.txt b/src/sunbather/RT_tables/Ca+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+11_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+11_levels_NIST.txt diff --git a/src/RT_tables/Ca+11_levels_processed.txt b/src/sunbather/RT_tables/Ca+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+11_levels_processed.txt rename to src/sunbather/RT_tables/Ca+11_levels_processed.txt diff --git a/src/RT_tables/Ca+11_lines_NIST.txt b/src/sunbather/RT_tables/Ca+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+11_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+11_lines_NIST.txt diff --git a/src/RT_tables/Ca+12_levels_NIST.txt b/src/sunbather/RT_tables/Ca+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+12_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+12_levels_NIST.txt diff --git a/src/RT_tables/Ca+12_levels_processed.txt b/src/sunbather/RT_tables/Ca+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+12_levels_processed.txt rename to src/sunbather/RT_tables/Ca+12_levels_processed.txt diff --git a/src/RT_tables/Ca+12_lines_NIST.txt b/src/sunbather/RT_tables/Ca+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+12_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+12_lines_NIST.txt diff --git a/src/RT_tables/Ca+2_levels_NIST.txt b/src/sunbather/RT_tables/Ca+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+2_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+2_levels_NIST.txt diff --git a/src/RT_tables/Ca+2_levels_processed.txt b/src/sunbather/RT_tables/Ca+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+2_levels_processed.txt rename to src/sunbather/RT_tables/Ca+2_levels_processed.txt diff --git a/src/RT_tables/Ca+2_lines_NIST.txt b/src/sunbather/RT_tables/Ca+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+2_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+2_lines_NIST.txt diff --git a/src/RT_tables/Ca+3_levels_NIST.txt b/src/sunbather/RT_tables/Ca+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+3_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+3_levels_NIST.txt diff --git a/src/RT_tables/Ca+3_levels_processed.txt b/src/sunbather/RT_tables/Ca+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+3_levels_processed.txt rename to src/sunbather/RT_tables/Ca+3_levels_processed.txt diff --git a/src/RT_tables/Ca+3_lines_NIST.txt b/src/sunbather/RT_tables/Ca+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+3_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+3_lines_NIST.txt diff --git a/src/RT_tables/Ca+4_levels_NIST.txt b/src/sunbather/RT_tables/Ca+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+4_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+4_levels_NIST.txt diff --git a/src/RT_tables/Ca+4_levels_processed.txt b/src/sunbather/RT_tables/Ca+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+4_levels_processed.txt rename to src/sunbather/RT_tables/Ca+4_levels_processed.txt diff --git a/src/RT_tables/Ca+4_lines_NIST.txt b/src/sunbather/RT_tables/Ca+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+4_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+4_lines_NIST.txt diff --git a/src/RT_tables/Ca+5_levels_NIST.txt b/src/sunbather/RT_tables/Ca+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+5_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+5_levels_NIST.txt diff --git a/src/RT_tables/Ca+5_levels_processed.txt b/src/sunbather/RT_tables/Ca+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+5_levels_processed.txt rename to src/sunbather/RT_tables/Ca+5_levels_processed.txt diff --git a/src/RT_tables/Ca+5_lines_NIST.txt b/src/sunbather/RT_tables/Ca+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+5_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+5_lines_NIST.txt diff --git a/src/RT_tables/Ca+6_levels_NIST.txt b/src/sunbather/RT_tables/Ca+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+6_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+6_levels_NIST.txt diff --git a/src/RT_tables/Ca+6_levels_processed.txt b/src/sunbather/RT_tables/Ca+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+6_levels_processed.txt rename to src/sunbather/RT_tables/Ca+6_levels_processed.txt diff --git a/src/RT_tables/Ca+6_lines_NIST.txt b/src/sunbather/RT_tables/Ca+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+6_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+6_lines_NIST.txt diff --git a/src/RT_tables/Ca+7_levels_NIST.txt b/src/sunbather/RT_tables/Ca+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+7_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+7_levels_NIST.txt diff --git a/src/RT_tables/Ca+7_levels_processed.txt b/src/sunbather/RT_tables/Ca+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+7_levels_processed.txt rename to src/sunbather/RT_tables/Ca+7_levels_processed.txt diff --git a/src/RT_tables/Ca+7_lines_NIST.txt b/src/sunbather/RT_tables/Ca+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+7_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+7_lines_NIST.txt diff --git a/src/RT_tables/Ca+8_levels_NIST.txt b/src/sunbather/RT_tables/Ca+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+8_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+8_levels_NIST.txt diff --git a/src/RT_tables/Ca+8_levels_processed.txt b/src/sunbather/RT_tables/Ca+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+8_levels_processed.txt rename to src/sunbather/RT_tables/Ca+8_levels_processed.txt diff --git a/src/RT_tables/Ca+8_lines_NIST.txt b/src/sunbather/RT_tables/Ca+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+8_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+8_lines_NIST.txt diff --git a/src/RT_tables/Ca+9_levels_NIST.txt b/src/sunbather/RT_tables/Ca+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+9_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+9_levels_NIST.txt diff --git a/src/RT_tables/Ca+9_levels_processed.txt b/src/sunbather/RT_tables/Ca+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+9_levels_processed.txt rename to src/sunbather/RT_tables/Ca+9_levels_processed.txt diff --git a/src/RT_tables/Ca+9_lines_NIST.txt b/src/sunbather/RT_tables/Ca+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+9_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+9_lines_NIST.txt diff --git a/src/RT_tables/Ca+_levels_NIST.txt b/src/sunbather/RT_tables/Ca+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca+_levels_NIST.txt rename to src/sunbather/RT_tables/Ca+_levels_NIST.txt diff --git a/src/RT_tables/Ca+_levels_processed.txt b/src/sunbather/RT_tables/Ca+_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca+_levels_processed.txt rename to src/sunbather/RT_tables/Ca+_levels_processed.txt diff --git a/src/RT_tables/Ca+_lines_NIST.txt b/src/sunbather/RT_tables/Ca+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca+_lines_NIST.txt rename to src/sunbather/RT_tables/Ca+_lines_NIST.txt diff --git a/src/RT_tables/Ca_levels_NIST.txt b/src/sunbather/RT_tables/Ca_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ca_levels_NIST.txt rename to src/sunbather/RT_tables/Ca_levels_NIST.txt diff --git a/src/RT_tables/Ca_levels_processed.txt b/src/sunbather/RT_tables/Ca_levels_processed.txt similarity index 100% rename from src/RT_tables/Ca_levels_processed.txt rename to src/sunbather/RT_tables/Ca_levels_processed.txt diff --git a/src/RT_tables/Ca_lines_NIST.txt b/src/sunbather/RT_tables/Ca_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ca_lines_NIST.txt rename to src/sunbather/RT_tables/Ca_lines_NIST.txt diff --git a/src/RT_tables/Cl+10_levels_NIST.txt b/src/sunbather/RT_tables/Cl+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+10_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+10_levels_NIST.txt diff --git a/src/RT_tables/Cl+10_levels_processed.txt b/src/sunbather/RT_tables/Cl+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+10_levels_processed.txt rename to src/sunbather/RT_tables/Cl+10_levels_processed.txt diff --git a/src/RT_tables/Cl+10_lines_NIST.txt b/src/sunbather/RT_tables/Cl+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+10_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+10_lines_NIST.txt diff --git a/src/RT_tables/Cl+11_levels_NIST.txt b/src/sunbather/RT_tables/Cl+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+11_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+11_levels_NIST.txt diff --git a/src/RT_tables/Cl+11_levels_processed.txt b/src/sunbather/RT_tables/Cl+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+11_levels_processed.txt rename to src/sunbather/RT_tables/Cl+11_levels_processed.txt diff --git a/src/RT_tables/Cl+11_lines_NIST.txt b/src/sunbather/RT_tables/Cl+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+11_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+11_lines_NIST.txt diff --git a/src/RT_tables/Cl+12_levels_NIST.txt b/src/sunbather/RT_tables/Cl+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+12_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+12_levels_NIST.txt diff --git a/src/RT_tables/Cl+12_levels_processed.txt b/src/sunbather/RT_tables/Cl+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+12_levels_processed.txt rename to src/sunbather/RT_tables/Cl+12_levels_processed.txt diff --git a/src/RT_tables/Cl+12_lines_NIST.txt b/src/sunbather/RT_tables/Cl+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+12_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+12_lines_NIST.txt diff --git a/src/RT_tables/Cl+2_levels_NIST.txt b/src/sunbather/RT_tables/Cl+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+2_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+2_levels_NIST.txt diff --git a/src/RT_tables/Cl+2_levels_processed.txt b/src/sunbather/RT_tables/Cl+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+2_levels_processed.txt rename to src/sunbather/RT_tables/Cl+2_levels_processed.txt diff --git a/src/RT_tables/Cl+2_lines_NIST.txt b/src/sunbather/RT_tables/Cl+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+2_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+2_lines_NIST.txt diff --git a/src/RT_tables/Cl+3_levels_NIST.txt b/src/sunbather/RT_tables/Cl+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+3_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+3_levels_NIST.txt diff --git a/src/RT_tables/Cl+3_levels_processed.txt b/src/sunbather/RT_tables/Cl+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+3_levels_processed.txt rename to src/sunbather/RT_tables/Cl+3_levels_processed.txt diff --git a/src/RT_tables/Cl+3_lines_NIST.txt b/src/sunbather/RT_tables/Cl+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+3_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+3_lines_NIST.txt diff --git a/src/RT_tables/Cl+4_levels_NIST.txt b/src/sunbather/RT_tables/Cl+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+4_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+4_levels_NIST.txt diff --git a/src/RT_tables/Cl+4_levels_processed.txt b/src/sunbather/RT_tables/Cl+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+4_levels_processed.txt rename to src/sunbather/RT_tables/Cl+4_levels_processed.txt diff --git a/src/RT_tables/Cl+4_lines_NIST.txt b/src/sunbather/RT_tables/Cl+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+4_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+4_lines_NIST.txt diff --git a/src/RT_tables/Cl+5_levels_NIST.txt b/src/sunbather/RT_tables/Cl+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+5_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+5_levels_NIST.txt diff --git a/src/RT_tables/Cl+5_levels_processed.txt b/src/sunbather/RT_tables/Cl+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+5_levels_processed.txt rename to src/sunbather/RT_tables/Cl+5_levels_processed.txt diff --git a/src/RT_tables/Cl+5_lines_NIST.txt b/src/sunbather/RT_tables/Cl+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+5_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+5_lines_NIST.txt diff --git a/src/RT_tables/Cl+6_levels_NIST.txt b/src/sunbather/RT_tables/Cl+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+6_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+6_levels_NIST.txt diff --git a/src/RT_tables/Cl+6_levels_processed.txt b/src/sunbather/RT_tables/Cl+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+6_levels_processed.txt rename to src/sunbather/RT_tables/Cl+6_levels_processed.txt diff --git a/src/RT_tables/Cl+6_lines_NIST.txt b/src/sunbather/RT_tables/Cl+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+6_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+6_lines_NIST.txt diff --git a/src/RT_tables/Cl+7_levels_NIST.txt b/src/sunbather/RT_tables/Cl+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+7_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+7_levels_NIST.txt diff --git a/src/RT_tables/Cl+7_levels_processed.txt b/src/sunbather/RT_tables/Cl+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+7_levels_processed.txt rename to src/sunbather/RT_tables/Cl+7_levels_processed.txt diff --git a/src/RT_tables/Cl+7_lines_NIST.txt b/src/sunbather/RT_tables/Cl+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+7_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+7_lines_NIST.txt diff --git a/src/RT_tables/Cl+8_levels_NIST.txt b/src/sunbather/RT_tables/Cl+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+8_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+8_levels_NIST.txt diff --git a/src/RT_tables/Cl+8_levels_processed.txt b/src/sunbather/RT_tables/Cl+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+8_levels_processed.txt rename to src/sunbather/RT_tables/Cl+8_levels_processed.txt diff --git a/src/RT_tables/Cl+8_lines_NIST.txt b/src/sunbather/RT_tables/Cl+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+8_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+8_lines_NIST.txt diff --git a/src/RT_tables/Cl+9_levels_NIST.txt b/src/sunbather/RT_tables/Cl+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+9_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+9_levels_NIST.txt diff --git a/src/RT_tables/Cl+9_levels_processed.txt b/src/sunbather/RT_tables/Cl+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+9_levels_processed.txt rename to src/sunbather/RT_tables/Cl+9_levels_processed.txt diff --git a/src/RT_tables/Cl+9_lines_NIST.txt b/src/sunbather/RT_tables/Cl+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+9_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+9_lines_NIST.txt diff --git a/src/RT_tables/Cl+_levels_NIST.txt b/src/sunbather/RT_tables/Cl+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl+_levels_NIST.txt rename to src/sunbather/RT_tables/Cl+_levels_NIST.txt diff --git a/src/RT_tables/Cl+_levels_processed.txt b/src/sunbather/RT_tables/Cl+_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl+_levels_processed.txt rename to src/sunbather/RT_tables/Cl+_levels_processed.txt diff --git a/src/RT_tables/Cl+_lines_NIST.txt b/src/sunbather/RT_tables/Cl+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl+_lines_NIST.txt rename to src/sunbather/RT_tables/Cl+_lines_NIST.txt diff --git a/src/RT_tables/Cl_levels_NIST.txt b/src/sunbather/RT_tables/Cl_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cl_levels_NIST.txt rename to src/sunbather/RT_tables/Cl_levels_NIST.txt diff --git a/src/RT_tables/Cl_levels_processed.txt b/src/sunbather/RT_tables/Cl_levels_processed.txt similarity index 100% rename from src/RT_tables/Cl_levels_processed.txt rename to src/sunbather/RT_tables/Cl_levels_processed.txt diff --git a/src/RT_tables/Cl_lines_NIST.txt b/src/sunbather/RT_tables/Cl_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cl_lines_NIST.txt rename to src/sunbather/RT_tables/Cl_lines_NIST.txt diff --git a/src/RT_tables/Co+10_levels_NIST.txt b/src/sunbather/RT_tables/Co+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+10_levels_NIST.txt rename to src/sunbather/RT_tables/Co+10_levels_NIST.txt diff --git a/src/RT_tables/Co+10_levels_processed.txt b/src/sunbather/RT_tables/Co+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+10_levels_processed.txt rename to src/sunbather/RT_tables/Co+10_levels_processed.txt diff --git a/src/RT_tables/Co+10_lines_NIST.txt b/src/sunbather/RT_tables/Co+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+10_lines_NIST.txt rename to src/sunbather/RT_tables/Co+10_lines_NIST.txt diff --git a/src/RT_tables/Co+11_levels_NIST.txt b/src/sunbather/RT_tables/Co+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+11_levels_NIST.txt rename to src/sunbather/RT_tables/Co+11_levels_NIST.txt diff --git a/src/RT_tables/Co+11_levels_processed.txt b/src/sunbather/RT_tables/Co+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+11_levels_processed.txt rename to src/sunbather/RT_tables/Co+11_levels_processed.txt diff --git a/src/RT_tables/Co+11_lines_NIST.txt b/src/sunbather/RT_tables/Co+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+11_lines_NIST.txt rename to src/sunbather/RT_tables/Co+11_lines_NIST.txt diff --git a/src/RT_tables/Co+12_levels_NIST.txt b/src/sunbather/RT_tables/Co+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+12_levels_NIST.txt rename to src/sunbather/RT_tables/Co+12_levels_NIST.txt diff --git a/src/RT_tables/Co+12_levels_processed.txt b/src/sunbather/RT_tables/Co+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+12_levels_processed.txt rename to src/sunbather/RT_tables/Co+12_levels_processed.txt diff --git a/src/RT_tables/Co+12_lines_NIST.txt b/src/sunbather/RT_tables/Co+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+12_lines_NIST.txt rename to src/sunbather/RT_tables/Co+12_lines_NIST.txt diff --git a/src/RT_tables/Co+2_levels_NIST.txt b/src/sunbather/RT_tables/Co+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+2_levels_NIST.txt rename to src/sunbather/RT_tables/Co+2_levels_NIST.txt diff --git a/src/RT_tables/Co+2_levels_processed.txt b/src/sunbather/RT_tables/Co+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+2_levels_processed.txt rename to src/sunbather/RT_tables/Co+2_levels_processed.txt diff --git a/src/RT_tables/Co+2_lines_NIST.txt b/src/sunbather/RT_tables/Co+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+2_lines_NIST.txt rename to src/sunbather/RT_tables/Co+2_lines_NIST.txt diff --git a/src/RT_tables/Co+3_levels_NIST.txt b/src/sunbather/RT_tables/Co+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+3_levels_NIST.txt rename to src/sunbather/RT_tables/Co+3_levels_NIST.txt diff --git a/src/RT_tables/Co+3_lines_NIST.txt b/src/sunbather/RT_tables/Co+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+3_lines_NIST.txt rename to src/sunbather/RT_tables/Co+3_lines_NIST.txt diff --git a/src/RT_tables/Co+4_levels_NIST.txt b/src/sunbather/RT_tables/Co+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+4_levels_NIST.txt rename to src/sunbather/RT_tables/Co+4_levels_NIST.txt diff --git a/src/RT_tables/Co+4_lines_NIST.txt b/src/sunbather/RT_tables/Co+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+4_lines_NIST.txt rename to src/sunbather/RT_tables/Co+4_lines_NIST.txt diff --git a/src/RT_tables/Co+5_levels_NIST.txt b/src/sunbather/RT_tables/Co+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+5_levels_NIST.txt rename to src/sunbather/RT_tables/Co+5_levels_NIST.txt diff --git a/src/RT_tables/Co+5_lines_NIST.txt b/src/sunbather/RT_tables/Co+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+5_lines_NIST.txt rename to src/sunbather/RT_tables/Co+5_lines_NIST.txt diff --git a/src/RT_tables/Co+6_levels_NIST.txt b/src/sunbather/RT_tables/Co+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+6_levels_NIST.txt rename to src/sunbather/RT_tables/Co+6_levels_NIST.txt diff --git a/src/RT_tables/Co+6_lines_NIST.txt b/src/sunbather/RT_tables/Co+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+6_lines_NIST.txt rename to src/sunbather/RT_tables/Co+6_lines_NIST.txt diff --git a/src/RT_tables/Co+7_levels_NIST.txt b/src/sunbather/RT_tables/Co+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+7_levels_NIST.txt rename to src/sunbather/RT_tables/Co+7_levels_NIST.txt diff --git a/src/RT_tables/Co+7_levels_processed.txt b/src/sunbather/RT_tables/Co+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+7_levels_processed.txt rename to src/sunbather/RT_tables/Co+7_levels_processed.txt diff --git a/src/RT_tables/Co+7_lines_NIST.txt b/src/sunbather/RT_tables/Co+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+7_lines_NIST.txt rename to src/sunbather/RT_tables/Co+7_lines_NIST.txt diff --git a/src/RT_tables/Co+8_levels_NIST.txt b/src/sunbather/RT_tables/Co+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+8_levels_NIST.txt rename to src/sunbather/RT_tables/Co+8_levels_NIST.txt diff --git a/src/RT_tables/Co+8_levels_processed.txt b/src/sunbather/RT_tables/Co+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+8_levels_processed.txt rename to src/sunbather/RT_tables/Co+8_levels_processed.txt diff --git a/src/RT_tables/Co+8_lines_NIST.txt b/src/sunbather/RT_tables/Co+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+8_lines_NIST.txt rename to src/sunbather/RT_tables/Co+8_lines_NIST.txt diff --git a/src/RT_tables/Co+9_levels_NIST.txt b/src/sunbather/RT_tables/Co+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+9_levels_NIST.txt rename to src/sunbather/RT_tables/Co+9_levels_NIST.txt diff --git a/src/RT_tables/Co+9_levels_processed.txt b/src/sunbather/RT_tables/Co+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+9_levels_processed.txt rename to src/sunbather/RT_tables/Co+9_levels_processed.txt diff --git a/src/RT_tables/Co+9_lines_NIST.txt b/src/sunbather/RT_tables/Co+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+9_lines_NIST.txt rename to src/sunbather/RT_tables/Co+9_lines_NIST.txt diff --git a/src/RT_tables/Co+_levels_NIST.txt b/src/sunbather/RT_tables/Co+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co+_levels_NIST.txt rename to src/sunbather/RT_tables/Co+_levels_NIST.txt diff --git a/src/RT_tables/Co+_levels_processed.txt b/src/sunbather/RT_tables/Co+_levels_processed.txt similarity index 100% rename from src/RT_tables/Co+_levels_processed.txt rename to src/sunbather/RT_tables/Co+_levels_processed.txt diff --git a/src/RT_tables/Co+_lines_NIST.txt b/src/sunbather/RT_tables/Co+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co+_lines_NIST.txt rename to src/sunbather/RT_tables/Co+_lines_NIST.txt diff --git a/src/RT_tables/Co_levels_NIST.txt b/src/sunbather/RT_tables/Co_levels_NIST.txt similarity index 100% rename from src/RT_tables/Co_levels_NIST.txt rename to src/sunbather/RT_tables/Co_levels_NIST.txt diff --git a/src/RT_tables/Co_levels_processed.txt b/src/sunbather/RT_tables/Co_levels_processed.txt similarity index 100% rename from src/RT_tables/Co_levels_processed.txt rename to src/sunbather/RT_tables/Co_levels_processed.txt diff --git a/src/RT_tables/Co_lines_NIST.txt b/src/sunbather/RT_tables/Co_lines_NIST.txt similarity index 100% rename from src/RT_tables/Co_lines_NIST.txt rename to src/sunbather/RT_tables/Co_lines_NIST.txt diff --git a/src/RT_tables/Cr+10_levels_NIST.txt b/src/sunbather/RT_tables/Cr+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+10_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+10_levels_NIST.txt diff --git a/src/RT_tables/Cr+10_levels_processed.txt b/src/sunbather/RT_tables/Cr+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+10_levels_processed.txt rename to src/sunbather/RT_tables/Cr+10_levels_processed.txt diff --git a/src/RT_tables/Cr+10_lines_NIST.txt b/src/sunbather/RT_tables/Cr+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+10_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+10_lines_NIST.txt diff --git a/src/RT_tables/Cr+11_levels_NIST.txt b/src/sunbather/RT_tables/Cr+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+11_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+11_levels_NIST.txt diff --git a/src/RT_tables/Cr+11_levels_processed.txt b/src/sunbather/RT_tables/Cr+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+11_levels_processed.txt rename to src/sunbather/RT_tables/Cr+11_levels_processed.txt diff --git a/src/RT_tables/Cr+11_lines_NIST.txt b/src/sunbather/RT_tables/Cr+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+11_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+11_lines_NIST.txt diff --git a/src/RT_tables/Cr+12_levels_NIST.txt b/src/sunbather/RT_tables/Cr+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+12_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+12_levels_NIST.txt diff --git a/src/RT_tables/Cr+12_levels_processed.txt b/src/sunbather/RT_tables/Cr+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+12_levels_processed.txt rename to src/sunbather/RT_tables/Cr+12_levels_processed.txt diff --git a/src/RT_tables/Cr+12_lines_NIST.txt b/src/sunbather/RT_tables/Cr+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+12_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+12_lines_NIST.txt diff --git a/src/RT_tables/Cr+2_levels_NIST.txt b/src/sunbather/RT_tables/Cr+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+2_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+2_levels_NIST.txt diff --git a/src/RT_tables/Cr+2_lines_NIST.txt b/src/sunbather/RT_tables/Cr+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+2_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+2_lines_NIST.txt diff --git a/src/RT_tables/Cr+3_levels_NIST.txt b/src/sunbather/RT_tables/Cr+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+3_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+3_levels_NIST.txt diff --git a/src/RT_tables/Cr+3_levels_processed.txt b/src/sunbather/RT_tables/Cr+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+3_levels_processed.txt rename to src/sunbather/RT_tables/Cr+3_levels_processed.txt diff --git a/src/RT_tables/Cr+3_lines_NIST.txt b/src/sunbather/RT_tables/Cr+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+3_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+3_lines_NIST.txt diff --git a/src/RT_tables/Cr+4_levels_NIST.txt b/src/sunbather/RT_tables/Cr+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+4_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+4_levels_NIST.txt diff --git a/src/RT_tables/Cr+4_levels_processed.txt b/src/sunbather/RT_tables/Cr+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+4_levels_processed.txt rename to src/sunbather/RT_tables/Cr+4_levels_processed.txt diff --git a/src/RT_tables/Cr+4_lines_NIST.txt b/src/sunbather/RT_tables/Cr+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+4_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+4_lines_NIST.txt diff --git a/src/RT_tables/Cr+5_levels_NIST.txt b/src/sunbather/RT_tables/Cr+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+5_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+5_levels_NIST.txt diff --git a/src/RT_tables/Cr+5_lines_NIST.txt b/src/sunbather/RT_tables/Cr+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+5_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+5_lines_NIST.txt diff --git a/src/RT_tables/Cr+6_levels_NIST.txt b/src/sunbather/RT_tables/Cr+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+6_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+6_levels_NIST.txt diff --git a/src/RT_tables/Cr+6_lines_NIST.txt b/src/sunbather/RT_tables/Cr+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+6_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+6_lines_NIST.txt diff --git a/src/RT_tables/Cr+7_levels_NIST.txt b/src/sunbather/RT_tables/Cr+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+7_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+7_levels_NIST.txt diff --git a/src/RT_tables/Cr+7_levels_processed.txt b/src/sunbather/RT_tables/Cr+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+7_levels_processed.txt rename to src/sunbather/RT_tables/Cr+7_levels_processed.txt diff --git a/src/RT_tables/Cr+7_lines_NIST.txt b/src/sunbather/RT_tables/Cr+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+7_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+7_lines_NIST.txt diff --git a/src/RT_tables/Cr+8_levels_NIST.txt b/src/sunbather/RT_tables/Cr+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+8_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+8_levels_NIST.txt diff --git a/src/RT_tables/Cr+8_levels_processed.txt b/src/sunbather/RT_tables/Cr+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+8_levels_processed.txt rename to src/sunbather/RT_tables/Cr+8_levels_processed.txt diff --git a/src/RT_tables/Cr+8_lines_NIST.txt b/src/sunbather/RT_tables/Cr+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+8_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+8_lines_NIST.txt diff --git a/src/RT_tables/Cr+9_levels_NIST.txt b/src/sunbather/RT_tables/Cr+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+9_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+9_levels_NIST.txt diff --git a/src/RT_tables/Cr+9_levels_processed.txt b/src/sunbather/RT_tables/Cr+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+9_levels_processed.txt rename to src/sunbather/RT_tables/Cr+9_levels_processed.txt diff --git a/src/RT_tables/Cr+9_lines_NIST.txt b/src/sunbather/RT_tables/Cr+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+9_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+9_lines_NIST.txt diff --git a/src/RT_tables/Cr+_levels_NIST.txt b/src/sunbather/RT_tables/Cr+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr+_levels_NIST.txt rename to src/sunbather/RT_tables/Cr+_levels_NIST.txt diff --git a/src/RT_tables/Cr+_levels_processed.txt b/src/sunbather/RT_tables/Cr+_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr+_levels_processed.txt rename to src/sunbather/RT_tables/Cr+_levels_processed.txt diff --git a/src/RT_tables/Cr+_lines_NIST.txt b/src/sunbather/RT_tables/Cr+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr+_lines_NIST.txt rename to src/sunbather/RT_tables/Cr+_lines_NIST.txt diff --git a/src/RT_tables/Cr_levels_NIST.txt b/src/sunbather/RT_tables/Cr_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cr_levels_NIST.txt rename to src/sunbather/RT_tables/Cr_levels_NIST.txt diff --git a/src/RT_tables/Cr_levels_processed.txt b/src/sunbather/RT_tables/Cr_levels_processed.txt similarity index 100% rename from src/RT_tables/Cr_levels_processed.txt rename to src/sunbather/RT_tables/Cr_levels_processed.txt diff --git a/src/RT_tables/Cr_lines_NIST.txt b/src/sunbather/RT_tables/Cr_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cr_lines_NIST.txt rename to src/sunbather/RT_tables/Cr_lines_NIST.txt diff --git a/src/RT_tables/Cu+10_levels_NIST.txt b/src/sunbather/RT_tables/Cu+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+10_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+10_levels_NIST.txt diff --git a/src/RT_tables/Cu+10_levels_processed.txt b/src/sunbather/RT_tables/Cu+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+10_levels_processed.txt rename to src/sunbather/RT_tables/Cu+10_levels_processed.txt diff --git a/src/RT_tables/Cu+10_lines_NIST.txt b/src/sunbather/RT_tables/Cu+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+10_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+10_lines_NIST.txt diff --git a/src/RT_tables/Cu+11_levels_NIST.txt b/src/sunbather/RT_tables/Cu+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+11_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+11_levels_NIST.txt diff --git a/src/RT_tables/Cu+11_levels_processed.txt b/src/sunbather/RT_tables/Cu+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+11_levels_processed.txt rename to src/sunbather/RT_tables/Cu+11_levels_processed.txt diff --git a/src/RT_tables/Cu+11_lines_NIST.txt b/src/sunbather/RT_tables/Cu+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+11_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+11_lines_NIST.txt diff --git a/src/RT_tables/Cu+12_levels_NIST.txt b/src/sunbather/RT_tables/Cu+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+12_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+12_levels_NIST.txt diff --git a/src/RT_tables/Cu+12_levels_processed.txt b/src/sunbather/RT_tables/Cu+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+12_levels_processed.txt rename to src/sunbather/RT_tables/Cu+12_levels_processed.txt diff --git a/src/RT_tables/Cu+12_lines_NIST.txt b/src/sunbather/RT_tables/Cu+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+12_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+12_lines_NIST.txt diff --git a/src/RT_tables/Cu+2_levels_NIST.txt b/src/sunbather/RT_tables/Cu+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+2_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+2_levels_NIST.txt diff --git a/src/RT_tables/Cu+2_lines_NIST.txt b/src/sunbather/RT_tables/Cu+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+2_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+2_lines_NIST.txt diff --git a/src/RT_tables/Cu+3_levels_NIST.txt b/src/sunbather/RT_tables/Cu+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+3_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+3_levels_NIST.txt diff --git a/src/RT_tables/Cu+3_lines_NIST.txt b/src/sunbather/RT_tables/Cu+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+3_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+3_lines_NIST.txt diff --git a/src/RT_tables/Cu+4_levels_NIST.txt b/src/sunbather/RT_tables/Cu+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+4_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+4_levels_NIST.txt diff --git a/src/RT_tables/Cu+4_lines_NIST.txt b/src/sunbather/RT_tables/Cu+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+4_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+4_lines_NIST.txt diff --git a/src/RT_tables/Cu+5_levels_NIST.txt b/src/sunbather/RT_tables/Cu+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+5_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+5_levels_NIST.txt diff --git a/src/RT_tables/Cu+5_lines_NIST.txt b/src/sunbather/RT_tables/Cu+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+5_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+5_lines_NIST.txt diff --git a/src/RT_tables/Cu+6_levels_NIST.txt b/src/sunbather/RT_tables/Cu+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+6_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+6_levels_NIST.txt diff --git a/src/RT_tables/Cu+6_lines_NIST.txt b/src/sunbather/RT_tables/Cu+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+6_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+6_lines_NIST.txt diff --git a/src/RT_tables/Cu+7_levels_NIST.txt b/src/sunbather/RT_tables/Cu+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+7_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+7_levels_NIST.txt diff --git a/src/RT_tables/Cu+7_levels_processed.txt b/src/sunbather/RT_tables/Cu+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+7_levels_processed.txt rename to src/sunbather/RT_tables/Cu+7_levels_processed.txt diff --git a/src/RT_tables/Cu+7_lines_NIST.txt b/src/sunbather/RT_tables/Cu+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+7_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+7_lines_NIST.txt diff --git a/src/RT_tables/Cu+8_levels_NIST.txt b/src/sunbather/RT_tables/Cu+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+8_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+8_levels_NIST.txt diff --git a/src/RT_tables/Cu+8_levels_processed.txt b/src/sunbather/RT_tables/Cu+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+8_levels_processed.txt rename to src/sunbather/RT_tables/Cu+8_levels_processed.txt diff --git a/src/RT_tables/Cu+8_lines_NIST.txt b/src/sunbather/RT_tables/Cu+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+8_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+8_lines_NIST.txt diff --git a/src/RT_tables/Cu+9_levels_NIST.txt b/src/sunbather/RT_tables/Cu+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+9_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+9_levels_NIST.txt diff --git a/src/RT_tables/Cu+9_levels_processed.txt b/src/sunbather/RT_tables/Cu+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+9_levels_processed.txt rename to src/sunbather/RT_tables/Cu+9_levels_processed.txt diff --git a/src/RT_tables/Cu+9_lines_NIST.txt b/src/sunbather/RT_tables/Cu+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+9_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+9_lines_NIST.txt diff --git a/src/RT_tables/Cu+_levels_NIST.txt b/src/sunbather/RT_tables/Cu+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu+_levels_NIST.txt rename to src/sunbather/RT_tables/Cu+_levels_NIST.txt diff --git a/src/RT_tables/Cu+_levels_processed.txt b/src/sunbather/RT_tables/Cu+_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu+_levels_processed.txt rename to src/sunbather/RT_tables/Cu+_levels_processed.txt diff --git a/src/RT_tables/Cu+_lines_NIST.txt b/src/sunbather/RT_tables/Cu+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu+_lines_NIST.txt rename to src/sunbather/RT_tables/Cu+_lines_NIST.txt diff --git a/src/RT_tables/Cu_levels_NIST.txt b/src/sunbather/RT_tables/Cu_levels_NIST.txt similarity index 100% rename from src/RT_tables/Cu_levels_NIST.txt rename to src/sunbather/RT_tables/Cu_levels_NIST.txt diff --git a/src/RT_tables/Cu_levels_processed.txt b/src/sunbather/RT_tables/Cu_levels_processed.txt similarity index 100% rename from src/RT_tables/Cu_levels_processed.txt rename to src/sunbather/RT_tables/Cu_levels_processed.txt diff --git a/src/RT_tables/Cu_lines_NIST.txt b/src/sunbather/RT_tables/Cu_lines_NIST.txt similarity index 100% rename from src/RT_tables/Cu_lines_NIST.txt rename to src/sunbather/RT_tables/Cu_lines_NIST.txt diff --git a/src/RT_tables/F+2_levels_NIST.txt b/src/sunbather/RT_tables/F+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+2_levels_NIST.txt rename to src/sunbather/RT_tables/F+2_levels_NIST.txt diff --git a/src/RT_tables/F+2_levels_processed.txt b/src/sunbather/RT_tables/F+2_levels_processed.txt similarity index 100% rename from src/RT_tables/F+2_levels_processed.txt rename to src/sunbather/RT_tables/F+2_levels_processed.txt diff --git a/src/RT_tables/F+2_lines_NIST.txt b/src/sunbather/RT_tables/F+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+2_lines_NIST.txt rename to src/sunbather/RT_tables/F+2_lines_NIST.txt diff --git a/src/RT_tables/F+3_levels_NIST.txt b/src/sunbather/RT_tables/F+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+3_levels_NIST.txt rename to src/sunbather/RT_tables/F+3_levels_NIST.txt diff --git a/src/RT_tables/F+3_levels_processed.txt b/src/sunbather/RT_tables/F+3_levels_processed.txt similarity index 100% rename from src/RT_tables/F+3_levels_processed.txt rename to src/sunbather/RT_tables/F+3_levels_processed.txt diff --git a/src/RT_tables/F+3_lines_NIST.txt b/src/sunbather/RT_tables/F+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+3_lines_NIST.txt rename to src/sunbather/RT_tables/F+3_lines_NIST.txt diff --git a/src/RT_tables/F+4_levels_NIST.txt b/src/sunbather/RT_tables/F+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+4_levels_NIST.txt rename to src/sunbather/RT_tables/F+4_levels_NIST.txt diff --git a/src/RT_tables/F+4_levels_processed.txt b/src/sunbather/RT_tables/F+4_levels_processed.txt similarity index 100% rename from src/RT_tables/F+4_levels_processed.txt rename to src/sunbather/RT_tables/F+4_levels_processed.txt diff --git a/src/RT_tables/F+4_lines_NIST.txt b/src/sunbather/RT_tables/F+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+4_lines_NIST.txt rename to src/sunbather/RT_tables/F+4_lines_NIST.txt diff --git a/src/RT_tables/F+5_levels_NIST.txt b/src/sunbather/RT_tables/F+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+5_levels_NIST.txt rename to src/sunbather/RT_tables/F+5_levels_NIST.txt diff --git a/src/RT_tables/F+5_levels_processed.txt b/src/sunbather/RT_tables/F+5_levels_processed.txt similarity index 100% rename from src/RT_tables/F+5_levels_processed.txt rename to src/sunbather/RT_tables/F+5_levels_processed.txt diff --git a/src/RT_tables/F+5_lines_NIST.txt b/src/sunbather/RT_tables/F+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+5_lines_NIST.txt rename to src/sunbather/RT_tables/F+5_lines_NIST.txt diff --git a/src/RT_tables/F+6_levels_NIST.txt b/src/sunbather/RT_tables/F+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+6_levels_NIST.txt rename to src/sunbather/RT_tables/F+6_levels_NIST.txt diff --git a/src/RT_tables/F+6_levels_processed.txt b/src/sunbather/RT_tables/F+6_levels_processed.txt similarity index 100% rename from src/RT_tables/F+6_levels_processed.txt rename to src/sunbather/RT_tables/F+6_levels_processed.txt diff --git a/src/RT_tables/F+6_lines_NIST.txt b/src/sunbather/RT_tables/F+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+6_lines_NIST.txt rename to src/sunbather/RT_tables/F+6_lines_NIST.txt diff --git a/src/RT_tables/F+7_levels_NIST.txt b/src/sunbather/RT_tables/F+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+7_levels_NIST.txt rename to src/sunbather/RT_tables/F+7_levels_NIST.txt diff --git a/src/RT_tables/F+7_levels_processed.txt b/src/sunbather/RT_tables/F+7_levels_processed.txt similarity index 100% rename from src/RT_tables/F+7_levels_processed.txt rename to src/sunbather/RT_tables/F+7_levels_processed.txt diff --git a/src/RT_tables/F+7_lines_NIST.txt b/src/sunbather/RT_tables/F+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+7_lines_NIST.txt rename to src/sunbather/RT_tables/F+7_lines_NIST.txt diff --git a/src/RT_tables/F+8_levels_NIST.txt b/src/sunbather/RT_tables/F+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+8_levels_NIST.txt rename to src/sunbather/RT_tables/F+8_levels_NIST.txt diff --git a/src/RT_tables/F+8_levels_processed.txt b/src/sunbather/RT_tables/F+8_levels_processed.txt similarity index 100% rename from src/RT_tables/F+8_levels_processed.txt rename to src/sunbather/RT_tables/F+8_levels_processed.txt diff --git a/src/RT_tables/F+8_lines_NIST.txt b/src/sunbather/RT_tables/F+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+8_lines_NIST.txt rename to src/sunbather/RT_tables/F+8_lines_NIST.txt diff --git a/src/RT_tables/F+_levels_NIST.txt b/src/sunbather/RT_tables/F+_levels_NIST.txt similarity index 100% rename from src/RT_tables/F+_levels_NIST.txt rename to src/sunbather/RT_tables/F+_levels_NIST.txt diff --git a/src/RT_tables/F+_levels_processed.txt b/src/sunbather/RT_tables/F+_levels_processed.txt similarity index 100% rename from src/RT_tables/F+_levels_processed.txt rename to src/sunbather/RT_tables/F+_levels_processed.txt diff --git a/src/RT_tables/F+_lines_NIST.txt b/src/sunbather/RT_tables/F+_lines_NIST.txt similarity index 100% rename from src/RT_tables/F+_lines_NIST.txt rename to src/sunbather/RT_tables/F+_lines_NIST.txt diff --git a/src/RT_tables/F_levels_NIST.txt b/src/sunbather/RT_tables/F_levels_NIST.txt similarity index 100% rename from src/RT_tables/F_levels_NIST.txt rename to src/sunbather/RT_tables/F_levels_NIST.txt diff --git a/src/RT_tables/F_lines_NIST.txt b/src/sunbather/RT_tables/F_lines_NIST.txt similarity index 100% rename from src/RT_tables/F_lines_NIST.txt rename to src/sunbather/RT_tables/F_lines_NIST.txt diff --git a/src/RT_tables/Fe+10_levels_NIST.txt b/src/sunbather/RT_tables/Fe+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+10_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+10_levels_NIST.txt diff --git a/src/RT_tables/Fe+10_levels_processed.txt b/src/sunbather/RT_tables/Fe+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+10_levels_processed.txt rename to src/sunbather/RT_tables/Fe+10_levels_processed.txt diff --git a/src/RT_tables/Fe+10_lines_NIST.txt b/src/sunbather/RT_tables/Fe+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+10_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+10_lines_NIST.txt diff --git a/src/RT_tables/Fe+11_levels_NIST.txt b/src/sunbather/RT_tables/Fe+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+11_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+11_levels_NIST.txt diff --git a/src/RT_tables/Fe+11_levels_processed.txt b/src/sunbather/RT_tables/Fe+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+11_levels_processed.txt rename to src/sunbather/RT_tables/Fe+11_levels_processed.txt diff --git a/src/RT_tables/Fe+11_lines_NIST.txt b/src/sunbather/RT_tables/Fe+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+11_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+11_lines_NIST.txt diff --git a/src/RT_tables/Fe+12_levels_NIST.txt b/src/sunbather/RT_tables/Fe+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+12_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+12_levels_NIST.txt diff --git a/src/RT_tables/Fe+12_levels_processed.txt b/src/sunbather/RT_tables/Fe+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+12_levels_processed.txt rename to src/sunbather/RT_tables/Fe+12_levels_processed.txt diff --git a/src/RT_tables/Fe+12_lines_NIST.txt b/src/sunbather/RT_tables/Fe+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+12_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+12_lines_NIST.txt diff --git a/src/RT_tables/Fe+2_levels_NIST.txt b/src/sunbather/RT_tables/Fe+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+2_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+2_levels_NIST.txt diff --git a/src/RT_tables/Fe+2_levels_processed.txt b/src/sunbather/RT_tables/Fe+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+2_levels_processed.txt rename to src/sunbather/RT_tables/Fe+2_levels_processed.txt diff --git a/src/RT_tables/Fe+2_lines_NIST.txt b/src/sunbather/RT_tables/Fe+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+2_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+2_lines_NIST.txt diff --git a/src/RT_tables/Fe+3_levels_NIST.txt b/src/sunbather/RT_tables/Fe+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+3_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+3_levels_NIST.txt diff --git a/src/RT_tables/Fe+3_levels_processed.txt b/src/sunbather/RT_tables/Fe+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+3_levels_processed.txt rename to src/sunbather/RT_tables/Fe+3_levels_processed.txt diff --git a/src/RT_tables/Fe+3_lines_NIST.txt b/src/sunbather/RT_tables/Fe+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+3_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+3_lines_NIST.txt diff --git a/src/RT_tables/Fe+4_levels_NIST.txt b/src/sunbather/RT_tables/Fe+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+4_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+4_levels_NIST.txt diff --git a/src/RT_tables/Fe+4_levels_processed.txt b/src/sunbather/RT_tables/Fe+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+4_levels_processed.txt rename to src/sunbather/RT_tables/Fe+4_levels_processed.txt diff --git a/src/RT_tables/Fe+4_lines_NIST.txt b/src/sunbather/RT_tables/Fe+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+4_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+4_lines_NIST.txt diff --git a/src/RT_tables/Fe+5_levels_NIST.txt b/src/sunbather/RT_tables/Fe+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+5_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+5_levels_NIST.txt diff --git a/src/RT_tables/Fe+5_levels_processed.txt b/src/sunbather/RT_tables/Fe+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+5_levels_processed.txt rename to src/sunbather/RT_tables/Fe+5_levels_processed.txt diff --git a/src/RT_tables/Fe+5_lines_NIST.txt b/src/sunbather/RT_tables/Fe+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+5_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+5_lines_NIST.txt diff --git a/src/RT_tables/Fe+6_levels_NIST.txt b/src/sunbather/RT_tables/Fe+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+6_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+6_levels_NIST.txt diff --git a/src/RT_tables/Fe+6_levels_processed.txt b/src/sunbather/RT_tables/Fe+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+6_levels_processed.txt rename to src/sunbather/RT_tables/Fe+6_levels_processed.txt diff --git a/src/RT_tables/Fe+6_lines_NIST.txt b/src/sunbather/RT_tables/Fe+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+6_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+6_lines_NIST.txt diff --git a/src/RT_tables/Fe+7_levels_NIST.txt b/src/sunbather/RT_tables/Fe+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+7_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+7_levels_NIST.txt diff --git a/src/RT_tables/Fe+7_levels_processed.txt b/src/sunbather/RT_tables/Fe+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+7_levels_processed.txt rename to src/sunbather/RT_tables/Fe+7_levels_processed.txt diff --git a/src/RT_tables/Fe+7_lines_NIST.txt b/src/sunbather/RT_tables/Fe+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+7_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+7_lines_NIST.txt diff --git a/src/RT_tables/Fe+8_levels_NIST.txt b/src/sunbather/RT_tables/Fe+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+8_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+8_levels_NIST.txt diff --git a/src/RT_tables/Fe+8_levels_processed.txt b/src/sunbather/RT_tables/Fe+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+8_levels_processed.txt rename to src/sunbather/RT_tables/Fe+8_levels_processed.txt diff --git a/src/RT_tables/Fe+8_lines_NIST.txt b/src/sunbather/RT_tables/Fe+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+8_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+8_lines_NIST.txt diff --git a/src/RT_tables/Fe+9_levels_NIST.txt b/src/sunbather/RT_tables/Fe+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+9_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+9_levels_NIST.txt diff --git a/src/RT_tables/Fe+9_levels_processed.txt b/src/sunbather/RT_tables/Fe+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+9_levels_processed.txt rename to src/sunbather/RT_tables/Fe+9_levels_processed.txt diff --git a/src/RT_tables/Fe+9_lines_NIST.txt b/src/sunbather/RT_tables/Fe+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+9_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+9_lines_NIST.txt diff --git a/src/RT_tables/Fe+_levels_NIST.txt b/src/sunbather/RT_tables/Fe+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe+_levels_NIST.txt rename to src/sunbather/RT_tables/Fe+_levels_NIST.txt diff --git a/src/RT_tables/Fe+_levels_processed.txt b/src/sunbather/RT_tables/Fe+_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe+_levels_processed.txt rename to src/sunbather/RT_tables/Fe+_levels_processed.txt diff --git a/src/RT_tables/Fe+_lines_NIST.txt b/src/sunbather/RT_tables/Fe+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe+_lines_NIST.txt rename to src/sunbather/RT_tables/Fe+_lines_NIST.txt diff --git a/src/RT_tables/Fe_levels_NIST.txt b/src/sunbather/RT_tables/Fe_levels_NIST.txt similarity index 100% rename from src/RT_tables/Fe_levels_NIST.txt rename to src/sunbather/RT_tables/Fe_levels_NIST.txt diff --git a/src/RT_tables/Fe_levels_processed.txt b/src/sunbather/RT_tables/Fe_levels_processed.txt similarity index 100% rename from src/RT_tables/Fe_levels_processed.txt rename to src/sunbather/RT_tables/Fe_levels_processed.txt diff --git a/src/RT_tables/Fe_lines_NIST.txt b/src/sunbather/RT_tables/Fe_lines_NIST.txt similarity index 100% rename from src/RT_tables/Fe_lines_NIST.txt rename to src/sunbather/RT_tables/Fe_lines_NIST.txt diff --git a/src/RT_tables/H_levels_NIST.txt b/src/sunbather/RT_tables/H_levels_NIST.txt similarity index 100% rename from src/RT_tables/H_levels_NIST.txt rename to src/sunbather/RT_tables/H_levels_NIST.txt diff --git a/src/RT_tables/H_levels_processed.txt b/src/sunbather/RT_tables/H_levels_processed.txt similarity index 100% rename from src/RT_tables/H_levels_processed.txt rename to src/sunbather/RT_tables/H_levels_processed.txt diff --git a/src/RT_tables/H_lines_NIST.txt b/src/sunbather/RT_tables/H_lines_NIST.txt similarity index 100% rename from src/RT_tables/H_lines_NIST.txt rename to src/sunbather/RT_tables/H_lines_NIST.txt diff --git a/src/RT_tables/H_lines_NIST_all.txt b/src/sunbather/RT_tables/H_lines_NIST_all.txt similarity index 100% rename from src/RT_tables/H_lines_NIST_all.txt rename to src/sunbather/RT_tables/H_lines_NIST_all.txt diff --git a/src/RT_tables/He+_levels_NIST.txt b/src/sunbather/RT_tables/He+_levels_NIST.txt similarity index 100% rename from src/RT_tables/He+_levels_NIST.txt rename to src/sunbather/RT_tables/He+_levels_NIST.txt diff --git a/src/RT_tables/He+_levels_processed.txt b/src/sunbather/RT_tables/He+_levels_processed.txt similarity index 100% rename from src/RT_tables/He+_levels_processed.txt rename to src/sunbather/RT_tables/He+_levels_processed.txt diff --git a/src/RT_tables/He+_lines_NIST.txt b/src/sunbather/RT_tables/He+_lines_NIST.txt similarity index 100% rename from src/RT_tables/He+_lines_NIST.txt rename to src/sunbather/RT_tables/He+_lines_NIST.txt diff --git a/src/RT_tables/He_levels_NIST.txt b/src/sunbather/RT_tables/He_levels_NIST.txt similarity index 100% rename from src/RT_tables/He_levels_NIST.txt rename to src/sunbather/RT_tables/He_levels_NIST.txt diff --git a/src/RT_tables/He_levels_processed.txt b/src/sunbather/RT_tables/He_levels_processed.txt similarity index 100% rename from src/RT_tables/He_levels_processed.txt rename to src/sunbather/RT_tables/He_levels_processed.txt diff --git a/src/RT_tables/He_lines_NIST.txt b/src/sunbather/RT_tables/He_lines_NIST.txt similarity index 100% rename from src/RT_tables/He_lines_NIST.txt rename to src/sunbather/RT_tables/He_lines_NIST.txt diff --git a/src/RT_tables/K+10_levels_NIST.txt b/src/sunbather/RT_tables/K+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+10_levels_NIST.txt rename to src/sunbather/RT_tables/K+10_levels_NIST.txt diff --git a/src/RT_tables/K+10_levels_processed.txt b/src/sunbather/RT_tables/K+10_levels_processed.txt similarity index 100% rename from src/RT_tables/K+10_levels_processed.txt rename to src/sunbather/RT_tables/K+10_levels_processed.txt diff --git a/src/RT_tables/K+10_lines_NIST.txt b/src/sunbather/RT_tables/K+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+10_lines_NIST.txt rename to src/sunbather/RT_tables/K+10_lines_NIST.txt diff --git a/src/RT_tables/K+11_levels_NIST.txt b/src/sunbather/RT_tables/K+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+11_levels_NIST.txt rename to src/sunbather/RT_tables/K+11_levels_NIST.txt diff --git a/src/RT_tables/K+11_levels_processed.txt b/src/sunbather/RT_tables/K+11_levels_processed.txt similarity index 100% rename from src/RT_tables/K+11_levels_processed.txt rename to src/sunbather/RT_tables/K+11_levels_processed.txt diff --git a/src/RT_tables/K+11_lines_NIST.txt b/src/sunbather/RT_tables/K+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+11_lines_NIST.txt rename to src/sunbather/RT_tables/K+11_lines_NIST.txt diff --git a/src/RT_tables/K+12_levels_NIST.txt b/src/sunbather/RT_tables/K+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+12_levels_NIST.txt rename to src/sunbather/RT_tables/K+12_levels_NIST.txt diff --git a/src/RT_tables/K+12_levels_processed.txt b/src/sunbather/RT_tables/K+12_levels_processed.txt similarity index 100% rename from src/RT_tables/K+12_levels_processed.txt rename to src/sunbather/RT_tables/K+12_levels_processed.txt diff --git a/src/RT_tables/K+12_lines_NIST.txt b/src/sunbather/RT_tables/K+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+12_lines_NIST.txt rename to src/sunbather/RT_tables/K+12_lines_NIST.txt diff --git a/src/RT_tables/K+2_levels_NIST.txt b/src/sunbather/RT_tables/K+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+2_levels_NIST.txt rename to src/sunbather/RT_tables/K+2_levels_NIST.txt diff --git a/src/RT_tables/K+2_levels_processed.txt b/src/sunbather/RT_tables/K+2_levels_processed.txt similarity index 100% rename from src/RT_tables/K+2_levels_processed.txt rename to src/sunbather/RT_tables/K+2_levels_processed.txt diff --git a/src/RT_tables/K+2_lines_NIST.txt b/src/sunbather/RT_tables/K+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+2_lines_NIST.txt rename to src/sunbather/RT_tables/K+2_lines_NIST.txt diff --git a/src/RT_tables/K+3_levels_NIST.txt b/src/sunbather/RT_tables/K+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+3_levels_NIST.txt rename to src/sunbather/RT_tables/K+3_levels_NIST.txt diff --git a/src/RT_tables/K+3_levels_processed.txt b/src/sunbather/RT_tables/K+3_levels_processed.txt similarity index 100% rename from src/RT_tables/K+3_levels_processed.txt rename to src/sunbather/RT_tables/K+3_levels_processed.txt diff --git a/src/RT_tables/K+3_lines_NIST.txt b/src/sunbather/RT_tables/K+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+3_lines_NIST.txt rename to src/sunbather/RT_tables/K+3_lines_NIST.txt diff --git a/src/RT_tables/K+4_levels_NIST.txt b/src/sunbather/RT_tables/K+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+4_levels_NIST.txt rename to src/sunbather/RT_tables/K+4_levels_NIST.txt diff --git a/src/RT_tables/K+4_levels_processed.txt b/src/sunbather/RT_tables/K+4_levels_processed.txt similarity index 100% rename from src/RT_tables/K+4_levels_processed.txt rename to src/sunbather/RT_tables/K+4_levels_processed.txt diff --git a/src/RT_tables/K+4_lines_NIST.txt b/src/sunbather/RT_tables/K+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+4_lines_NIST.txt rename to src/sunbather/RT_tables/K+4_lines_NIST.txt diff --git a/src/RT_tables/K+5_levels_NIST.txt b/src/sunbather/RT_tables/K+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+5_levels_NIST.txt rename to src/sunbather/RT_tables/K+5_levels_NIST.txt diff --git a/src/RT_tables/K+5_levels_processed.txt b/src/sunbather/RT_tables/K+5_levels_processed.txt similarity index 100% rename from src/RT_tables/K+5_levels_processed.txt rename to src/sunbather/RT_tables/K+5_levels_processed.txt diff --git a/src/RT_tables/K+5_lines_NIST.txt b/src/sunbather/RT_tables/K+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+5_lines_NIST.txt rename to src/sunbather/RT_tables/K+5_lines_NIST.txt diff --git a/src/RT_tables/K+6_levels_NIST.txt b/src/sunbather/RT_tables/K+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+6_levels_NIST.txt rename to src/sunbather/RT_tables/K+6_levels_NIST.txt diff --git a/src/RT_tables/K+6_levels_processed.txt b/src/sunbather/RT_tables/K+6_levels_processed.txt similarity index 100% rename from src/RT_tables/K+6_levels_processed.txt rename to src/sunbather/RT_tables/K+6_levels_processed.txt diff --git a/src/RT_tables/K+6_lines_NIST.txt b/src/sunbather/RT_tables/K+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+6_lines_NIST.txt rename to src/sunbather/RT_tables/K+6_lines_NIST.txt diff --git a/src/RT_tables/K+7_levels_NIST.txt b/src/sunbather/RT_tables/K+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+7_levels_NIST.txt rename to src/sunbather/RT_tables/K+7_levels_NIST.txt diff --git a/src/RT_tables/K+7_levels_processed.txt b/src/sunbather/RT_tables/K+7_levels_processed.txt similarity index 100% rename from src/RT_tables/K+7_levels_processed.txt rename to src/sunbather/RT_tables/K+7_levels_processed.txt diff --git a/src/RT_tables/K+7_lines_NIST.txt b/src/sunbather/RT_tables/K+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+7_lines_NIST.txt rename to src/sunbather/RT_tables/K+7_lines_NIST.txt diff --git a/src/RT_tables/K+8_levels_NIST.txt b/src/sunbather/RT_tables/K+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+8_levels_NIST.txt rename to src/sunbather/RT_tables/K+8_levels_NIST.txt diff --git a/src/RT_tables/K+8_levels_processed.txt b/src/sunbather/RT_tables/K+8_levels_processed.txt similarity index 100% rename from src/RT_tables/K+8_levels_processed.txt rename to src/sunbather/RT_tables/K+8_levels_processed.txt diff --git a/src/RT_tables/K+8_lines_NIST.txt b/src/sunbather/RT_tables/K+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+8_lines_NIST.txt rename to src/sunbather/RT_tables/K+8_lines_NIST.txt diff --git a/src/RT_tables/K+9_levels_NIST.txt b/src/sunbather/RT_tables/K+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+9_levels_NIST.txt rename to src/sunbather/RT_tables/K+9_levels_NIST.txt diff --git a/src/RT_tables/K+9_levels_processed.txt b/src/sunbather/RT_tables/K+9_levels_processed.txt similarity index 100% rename from src/RT_tables/K+9_levels_processed.txt rename to src/sunbather/RT_tables/K+9_levels_processed.txt diff --git a/src/RT_tables/K+9_lines_NIST.txt b/src/sunbather/RT_tables/K+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+9_lines_NIST.txt rename to src/sunbather/RT_tables/K+9_lines_NIST.txt diff --git a/src/RT_tables/K+_levels_NIST.txt b/src/sunbather/RT_tables/K+_levels_NIST.txt similarity index 100% rename from src/RT_tables/K+_levels_NIST.txt rename to src/sunbather/RT_tables/K+_levels_NIST.txt diff --git a/src/RT_tables/K+_levels_processed.txt b/src/sunbather/RT_tables/K+_levels_processed.txt similarity index 100% rename from src/RT_tables/K+_levels_processed.txt rename to src/sunbather/RT_tables/K+_levels_processed.txt diff --git a/src/RT_tables/K+_lines_NIST.txt b/src/sunbather/RT_tables/K+_lines_NIST.txt similarity index 100% rename from src/RT_tables/K+_lines_NIST.txt rename to src/sunbather/RT_tables/K+_lines_NIST.txt diff --git a/src/RT_tables/K_levels_NIST.txt b/src/sunbather/RT_tables/K_levels_NIST.txt similarity index 100% rename from src/RT_tables/K_levels_NIST.txt rename to src/sunbather/RT_tables/K_levels_NIST.txt diff --git a/src/RT_tables/K_levels_processed.txt b/src/sunbather/RT_tables/K_levels_processed.txt similarity index 100% rename from src/RT_tables/K_levels_processed.txt rename to src/sunbather/RT_tables/K_levels_processed.txt diff --git a/src/RT_tables/K_lines_NIST.txt b/src/sunbather/RT_tables/K_lines_NIST.txt similarity index 100% rename from src/RT_tables/K_lines_NIST.txt rename to src/sunbather/RT_tables/K_lines_NIST.txt diff --git a/src/RT_tables/Li+2_levels_NIST.txt b/src/sunbather/RT_tables/Li+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Li+2_levels_NIST.txt rename to src/sunbather/RT_tables/Li+2_levels_NIST.txt diff --git a/src/RT_tables/Li+2_levels_processed.txt b/src/sunbather/RT_tables/Li+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Li+2_levels_processed.txt rename to src/sunbather/RT_tables/Li+2_levels_processed.txt diff --git a/src/RT_tables/Li+2_lines_NIST.txt b/src/sunbather/RT_tables/Li+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Li+2_lines_NIST.txt rename to src/sunbather/RT_tables/Li+2_lines_NIST.txt diff --git a/src/RT_tables/Li+_levels_NIST.txt b/src/sunbather/RT_tables/Li+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Li+_levels_NIST.txt rename to src/sunbather/RT_tables/Li+_levels_NIST.txt diff --git a/src/RT_tables/Li+_levels_processed.txt b/src/sunbather/RT_tables/Li+_levels_processed.txt similarity index 100% rename from src/RT_tables/Li+_levels_processed.txt rename to src/sunbather/RT_tables/Li+_levels_processed.txt diff --git a/src/RT_tables/Li+_lines_NIST.txt b/src/sunbather/RT_tables/Li+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Li+_lines_NIST.txt rename to src/sunbather/RT_tables/Li+_lines_NIST.txt diff --git a/src/RT_tables/Li_levels_NIST.txt b/src/sunbather/RT_tables/Li_levels_NIST.txt similarity index 100% rename from src/RT_tables/Li_levels_NIST.txt rename to src/sunbather/RT_tables/Li_levels_NIST.txt diff --git a/src/RT_tables/Li_levels_processed.txt b/src/sunbather/RT_tables/Li_levels_processed.txt similarity index 100% rename from src/RT_tables/Li_levels_processed.txt rename to src/sunbather/RT_tables/Li_levels_processed.txt diff --git a/src/RT_tables/Li_lines_NIST.txt b/src/sunbather/RT_tables/Li_lines_NIST.txt similarity index 100% rename from src/RT_tables/Li_lines_NIST.txt rename to src/sunbather/RT_tables/Li_lines_NIST.txt diff --git a/src/RT_tables/Mg+10_levels_NIST.txt b/src/sunbather/RT_tables/Mg+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+10_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+10_levels_NIST.txt diff --git a/src/RT_tables/Mg+10_levels_processed.txt b/src/sunbather/RT_tables/Mg+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+10_levels_processed.txt rename to src/sunbather/RT_tables/Mg+10_levels_processed.txt diff --git a/src/RT_tables/Mg+10_lines_NIST.txt b/src/sunbather/RT_tables/Mg+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+10_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+10_lines_NIST.txt diff --git a/src/RT_tables/Mg+11_levels_NIST.txt b/src/sunbather/RT_tables/Mg+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+11_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+11_levels_NIST.txt diff --git a/src/RT_tables/Mg+11_levels_processed.txt b/src/sunbather/RT_tables/Mg+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+11_levels_processed.txt rename to src/sunbather/RT_tables/Mg+11_levels_processed.txt diff --git a/src/RT_tables/Mg+11_lines_NIST.txt b/src/sunbather/RT_tables/Mg+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+11_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+11_lines_NIST.txt diff --git a/src/RT_tables/Mg+2_levels_NIST.txt b/src/sunbather/RT_tables/Mg+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+2_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+2_levels_NIST.txt diff --git a/src/RT_tables/Mg+2_levels_processed.txt b/src/sunbather/RT_tables/Mg+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+2_levels_processed.txt rename to src/sunbather/RT_tables/Mg+2_levels_processed.txt diff --git a/src/RT_tables/Mg+2_lines_NIST.txt b/src/sunbather/RT_tables/Mg+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+2_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+2_lines_NIST.txt diff --git a/src/RT_tables/Mg+3_levels_NIST.txt b/src/sunbather/RT_tables/Mg+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+3_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+3_levels_NIST.txt diff --git a/src/RT_tables/Mg+3_levels_processed.txt b/src/sunbather/RT_tables/Mg+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+3_levels_processed.txt rename to src/sunbather/RT_tables/Mg+3_levels_processed.txt diff --git a/src/RT_tables/Mg+3_lines_NIST.txt b/src/sunbather/RT_tables/Mg+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+3_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+3_lines_NIST.txt diff --git a/src/RT_tables/Mg+4_levels_NIST.txt b/src/sunbather/RT_tables/Mg+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+4_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+4_levels_NIST.txt diff --git a/src/RT_tables/Mg+4_levels_processed.txt b/src/sunbather/RT_tables/Mg+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+4_levels_processed.txt rename to src/sunbather/RT_tables/Mg+4_levels_processed.txt diff --git a/src/RT_tables/Mg+4_lines_NIST.txt b/src/sunbather/RT_tables/Mg+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+4_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+4_lines_NIST.txt diff --git a/src/RT_tables/Mg+5_levels_NIST.txt b/src/sunbather/RT_tables/Mg+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+5_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+5_levels_NIST.txt diff --git a/src/RT_tables/Mg+5_levels_processed.txt b/src/sunbather/RT_tables/Mg+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+5_levels_processed.txt rename to src/sunbather/RT_tables/Mg+5_levels_processed.txt diff --git a/src/RT_tables/Mg+5_lines_NIST.txt b/src/sunbather/RT_tables/Mg+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+5_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+5_lines_NIST.txt diff --git a/src/RT_tables/Mg+6_levels_NIST.txt b/src/sunbather/RT_tables/Mg+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+6_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+6_levels_NIST.txt diff --git a/src/RT_tables/Mg+6_levels_processed.txt b/src/sunbather/RT_tables/Mg+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+6_levels_processed.txt rename to src/sunbather/RT_tables/Mg+6_levels_processed.txt diff --git a/src/RT_tables/Mg+6_lines_NIST.txt b/src/sunbather/RT_tables/Mg+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+6_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+6_lines_NIST.txt diff --git a/src/RT_tables/Mg+7_levels_NIST.txt b/src/sunbather/RT_tables/Mg+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+7_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+7_levels_NIST.txt diff --git a/src/RT_tables/Mg+7_levels_processed.txt b/src/sunbather/RT_tables/Mg+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+7_levels_processed.txt rename to src/sunbather/RT_tables/Mg+7_levels_processed.txt diff --git a/src/RT_tables/Mg+7_lines_NIST.txt b/src/sunbather/RT_tables/Mg+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+7_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+7_lines_NIST.txt diff --git a/src/RT_tables/Mg+8_levels_NIST.txt b/src/sunbather/RT_tables/Mg+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+8_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+8_levels_NIST.txt diff --git a/src/RT_tables/Mg+8_levels_processed.txt b/src/sunbather/RT_tables/Mg+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+8_levels_processed.txt rename to src/sunbather/RT_tables/Mg+8_levels_processed.txt diff --git a/src/RT_tables/Mg+8_lines_NIST.txt b/src/sunbather/RT_tables/Mg+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+8_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+8_lines_NIST.txt diff --git a/src/RT_tables/Mg+9_levels_NIST.txt b/src/sunbather/RT_tables/Mg+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+9_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+9_levels_NIST.txt diff --git a/src/RT_tables/Mg+9_levels_processed.txt b/src/sunbather/RT_tables/Mg+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+9_levels_processed.txt rename to src/sunbather/RT_tables/Mg+9_levels_processed.txt diff --git a/src/RT_tables/Mg+9_lines_NIST.txt b/src/sunbather/RT_tables/Mg+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+9_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+9_lines_NIST.txt diff --git a/src/RT_tables/Mg+_levels_NIST.txt b/src/sunbather/RT_tables/Mg+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg+_levels_NIST.txt rename to src/sunbather/RT_tables/Mg+_levels_NIST.txt diff --git a/src/RT_tables/Mg+_levels_processed.txt b/src/sunbather/RT_tables/Mg+_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg+_levels_processed.txt rename to src/sunbather/RT_tables/Mg+_levels_processed.txt diff --git a/src/RT_tables/Mg+_lines_NIST.txt b/src/sunbather/RT_tables/Mg+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg+_lines_NIST.txt rename to src/sunbather/RT_tables/Mg+_lines_NIST.txt diff --git a/src/RT_tables/Mg_levels_NIST.txt b/src/sunbather/RT_tables/Mg_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mg_levels_NIST.txt rename to src/sunbather/RT_tables/Mg_levels_NIST.txt diff --git a/src/RT_tables/Mg_levels_processed.txt b/src/sunbather/RT_tables/Mg_levels_processed.txt similarity index 100% rename from src/RT_tables/Mg_levels_processed.txt rename to src/sunbather/RT_tables/Mg_levels_processed.txt diff --git a/src/RT_tables/Mg_lines_NIST.txt b/src/sunbather/RT_tables/Mg_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mg_lines_NIST.txt rename to src/sunbather/RT_tables/Mg_lines_NIST.txt diff --git a/src/RT_tables/Mn+10_levels_NIST.txt b/src/sunbather/RT_tables/Mn+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+10_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+10_levels_NIST.txt diff --git a/src/RT_tables/Mn+10_levels_processed.txt b/src/sunbather/RT_tables/Mn+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+10_levels_processed.txt rename to src/sunbather/RT_tables/Mn+10_levels_processed.txt diff --git a/src/RT_tables/Mn+10_lines_NIST.txt b/src/sunbather/RT_tables/Mn+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+10_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+10_lines_NIST.txt diff --git a/src/RT_tables/Mn+11_levels_NIST.txt b/src/sunbather/RT_tables/Mn+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+11_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+11_levels_NIST.txt diff --git a/src/RT_tables/Mn+11_levels_processed.txt b/src/sunbather/RT_tables/Mn+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+11_levels_processed.txt rename to src/sunbather/RT_tables/Mn+11_levels_processed.txt diff --git a/src/RT_tables/Mn+11_lines_NIST.txt b/src/sunbather/RT_tables/Mn+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+11_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+11_lines_NIST.txt diff --git a/src/RT_tables/Mn+12_levels_NIST.txt b/src/sunbather/RT_tables/Mn+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+12_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+12_levels_NIST.txt diff --git a/src/RT_tables/Mn+12_levels_processed.txt b/src/sunbather/RT_tables/Mn+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+12_levels_processed.txt rename to src/sunbather/RT_tables/Mn+12_levels_processed.txt diff --git a/src/RT_tables/Mn+12_lines_NIST.txt b/src/sunbather/RT_tables/Mn+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+12_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+12_lines_NIST.txt diff --git a/src/RT_tables/Mn+2_levels_NIST.txt b/src/sunbather/RT_tables/Mn+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+2_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+2_levels_NIST.txt diff --git a/src/RT_tables/Mn+2_lines_NIST.txt b/src/sunbather/RT_tables/Mn+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+2_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+2_lines_NIST.txt diff --git a/src/RT_tables/Mn+3_levels_NIST.txt b/src/sunbather/RT_tables/Mn+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+3_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+3_levels_NIST.txt diff --git a/src/RT_tables/Mn+3_lines_NIST.txt b/src/sunbather/RT_tables/Mn+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+3_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+3_lines_NIST.txt diff --git a/src/RT_tables/Mn+4_levels_NIST.txt b/src/sunbather/RT_tables/Mn+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+4_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+4_levels_NIST.txt diff --git a/src/RT_tables/Mn+4_levels_processed.txt b/src/sunbather/RT_tables/Mn+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+4_levels_processed.txt rename to src/sunbather/RT_tables/Mn+4_levels_processed.txt diff --git a/src/RT_tables/Mn+4_lines_NIST.txt b/src/sunbather/RT_tables/Mn+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+4_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+4_lines_NIST.txt diff --git a/src/RT_tables/Mn+5_levels_NIST.txt b/src/sunbather/RT_tables/Mn+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+5_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+5_levels_NIST.txt diff --git a/src/RT_tables/Mn+5_lines_NIST.txt b/src/sunbather/RT_tables/Mn+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+5_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+5_lines_NIST.txt diff --git a/src/RT_tables/Mn+6_levels_NIST.txt b/src/sunbather/RT_tables/Mn+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+6_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+6_levels_NIST.txt diff --git a/src/RT_tables/Mn+6_lines_NIST.txt b/src/sunbather/RT_tables/Mn+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+6_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+6_lines_NIST.txt diff --git a/src/RT_tables/Mn+7_levels_NIST.txt b/src/sunbather/RT_tables/Mn+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+7_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+7_levels_NIST.txt diff --git a/src/RT_tables/Mn+7_levels_processed.txt b/src/sunbather/RT_tables/Mn+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+7_levels_processed.txt rename to src/sunbather/RT_tables/Mn+7_levels_processed.txt diff --git a/src/RT_tables/Mn+7_lines_NIST.txt b/src/sunbather/RT_tables/Mn+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+7_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+7_lines_NIST.txt diff --git a/src/RT_tables/Mn+8_levels_NIST.txt b/src/sunbather/RT_tables/Mn+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+8_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+8_levels_NIST.txt diff --git a/src/RT_tables/Mn+8_levels_processed.txt b/src/sunbather/RT_tables/Mn+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+8_levels_processed.txt rename to src/sunbather/RT_tables/Mn+8_levels_processed.txt diff --git a/src/RT_tables/Mn+8_lines_NIST.txt b/src/sunbather/RT_tables/Mn+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+8_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+8_lines_NIST.txt diff --git a/src/RT_tables/Mn+9_levels_NIST.txt b/src/sunbather/RT_tables/Mn+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+9_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+9_levels_NIST.txt diff --git a/src/RT_tables/Mn+9_levels_processed.txt b/src/sunbather/RT_tables/Mn+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+9_levels_processed.txt rename to src/sunbather/RT_tables/Mn+9_levels_processed.txt diff --git a/src/RT_tables/Mn+9_lines_NIST.txt b/src/sunbather/RT_tables/Mn+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+9_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+9_lines_NIST.txt diff --git a/src/RT_tables/Mn+_levels_NIST.txt b/src/sunbather/RT_tables/Mn+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn+_levels_NIST.txt rename to src/sunbather/RT_tables/Mn+_levels_NIST.txt diff --git a/src/RT_tables/Mn+_levels_processed.txt b/src/sunbather/RT_tables/Mn+_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn+_levels_processed.txt rename to src/sunbather/RT_tables/Mn+_levels_processed.txt diff --git a/src/RT_tables/Mn+_lines_NIST.txt b/src/sunbather/RT_tables/Mn+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn+_lines_NIST.txt rename to src/sunbather/RT_tables/Mn+_lines_NIST.txt diff --git a/src/RT_tables/Mn_levels_NIST.txt b/src/sunbather/RT_tables/Mn_levels_NIST.txt similarity index 100% rename from src/RT_tables/Mn_levels_NIST.txt rename to src/sunbather/RT_tables/Mn_levels_NIST.txt diff --git a/src/RT_tables/Mn_levels_processed.txt b/src/sunbather/RT_tables/Mn_levels_processed.txt similarity index 100% rename from src/RT_tables/Mn_levels_processed.txt rename to src/sunbather/RT_tables/Mn_levels_processed.txt diff --git a/src/RT_tables/Mn_lines_NIST.txt b/src/sunbather/RT_tables/Mn_lines_NIST.txt similarity index 100% rename from src/RT_tables/Mn_lines_NIST.txt rename to src/sunbather/RT_tables/Mn_lines_NIST.txt diff --git a/src/RT_tables/N+2_levels_NIST.txt b/src/sunbather/RT_tables/N+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/N+2_levels_NIST.txt rename to src/sunbather/RT_tables/N+2_levels_NIST.txt diff --git a/src/RT_tables/N+2_levels_processed.txt b/src/sunbather/RT_tables/N+2_levels_processed.txt similarity index 100% rename from src/RT_tables/N+2_levels_processed.txt rename to src/sunbather/RT_tables/N+2_levels_processed.txt diff --git a/src/RT_tables/N+2_lines_NIST.txt b/src/sunbather/RT_tables/N+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/N+2_lines_NIST.txt rename to src/sunbather/RT_tables/N+2_lines_NIST.txt diff --git a/src/RT_tables/N+3_levels_NIST.txt b/src/sunbather/RT_tables/N+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/N+3_levels_NIST.txt rename to src/sunbather/RT_tables/N+3_levels_NIST.txt diff --git a/src/RT_tables/N+3_levels_processed.txt b/src/sunbather/RT_tables/N+3_levels_processed.txt similarity index 100% rename from src/RT_tables/N+3_levels_processed.txt rename to src/sunbather/RT_tables/N+3_levels_processed.txt diff --git a/src/RT_tables/N+3_lines_NIST.txt b/src/sunbather/RT_tables/N+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/N+3_lines_NIST.txt rename to src/sunbather/RT_tables/N+3_lines_NIST.txt diff --git a/src/RT_tables/N+4_levels_NIST.txt b/src/sunbather/RT_tables/N+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/N+4_levels_NIST.txt rename to src/sunbather/RT_tables/N+4_levels_NIST.txt diff --git a/src/RT_tables/N+4_levels_processed.txt b/src/sunbather/RT_tables/N+4_levels_processed.txt similarity index 100% rename from src/RT_tables/N+4_levels_processed.txt rename to src/sunbather/RT_tables/N+4_levels_processed.txt diff --git a/src/RT_tables/N+4_lines_NIST.txt b/src/sunbather/RT_tables/N+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/N+4_lines_NIST.txt rename to src/sunbather/RT_tables/N+4_lines_NIST.txt diff --git a/src/RT_tables/N+5_levels_NIST.txt b/src/sunbather/RT_tables/N+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/N+5_levels_NIST.txt rename to src/sunbather/RT_tables/N+5_levels_NIST.txt diff --git a/src/RT_tables/N+5_levels_processed.txt b/src/sunbather/RT_tables/N+5_levels_processed.txt similarity index 100% rename from src/RT_tables/N+5_levels_processed.txt rename to src/sunbather/RT_tables/N+5_levels_processed.txt diff --git a/src/RT_tables/N+5_lines_NIST.txt b/src/sunbather/RT_tables/N+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/N+5_lines_NIST.txt rename to src/sunbather/RT_tables/N+5_lines_NIST.txt diff --git a/src/RT_tables/N+6_levels_NIST.txt b/src/sunbather/RT_tables/N+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/N+6_levels_NIST.txt rename to src/sunbather/RT_tables/N+6_levels_NIST.txt diff --git a/src/RT_tables/N+6_levels_processed.txt b/src/sunbather/RT_tables/N+6_levels_processed.txt similarity index 100% rename from src/RT_tables/N+6_levels_processed.txt rename to src/sunbather/RT_tables/N+6_levels_processed.txt diff --git a/src/RT_tables/N+6_lines_NIST.txt b/src/sunbather/RT_tables/N+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/N+6_lines_NIST.txt rename to src/sunbather/RT_tables/N+6_lines_NIST.txt diff --git a/src/RT_tables/N+_levels_NIST.txt b/src/sunbather/RT_tables/N+_levels_NIST.txt similarity index 100% rename from src/RT_tables/N+_levels_NIST.txt rename to src/sunbather/RT_tables/N+_levels_NIST.txt diff --git a/src/RT_tables/N+_levels_processed.txt b/src/sunbather/RT_tables/N+_levels_processed.txt similarity index 100% rename from src/RT_tables/N+_levels_processed.txt rename to src/sunbather/RT_tables/N+_levels_processed.txt diff --git a/src/RT_tables/N+_lines_NIST.txt b/src/sunbather/RT_tables/N+_lines_NIST.txt similarity index 100% rename from src/RT_tables/N+_lines_NIST.txt rename to src/sunbather/RT_tables/N+_lines_NIST.txt diff --git a/src/RT_tables/N_levels_NIST.txt b/src/sunbather/RT_tables/N_levels_NIST.txt similarity index 100% rename from src/RT_tables/N_levels_NIST.txt rename to src/sunbather/RT_tables/N_levels_NIST.txt diff --git a/src/RT_tables/N_levels_processed.txt b/src/sunbather/RT_tables/N_levels_processed.txt similarity index 100% rename from src/RT_tables/N_levels_processed.txt rename to src/sunbather/RT_tables/N_levels_processed.txt diff --git a/src/RT_tables/N_lines_NIST.txt b/src/sunbather/RT_tables/N_lines_NIST.txt similarity index 100% rename from src/RT_tables/N_lines_NIST.txt rename to src/sunbather/RT_tables/N_lines_NIST.txt diff --git a/src/RT_tables/Na+10_levels_NIST.txt b/src/sunbather/RT_tables/Na+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+10_levels_NIST.txt rename to src/sunbather/RT_tables/Na+10_levels_NIST.txt diff --git a/src/RT_tables/Na+10_levels_processed.txt b/src/sunbather/RT_tables/Na+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+10_levels_processed.txt rename to src/sunbather/RT_tables/Na+10_levels_processed.txt diff --git a/src/RT_tables/Na+10_lines_NIST.txt b/src/sunbather/RT_tables/Na+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+10_lines_NIST.txt rename to src/sunbather/RT_tables/Na+10_lines_NIST.txt diff --git a/src/RT_tables/Na+2_levels_NIST.txt b/src/sunbather/RT_tables/Na+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+2_levels_NIST.txt rename to src/sunbather/RT_tables/Na+2_levels_NIST.txt diff --git a/src/RT_tables/Na+2_levels_processed.txt b/src/sunbather/RT_tables/Na+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+2_levels_processed.txt rename to src/sunbather/RT_tables/Na+2_levels_processed.txt diff --git a/src/RT_tables/Na+2_lines_NIST.txt b/src/sunbather/RT_tables/Na+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+2_lines_NIST.txt rename to src/sunbather/RT_tables/Na+2_lines_NIST.txt diff --git a/src/RT_tables/Na+3_levels_NIST.txt b/src/sunbather/RT_tables/Na+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+3_levels_NIST.txt rename to src/sunbather/RT_tables/Na+3_levels_NIST.txt diff --git a/src/RT_tables/Na+3_levels_processed.txt b/src/sunbather/RT_tables/Na+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+3_levels_processed.txt rename to src/sunbather/RT_tables/Na+3_levels_processed.txt diff --git a/src/RT_tables/Na+3_lines_NIST.txt b/src/sunbather/RT_tables/Na+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+3_lines_NIST.txt rename to src/sunbather/RT_tables/Na+3_lines_NIST.txt diff --git a/src/RT_tables/Na+4_levels_NIST.txt b/src/sunbather/RT_tables/Na+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+4_levels_NIST.txt rename to src/sunbather/RT_tables/Na+4_levels_NIST.txt diff --git a/src/RT_tables/Na+4_levels_processed.txt b/src/sunbather/RT_tables/Na+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+4_levels_processed.txt rename to src/sunbather/RT_tables/Na+4_levels_processed.txt diff --git a/src/RT_tables/Na+4_lines_NIST.txt b/src/sunbather/RT_tables/Na+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+4_lines_NIST.txt rename to src/sunbather/RT_tables/Na+4_lines_NIST.txt diff --git a/src/RT_tables/Na+5_levels_NIST.txt b/src/sunbather/RT_tables/Na+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+5_levels_NIST.txt rename to src/sunbather/RT_tables/Na+5_levels_NIST.txt diff --git a/src/RT_tables/Na+5_levels_processed.txt b/src/sunbather/RT_tables/Na+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+5_levels_processed.txt rename to src/sunbather/RT_tables/Na+5_levels_processed.txt diff --git a/src/RT_tables/Na+5_lines_NIST.txt b/src/sunbather/RT_tables/Na+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+5_lines_NIST.txt rename to src/sunbather/RT_tables/Na+5_lines_NIST.txt diff --git a/src/RT_tables/Na+6_levels_NIST.txt b/src/sunbather/RT_tables/Na+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+6_levels_NIST.txt rename to src/sunbather/RT_tables/Na+6_levels_NIST.txt diff --git a/src/RT_tables/Na+6_levels_processed.txt b/src/sunbather/RT_tables/Na+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+6_levels_processed.txt rename to src/sunbather/RT_tables/Na+6_levels_processed.txt diff --git a/src/RT_tables/Na+6_lines_NIST.txt b/src/sunbather/RT_tables/Na+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+6_lines_NIST.txt rename to src/sunbather/RT_tables/Na+6_lines_NIST.txt diff --git a/src/RT_tables/Na+7_levels_NIST.txt b/src/sunbather/RT_tables/Na+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+7_levels_NIST.txt rename to src/sunbather/RT_tables/Na+7_levels_NIST.txt diff --git a/src/RT_tables/Na+7_levels_processed.txt b/src/sunbather/RT_tables/Na+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+7_levels_processed.txt rename to src/sunbather/RT_tables/Na+7_levels_processed.txt diff --git a/src/RT_tables/Na+7_lines_NIST.txt b/src/sunbather/RT_tables/Na+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+7_lines_NIST.txt rename to src/sunbather/RT_tables/Na+7_lines_NIST.txt diff --git a/src/RT_tables/Na+8_levels_NIST.txt b/src/sunbather/RT_tables/Na+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+8_levels_NIST.txt rename to src/sunbather/RT_tables/Na+8_levels_NIST.txt diff --git a/src/RT_tables/Na+8_levels_processed.txt b/src/sunbather/RT_tables/Na+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+8_levels_processed.txt rename to src/sunbather/RT_tables/Na+8_levels_processed.txt diff --git a/src/RT_tables/Na+8_lines_NIST.txt b/src/sunbather/RT_tables/Na+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+8_lines_NIST.txt rename to src/sunbather/RT_tables/Na+8_lines_NIST.txt diff --git a/src/RT_tables/Na+9_levels_NIST.txt b/src/sunbather/RT_tables/Na+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+9_levels_NIST.txt rename to src/sunbather/RT_tables/Na+9_levels_NIST.txt diff --git a/src/RT_tables/Na+9_levels_processed.txt b/src/sunbather/RT_tables/Na+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+9_levels_processed.txt rename to src/sunbather/RT_tables/Na+9_levels_processed.txt diff --git a/src/RT_tables/Na+9_lines_NIST.txt b/src/sunbather/RT_tables/Na+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+9_lines_NIST.txt rename to src/sunbather/RT_tables/Na+9_lines_NIST.txt diff --git a/src/RT_tables/Na+_levels_NIST.txt b/src/sunbather/RT_tables/Na+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na+_levels_NIST.txt rename to src/sunbather/RT_tables/Na+_levels_NIST.txt diff --git a/src/RT_tables/Na+_levels_processed.txt b/src/sunbather/RT_tables/Na+_levels_processed.txt similarity index 100% rename from src/RT_tables/Na+_levels_processed.txt rename to src/sunbather/RT_tables/Na+_levels_processed.txt diff --git a/src/RT_tables/Na+_lines_NIST.txt b/src/sunbather/RT_tables/Na+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na+_lines_NIST.txt rename to src/sunbather/RT_tables/Na+_lines_NIST.txt diff --git a/src/RT_tables/Na_levels_NIST.txt b/src/sunbather/RT_tables/Na_levels_NIST.txt similarity index 100% rename from src/RT_tables/Na_levels_NIST.txt rename to src/sunbather/RT_tables/Na_levels_NIST.txt diff --git a/src/RT_tables/Na_levels_processed.txt b/src/sunbather/RT_tables/Na_levels_processed.txt similarity index 100% rename from src/RT_tables/Na_levels_processed.txt rename to src/sunbather/RT_tables/Na_levels_processed.txt diff --git a/src/RT_tables/Na_lines_NIST.txt b/src/sunbather/RT_tables/Na_lines_NIST.txt similarity index 100% rename from src/RT_tables/Na_lines_NIST.txt rename to src/sunbather/RT_tables/Na_lines_NIST.txt diff --git a/src/RT_tables/Ne+2_levels_NIST.txt b/src/sunbather/RT_tables/Ne+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+2_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+2_levels_NIST.txt diff --git a/src/RT_tables/Ne+2_levels_processed.txt b/src/sunbather/RT_tables/Ne+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+2_levels_processed.txt rename to src/sunbather/RT_tables/Ne+2_levels_processed.txt diff --git a/src/RT_tables/Ne+2_lines_NIST.txt b/src/sunbather/RT_tables/Ne+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+2_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+2_lines_NIST.txt diff --git a/src/RT_tables/Ne+3_levels_NIST.txt b/src/sunbather/RT_tables/Ne+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+3_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+3_levels_NIST.txt diff --git a/src/RT_tables/Ne+3_levels_processed.txt b/src/sunbather/RT_tables/Ne+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+3_levels_processed.txt rename to src/sunbather/RT_tables/Ne+3_levels_processed.txt diff --git a/src/RT_tables/Ne+3_lines_NIST.txt b/src/sunbather/RT_tables/Ne+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+3_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+3_lines_NIST.txt diff --git a/src/RT_tables/Ne+4_levels_NIST.txt b/src/sunbather/RT_tables/Ne+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+4_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+4_levels_NIST.txt diff --git a/src/RT_tables/Ne+4_levels_processed.txt b/src/sunbather/RT_tables/Ne+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+4_levels_processed.txt rename to src/sunbather/RT_tables/Ne+4_levels_processed.txt diff --git a/src/RT_tables/Ne+4_lines_NIST.txt b/src/sunbather/RT_tables/Ne+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+4_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+4_lines_NIST.txt diff --git a/src/RT_tables/Ne+5_levels_NIST.txt b/src/sunbather/RT_tables/Ne+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+5_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+5_levels_NIST.txt diff --git a/src/RT_tables/Ne+5_levels_processed.txt b/src/sunbather/RT_tables/Ne+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+5_levels_processed.txt rename to src/sunbather/RT_tables/Ne+5_levels_processed.txt diff --git a/src/RT_tables/Ne+5_lines_NIST.txt b/src/sunbather/RT_tables/Ne+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+5_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+5_lines_NIST.txt diff --git a/src/RT_tables/Ne+6_levels_NIST.txt b/src/sunbather/RT_tables/Ne+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+6_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+6_levels_NIST.txt diff --git a/src/RT_tables/Ne+6_levels_processed.txt b/src/sunbather/RT_tables/Ne+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+6_levels_processed.txt rename to src/sunbather/RT_tables/Ne+6_levels_processed.txt diff --git a/src/RT_tables/Ne+6_lines_NIST.txt b/src/sunbather/RT_tables/Ne+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+6_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+6_lines_NIST.txt diff --git a/src/RT_tables/Ne+7_levels_NIST.txt b/src/sunbather/RT_tables/Ne+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+7_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+7_levels_NIST.txt diff --git a/src/RT_tables/Ne+7_levels_processed.txt b/src/sunbather/RT_tables/Ne+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+7_levels_processed.txt rename to src/sunbather/RT_tables/Ne+7_levels_processed.txt diff --git a/src/RT_tables/Ne+7_lines_NIST.txt b/src/sunbather/RT_tables/Ne+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+7_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+7_lines_NIST.txt diff --git a/src/RT_tables/Ne+8_levels_NIST.txt b/src/sunbather/RT_tables/Ne+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+8_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+8_levels_NIST.txt diff --git a/src/RT_tables/Ne+8_levels_processed.txt b/src/sunbather/RT_tables/Ne+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+8_levels_processed.txt rename to src/sunbather/RT_tables/Ne+8_levels_processed.txt diff --git a/src/RT_tables/Ne+8_lines_NIST.txt b/src/sunbather/RT_tables/Ne+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+8_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+8_lines_NIST.txt diff --git a/src/RT_tables/Ne+9_levels_NIST.txt b/src/sunbather/RT_tables/Ne+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+9_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+9_levels_NIST.txt diff --git a/src/RT_tables/Ne+9_levels_processed.txt b/src/sunbather/RT_tables/Ne+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+9_levels_processed.txt rename to src/sunbather/RT_tables/Ne+9_levels_processed.txt diff --git a/src/RT_tables/Ne+9_lines_NIST.txt b/src/sunbather/RT_tables/Ne+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+9_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+9_lines_NIST.txt diff --git a/src/RT_tables/Ne+_levels_NIST.txt b/src/sunbather/RT_tables/Ne+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne+_levels_NIST.txt rename to src/sunbather/RT_tables/Ne+_levels_NIST.txt diff --git a/src/RT_tables/Ne+_levels_processed.txt b/src/sunbather/RT_tables/Ne+_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne+_levels_processed.txt rename to src/sunbather/RT_tables/Ne+_levels_processed.txt diff --git a/src/RT_tables/Ne+_lines_NIST.txt b/src/sunbather/RT_tables/Ne+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne+_lines_NIST.txt rename to src/sunbather/RT_tables/Ne+_lines_NIST.txt diff --git a/src/RT_tables/Ne_levels_NIST.txt b/src/sunbather/RT_tables/Ne_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ne_levels_NIST.txt rename to src/sunbather/RT_tables/Ne_levels_NIST.txt diff --git a/src/RT_tables/Ne_levels_processed.txt b/src/sunbather/RT_tables/Ne_levels_processed.txt similarity index 100% rename from src/RT_tables/Ne_levels_processed.txt rename to src/sunbather/RT_tables/Ne_levels_processed.txt diff --git a/src/RT_tables/Ne_lines_NIST.txt b/src/sunbather/RT_tables/Ne_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ne_lines_NIST.txt rename to src/sunbather/RT_tables/Ne_lines_NIST.txt diff --git a/src/RT_tables/Ni+10_levels_NIST.txt b/src/sunbather/RT_tables/Ni+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+10_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+10_levels_NIST.txt diff --git a/src/RT_tables/Ni+10_levels_processed.txt b/src/sunbather/RT_tables/Ni+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+10_levels_processed.txt rename to src/sunbather/RT_tables/Ni+10_levels_processed.txt diff --git a/src/RT_tables/Ni+10_lines_NIST.txt b/src/sunbather/RT_tables/Ni+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+10_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+10_lines_NIST.txt diff --git a/src/RT_tables/Ni+11_levels_NIST.txt b/src/sunbather/RT_tables/Ni+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+11_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+11_levels_NIST.txt diff --git a/src/RT_tables/Ni+11_levels_processed.txt b/src/sunbather/RT_tables/Ni+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+11_levels_processed.txt rename to src/sunbather/RT_tables/Ni+11_levels_processed.txt diff --git a/src/RT_tables/Ni+11_lines_NIST.txt b/src/sunbather/RT_tables/Ni+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+11_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+11_lines_NIST.txt diff --git a/src/RT_tables/Ni+12_levels_NIST.txt b/src/sunbather/RT_tables/Ni+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+12_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+12_levels_NIST.txt diff --git a/src/RT_tables/Ni+12_levels_processed.txt b/src/sunbather/RT_tables/Ni+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+12_levels_processed.txt rename to src/sunbather/RT_tables/Ni+12_levels_processed.txt diff --git a/src/RT_tables/Ni+12_lines_NIST.txt b/src/sunbather/RT_tables/Ni+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+12_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+12_lines_NIST.txt diff --git a/src/RT_tables/Ni+2_levels_NIST.txt b/src/sunbather/RT_tables/Ni+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+2_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+2_levels_NIST.txt diff --git a/src/RT_tables/Ni+2_levels_processed.txt b/src/sunbather/RT_tables/Ni+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+2_levels_processed.txt rename to src/sunbather/RT_tables/Ni+2_levels_processed.txt diff --git a/src/RT_tables/Ni+2_lines_NIST.txt b/src/sunbather/RT_tables/Ni+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+2_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+2_lines_NIST.txt diff --git a/src/RT_tables/Ni+3_levels_NIST.txt b/src/sunbather/RT_tables/Ni+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+3_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+3_levels_NIST.txt diff --git a/src/RT_tables/Ni+3_levels_processed.txt b/src/sunbather/RT_tables/Ni+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+3_levels_processed.txt rename to src/sunbather/RT_tables/Ni+3_levels_processed.txt diff --git a/src/RT_tables/Ni+3_lines_NIST.txt b/src/sunbather/RT_tables/Ni+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+3_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+3_lines_NIST.txt diff --git a/src/RT_tables/Ni+4_levels_NIST.txt b/src/sunbather/RT_tables/Ni+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+4_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+4_levels_NIST.txt diff --git a/src/RT_tables/Ni+4_levels_processed.txt b/src/sunbather/RT_tables/Ni+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+4_levels_processed.txt rename to src/sunbather/RT_tables/Ni+4_levels_processed.txt diff --git a/src/RT_tables/Ni+4_lines_NIST.txt b/src/sunbather/RT_tables/Ni+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+4_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+4_lines_NIST.txt diff --git a/src/RT_tables/Ni+5_levels_NIST.txt b/src/sunbather/RT_tables/Ni+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+5_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+5_levels_NIST.txt diff --git a/src/RT_tables/Ni+5_lines_NIST.txt b/src/sunbather/RT_tables/Ni+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+5_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+5_lines_NIST.txt diff --git a/src/RT_tables/Ni+6_levels_NIST.txt b/src/sunbather/RT_tables/Ni+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+6_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+6_levels_NIST.txt diff --git a/src/RT_tables/Ni+6_levels_processed.txt b/src/sunbather/RT_tables/Ni+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+6_levels_processed.txt rename to src/sunbather/RT_tables/Ni+6_levels_processed.txt diff --git a/src/RT_tables/Ni+6_lines_NIST.txt b/src/sunbather/RT_tables/Ni+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+6_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+6_lines_NIST.txt diff --git a/src/RT_tables/Ni+7_levels_NIST.txt b/src/sunbather/RT_tables/Ni+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+7_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+7_levels_NIST.txt diff --git a/src/RT_tables/Ni+7_levels_processed.txt b/src/sunbather/RT_tables/Ni+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+7_levels_processed.txt rename to src/sunbather/RT_tables/Ni+7_levels_processed.txt diff --git a/src/RT_tables/Ni+7_lines_NIST.txt b/src/sunbather/RT_tables/Ni+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+7_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+7_lines_NIST.txt diff --git a/src/RT_tables/Ni+8_levels_NIST.txt b/src/sunbather/RT_tables/Ni+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+8_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+8_levels_NIST.txt diff --git a/src/RT_tables/Ni+8_levels_processed.txt b/src/sunbather/RT_tables/Ni+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+8_levels_processed.txt rename to src/sunbather/RT_tables/Ni+8_levels_processed.txt diff --git a/src/RT_tables/Ni+8_lines_NIST.txt b/src/sunbather/RT_tables/Ni+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+8_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+8_lines_NIST.txt diff --git a/src/RT_tables/Ni+9_levels_NIST.txt b/src/sunbather/RT_tables/Ni+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+9_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+9_levels_NIST.txt diff --git a/src/RT_tables/Ni+9_levels_processed.txt b/src/sunbather/RT_tables/Ni+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+9_levels_processed.txt rename to src/sunbather/RT_tables/Ni+9_levels_processed.txt diff --git a/src/RT_tables/Ni+9_lines_NIST.txt b/src/sunbather/RT_tables/Ni+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+9_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+9_lines_NIST.txt diff --git a/src/RT_tables/Ni+_levels_NIST.txt b/src/sunbather/RT_tables/Ni+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni+_levels_NIST.txt rename to src/sunbather/RT_tables/Ni+_levels_NIST.txt diff --git a/src/RT_tables/Ni+_levels_processed.txt b/src/sunbather/RT_tables/Ni+_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni+_levels_processed.txt rename to src/sunbather/RT_tables/Ni+_levels_processed.txt diff --git a/src/RT_tables/Ni+_lines_NIST.txt b/src/sunbather/RT_tables/Ni+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni+_lines_NIST.txt rename to src/sunbather/RT_tables/Ni+_lines_NIST.txt diff --git a/src/RT_tables/Ni_levels_NIST.txt b/src/sunbather/RT_tables/Ni_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ni_levels_NIST.txt rename to src/sunbather/RT_tables/Ni_levels_NIST.txt diff --git a/src/RT_tables/Ni_levels_processed.txt b/src/sunbather/RT_tables/Ni_levels_processed.txt similarity index 100% rename from src/RT_tables/Ni_levels_processed.txt rename to src/sunbather/RT_tables/Ni_levels_processed.txt diff --git a/src/RT_tables/Ni_lines_NIST.txt b/src/sunbather/RT_tables/Ni_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ni_lines_NIST.txt rename to src/sunbather/RT_tables/Ni_lines_NIST.txt diff --git a/src/RT_tables/O+2_levels_NIST.txt b/src/sunbather/RT_tables/O+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+2_levels_NIST.txt rename to src/sunbather/RT_tables/O+2_levels_NIST.txt diff --git a/src/RT_tables/O+2_levels_processed.txt b/src/sunbather/RT_tables/O+2_levels_processed.txt similarity index 100% rename from src/RT_tables/O+2_levels_processed.txt rename to src/sunbather/RT_tables/O+2_levels_processed.txt diff --git a/src/RT_tables/O+2_lines_NIST.txt b/src/sunbather/RT_tables/O+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+2_lines_NIST.txt rename to src/sunbather/RT_tables/O+2_lines_NIST.txt diff --git a/src/RT_tables/O+3_levels_NIST.txt b/src/sunbather/RT_tables/O+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+3_levels_NIST.txt rename to src/sunbather/RT_tables/O+3_levels_NIST.txt diff --git a/src/RT_tables/O+3_levels_processed.txt b/src/sunbather/RT_tables/O+3_levels_processed.txt similarity index 100% rename from src/RT_tables/O+3_levels_processed.txt rename to src/sunbather/RT_tables/O+3_levels_processed.txt diff --git a/src/RT_tables/O+3_lines_NIST.txt b/src/sunbather/RT_tables/O+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+3_lines_NIST.txt rename to src/sunbather/RT_tables/O+3_lines_NIST.txt diff --git a/src/RT_tables/O+4_levels_NIST.txt b/src/sunbather/RT_tables/O+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+4_levels_NIST.txt rename to src/sunbather/RT_tables/O+4_levels_NIST.txt diff --git a/src/RT_tables/O+4_levels_processed.txt b/src/sunbather/RT_tables/O+4_levels_processed.txt similarity index 100% rename from src/RT_tables/O+4_levels_processed.txt rename to src/sunbather/RT_tables/O+4_levels_processed.txt diff --git a/src/RT_tables/O+4_lines_NIST.txt b/src/sunbather/RT_tables/O+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+4_lines_NIST.txt rename to src/sunbather/RT_tables/O+4_lines_NIST.txt diff --git a/src/RT_tables/O+5_levels_NIST.txt b/src/sunbather/RT_tables/O+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+5_levels_NIST.txt rename to src/sunbather/RT_tables/O+5_levels_NIST.txt diff --git a/src/RT_tables/O+5_levels_processed.txt b/src/sunbather/RT_tables/O+5_levels_processed.txt similarity index 100% rename from src/RT_tables/O+5_levels_processed.txt rename to src/sunbather/RT_tables/O+5_levels_processed.txt diff --git a/src/RT_tables/O+5_lines_NIST.txt b/src/sunbather/RT_tables/O+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+5_lines_NIST.txt rename to src/sunbather/RT_tables/O+5_lines_NIST.txt diff --git a/src/RT_tables/O+6_levels_NIST.txt b/src/sunbather/RT_tables/O+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+6_levels_NIST.txt rename to src/sunbather/RT_tables/O+6_levels_NIST.txt diff --git a/src/RT_tables/O+6_levels_processed.txt b/src/sunbather/RT_tables/O+6_levels_processed.txt similarity index 100% rename from src/RT_tables/O+6_levels_processed.txt rename to src/sunbather/RT_tables/O+6_levels_processed.txt diff --git a/src/RT_tables/O+6_lines_NIST.txt b/src/sunbather/RT_tables/O+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+6_lines_NIST.txt rename to src/sunbather/RT_tables/O+6_lines_NIST.txt diff --git a/src/RT_tables/O+7_levels_NIST.txt b/src/sunbather/RT_tables/O+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+7_levels_NIST.txt rename to src/sunbather/RT_tables/O+7_levels_NIST.txt diff --git a/src/RT_tables/O+7_levels_processed.txt b/src/sunbather/RT_tables/O+7_levels_processed.txt similarity index 100% rename from src/RT_tables/O+7_levels_processed.txt rename to src/sunbather/RT_tables/O+7_levels_processed.txt diff --git a/src/RT_tables/O+7_lines_NIST.txt b/src/sunbather/RT_tables/O+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+7_lines_NIST.txt rename to src/sunbather/RT_tables/O+7_lines_NIST.txt diff --git a/src/RT_tables/O+_levels_NIST.txt b/src/sunbather/RT_tables/O+_levels_NIST.txt similarity index 100% rename from src/RT_tables/O+_levels_NIST.txt rename to src/sunbather/RT_tables/O+_levels_NIST.txt diff --git a/src/RT_tables/O+_levels_processed.txt b/src/sunbather/RT_tables/O+_levels_processed.txt similarity index 100% rename from src/RT_tables/O+_levels_processed.txt rename to src/sunbather/RT_tables/O+_levels_processed.txt diff --git a/src/RT_tables/O+_lines_NIST.txt b/src/sunbather/RT_tables/O+_lines_NIST.txt similarity index 100% rename from src/RT_tables/O+_lines_NIST.txt rename to src/sunbather/RT_tables/O+_lines_NIST.txt diff --git a/src/RT_tables/O_levels_NIST.txt b/src/sunbather/RT_tables/O_levels_NIST.txt similarity index 100% rename from src/RT_tables/O_levels_NIST.txt rename to src/sunbather/RT_tables/O_levels_NIST.txt diff --git a/src/RT_tables/O_levels_processed.txt b/src/sunbather/RT_tables/O_levels_processed.txt similarity index 100% rename from src/RT_tables/O_levels_processed.txt rename to src/sunbather/RT_tables/O_levels_processed.txt diff --git a/src/RT_tables/O_lines_NIST.txt b/src/sunbather/RT_tables/O_lines_NIST.txt similarity index 100% rename from src/RT_tables/O_lines_NIST.txt rename to src/sunbather/RT_tables/O_lines_NIST.txt diff --git a/src/RT_tables/P+10_levels_NIST.txt b/src/sunbather/RT_tables/P+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+10_levels_NIST.txt rename to src/sunbather/RT_tables/P+10_levels_NIST.txt diff --git a/src/RT_tables/P+10_levels_processed.txt b/src/sunbather/RT_tables/P+10_levels_processed.txt similarity index 100% rename from src/RT_tables/P+10_levels_processed.txt rename to src/sunbather/RT_tables/P+10_levels_processed.txt diff --git a/src/RT_tables/P+10_lines_NIST.txt b/src/sunbather/RT_tables/P+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+10_lines_NIST.txt rename to src/sunbather/RT_tables/P+10_lines_NIST.txt diff --git a/src/RT_tables/P+11_levels_NIST.txt b/src/sunbather/RT_tables/P+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+11_levels_NIST.txt rename to src/sunbather/RT_tables/P+11_levels_NIST.txt diff --git a/src/RT_tables/P+11_levels_processed.txt b/src/sunbather/RT_tables/P+11_levels_processed.txt similarity index 100% rename from src/RT_tables/P+11_levels_processed.txt rename to src/sunbather/RT_tables/P+11_levels_processed.txt diff --git a/src/RT_tables/P+11_lines_NIST.txt b/src/sunbather/RT_tables/P+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+11_lines_NIST.txt rename to src/sunbather/RT_tables/P+11_lines_NIST.txt diff --git a/src/RT_tables/P+12_levels_NIST.txt b/src/sunbather/RT_tables/P+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+12_levels_NIST.txt rename to src/sunbather/RT_tables/P+12_levels_NIST.txt diff --git a/src/RT_tables/P+12_levels_processed.txt b/src/sunbather/RT_tables/P+12_levels_processed.txt similarity index 100% rename from src/RT_tables/P+12_levels_processed.txt rename to src/sunbather/RT_tables/P+12_levels_processed.txt diff --git a/src/RT_tables/P+12_lines_NIST.txt b/src/sunbather/RT_tables/P+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+12_lines_NIST.txt rename to src/sunbather/RT_tables/P+12_lines_NIST.txt diff --git a/src/RT_tables/P+2_levels_NIST.txt b/src/sunbather/RT_tables/P+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+2_levels_NIST.txt rename to src/sunbather/RT_tables/P+2_levels_NIST.txt diff --git a/src/RT_tables/P+2_levels_processed.txt b/src/sunbather/RT_tables/P+2_levels_processed.txt similarity index 100% rename from src/RT_tables/P+2_levels_processed.txt rename to src/sunbather/RT_tables/P+2_levels_processed.txt diff --git a/src/RT_tables/P+2_lines_NIST.txt b/src/sunbather/RT_tables/P+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+2_lines_NIST.txt rename to src/sunbather/RT_tables/P+2_lines_NIST.txt diff --git a/src/RT_tables/P+3_levels_NIST.txt b/src/sunbather/RT_tables/P+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+3_levels_NIST.txt rename to src/sunbather/RT_tables/P+3_levels_NIST.txt diff --git a/src/RT_tables/P+3_levels_processed.txt b/src/sunbather/RT_tables/P+3_levels_processed.txt similarity index 100% rename from src/RT_tables/P+3_levels_processed.txt rename to src/sunbather/RT_tables/P+3_levels_processed.txt diff --git a/src/RT_tables/P+3_lines_NIST.txt b/src/sunbather/RT_tables/P+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+3_lines_NIST.txt rename to src/sunbather/RT_tables/P+3_lines_NIST.txt diff --git a/src/RT_tables/P+4_levels_NIST.txt b/src/sunbather/RT_tables/P+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+4_levels_NIST.txt rename to src/sunbather/RT_tables/P+4_levels_NIST.txt diff --git a/src/RT_tables/P+4_levels_processed.txt b/src/sunbather/RT_tables/P+4_levels_processed.txt similarity index 100% rename from src/RT_tables/P+4_levels_processed.txt rename to src/sunbather/RT_tables/P+4_levels_processed.txt diff --git a/src/RT_tables/P+4_lines_NIST.txt b/src/sunbather/RT_tables/P+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+4_lines_NIST.txt rename to src/sunbather/RT_tables/P+4_lines_NIST.txt diff --git a/src/RT_tables/P+5_levels_NIST.txt b/src/sunbather/RT_tables/P+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+5_levels_NIST.txt rename to src/sunbather/RT_tables/P+5_levels_NIST.txt diff --git a/src/RT_tables/P+5_levels_processed.txt b/src/sunbather/RT_tables/P+5_levels_processed.txt similarity index 100% rename from src/RT_tables/P+5_levels_processed.txt rename to src/sunbather/RT_tables/P+5_levels_processed.txt diff --git a/src/RT_tables/P+5_lines_NIST.txt b/src/sunbather/RT_tables/P+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+5_lines_NIST.txt rename to src/sunbather/RT_tables/P+5_lines_NIST.txt diff --git a/src/RT_tables/P+6_levels_NIST.txt b/src/sunbather/RT_tables/P+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+6_levels_NIST.txt rename to src/sunbather/RT_tables/P+6_levels_NIST.txt diff --git a/src/RT_tables/P+6_levels_processed.txt b/src/sunbather/RT_tables/P+6_levels_processed.txt similarity index 100% rename from src/RT_tables/P+6_levels_processed.txt rename to src/sunbather/RT_tables/P+6_levels_processed.txt diff --git a/src/RT_tables/P+6_lines_NIST.txt b/src/sunbather/RT_tables/P+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+6_lines_NIST.txt rename to src/sunbather/RT_tables/P+6_lines_NIST.txt diff --git a/src/RT_tables/P+7_levels_NIST.txt b/src/sunbather/RT_tables/P+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+7_levels_NIST.txt rename to src/sunbather/RT_tables/P+7_levels_NIST.txt diff --git a/src/RT_tables/P+7_levels_processed.txt b/src/sunbather/RT_tables/P+7_levels_processed.txt similarity index 100% rename from src/RT_tables/P+7_levels_processed.txt rename to src/sunbather/RT_tables/P+7_levels_processed.txt diff --git a/src/RT_tables/P+7_lines_NIST.txt b/src/sunbather/RT_tables/P+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+7_lines_NIST.txt rename to src/sunbather/RT_tables/P+7_lines_NIST.txt diff --git a/src/RT_tables/P+8_levels_NIST.txt b/src/sunbather/RT_tables/P+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+8_levels_NIST.txt rename to src/sunbather/RT_tables/P+8_levels_NIST.txt diff --git a/src/RT_tables/P+8_levels_processed.txt b/src/sunbather/RT_tables/P+8_levels_processed.txt similarity index 100% rename from src/RT_tables/P+8_levels_processed.txt rename to src/sunbather/RT_tables/P+8_levels_processed.txt diff --git a/src/RT_tables/P+8_lines_NIST.txt b/src/sunbather/RT_tables/P+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+8_lines_NIST.txt rename to src/sunbather/RT_tables/P+8_lines_NIST.txt diff --git a/src/RT_tables/P+9_levels_NIST.txt b/src/sunbather/RT_tables/P+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+9_levels_NIST.txt rename to src/sunbather/RT_tables/P+9_levels_NIST.txt diff --git a/src/RT_tables/P+9_levels_processed.txt b/src/sunbather/RT_tables/P+9_levels_processed.txt similarity index 100% rename from src/RT_tables/P+9_levels_processed.txt rename to src/sunbather/RT_tables/P+9_levels_processed.txt diff --git a/src/RT_tables/P+9_lines_NIST.txt b/src/sunbather/RT_tables/P+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+9_lines_NIST.txt rename to src/sunbather/RT_tables/P+9_lines_NIST.txt diff --git a/src/RT_tables/P+_levels_NIST.txt b/src/sunbather/RT_tables/P+_levels_NIST.txt similarity index 100% rename from src/RT_tables/P+_levels_NIST.txt rename to src/sunbather/RT_tables/P+_levels_NIST.txt diff --git a/src/RT_tables/P+_levels_processed.txt b/src/sunbather/RT_tables/P+_levels_processed.txt similarity index 100% rename from src/RT_tables/P+_levels_processed.txt rename to src/sunbather/RT_tables/P+_levels_processed.txt diff --git a/src/RT_tables/P+_lines_NIST.txt b/src/sunbather/RT_tables/P+_lines_NIST.txt similarity index 100% rename from src/RT_tables/P+_lines_NIST.txt rename to src/sunbather/RT_tables/P+_lines_NIST.txt diff --git a/src/RT_tables/P_levels_NIST.txt b/src/sunbather/RT_tables/P_levels_NIST.txt similarity index 100% rename from src/RT_tables/P_levels_NIST.txt rename to src/sunbather/RT_tables/P_levels_NIST.txt diff --git a/src/RT_tables/P_levels_processed.txt b/src/sunbather/RT_tables/P_levels_processed.txt similarity index 100% rename from src/RT_tables/P_levels_processed.txt rename to src/sunbather/RT_tables/P_levels_processed.txt diff --git a/src/RT_tables/P_lines_NIST.txt b/src/sunbather/RT_tables/P_lines_NIST.txt similarity index 100% rename from src/RT_tables/P_lines_NIST.txt rename to src/sunbather/RT_tables/P_lines_NIST.txt diff --git a/src/RT_tables/S+10_levels_NIST.txt b/src/sunbather/RT_tables/S+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+10_levels_NIST.txt rename to src/sunbather/RT_tables/S+10_levels_NIST.txt diff --git a/src/RT_tables/S+10_levels_processed.txt b/src/sunbather/RT_tables/S+10_levels_processed.txt similarity index 100% rename from src/RT_tables/S+10_levels_processed.txt rename to src/sunbather/RT_tables/S+10_levels_processed.txt diff --git a/src/RT_tables/S+10_lines_NIST.txt b/src/sunbather/RT_tables/S+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+10_lines_NIST.txt rename to src/sunbather/RT_tables/S+10_lines_NIST.txt diff --git a/src/RT_tables/S+11_levels_NIST.txt b/src/sunbather/RT_tables/S+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+11_levels_NIST.txt rename to src/sunbather/RT_tables/S+11_levels_NIST.txt diff --git a/src/RT_tables/S+11_levels_processed.txt b/src/sunbather/RT_tables/S+11_levels_processed.txt similarity index 100% rename from src/RT_tables/S+11_levels_processed.txt rename to src/sunbather/RT_tables/S+11_levels_processed.txt diff --git a/src/RT_tables/S+11_lines_NIST.txt b/src/sunbather/RT_tables/S+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+11_lines_NIST.txt rename to src/sunbather/RT_tables/S+11_lines_NIST.txt diff --git a/src/RT_tables/S+12_levels_NIST.txt b/src/sunbather/RT_tables/S+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+12_levels_NIST.txt rename to src/sunbather/RT_tables/S+12_levels_NIST.txt diff --git a/src/RT_tables/S+12_levels_processed.txt b/src/sunbather/RT_tables/S+12_levels_processed.txt similarity index 100% rename from src/RT_tables/S+12_levels_processed.txt rename to src/sunbather/RT_tables/S+12_levels_processed.txt diff --git a/src/RT_tables/S+12_lines_NIST.txt b/src/sunbather/RT_tables/S+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+12_lines_NIST.txt rename to src/sunbather/RT_tables/S+12_lines_NIST.txt diff --git a/src/RT_tables/S+2_levels_NIST.txt b/src/sunbather/RT_tables/S+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+2_levels_NIST.txt rename to src/sunbather/RT_tables/S+2_levels_NIST.txt diff --git a/src/RT_tables/S+2_levels_processed.txt b/src/sunbather/RT_tables/S+2_levels_processed.txt similarity index 100% rename from src/RT_tables/S+2_levels_processed.txt rename to src/sunbather/RT_tables/S+2_levels_processed.txt diff --git a/src/RT_tables/S+2_lines_NIST.txt b/src/sunbather/RT_tables/S+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+2_lines_NIST.txt rename to src/sunbather/RT_tables/S+2_lines_NIST.txt diff --git a/src/RT_tables/S+3_levels_NIST.txt b/src/sunbather/RT_tables/S+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+3_levels_NIST.txt rename to src/sunbather/RT_tables/S+3_levels_NIST.txt diff --git a/src/RT_tables/S+3_levels_processed.txt b/src/sunbather/RT_tables/S+3_levels_processed.txt similarity index 100% rename from src/RT_tables/S+3_levels_processed.txt rename to src/sunbather/RT_tables/S+3_levels_processed.txt diff --git a/src/RT_tables/S+3_lines_NIST.txt b/src/sunbather/RT_tables/S+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+3_lines_NIST.txt rename to src/sunbather/RT_tables/S+3_lines_NIST.txt diff --git a/src/RT_tables/S+4_levels_NIST.txt b/src/sunbather/RT_tables/S+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+4_levels_NIST.txt rename to src/sunbather/RT_tables/S+4_levels_NIST.txt diff --git a/src/RT_tables/S+4_levels_processed.txt b/src/sunbather/RT_tables/S+4_levels_processed.txt similarity index 100% rename from src/RT_tables/S+4_levels_processed.txt rename to src/sunbather/RT_tables/S+4_levels_processed.txt diff --git a/src/RT_tables/S+4_lines_NIST.txt b/src/sunbather/RT_tables/S+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+4_lines_NIST.txt rename to src/sunbather/RT_tables/S+4_lines_NIST.txt diff --git a/src/RT_tables/S+5_levels_NIST.txt b/src/sunbather/RT_tables/S+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+5_levels_NIST.txt rename to src/sunbather/RT_tables/S+5_levels_NIST.txt diff --git a/src/RT_tables/S+5_levels_processed.txt b/src/sunbather/RT_tables/S+5_levels_processed.txt similarity index 100% rename from src/RT_tables/S+5_levels_processed.txt rename to src/sunbather/RT_tables/S+5_levels_processed.txt diff --git a/src/RT_tables/S+5_lines_NIST.txt b/src/sunbather/RT_tables/S+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+5_lines_NIST.txt rename to src/sunbather/RT_tables/S+5_lines_NIST.txt diff --git a/src/RT_tables/S+6_levels_NIST.txt b/src/sunbather/RT_tables/S+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+6_levels_NIST.txt rename to src/sunbather/RT_tables/S+6_levels_NIST.txt diff --git a/src/RT_tables/S+6_levels_processed.txt b/src/sunbather/RT_tables/S+6_levels_processed.txt similarity index 100% rename from src/RT_tables/S+6_levels_processed.txt rename to src/sunbather/RT_tables/S+6_levels_processed.txt diff --git a/src/RT_tables/S+6_lines_NIST.txt b/src/sunbather/RT_tables/S+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+6_lines_NIST.txt rename to src/sunbather/RT_tables/S+6_lines_NIST.txt diff --git a/src/RT_tables/S+7_levels_NIST.txt b/src/sunbather/RT_tables/S+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+7_levels_NIST.txt rename to src/sunbather/RT_tables/S+7_levels_NIST.txt diff --git a/src/RT_tables/S+7_levels_processed.txt b/src/sunbather/RT_tables/S+7_levels_processed.txt similarity index 100% rename from src/RT_tables/S+7_levels_processed.txt rename to src/sunbather/RT_tables/S+7_levels_processed.txt diff --git a/src/RT_tables/S+7_lines_NIST.txt b/src/sunbather/RT_tables/S+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+7_lines_NIST.txt rename to src/sunbather/RT_tables/S+7_lines_NIST.txt diff --git a/src/RT_tables/S+8_levels_NIST.txt b/src/sunbather/RT_tables/S+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+8_levels_NIST.txt rename to src/sunbather/RT_tables/S+8_levels_NIST.txt diff --git a/src/RT_tables/S+8_levels_processed.txt b/src/sunbather/RT_tables/S+8_levels_processed.txt similarity index 100% rename from src/RT_tables/S+8_levels_processed.txt rename to src/sunbather/RT_tables/S+8_levels_processed.txt diff --git a/src/RT_tables/S+8_lines_NIST.txt b/src/sunbather/RT_tables/S+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+8_lines_NIST.txt rename to src/sunbather/RT_tables/S+8_lines_NIST.txt diff --git a/src/RT_tables/S+9_levels_NIST.txt b/src/sunbather/RT_tables/S+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+9_levels_NIST.txt rename to src/sunbather/RT_tables/S+9_levels_NIST.txt diff --git a/src/RT_tables/S+9_levels_processed.txt b/src/sunbather/RT_tables/S+9_levels_processed.txt similarity index 100% rename from src/RT_tables/S+9_levels_processed.txt rename to src/sunbather/RT_tables/S+9_levels_processed.txt diff --git a/src/RT_tables/S+9_lines_NIST.txt b/src/sunbather/RT_tables/S+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+9_lines_NIST.txt rename to src/sunbather/RT_tables/S+9_lines_NIST.txt diff --git a/src/RT_tables/S+_levels_NIST.txt b/src/sunbather/RT_tables/S+_levels_NIST.txt similarity index 100% rename from src/RT_tables/S+_levels_NIST.txt rename to src/sunbather/RT_tables/S+_levels_NIST.txt diff --git a/src/RT_tables/S+_levels_processed.txt b/src/sunbather/RT_tables/S+_levels_processed.txt similarity index 100% rename from src/RT_tables/S+_levels_processed.txt rename to src/sunbather/RT_tables/S+_levels_processed.txt diff --git a/src/RT_tables/S+_lines_NIST.txt b/src/sunbather/RT_tables/S+_lines_NIST.txt similarity index 100% rename from src/RT_tables/S+_lines_NIST.txt rename to src/sunbather/RT_tables/S+_lines_NIST.txt diff --git a/src/RT_tables/S_levels_NIST.txt b/src/sunbather/RT_tables/S_levels_NIST.txt similarity index 100% rename from src/RT_tables/S_levels_NIST.txt rename to src/sunbather/RT_tables/S_levels_NIST.txt diff --git a/src/RT_tables/S_levels_processed.txt b/src/sunbather/RT_tables/S_levels_processed.txt similarity index 100% rename from src/RT_tables/S_levels_processed.txt rename to src/sunbather/RT_tables/S_levels_processed.txt diff --git a/src/RT_tables/S_lines_NIST.txt b/src/sunbather/RT_tables/S_lines_NIST.txt similarity index 100% rename from src/RT_tables/S_lines_NIST.txt rename to src/sunbather/RT_tables/S_lines_NIST.txt diff --git a/src/RT_tables/Sc+10_levels_NIST.txt b/src/sunbather/RT_tables/Sc+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+10_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+10_levels_NIST.txt diff --git a/src/RT_tables/Sc+10_levels_processed.txt b/src/sunbather/RT_tables/Sc+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+10_levels_processed.txt rename to src/sunbather/RT_tables/Sc+10_levels_processed.txt diff --git a/src/RT_tables/Sc+10_lines_NIST.txt b/src/sunbather/RT_tables/Sc+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+10_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+10_lines_NIST.txt diff --git a/src/RT_tables/Sc+11_levels_NIST.txt b/src/sunbather/RT_tables/Sc+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+11_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+11_levels_NIST.txt diff --git a/src/RT_tables/Sc+11_levels_processed.txt b/src/sunbather/RT_tables/Sc+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+11_levels_processed.txt rename to src/sunbather/RT_tables/Sc+11_levels_processed.txt diff --git a/src/RT_tables/Sc+11_lines_NIST.txt b/src/sunbather/RT_tables/Sc+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+11_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+11_lines_NIST.txt diff --git a/src/RT_tables/Sc+12_levels_NIST.txt b/src/sunbather/RT_tables/Sc+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+12_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+12_levels_NIST.txt diff --git a/src/RT_tables/Sc+12_levels_processed.txt b/src/sunbather/RT_tables/Sc+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+12_levels_processed.txt rename to src/sunbather/RT_tables/Sc+12_levels_processed.txt diff --git a/src/RT_tables/Sc+12_lines_NIST.txt b/src/sunbather/RT_tables/Sc+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+12_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+12_lines_NIST.txt diff --git a/src/RT_tables/Sc+2_levels_NIST.txt b/src/sunbather/RT_tables/Sc+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+2_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+2_levels_NIST.txt diff --git a/src/RT_tables/Sc+2_levels_processed.txt b/src/sunbather/RT_tables/Sc+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+2_levels_processed.txt rename to src/sunbather/RT_tables/Sc+2_levels_processed.txt diff --git a/src/RT_tables/Sc+2_lines_NIST.txt b/src/sunbather/RT_tables/Sc+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+2_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+2_lines_NIST.txt diff --git a/src/RT_tables/Sc+3_levels_NIST.txt b/src/sunbather/RT_tables/Sc+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+3_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+3_levels_NIST.txt diff --git a/src/RT_tables/Sc+3_levels_processed.txt b/src/sunbather/RT_tables/Sc+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+3_levels_processed.txt rename to src/sunbather/RT_tables/Sc+3_levels_processed.txt diff --git a/src/RT_tables/Sc+3_lines_NIST.txt b/src/sunbather/RT_tables/Sc+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+3_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+3_lines_NIST.txt diff --git a/src/RT_tables/Sc+4_levels_NIST.txt b/src/sunbather/RT_tables/Sc+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+4_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+4_levels_NIST.txt diff --git a/src/RT_tables/Sc+4_levels_processed.txt b/src/sunbather/RT_tables/Sc+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+4_levels_processed.txt rename to src/sunbather/RT_tables/Sc+4_levels_processed.txt diff --git a/src/RT_tables/Sc+4_lines_NIST.txt b/src/sunbather/RT_tables/Sc+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+4_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+4_lines_NIST.txt diff --git a/src/RT_tables/Sc+5_levels_NIST.txt b/src/sunbather/RT_tables/Sc+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+5_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+5_levels_NIST.txt diff --git a/src/RT_tables/Sc+5_levels_processed.txt b/src/sunbather/RT_tables/Sc+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+5_levels_processed.txt rename to src/sunbather/RT_tables/Sc+5_levels_processed.txt diff --git a/src/RT_tables/Sc+5_lines_NIST.txt b/src/sunbather/RT_tables/Sc+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+5_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+5_lines_NIST.txt diff --git a/src/RT_tables/Sc+6_levels_NIST.txt b/src/sunbather/RT_tables/Sc+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+6_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+6_levels_NIST.txt diff --git a/src/RT_tables/Sc+6_levels_processed.txt b/src/sunbather/RT_tables/Sc+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+6_levels_processed.txt rename to src/sunbather/RT_tables/Sc+6_levels_processed.txt diff --git a/src/RT_tables/Sc+6_lines_NIST.txt b/src/sunbather/RT_tables/Sc+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+6_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+6_lines_NIST.txt diff --git a/src/RT_tables/Sc+7_levels_NIST.txt b/src/sunbather/RT_tables/Sc+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+7_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+7_levels_NIST.txt diff --git a/src/RT_tables/Sc+7_levels_processed.txt b/src/sunbather/RT_tables/Sc+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+7_levels_processed.txt rename to src/sunbather/RT_tables/Sc+7_levels_processed.txt diff --git a/src/RT_tables/Sc+7_lines_NIST.txt b/src/sunbather/RT_tables/Sc+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+7_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+7_lines_NIST.txt diff --git a/src/RT_tables/Sc+8_levels_NIST.txt b/src/sunbather/RT_tables/Sc+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+8_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+8_levels_NIST.txt diff --git a/src/RT_tables/Sc+8_levels_processed.txt b/src/sunbather/RT_tables/Sc+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+8_levels_processed.txt rename to src/sunbather/RT_tables/Sc+8_levels_processed.txt diff --git a/src/RT_tables/Sc+8_lines_NIST.txt b/src/sunbather/RT_tables/Sc+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+8_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+8_lines_NIST.txt diff --git a/src/RT_tables/Sc+9_levels_NIST.txt b/src/sunbather/RT_tables/Sc+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+9_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+9_levels_NIST.txt diff --git a/src/RT_tables/Sc+9_levels_processed.txt b/src/sunbather/RT_tables/Sc+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+9_levels_processed.txt rename to src/sunbather/RT_tables/Sc+9_levels_processed.txt diff --git a/src/RT_tables/Sc+9_lines_NIST.txt b/src/sunbather/RT_tables/Sc+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+9_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+9_lines_NIST.txt diff --git a/src/RT_tables/Sc+_levels_NIST.txt b/src/sunbather/RT_tables/Sc+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc+_levels_NIST.txt rename to src/sunbather/RT_tables/Sc+_levels_NIST.txt diff --git a/src/RT_tables/Sc+_levels_processed.txt b/src/sunbather/RT_tables/Sc+_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc+_levels_processed.txt rename to src/sunbather/RT_tables/Sc+_levels_processed.txt diff --git a/src/RT_tables/Sc+_lines_NIST.txt b/src/sunbather/RT_tables/Sc+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc+_lines_NIST.txt rename to src/sunbather/RT_tables/Sc+_lines_NIST.txt diff --git a/src/RT_tables/Sc_levels_NIST.txt b/src/sunbather/RT_tables/Sc_levels_NIST.txt similarity index 100% rename from src/RT_tables/Sc_levels_NIST.txt rename to src/sunbather/RT_tables/Sc_levels_NIST.txt diff --git a/src/RT_tables/Sc_levels_processed.txt b/src/sunbather/RT_tables/Sc_levels_processed.txt similarity index 100% rename from src/RT_tables/Sc_levels_processed.txt rename to src/sunbather/RT_tables/Sc_levels_processed.txt diff --git a/src/RT_tables/Sc_lines_NIST.txt b/src/sunbather/RT_tables/Sc_lines_NIST.txt similarity index 100% rename from src/RT_tables/Sc_lines_NIST.txt rename to src/sunbather/RT_tables/Sc_lines_NIST.txt diff --git a/src/RT_tables/Si+10_levels_NIST.txt b/src/sunbather/RT_tables/Si+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+10_levels_NIST.txt rename to src/sunbather/RT_tables/Si+10_levels_NIST.txt diff --git a/src/RT_tables/Si+10_levels_processed.txt b/src/sunbather/RT_tables/Si+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+10_levels_processed.txt rename to src/sunbather/RT_tables/Si+10_levels_processed.txt diff --git a/src/RT_tables/Si+10_lines_NIST.txt b/src/sunbather/RT_tables/Si+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+10_lines_NIST.txt rename to src/sunbather/RT_tables/Si+10_lines_NIST.txt diff --git a/src/RT_tables/Si+11_levels_NIST.txt b/src/sunbather/RT_tables/Si+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+11_levels_NIST.txt rename to src/sunbather/RT_tables/Si+11_levels_NIST.txt diff --git a/src/RT_tables/Si+11_levels_processed.txt b/src/sunbather/RT_tables/Si+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+11_levels_processed.txt rename to src/sunbather/RT_tables/Si+11_levels_processed.txt diff --git a/src/RT_tables/Si+11_lines_NIST.txt b/src/sunbather/RT_tables/Si+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+11_lines_NIST.txt rename to src/sunbather/RT_tables/Si+11_lines_NIST.txt diff --git a/src/RT_tables/Si+12_levels_NIST.txt b/src/sunbather/RT_tables/Si+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+12_levels_NIST.txt rename to src/sunbather/RT_tables/Si+12_levels_NIST.txt diff --git a/src/RT_tables/Si+12_levels_processed.txt b/src/sunbather/RT_tables/Si+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+12_levels_processed.txt rename to src/sunbather/RT_tables/Si+12_levels_processed.txt diff --git a/src/RT_tables/Si+12_lines_NIST.txt b/src/sunbather/RT_tables/Si+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+12_lines_NIST.txt rename to src/sunbather/RT_tables/Si+12_lines_NIST.txt diff --git a/src/RT_tables/Si+2_levels_NIST.txt b/src/sunbather/RT_tables/Si+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+2_levels_NIST.txt rename to src/sunbather/RT_tables/Si+2_levels_NIST.txt diff --git a/src/RT_tables/Si+2_levels_processed.txt b/src/sunbather/RT_tables/Si+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+2_levels_processed.txt rename to src/sunbather/RT_tables/Si+2_levels_processed.txt diff --git a/src/RT_tables/Si+2_lines_NIST.txt b/src/sunbather/RT_tables/Si+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+2_lines_NIST.txt rename to src/sunbather/RT_tables/Si+2_lines_NIST.txt diff --git a/src/RT_tables/Si+3_levels_NIST.txt b/src/sunbather/RT_tables/Si+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+3_levels_NIST.txt rename to src/sunbather/RT_tables/Si+3_levels_NIST.txt diff --git a/src/RT_tables/Si+3_levels_processed.txt b/src/sunbather/RT_tables/Si+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+3_levels_processed.txt rename to src/sunbather/RT_tables/Si+3_levels_processed.txt diff --git a/src/RT_tables/Si+3_lines_NIST.txt b/src/sunbather/RT_tables/Si+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+3_lines_NIST.txt rename to src/sunbather/RT_tables/Si+3_lines_NIST.txt diff --git a/src/RT_tables/Si+4_levels_NIST.txt b/src/sunbather/RT_tables/Si+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+4_levels_NIST.txt rename to src/sunbather/RT_tables/Si+4_levels_NIST.txt diff --git a/src/RT_tables/Si+4_levels_processed.txt b/src/sunbather/RT_tables/Si+4_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+4_levels_processed.txt rename to src/sunbather/RT_tables/Si+4_levels_processed.txt diff --git a/src/RT_tables/Si+4_lines_NIST.txt b/src/sunbather/RT_tables/Si+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+4_lines_NIST.txt rename to src/sunbather/RT_tables/Si+4_lines_NIST.txt diff --git a/src/RT_tables/Si+5_levels_NIST.txt b/src/sunbather/RT_tables/Si+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+5_levels_NIST.txt rename to src/sunbather/RT_tables/Si+5_levels_NIST.txt diff --git a/src/RT_tables/Si+5_levels_processed.txt b/src/sunbather/RT_tables/Si+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+5_levels_processed.txt rename to src/sunbather/RT_tables/Si+5_levels_processed.txt diff --git a/src/RT_tables/Si+5_lines_NIST.txt b/src/sunbather/RT_tables/Si+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+5_lines_NIST.txt rename to src/sunbather/RT_tables/Si+5_lines_NIST.txt diff --git a/src/RT_tables/Si+6_levels_NIST.txt b/src/sunbather/RT_tables/Si+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+6_levels_NIST.txt rename to src/sunbather/RT_tables/Si+6_levels_NIST.txt diff --git a/src/RT_tables/Si+6_levels_processed.txt b/src/sunbather/RT_tables/Si+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+6_levels_processed.txt rename to src/sunbather/RT_tables/Si+6_levels_processed.txt diff --git a/src/RT_tables/Si+6_lines_NIST.txt b/src/sunbather/RT_tables/Si+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+6_lines_NIST.txt rename to src/sunbather/RT_tables/Si+6_lines_NIST.txt diff --git a/src/RT_tables/Si+7_levels_NIST.txt b/src/sunbather/RT_tables/Si+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+7_levels_NIST.txt rename to src/sunbather/RT_tables/Si+7_levels_NIST.txt diff --git a/src/RT_tables/Si+7_levels_processed.txt b/src/sunbather/RT_tables/Si+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+7_levels_processed.txt rename to src/sunbather/RT_tables/Si+7_levels_processed.txt diff --git a/src/RT_tables/Si+7_lines_NIST.txt b/src/sunbather/RT_tables/Si+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+7_lines_NIST.txt rename to src/sunbather/RT_tables/Si+7_lines_NIST.txt diff --git a/src/RT_tables/Si+8_levels_NIST.txt b/src/sunbather/RT_tables/Si+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+8_levels_NIST.txt rename to src/sunbather/RT_tables/Si+8_levels_NIST.txt diff --git a/src/RT_tables/Si+8_levels_processed.txt b/src/sunbather/RT_tables/Si+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+8_levels_processed.txt rename to src/sunbather/RT_tables/Si+8_levels_processed.txt diff --git a/src/RT_tables/Si+8_lines_NIST.txt b/src/sunbather/RT_tables/Si+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+8_lines_NIST.txt rename to src/sunbather/RT_tables/Si+8_lines_NIST.txt diff --git a/src/RT_tables/Si+9_levels_NIST.txt b/src/sunbather/RT_tables/Si+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+9_levels_NIST.txt rename to src/sunbather/RT_tables/Si+9_levels_NIST.txt diff --git a/src/RT_tables/Si+9_levels_processed.txt b/src/sunbather/RT_tables/Si+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+9_levels_processed.txt rename to src/sunbather/RT_tables/Si+9_levels_processed.txt diff --git a/src/RT_tables/Si+9_lines_NIST.txt b/src/sunbather/RT_tables/Si+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+9_lines_NIST.txt rename to src/sunbather/RT_tables/Si+9_lines_NIST.txt diff --git a/src/RT_tables/Si+_levels_NIST.txt b/src/sunbather/RT_tables/Si+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si+_levels_NIST.txt rename to src/sunbather/RT_tables/Si+_levels_NIST.txt diff --git a/src/RT_tables/Si+_levels_processed.txt b/src/sunbather/RT_tables/Si+_levels_processed.txt similarity index 100% rename from src/RT_tables/Si+_levels_processed.txt rename to src/sunbather/RT_tables/Si+_levels_processed.txt diff --git a/src/RT_tables/Si+_lines_NIST.txt b/src/sunbather/RT_tables/Si+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si+_lines_NIST.txt rename to src/sunbather/RT_tables/Si+_lines_NIST.txt diff --git a/src/RT_tables/Si_levels_NIST.txt b/src/sunbather/RT_tables/Si_levels_NIST.txt similarity index 100% rename from src/RT_tables/Si_levels_NIST.txt rename to src/sunbather/RT_tables/Si_levels_NIST.txt diff --git a/src/RT_tables/Si_levels_processed.txt b/src/sunbather/RT_tables/Si_levels_processed.txt similarity index 100% rename from src/RT_tables/Si_levels_processed.txt rename to src/sunbather/RT_tables/Si_levels_processed.txt diff --git a/src/RT_tables/Si_lines_NIST.txt b/src/sunbather/RT_tables/Si_lines_NIST.txt similarity index 100% rename from src/RT_tables/Si_lines_NIST.txt rename to src/sunbather/RT_tables/Si_lines_NIST.txt diff --git a/src/RT_tables/Ti+10_levels_NIST.txt b/src/sunbather/RT_tables/Ti+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+10_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+10_levels_NIST.txt diff --git a/src/RT_tables/Ti+10_levels_processed.txt b/src/sunbather/RT_tables/Ti+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+10_levels_processed.txt rename to src/sunbather/RT_tables/Ti+10_levels_processed.txt diff --git a/src/RT_tables/Ti+10_lines_NIST.txt b/src/sunbather/RT_tables/Ti+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+10_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+10_lines_NIST.txt diff --git a/src/RT_tables/Ti+11_levels_NIST.txt b/src/sunbather/RT_tables/Ti+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+11_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+11_levels_NIST.txt diff --git a/src/RT_tables/Ti+11_levels_processed.txt b/src/sunbather/RT_tables/Ti+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+11_levels_processed.txt rename to src/sunbather/RT_tables/Ti+11_levels_processed.txt diff --git a/src/RT_tables/Ti+11_lines_NIST.txt b/src/sunbather/RT_tables/Ti+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+11_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+11_lines_NIST.txt diff --git a/src/RT_tables/Ti+12_levels_NIST.txt b/src/sunbather/RT_tables/Ti+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+12_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+12_levels_NIST.txt diff --git a/src/RT_tables/Ti+12_levels_processed.txt b/src/sunbather/RT_tables/Ti+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+12_levels_processed.txt rename to src/sunbather/RT_tables/Ti+12_levels_processed.txt diff --git a/src/RT_tables/Ti+12_lines_NIST.txt b/src/sunbather/RT_tables/Ti+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+12_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+12_lines_NIST.txt diff --git a/src/RT_tables/Ti+2_levels_NIST.txt b/src/sunbather/RT_tables/Ti+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+2_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+2_levels_NIST.txt diff --git a/src/RT_tables/Ti+2_levels_processed.txt b/src/sunbather/RT_tables/Ti+2_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+2_levels_processed.txt rename to src/sunbather/RT_tables/Ti+2_levels_processed.txt diff --git a/src/RT_tables/Ti+2_lines_NIST.txt b/src/sunbather/RT_tables/Ti+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+2_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+2_lines_NIST.txt diff --git a/src/RT_tables/Ti+3_levels_NIST.txt b/src/sunbather/RT_tables/Ti+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+3_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+3_levels_NIST.txt diff --git a/src/RT_tables/Ti+3_levels_processed.txt b/src/sunbather/RT_tables/Ti+3_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+3_levels_processed.txt rename to src/sunbather/RT_tables/Ti+3_levels_processed.txt diff --git a/src/RT_tables/Ti+3_lines_NIST.txt b/src/sunbather/RT_tables/Ti+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+3_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+3_lines_NIST.txt diff --git a/src/RT_tables/Ti+4_levels_NIST.txt b/src/sunbather/RT_tables/Ti+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+4_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+4_levels_NIST.txt diff --git a/src/RT_tables/Ti+4_lines_NIST.txt b/src/sunbather/RT_tables/Ti+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+4_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+4_lines_NIST.txt diff --git a/src/RT_tables/Ti+5_levels_NIST.txt b/src/sunbather/RT_tables/Ti+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+5_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+5_levels_NIST.txt diff --git a/src/RT_tables/Ti+5_levels_processed.txt b/src/sunbather/RT_tables/Ti+5_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+5_levels_processed.txt rename to src/sunbather/RT_tables/Ti+5_levels_processed.txt diff --git a/src/RT_tables/Ti+5_lines_NIST.txt b/src/sunbather/RT_tables/Ti+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+5_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+5_lines_NIST.txt diff --git a/src/RT_tables/Ti+6_levels_NIST.txt b/src/sunbather/RT_tables/Ti+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+6_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+6_levels_NIST.txt diff --git a/src/RT_tables/Ti+6_levels_processed.txt b/src/sunbather/RT_tables/Ti+6_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+6_levels_processed.txt rename to src/sunbather/RT_tables/Ti+6_levels_processed.txt diff --git a/src/RT_tables/Ti+6_lines_NIST.txt b/src/sunbather/RT_tables/Ti+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+6_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+6_lines_NIST.txt diff --git a/src/RT_tables/Ti+7_levels_NIST.txt b/src/sunbather/RT_tables/Ti+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+7_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+7_levels_NIST.txt diff --git a/src/RT_tables/Ti+7_levels_processed.txt b/src/sunbather/RT_tables/Ti+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+7_levels_processed.txt rename to src/sunbather/RT_tables/Ti+7_levels_processed.txt diff --git a/src/RT_tables/Ti+7_lines_NIST.txt b/src/sunbather/RT_tables/Ti+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+7_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+7_lines_NIST.txt diff --git a/src/RT_tables/Ti+8_levels_NIST.txt b/src/sunbather/RT_tables/Ti+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+8_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+8_levels_NIST.txt diff --git a/src/RT_tables/Ti+8_levels_processed.txt b/src/sunbather/RT_tables/Ti+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+8_levels_processed.txt rename to src/sunbather/RT_tables/Ti+8_levels_processed.txt diff --git a/src/RT_tables/Ti+8_lines_NIST.txt b/src/sunbather/RT_tables/Ti+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+8_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+8_lines_NIST.txt diff --git a/src/RT_tables/Ti+9_levels_NIST.txt b/src/sunbather/RT_tables/Ti+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+9_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+9_levels_NIST.txt diff --git a/src/RT_tables/Ti+9_levels_processed.txt b/src/sunbather/RT_tables/Ti+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+9_levels_processed.txt rename to src/sunbather/RT_tables/Ti+9_levels_processed.txt diff --git a/src/RT_tables/Ti+9_lines_NIST.txt b/src/sunbather/RT_tables/Ti+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+9_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+9_lines_NIST.txt diff --git a/src/RT_tables/Ti+_levels_NIST.txt b/src/sunbather/RT_tables/Ti+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti+_levels_NIST.txt rename to src/sunbather/RT_tables/Ti+_levels_NIST.txt diff --git a/src/RT_tables/Ti+_levels_processed.txt b/src/sunbather/RT_tables/Ti+_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti+_levels_processed.txt rename to src/sunbather/RT_tables/Ti+_levels_processed.txt diff --git a/src/RT_tables/Ti+_lines_NIST.txt b/src/sunbather/RT_tables/Ti+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti+_lines_NIST.txt rename to src/sunbather/RT_tables/Ti+_lines_NIST.txt diff --git a/src/RT_tables/Ti_levels_NIST.txt b/src/sunbather/RT_tables/Ti_levels_NIST.txt similarity index 100% rename from src/RT_tables/Ti_levels_NIST.txt rename to src/sunbather/RT_tables/Ti_levels_NIST.txt diff --git a/src/RT_tables/Ti_levels_processed.txt b/src/sunbather/RT_tables/Ti_levels_processed.txt similarity index 100% rename from src/RT_tables/Ti_levels_processed.txt rename to src/sunbather/RT_tables/Ti_levels_processed.txt diff --git a/src/RT_tables/Ti_lines_NIST.txt b/src/sunbather/RT_tables/Ti_lines_NIST.txt similarity index 100% rename from src/RT_tables/Ti_lines_NIST.txt rename to src/sunbather/RT_tables/Ti_lines_NIST.txt diff --git a/src/RT_tables/V+10_levels_NIST.txt b/src/sunbather/RT_tables/V+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+10_levels_NIST.txt rename to src/sunbather/RT_tables/V+10_levels_NIST.txt diff --git a/src/RT_tables/V+10_levels_processed.txt b/src/sunbather/RT_tables/V+10_levels_processed.txt similarity index 100% rename from src/RT_tables/V+10_levels_processed.txt rename to src/sunbather/RT_tables/V+10_levels_processed.txt diff --git a/src/RT_tables/V+10_lines_NIST.txt b/src/sunbather/RT_tables/V+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+10_lines_NIST.txt rename to src/sunbather/RT_tables/V+10_lines_NIST.txt diff --git a/src/RT_tables/V+11_levels_NIST.txt b/src/sunbather/RT_tables/V+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+11_levels_NIST.txt rename to src/sunbather/RT_tables/V+11_levels_NIST.txt diff --git a/src/RT_tables/V+11_levels_processed.txt b/src/sunbather/RT_tables/V+11_levels_processed.txt similarity index 100% rename from src/RT_tables/V+11_levels_processed.txt rename to src/sunbather/RT_tables/V+11_levels_processed.txt diff --git a/src/RT_tables/V+11_lines_NIST.txt b/src/sunbather/RT_tables/V+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+11_lines_NIST.txt rename to src/sunbather/RT_tables/V+11_lines_NIST.txt diff --git a/src/RT_tables/V+12_levels_NIST.txt b/src/sunbather/RT_tables/V+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+12_levels_NIST.txt rename to src/sunbather/RT_tables/V+12_levels_NIST.txt diff --git a/src/RT_tables/V+12_levels_processed.txt b/src/sunbather/RT_tables/V+12_levels_processed.txt similarity index 100% rename from src/RT_tables/V+12_levels_processed.txt rename to src/sunbather/RT_tables/V+12_levels_processed.txt diff --git a/src/RT_tables/V+12_lines_NIST.txt b/src/sunbather/RT_tables/V+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+12_lines_NIST.txt rename to src/sunbather/RT_tables/V+12_lines_NIST.txt diff --git a/src/RT_tables/V+2_levels_NIST.txt b/src/sunbather/RT_tables/V+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+2_levels_NIST.txt rename to src/sunbather/RT_tables/V+2_levels_NIST.txt diff --git a/src/RT_tables/V+2_levels_processed.txt b/src/sunbather/RT_tables/V+2_levels_processed.txt similarity index 100% rename from src/RT_tables/V+2_levels_processed.txt rename to src/sunbather/RT_tables/V+2_levels_processed.txt diff --git a/src/RT_tables/V+2_lines_NIST.txt b/src/sunbather/RT_tables/V+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+2_lines_NIST.txt rename to src/sunbather/RT_tables/V+2_lines_NIST.txt diff --git a/src/RT_tables/V+3_levels_NIST.txt b/src/sunbather/RT_tables/V+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+3_levels_NIST.txt rename to src/sunbather/RT_tables/V+3_levels_NIST.txt diff --git a/src/RT_tables/V+3_levels_processed.txt b/src/sunbather/RT_tables/V+3_levels_processed.txt similarity index 100% rename from src/RT_tables/V+3_levels_processed.txt rename to src/sunbather/RT_tables/V+3_levels_processed.txt diff --git a/src/RT_tables/V+3_lines_NIST.txt b/src/sunbather/RT_tables/V+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+3_lines_NIST.txt rename to src/sunbather/RT_tables/V+3_lines_NIST.txt diff --git a/src/RT_tables/V+4_levels_NIST.txt b/src/sunbather/RT_tables/V+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+4_levels_NIST.txt rename to src/sunbather/RT_tables/V+4_levels_NIST.txt diff --git a/src/RT_tables/V+4_lines_NIST.txt b/src/sunbather/RT_tables/V+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+4_lines_NIST.txt rename to src/sunbather/RT_tables/V+4_lines_NIST.txt diff --git a/src/RT_tables/V+5_levels_NIST.txt b/src/sunbather/RT_tables/V+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+5_levels_NIST.txt rename to src/sunbather/RT_tables/V+5_levels_NIST.txt diff --git a/src/RT_tables/V+5_lines_NIST.txt b/src/sunbather/RT_tables/V+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+5_lines_NIST.txt rename to src/sunbather/RT_tables/V+5_lines_NIST.txt diff --git a/src/RT_tables/V+6_levels_NIST.txt b/src/sunbather/RT_tables/V+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+6_levels_NIST.txt rename to src/sunbather/RT_tables/V+6_levels_NIST.txt diff --git a/src/RT_tables/V+6_levels_processed.txt b/src/sunbather/RT_tables/V+6_levels_processed.txt similarity index 100% rename from src/RT_tables/V+6_levels_processed.txt rename to src/sunbather/RT_tables/V+6_levels_processed.txt diff --git a/src/RT_tables/V+6_lines_NIST.txt b/src/sunbather/RT_tables/V+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+6_lines_NIST.txt rename to src/sunbather/RT_tables/V+6_lines_NIST.txt diff --git a/src/RT_tables/V+7_levels_NIST.txt b/src/sunbather/RT_tables/V+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+7_levels_NIST.txt rename to src/sunbather/RT_tables/V+7_levels_NIST.txt diff --git a/src/RT_tables/V+7_levels_processed.txt b/src/sunbather/RT_tables/V+7_levels_processed.txt similarity index 100% rename from src/RT_tables/V+7_levels_processed.txt rename to src/sunbather/RT_tables/V+7_levels_processed.txt diff --git a/src/RT_tables/V+7_lines_NIST.txt b/src/sunbather/RT_tables/V+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+7_lines_NIST.txt rename to src/sunbather/RT_tables/V+7_lines_NIST.txt diff --git a/src/RT_tables/V+8_levels_NIST.txt b/src/sunbather/RT_tables/V+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+8_levels_NIST.txt rename to src/sunbather/RT_tables/V+8_levels_NIST.txt diff --git a/src/RT_tables/V+8_levels_processed.txt b/src/sunbather/RT_tables/V+8_levels_processed.txt similarity index 100% rename from src/RT_tables/V+8_levels_processed.txt rename to src/sunbather/RT_tables/V+8_levels_processed.txt diff --git a/src/RT_tables/V+8_lines_NIST.txt b/src/sunbather/RT_tables/V+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+8_lines_NIST.txt rename to src/sunbather/RT_tables/V+8_lines_NIST.txt diff --git a/src/RT_tables/V+9_levels_NIST.txt b/src/sunbather/RT_tables/V+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+9_levels_NIST.txt rename to src/sunbather/RT_tables/V+9_levels_NIST.txt diff --git a/src/RT_tables/V+9_levels_processed.txt b/src/sunbather/RT_tables/V+9_levels_processed.txt similarity index 100% rename from src/RT_tables/V+9_levels_processed.txt rename to src/sunbather/RT_tables/V+9_levels_processed.txt diff --git a/src/RT_tables/V+9_lines_NIST.txt b/src/sunbather/RT_tables/V+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+9_lines_NIST.txt rename to src/sunbather/RT_tables/V+9_lines_NIST.txt diff --git a/src/RT_tables/V+_levels_NIST.txt b/src/sunbather/RT_tables/V+_levels_NIST.txt similarity index 100% rename from src/RT_tables/V+_levels_NIST.txt rename to src/sunbather/RT_tables/V+_levels_NIST.txt diff --git a/src/RT_tables/V+_levels_processed.txt b/src/sunbather/RT_tables/V+_levels_processed.txt similarity index 100% rename from src/RT_tables/V+_levels_processed.txt rename to src/sunbather/RT_tables/V+_levels_processed.txt diff --git a/src/RT_tables/V+_lines_NIST.txt b/src/sunbather/RT_tables/V+_lines_NIST.txt similarity index 100% rename from src/RT_tables/V+_lines_NIST.txt rename to src/sunbather/RT_tables/V+_lines_NIST.txt diff --git a/src/RT_tables/V_levels_NIST.txt b/src/sunbather/RT_tables/V_levels_NIST.txt similarity index 100% rename from src/RT_tables/V_levels_NIST.txt rename to src/sunbather/RT_tables/V_levels_NIST.txt diff --git a/src/RT_tables/V_levels_processed.txt b/src/sunbather/RT_tables/V_levels_processed.txt similarity index 100% rename from src/RT_tables/V_levels_processed.txt rename to src/sunbather/RT_tables/V_levels_processed.txt diff --git a/src/RT_tables/V_lines_NIST.txt b/src/sunbather/RT_tables/V_lines_NIST.txt similarity index 100% rename from src/RT_tables/V_lines_NIST.txt rename to src/sunbather/RT_tables/V_lines_NIST.txt diff --git a/src/RT_tables/Zn+10_levels_NIST.txt b/src/sunbather/RT_tables/Zn+10_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+10_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+10_levels_NIST.txt diff --git a/src/RT_tables/Zn+10_levels_processed.txt b/src/sunbather/RT_tables/Zn+10_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn+10_levels_processed.txt rename to src/sunbather/RT_tables/Zn+10_levels_processed.txt diff --git a/src/RT_tables/Zn+10_lines_NIST.txt b/src/sunbather/RT_tables/Zn+10_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+10_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+10_lines_NIST.txt diff --git a/src/RT_tables/Zn+11_levels_NIST.txt b/src/sunbather/RT_tables/Zn+11_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+11_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+11_levels_NIST.txt diff --git a/src/RT_tables/Zn+11_levels_processed.txt b/src/sunbather/RT_tables/Zn+11_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn+11_levels_processed.txt rename to src/sunbather/RT_tables/Zn+11_levels_processed.txt diff --git a/src/RT_tables/Zn+11_lines_NIST.txt b/src/sunbather/RT_tables/Zn+11_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+11_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+11_lines_NIST.txt diff --git a/src/RT_tables/Zn+12_levels_NIST.txt b/src/sunbather/RT_tables/Zn+12_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+12_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+12_levels_NIST.txt diff --git a/src/RT_tables/Zn+12_levels_processed.txt b/src/sunbather/RT_tables/Zn+12_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn+12_levels_processed.txt rename to src/sunbather/RT_tables/Zn+12_levels_processed.txt diff --git a/src/RT_tables/Zn+12_lines_NIST.txt b/src/sunbather/RT_tables/Zn+12_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+12_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+12_lines_NIST.txt diff --git a/src/RT_tables/Zn+2_levels_NIST.txt b/src/sunbather/RT_tables/Zn+2_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+2_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+2_levels_NIST.txt diff --git a/src/RT_tables/Zn+2_lines_NIST.txt b/src/sunbather/RT_tables/Zn+2_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+2_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+2_lines_NIST.txt diff --git a/src/RT_tables/Zn+3_levels_NIST.txt b/src/sunbather/RT_tables/Zn+3_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+3_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+3_levels_NIST.txt diff --git a/src/RT_tables/Zn+3_lines_NIST.txt b/src/sunbather/RT_tables/Zn+3_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+3_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+3_lines_NIST.txt diff --git a/src/RT_tables/Zn+4_levels_NIST.txt b/src/sunbather/RT_tables/Zn+4_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+4_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+4_levels_NIST.txt diff --git a/src/RT_tables/Zn+4_lines_NIST.txt b/src/sunbather/RT_tables/Zn+4_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+4_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+4_lines_NIST.txt diff --git a/src/RT_tables/Zn+5_levels_NIST.txt b/src/sunbather/RT_tables/Zn+5_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+5_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+5_levels_NIST.txt diff --git a/src/RT_tables/Zn+5_lines_NIST.txt b/src/sunbather/RT_tables/Zn+5_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+5_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+5_lines_NIST.txt diff --git a/src/RT_tables/Zn+6_levels_NIST.txt b/src/sunbather/RT_tables/Zn+6_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+6_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+6_levels_NIST.txt diff --git a/src/RT_tables/Zn+6_lines_NIST.txt b/src/sunbather/RT_tables/Zn+6_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+6_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+6_lines_NIST.txt diff --git a/src/RT_tables/Zn+7_levels_NIST.txt b/src/sunbather/RT_tables/Zn+7_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+7_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+7_levels_NIST.txt diff --git a/src/RT_tables/Zn+7_levels_processed.txt b/src/sunbather/RT_tables/Zn+7_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn+7_levels_processed.txt rename to src/sunbather/RT_tables/Zn+7_levels_processed.txt diff --git a/src/RT_tables/Zn+7_lines_NIST.txt b/src/sunbather/RT_tables/Zn+7_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+7_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+7_lines_NIST.txt diff --git a/src/RT_tables/Zn+8_levels_NIST.txt b/src/sunbather/RT_tables/Zn+8_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+8_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+8_levels_NIST.txt diff --git a/src/RT_tables/Zn+8_levels_processed.txt b/src/sunbather/RT_tables/Zn+8_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn+8_levels_processed.txt rename to src/sunbather/RT_tables/Zn+8_levels_processed.txt diff --git a/src/RT_tables/Zn+8_lines_NIST.txt b/src/sunbather/RT_tables/Zn+8_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+8_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+8_lines_NIST.txt diff --git a/src/RT_tables/Zn+9_levels_NIST.txt b/src/sunbather/RT_tables/Zn+9_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+9_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+9_levels_NIST.txt diff --git a/src/RT_tables/Zn+9_levels_processed.txt b/src/sunbather/RT_tables/Zn+9_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn+9_levels_processed.txt rename to src/sunbather/RT_tables/Zn+9_levels_processed.txt diff --git a/src/RT_tables/Zn+9_lines_NIST.txt b/src/sunbather/RT_tables/Zn+9_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+9_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+9_lines_NIST.txt diff --git a/src/RT_tables/Zn+_levels_NIST.txt b/src/sunbather/RT_tables/Zn+_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn+_levels_NIST.txt rename to src/sunbather/RT_tables/Zn+_levels_NIST.txt diff --git a/src/RT_tables/Zn+_lines_NIST.txt b/src/sunbather/RT_tables/Zn+_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn+_lines_NIST.txt rename to src/sunbather/RT_tables/Zn+_lines_NIST.txt diff --git a/src/RT_tables/Zn_levels_NIST.txt b/src/sunbather/RT_tables/Zn_levels_NIST.txt similarity index 100% rename from src/RT_tables/Zn_levels_NIST.txt rename to src/sunbather/RT_tables/Zn_levels_NIST.txt diff --git a/src/RT_tables/Zn_levels_processed.txt b/src/sunbather/RT_tables/Zn_levels_processed.txt similarity index 100% rename from src/RT_tables/Zn_levels_processed.txt rename to src/sunbather/RT_tables/Zn_levels_processed.txt diff --git a/src/RT_tables/Zn_lines_NIST.txt b/src/sunbather/RT_tables/Zn_lines_NIST.txt similarity index 100% rename from src/RT_tables/Zn_lines_NIST.txt rename to src/sunbather/RT_tables/Zn_lines_NIST.txt diff --git a/src/RT_tables/clean_H_lines.py b/src/sunbather/RT_tables/clean_H_lines.py similarity index 100% rename from src/RT_tables/clean_H_lines.py rename to src/sunbather/RT_tables/clean_H_lines.py diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/construct_parker.py b/src/sunbather/construct_parker.py similarity index 100% rename from src/construct_parker.py rename to src/sunbather/construct_parker.py diff --git a/src/convergeT_parker.py b/src/sunbather/convergeT_parker.py similarity index 100% rename from src/convergeT_parker.py rename to src/sunbather/convergeT_parker.py diff --git a/src/solveT.py b/src/sunbather/solveT.py similarity index 100% rename from src/solveT.py rename to src/sunbather/solveT.py diff --git a/src/species_enlim.txt b/src/sunbather/species_enlim.txt similarity index 100% rename from src/species_enlim.txt rename to src/sunbather/species_enlim.txt diff --git a/src/tools.py b/src/sunbather/tools.py similarity index 100% rename from src/tools.py rename to src/sunbather/tools.py From 9d758786e9c99ff0aa29e7d301fd5b2bd59b061c Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 23 Oct 2024 15:00:29 +0200 Subject: [PATCH 02/63] update .gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index fa2cd40..0a6e545 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ examples/WASP52b_dT.csv examples/WASP52b_sigmaT.csv examples/WASP52b_nsig_fit.csv env/ +dist/ From d36ff6d0b07f4aedcad03c3ce4642d8d03e7294b Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 23 Oct 2024 15:04:00 +0200 Subject: [PATCH 03/63] update test to use sunbather module --- tests/test.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/test.py b/tests/test.py index 3f88f12..720cbdd 100644 --- a/tests/test.py +++ b/tests/test.py @@ -1,12 +1,9 @@ import os import sys -this_path = os.path.dirname(os.path.abspath(__file__)) #the absolute path where this code lives -src_path = this_path.split('tests')[-2] + 'src/' -sys.path.append(src_path) #sunbather imports -import tools -import RT +import sunbather.tools as tools +import sunbather.RT as RT #other imports import pandas as pd From fc8b3aca673626894e6556ad488d99109ed17784 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 23 Oct 2024 15:04:24 +0200 Subject: [PATCH 04/63] add initial pyproject.toml file --- pyproject.toml | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 pyproject.toml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0e03b6f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = ["setuptools >= 61.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "sunbather" +version = "2024.10.0" +# dynamic = ["version"] +dependencies = [ + "numpy >= 1.18", + "pandas", + "matplotlib >= 2.0", + "scipy", +] +requires-python = ">= 3.7" +authors = [ + {name = "Dion Linssen", email = "d.c.linssen@uva.nl"}, + {name = "Antonija Oklopčić", email = "a.oklopcic@uva.nl"}, +] +description = "Sunbather" +readme = {file = "README.md", content-type = "text/markdown"} +license = {file = "LICENSE"} +keywords = ["astrophysics"] + +[project.urls] +Issues = "https://github.com/antonpannekoek/sunbather/issues" +Repository = "https://github.com/antonpannekoek/sunbather" From b90fb1a7235273383bbc382607b760bdc5b47853 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 23 Oct 2024 15:14:19 +0200 Subject: [PATCH 05/63] Let imports work when sunbather is a module --- src/sunbather/RT.py | 4 ++-- src/sunbather/construct_parker.py | 2 +- src/sunbather/convergeT_parker.py | 4 ++-- src/sunbather/solveT.py | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/sunbather/RT.py b/src/sunbather/RT.py index 8120009..8596078 100644 --- a/src/sunbather/RT.py +++ b/src/sunbather/RT.py @@ -1,5 +1,5 @@ #sunbather imports -import tools +import .tools #other imports import pandas as pd @@ -903,4 +903,4 @@ def convolve_spectrum_R(wavs, flux, R, verbose=False): convolved_spectrum = gaussian_filter1d(flux, sigma) - return convolved_spectrum \ No newline at end of file + return convolved_spectrum diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index b04f4cc..b454911 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -1,5 +1,5 @@ #sunbather imports -import tools +import .tools #other imports import numpy as np diff --git a/src/sunbather/convergeT_parker.py b/src/sunbather/convergeT_parker.py index d7a5446..1f7b2fb 100644 --- a/src/sunbather/convergeT_parker.py +++ b/src/sunbather/convergeT_parker.py @@ -1,6 +1,6 @@ #sunbather imports -import tools -import solveT +import .tools +import .solveT #other imports import pandas as pd diff --git a/src/sunbather/solveT.py b/src/sunbather/solveT.py index 338f247..89799aa 100644 --- a/src/sunbather/solveT.py +++ b/src/sunbather/solveT.py @@ -1,5 +1,5 @@ #sunbather imports -import tools +import .tools #other imports import pandas as pd From 2fd596f6f06f84ee28df0349f06de64da9f6d28c Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 23 Oct 2024 15:26:25 +0200 Subject: [PATCH 06/63] update prerequisites --- pyproject.toml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0e03b6f..2b12977 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,12 +7,14 @@ name = "sunbather" version = "2024.10.0" # dynamic = ["version"] dependencies = [ - "numpy >= 1.18", - "pandas", - "matplotlib >= 2.0", - "scipy", + "numpy >= 1.24.3", + "pandas >= 1.1.4", + "matplotlib >= 3.7.1", + "scipy >= 1.8.0", + "astropy >= 5.3", + "p-winds >= 1.3.4", ] -requires-python = ">= 3.7" +requires-python = ">= 3.9" authors = [ {name = "Dion Linssen", email = "d.c.linssen@uva.nl"}, {name = "Antonija Oklopčić", email = "a.oklopcic@uva.nl"}, From ea2c856456e1f888f80ce8e4830a22f4e7d80ad8 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 24 Oct 2024 14:11:51 +0200 Subject: [PATCH 07/63] add script to download and build Cloudy --- src/sunbather/install_cloudy.py | 48 +++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 src/sunbather/install_cloudy.py diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py new file mode 100644 index 0000000..b90812b --- /dev/null +++ b/src/sunbather/install_cloudy.py @@ -0,0 +1,48 @@ +import os +import pathlib +import urllib.request +import tarfile +import subprocess + + +class get_cloudy: + def __init__(self, version="23.01"): + self.version = version + self.path = "./" + major = version.split(".")[0] + self.url = f"https://data.nublado.org/cloudy_releases/c{major}/" + self.filename = "c{version}.tar.gz" + self.cloudypath = f"{pathlib.Path(__file__).parent.resolve()}/cloudy/" + + def download(self): + if not pathlib.Path(self.cloudypath).is_dir(): + os.mkdir(self.cloudypath) + else: + print("Directory already exists! Skipping download.") + return + os.chdir(self.cloudypath) + with urllib.request.urlopen(f"{self.url}{self.filename}") as g: + with open(self.filename, "b+w") as f: + f.write(g.read()) + # Go to the v23 download page and download the "c23.01.tar.gz" file + return + + def compile(self): + # Extract it in a location where you want to install Cloudy. + os.chdir(self.cloudypath) + tar = tarfile.open(self.filename, "r:gz") + tar.extractall(filter="data") + tar.close() + + # cd into the /c23.01/source/ or /c17.02/source/ folder and compile the code by running make. + os.chdir(f"{self.cloudypath}/c{version}/source/") + subprocess.Popen(["make",]).wait() + + def test(self): + # Quickly test the Cloudy installation: in the source folder, run ./cloudy.exe, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. + os.chdir(f"{self.cloudypath}/c{version}/source/") + print( + "Type \"test\" and hit return twice. " + "It should print \"Cloudy exited OK\" at the end." + ) + subprocess.Popen(["./cloudy.exe",]).wait() From 3cc18f3051171001b52f865badaf114129d43f02 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 24 Oct 2024 14:12:43 +0200 Subject: [PATCH 08/63] fix name, switch to semantic versioning --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2b12977..5dde784 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "sunbather" -version = "2024.10.0" +version = "1.0.0a1" # dynamic = ["version"] dependencies = [ "numpy >= 1.24.3", @@ -19,7 +19,7 @@ authors = [ {name = "Dion Linssen", email = "d.c.linssen@uva.nl"}, {name = "Antonija Oklopčić", email = "a.oklopcic@uva.nl"}, ] -description = "Sunbather" +description = "sunbather" readme = {file = "README.md", content-type = "text/markdown"} license = {file = "LICENSE"} keywords = ["astrophysics"] From cd30b14faa36f7937f999941376c4f7e2c5ff796 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 24 Oct 2024 14:33:09 +0200 Subject: [PATCH 09/63] add max versions to dependencies --- pyproject.toml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5dde784..59751c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools >= 61.0"] +requires = ["setuptools >= 75.0"] build-backend = "setuptools.build_meta" [project] @@ -7,12 +7,12 @@ name = "sunbather" version = "1.0.0a1" # dynamic = ["version"] dependencies = [ - "numpy >= 1.24.3", - "pandas >= 1.1.4", - "matplotlib >= 3.7.1", - "scipy >= 1.8.0", - "astropy >= 5.3", - "p-winds >= 1.3.4", + "numpy >= 1.24.3, <3", + "pandas >= 1.1.4, <3", + "matplotlib >= 3.7.1, <4", + "scipy >= 1.8.0, <2", + "astropy >= 5.3, <7", + "p-winds >= 1.3.4, <2", ] requires-python = ">= 3.9" authors = [ From 016e263c8fa642e2285ed656d246a45fec589031 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 24 Oct 2024 14:39:46 +0200 Subject: [PATCH 10/63] fix errors --- src/sunbather/install_cloudy.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index b90812b..6874d49 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -5,7 +5,10 @@ import subprocess -class get_cloudy: +class GetCloudy: + """ + Class to download and compile the Cloudy program + """ def __init__(self, version="23.01"): self.version = version self.path = "./" @@ -15,6 +18,9 @@ def __init__(self, version="23.01"): self.cloudypath = f"{pathlib.Path(__file__).parent.resolve()}/cloudy/" def download(self): + """ + Creates the cloudy directory and downloads the cloudy version specified. + """ if not pathlib.Path(self.cloudypath).is_dir(): os.mkdir(self.cloudypath) else: @@ -28,19 +34,19 @@ def download(self): return def compile(self): - # Extract it in a location where you want to install Cloudy. + """ + Extracts and builds Cloudy. + """ os.chdir(self.cloudypath) - tar = tarfile.open(self.filename, "r:gz") - tar.extractall(filter="data") - tar.close() + with tarfile.open(self.filename, "r:gz") as tar: + tar.extractall(filter="data") - # cd into the /c23.01/source/ or /c17.02/source/ folder and compile the code by running make. - os.chdir(f"{self.cloudypath}/c{version}/source/") + os.chdir(f"{self.cloudypath}/c{self.version}/source/") subprocess.Popen(["make",]).wait() def test(self): # Quickly test the Cloudy installation: in the source folder, run ./cloudy.exe, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. - os.chdir(f"{self.cloudypath}/c{version}/source/") + os.chdir(f"{self.cloudypath}/c{self.version}/source/") print( "Type \"test\" and hit return twice. " "It should print \"Cloudy exited OK\" at the end." From 9a38176e967b740d0c1dabd35633b03928d2eb7b Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 13:23:52 +0100 Subject: [PATCH 11/63] Create pylint.yml --- .github/workflows/pylint.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/pylint.yml diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml new file mode 100644 index 0000000..c73e032 --- /dev/null +++ b/.github/workflows/pylint.yml @@ -0,0 +1,23 @@ +name: Pylint + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + - name: Analysing the code with pylint + run: | + pylint $(git ls-files '*.py') From fe852993c74e5b8cf91fd942301a2d1d13264453 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:23:21 +0100 Subject: [PATCH 12/63] Moved SED files --- {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ1132_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ1214_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ15A_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ163_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ176_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ436_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ581_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ649_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ667C_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ674_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ676A_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ699_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ729_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ832_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ849_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ876_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/HATP12_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/HATP26_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/HD149026_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/HD40307_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/HD85512_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/HD97658_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/K4_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/L-678-39_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/L-98-59_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/L-980-5_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/LHS-2686_binned.spec | 0 .../sunbather/stellar_SEDs}/LP-791-18_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/TOI193_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/TOI2134.spec | 0 .../sunbather/stellar_SEDs}/TRAPPIST-1_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP127_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP17_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP43_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP77A_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/eps_Eri_binned.spec | 0 {stellar_SEDs => src/sunbather/stellar_SEDs}/solar.spec | 0 37 files changed, 0 insertions(+), 0 deletions(-) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ1132_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ1214_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ15A_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ163_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ176_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ436_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ581_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ649_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ667C_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ674_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ676A_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ699_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ729_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ832_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ849_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/GJ876_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/HATP12_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/HATP26_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/HD149026_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/HD40307_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/HD85512_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/HD97658_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/K4_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/L-678-39_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/L-98-59_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/L-980-5_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/LHS-2686_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/LP-791-18_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/TOI193_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/TOI2134.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/TRAPPIST-1_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP127_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP17_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP43_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/WASP77A_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/eps_Eri_binned.spec (100%) rename {stellar_SEDs => src/sunbather/stellar_SEDs}/solar.spec (100%) diff --git a/stellar_SEDs/GJ1132_binned.spec b/src/sunbather/stellar_SEDs/GJ1132_binned.spec similarity index 100% rename from stellar_SEDs/GJ1132_binned.spec rename to src/sunbather/stellar_SEDs/GJ1132_binned.spec diff --git a/stellar_SEDs/GJ1214_binned.spec b/src/sunbather/stellar_SEDs/GJ1214_binned.spec similarity index 100% rename from stellar_SEDs/GJ1214_binned.spec rename to src/sunbather/stellar_SEDs/GJ1214_binned.spec diff --git a/stellar_SEDs/GJ15A_binned.spec b/src/sunbather/stellar_SEDs/GJ15A_binned.spec similarity index 100% rename from stellar_SEDs/GJ15A_binned.spec rename to src/sunbather/stellar_SEDs/GJ15A_binned.spec diff --git a/stellar_SEDs/GJ163_binned.spec b/src/sunbather/stellar_SEDs/GJ163_binned.spec similarity index 100% rename from stellar_SEDs/GJ163_binned.spec rename to src/sunbather/stellar_SEDs/GJ163_binned.spec diff --git a/stellar_SEDs/GJ176_binned.spec b/src/sunbather/stellar_SEDs/GJ176_binned.spec similarity index 100% rename from stellar_SEDs/GJ176_binned.spec rename to src/sunbather/stellar_SEDs/GJ176_binned.spec diff --git a/stellar_SEDs/GJ436_binned.spec b/src/sunbather/stellar_SEDs/GJ436_binned.spec similarity index 100% rename from stellar_SEDs/GJ436_binned.spec rename to src/sunbather/stellar_SEDs/GJ436_binned.spec diff --git a/stellar_SEDs/GJ581_binned.spec b/src/sunbather/stellar_SEDs/GJ581_binned.spec similarity index 100% rename from stellar_SEDs/GJ581_binned.spec rename to src/sunbather/stellar_SEDs/GJ581_binned.spec diff --git a/stellar_SEDs/GJ649_binned.spec b/src/sunbather/stellar_SEDs/GJ649_binned.spec similarity index 100% rename from stellar_SEDs/GJ649_binned.spec rename to src/sunbather/stellar_SEDs/GJ649_binned.spec diff --git a/stellar_SEDs/GJ667C_binned.spec b/src/sunbather/stellar_SEDs/GJ667C_binned.spec similarity index 100% rename from stellar_SEDs/GJ667C_binned.spec rename to src/sunbather/stellar_SEDs/GJ667C_binned.spec diff --git a/stellar_SEDs/GJ674_binned.spec b/src/sunbather/stellar_SEDs/GJ674_binned.spec similarity index 100% rename from stellar_SEDs/GJ674_binned.spec rename to src/sunbather/stellar_SEDs/GJ674_binned.spec diff --git a/stellar_SEDs/GJ676A_binned.spec b/src/sunbather/stellar_SEDs/GJ676A_binned.spec similarity index 100% rename from stellar_SEDs/GJ676A_binned.spec rename to src/sunbather/stellar_SEDs/GJ676A_binned.spec diff --git a/stellar_SEDs/GJ699_binned.spec b/src/sunbather/stellar_SEDs/GJ699_binned.spec similarity index 100% rename from stellar_SEDs/GJ699_binned.spec rename to src/sunbather/stellar_SEDs/GJ699_binned.spec diff --git a/stellar_SEDs/GJ729_binned.spec b/src/sunbather/stellar_SEDs/GJ729_binned.spec similarity index 100% rename from stellar_SEDs/GJ729_binned.spec rename to src/sunbather/stellar_SEDs/GJ729_binned.spec diff --git a/stellar_SEDs/GJ832_binned.spec b/src/sunbather/stellar_SEDs/GJ832_binned.spec similarity index 100% rename from stellar_SEDs/GJ832_binned.spec rename to src/sunbather/stellar_SEDs/GJ832_binned.spec diff --git a/stellar_SEDs/GJ849_binned.spec b/src/sunbather/stellar_SEDs/GJ849_binned.spec similarity index 100% rename from stellar_SEDs/GJ849_binned.spec rename to src/sunbather/stellar_SEDs/GJ849_binned.spec diff --git a/stellar_SEDs/GJ876_binned.spec b/src/sunbather/stellar_SEDs/GJ876_binned.spec similarity index 100% rename from stellar_SEDs/GJ876_binned.spec rename to src/sunbather/stellar_SEDs/GJ876_binned.spec diff --git a/stellar_SEDs/HATP12_binned.spec b/src/sunbather/stellar_SEDs/HATP12_binned.spec similarity index 100% rename from stellar_SEDs/HATP12_binned.spec rename to src/sunbather/stellar_SEDs/HATP12_binned.spec diff --git a/stellar_SEDs/HATP26_binned.spec b/src/sunbather/stellar_SEDs/HATP26_binned.spec similarity index 100% rename from stellar_SEDs/HATP26_binned.spec rename to src/sunbather/stellar_SEDs/HATP26_binned.spec diff --git a/stellar_SEDs/HD149026_binned.spec b/src/sunbather/stellar_SEDs/HD149026_binned.spec similarity index 100% rename from stellar_SEDs/HD149026_binned.spec rename to src/sunbather/stellar_SEDs/HD149026_binned.spec diff --git a/stellar_SEDs/HD40307_binned.spec b/src/sunbather/stellar_SEDs/HD40307_binned.spec similarity index 100% rename from stellar_SEDs/HD40307_binned.spec rename to src/sunbather/stellar_SEDs/HD40307_binned.spec diff --git a/stellar_SEDs/HD85512_binned.spec b/src/sunbather/stellar_SEDs/HD85512_binned.spec similarity index 100% rename from stellar_SEDs/HD85512_binned.spec rename to src/sunbather/stellar_SEDs/HD85512_binned.spec diff --git a/stellar_SEDs/HD97658_binned.spec b/src/sunbather/stellar_SEDs/HD97658_binned.spec similarity index 100% rename from stellar_SEDs/HD97658_binned.spec rename to src/sunbather/stellar_SEDs/HD97658_binned.spec diff --git a/stellar_SEDs/K4_binned.spec b/src/sunbather/stellar_SEDs/K4_binned.spec similarity index 100% rename from stellar_SEDs/K4_binned.spec rename to src/sunbather/stellar_SEDs/K4_binned.spec diff --git a/stellar_SEDs/L-678-39_binned.spec b/src/sunbather/stellar_SEDs/L-678-39_binned.spec similarity index 100% rename from stellar_SEDs/L-678-39_binned.spec rename to src/sunbather/stellar_SEDs/L-678-39_binned.spec diff --git a/stellar_SEDs/L-98-59_binned.spec b/src/sunbather/stellar_SEDs/L-98-59_binned.spec similarity index 100% rename from stellar_SEDs/L-98-59_binned.spec rename to src/sunbather/stellar_SEDs/L-98-59_binned.spec diff --git a/stellar_SEDs/L-980-5_binned.spec b/src/sunbather/stellar_SEDs/L-980-5_binned.spec similarity index 100% rename from stellar_SEDs/L-980-5_binned.spec rename to src/sunbather/stellar_SEDs/L-980-5_binned.spec diff --git a/stellar_SEDs/LHS-2686_binned.spec b/src/sunbather/stellar_SEDs/LHS-2686_binned.spec similarity index 100% rename from stellar_SEDs/LHS-2686_binned.spec rename to src/sunbather/stellar_SEDs/LHS-2686_binned.spec diff --git a/stellar_SEDs/LP-791-18_binned.spec b/src/sunbather/stellar_SEDs/LP-791-18_binned.spec similarity index 100% rename from stellar_SEDs/LP-791-18_binned.spec rename to src/sunbather/stellar_SEDs/LP-791-18_binned.spec diff --git a/stellar_SEDs/TOI193_binned.spec b/src/sunbather/stellar_SEDs/TOI193_binned.spec similarity index 100% rename from stellar_SEDs/TOI193_binned.spec rename to src/sunbather/stellar_SEDs/TOI193_binned.spec diff --git a/stellar_SEDs/TOI2134.spec b/src/sunbather/stellar_SEDs/TOI2134.spec similarity index 100% rename from stellar_SEDs/TOI2134.spec rename to src/sunbather/stellar_SEDs/TOI2134.spec diff --git a/stellar_SEDs/TRAPPIST-1_binned.spec b/src/sunbather/stellar_SEDs/TRAPPIST-1_binned.spec similarity index 100% rename from stellar_SEDs/TRAPPIST-1_binned.spec rename to src/sunbather/stellar_SEDs/TRAPPIST-1_binned.spec diff --git a/stellar_SEDs/WASP127_binned.spec b/src/sunbather/stellar_SEDs/WASP127_binned.spec similarity index 100% rename from stellar_SEDs/WASP127_binned.spec rename to src/sunbather/stellar_SEDs/WASP127_binned.spec diff --git a/stellar_SEDs/WASP17_binned.spec b/src/sunbather/stellar_SEDs/WASP17_binned.spec similarity index 100% rename from stellar_SEDs/WASP17_binned.spec rename to src/sunbather/stellar_SEDs/WASP17_binned.spec diff --git a/stellar_SEDs/WASP43_binned.spec b/src/sunbather/stellar_SEDs/WASP43_binned.spec similarity index 100% rename from stellar_SEDs/WASP43_binned.spec rename to src/sunbather/stellar_SEDs/WASP43_binned.spec diff --git a/stellar_SEDs/WASP77A_binned.spec b/src/sunbather/stellar_SEDs/WASP77A_binned.spec similarity index 100% rename from stellar_SEDs/WASP77A_binned.spec rename to src/sunbather/stellar_SEDs/WASP77A_binned.spec diff --git a/stellar_SEDs/eps_Eri_binned.spec b/src/sunbather/stellar_SEDs/eps_Eri_binned.spec similarity index 100% rename from stellar_SEDs/eps_Eri_binned.spec rename to src/sunbather/stellar_SEDs/eps_Eri_binned.spec diff --git a/stellar_SEDs/solar.spec b/src/sunbather/stellar_SEDs/solar.spec similarity index 100% rename from stellar_SEDs/solar.spec rename to src/sunbather/stellar_SEDs/solar.spec From 06201ff3c5d1364085c22c1a77fd269cfa3070be Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:24:11 +0100 Subject: [PATCH 13/63] update project file --- pyproject.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 59751c6..fa231ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,7 +10,7 @@ dependencies = [ "numpy >= 1.24.3, <3", "pandas >= 1.1.4, <3", "matplotlib >= 3.7.1, <4", - "scipy >= 1.8.0, <2", + "scipy >= 1.9.0, <1.14", "astropy >= 5.3, <7", "p-winds >= 1.3.4, <2", ] @@ -27,3 +27,6 @@ keywords = ["astrophysics"] [project.urls] Issues = "https://github.com/antonpannekoek/sunbather/issues" Repository = "https://github.com/antonpannekoek/sunbather" + +[tool.pylint] +max-line-length = 88 From a4d38b681c68bb3474f9eb4e111b207c38765c86 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:25:37 +0100 Subject: [PATCH 14/63] update syntax --- src/sunbather/RT.py | 7 +- src/sunbather/construct_parker.py | 919 ++++++++++--- src/sunbather/solveT.py | 4 +- src/sunbather/tools.py | 2142 +++++++++++++++++++---------- 4 files changed, 2158 insertions(+), 914 deletions(-) diff --git a/src/sunbather/RT.py b/src/sunbather/RT.py index 8596078..565a155 100644 --- a/src/sunbather/RT.py +++ b/src/sunbather/RT.py @@ -1,7 +1,4 @@ -#sunbather imports -import .tools - -#other imports +# other imports import pandas as pd import numpy as np import numpy.ma as ma @@ -11,6 +8,8 @@ from scipy.ndimage import gaussian_filter1d import warnings +# sunbather imports +import sunbather.tools as tools sigt0 = 2.654e-2 #cm2 s-1 = cm2 Hz, from Axner et al. 2004 diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index b454911..5ab5082 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -1,7 +1,4 @@ -#sunbather imports -import .tools - -#other imports +# other imports import numpy as np import os import time @@ -18,6 +15,9 @@ import traceback import warnings +# sunbather imports +import sunbather.tools as tools + def cloudy_spec_to_pwinds(SEDfilename, dist_SED, dist_planet): """ @@ -45,28 +45,32 @@ def cloudy_spec_to_pwinds(SEDfilename, dist_SED, dist_planet): SED at the planet distance in the dictionary format that p-winds expects. """ - with open(SEDfilename, 'r') as f: + with open(SEDfilename, "r") as f: for line in f: - if not line.startswith('#'): #skip through the comments at the top - assert ('angstrom' in line) or ('Angstrom' in line) #verify the units - assert 'nuFnu' in line #verify the units - first_spec_point = np.array(line.split(' ')[:2]).astype(float) + if not line.startswith("#"): # skip through the comments at the top + assert ("angstrom" in line) or ("Angstrom" in line) # verify the units + assert "nuFnu" in line # verify the units + first_spec_point = np.array(line.split(" ")[:2]).astype(float) break rest_data = np.genfromtxt(f, skip_header=1) - SED = np.concatenate(([first_spec_point], rest_data)) #rejoin with the first spectrum point that we read separately + SED = np.concatenate( + ([first_spec_point], rest_data) + ) # rejoin with the first spectrum point that we read separately - flux = SED[:,1] / SED[:,0] #from nuFnu = wavFwav to Fwav in erg s-1 cm-2 A-1 - flux = flux * (dist_SED / dist_planet)**2 #scale to planet distance + flux = SED[:, 1] / SED[:, 0] # from nuFnu = wavFwav to Fwav in erg s-1 cm-2 A-1 + flux = flux * (dist_SED / dist_planet) ** 2 # scale to planet distance - assert SED[1,0] > SED[0,0] #check ascending wavelengths + assert SED[1, 0] > SED[0, 0] # check ascending wavelengths - #make a dictionary like p_winds expects it - spectrum = {'wavelength':SED[:,0], - 'flux_lambda':flux, - 'wavelength_unit':u.angstrom, - 'flux_unit':u.erg / u.s / u.cm ** 2 / u.angstrom, - 'SEDname':SEDfilename.split('/')[-1][:-5]} #SEDname added by me (without extension) + # make a dictionary like p_winds expects it + spectrum = { + "wavelength": SED[:, 0], + "flux_lambda": flux, + "wavelength_unit": u.angstrom, + "flux_unit": u.erg / u.s / u.cm**2 / u.angstrom, + "SEDname": SEDfilename.split("/")[-1][:-5], + } # SEDname added by me (without extension) return spectrum @@ -89,13 +93,24 @@ def calc_neutral_mu(zdict): """ abundances = tools.get_abundances(zdict) - neutral_mu = tools.calc_mu(1., 0., abundances=abundances) #set ne=0 so completely neutral + neutral_mu = tools.calc_mu( + 1.0, 0.0, abundances=abundances + ) # set ne=0 so completely neutral return neutral_mu -def save_plain_parker_profile(planet, Mdot, T, spectrum, h_fraction=0.9, - pdir='fH_0.9', overwrite=False, no_tidal=False, altmax=20): +def save_plain_parker_profile( + planet, + Mdot, + T, + spectrum, + h_fraction=0.9, + pdir="fH_0.9", + overwrite=False, + no_tidal=False, + altmax=20, +): """ Uses the p-winds code (dos Santos et al. 2022). Runs p-winds and saves a 'pprof' txt file with the r, rho, v, mu structure. @@ -137,59 +152,123 @@ def save_plain_parker_profile(planet, Mdot, T, spectrum, h_fraction=0.9, Mdot = float(Mdot) T = int(T) - save_name = tools.projectpath+'/parker_profiles/'+planet.name+'/'+pdir+'/pprof_'+planet.name+'_T='+str(T)+'_M='+ \ - "%.3f" %Mdot +".txt" + save_name = ( + tools.projectpath + + "/parker_profiles/" + + planet.name + + "/" + + pdir + + "/pprof_" + + planet.name + + "_T=" + + str(T) + + "_M=" + + "%.3f" % Mdot + + ".txt" + ) if os.path.exists(save_name) and not overwrite: - print("Parker profile already exists and overwrite = False:", planet.name, pdir, "%.3f" %Mdot, T) - return #this quits the function but if we're running a grid, it doesn't quit the whole Python code - - R_pl = planet.R / tools.RJ #convert from cm to Rjup - M_pl = planet.M / tools.MJ #convert from g to Mjup - - m_dot = 10 ** Mdot # Total atmospheric escape rate in g / s - r = np.logspace(0, np.log10(altmax), 1000) # Radial distance profile in unit of planetary radii + print( + "Parker profile already exists and overwrite = False:", + planet.name, + pdir, + "%.3f" % Mdot, + T, + ) + return # this quits the function but if we're running a grid, it doesn't quit the whole Python code + + R_pl = planet.R / tools.RJ # convert from cm to Rjup + M_pl = planet.M / tools.MJ # convert from g to Mjup + + m_dot = 10**Mdot # Total atmospheric escape rate in g / s + r = np.logspace( + 0, np.log10(altmax), 1000 + ) # Radial distance profile in unit of planetary radii # A few assumptions about the planet's atmosphere he_fraction = 1 - h_fraction # He number fraction he_h_fraction = he_fraction / h_fraction - mean_f_ion = 0.0 # Mean ionization fraction (will be self-consistently calculated later) + mean_f_ion = ( + 0.0 # Mean ionization fraction (will be self-consistently calculated later) + ) mu_0 = (1 + 4 * he_h_fraction) / (1 + he_h_fraction + mean_f_ion) # mu_0 is the constant mean molecular weight (assumed for now, will be updated later) - initial_f_ion = 0. - f_r, mu_bar = pw_hydrogen.ion_fraction(r, R_pl, T, h_fraction, - m_dot, M_pl, mu_0, - spectrum_at_planet=spectrum, exact_phi=True, - initial_f_ion=initial_f_ion, relax_solution=True, - return_mu=True, atol=1e-8, rtol=1e-5) - - vs = pw_parker.sound_speed(T, mu_bar) # Speed of sound (km/s, assumed to be constant) + initial_f_ion = 0.0 + f_r, mu_bar = pw_hydrogen.ion_fraction( + r, + R_pl, + T, + h_fraction, + m_dot, + M_pl, + mu_0, + spectrum_at_planet=spectrum, + exact_phi=True, + initial_f_ion=initial_f_ion, + relax_solution=True, + return_mu=True, + atol=1e-8, + rtol=1e-5, + ) + + vs = pw_parker.sound_speed( + T, mu_bar + ) # Speed of sound (km/s, assumed to be constant) if no_tidal: - rs = pw_parker.radius_sonic_point(M_pl, vs) # Radius at the sonic point (jupiterRad) - rhos = pw_parker.density_sonic_point(m_dot, rs, vs) # Density at the sonic point (g/cm^3) + rs = pw_parker.radius_sonic_point( + M_pl, vs + ) # Radius at the sonic point (jupiterRad) + rhos = pw_parker.density_sonic_point( + m_dot, rs, vs + ) # Density at the sonic point (g/cm^3) r_array = r * R_pl / rs v_array, rho_array = pw_parker.structure(r_array) else: - Mstar = planet.Mstar / tools.Msun #convert from g to Msun - a = planet.a / tools.AU #convert from cm to AU - rs = pw_parker.radius_sonic_point_tidal(M_pl, vs, Mstar, a) #radius at the sonic point (jupiterRad) - rhos = pw_parker.density_sonic_point(m_dot, rs, vs) # Density at the sonic point (g/cm^3) + Mstar = planet.Mstar / tools.Msun # convert from g to Msun + a = planet.a / tools.AU # convert from cm to AU + rs = pw_parker.radius_sonic_point_tidal( + M_pl, vs, Mstar, a + ) # radius at the sonic point (jupiterRad) + rhos = pw_parker.density_sonic_point( + m_dot, rs, vs + ) # Density at the sonic point (g/cm^3) r_array = r * R_pl / rs v_array, rho_array = pw_parker.structure_tidal(r_array, vs, rs, M_pl, Mstar, a) - mu_array = ((1-h_fraction)*4.0 + h_fraction)/(h_fraction*(1+f_r)+(1-h_fraction)) #this assumes no Helium ionization - - save_array = np.column_stack((r*planet.R, rho_array*rhos, v_array*vs*1e5, mu_array)) - np.savetxt(save_name, save_array, delimiter='\t', header=f"hydrogen fraction: {h_fraction:.3f}\nalt rho v mu") + mu_array = ((1 - h_fraction) * 4.0 + h_fraction) / ( + h_fraction * (1 + f_r) + (1 - h_fraction) + ) # this assumes no Helium ionization + + save_array = np.column_stack( + (r * planet.R, rho_array * rhos, v_array * vs * 1e5, mu_array) + ) + np.savetxt( + save_name, + save_array, + delimiter="\t", + header=f"hydrogen fraction: {h_fraction:.3f}\nalt rho v mu", + ) print("Parker wind profile done:", save_name) - launch_velocity = v_array[0] #velocity at Rp in units of sonic speed + launch_velocity = v_array[0] # velocity at Rp in units of sonic speed if launch_velocity > 1: - warnings.warn(f"This Parker wind profile is supersonic already at Rp: {save_name}") - - -def save_temp_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, - mu_bar=None, mu_struc=None, no_tidal=False, altmax=20): + warnings.warn( + f"This Parker wind profile is supersonic already at Rp: {save_name}" + ) + + +def save_temp_parker_profile( + planet, + Mdot, + T, + spectrum, + zdict, + pdir, + mu_bar=None, + mu_struc=None, + no_tidal=False, + altmax=20, +): """ Uses the p-winds code (dos Santos et al. 2022) Runs p_winds and saves a 'pprof' txt file with the r, rho, v, mu structure. @@ -248,62 +327,115 @@ def save_temp_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, Mdot = float(Mdot) T = int(T) - R_pl = planet.R / tools.RJ #convert from cm to Rjup - M_pl = planet.M / tools.MJ #convert from g to Mjup - - m_dot = 10 ** Mdot # Total atmospheric escape rate in g / s - r = np.logspace(0, np.log10(altmax), 1000) # Radial distance profile in unit of planetary radii + R_pl = planet.R / tools.RJ # convert from cm to Rjup + M_pl = planet.M / tools.MJ # convert from g to Mjup + m_dot = 10**Mdot # Total atmospheric escape rate in g / s + r = np.logspace( + 0, np.log10(altmax), 1000 + ) # Radial distance profile in unit of planetary radii - if mu_bar is None: #if not given by a Cloudy run, let p-winds calculate it (used the first iteration) - #pretend that the metals don't exist and just calculate the h_fraction with only H and He abundances - abundances = tools.get_abundances(zdict) #solar abundances - h_fraction = abundances['H'] / (abundances['H'] + abundances['He']) #approximate it by this for now, later Cloudy will give mu + if ( + mu_bar is None + ): # if not given by a Cloudy run, let p-winds calculate it (used the first iteration) + # pretend that the metals don't exist and just calculate the h_fraction with only H and He abundances + abundances = tools.get_abundances(zdict) # solar abundances + h_fraction = abundances["H"] / ( + abundances["H"] + abundances["He"] + ) # approximate it by this for now, later Cloudy will give mu # A few assumptions about the planet's atmosphere he_fraction = 1 - h_fraction # He number fraction he_h_fraction = he_fraction / h_fraction - mean_f_ion = 0.0 # Mean ionization fraction (will be self-consistently calculated later) + mean_f_ion = ( + 0.0 # Mean ionization fraction (will be self-consistently calculated later) + ) mu_0 = (1 + 4 * he_h_fraction) / (1 + he_h_fraction + mean_f_ion) # mu_0 is the constant mean molecular weight (assumed for now, will be updated later) - initial_f_ion = 0. - - f_r, mu_bar = pw_hydrogen.ion_fraction(r, R_pl, T, h_fraction, - m_dot, M_pl, mu_0, - spectrum_at_planet=spectrum, exact_phi=True, - initial_f_ion=initial_f_ion, relax_solution=True, - return_mu=True, atol=1e-8, rtol=1e-5, - convergence=0.0001, max_n_relax=30) #I personally think we can use more than 0.01 convergence - - mu_array = ((1-h_fraction)*4.0 + h_fraction)/(h_fraction*(1+f_r)+(1-h_fraction)) #this assumes no Helium ionization - - else: #used later iterations - assert np.abs(mu_struc[0,0] - 1.) < 0.03 and np.abs(mu_struc[-1,0] - altmax) < 0.0001, "Looks like Cloudy didn't simulate to 1Rp: "+str(mu_struc[0,0]) #ensure safe extrapolation - mu_array = interp1d(mu_struc[:,0], mu_struc[:,1], fill_value='extrapolate')(r) - - vs = pw_parker.sound_speed(T, mu_bar) # Speed of sound (km/s, assumed to be constant) + initial_f_ion = 0.0 + + f_r, mu_bar = pw_hydrogen.ion_fraction( + r, + R_pl, + T, + h_fraction, + m_dot, + M_pl, + mu_0, + spectrum_at_planet=spectrum, + exact_phi=True, + initial_f_ion=initial_f_ion, + relax_solution=True, + return_mu=True, + atol=1e-8, + rtol=1e-5, + convergence=0.0001, + max_n_relax=30, + ) # I personally think we can use more than 0.01 convergence + + mu_array = ((1 - h_fraction) * 4.0 + h_fraction) / ( + h_fraction * (1 + f_r) + (1 - h_fraction) + ) # this assumes no Helium ionization + + else: # used later iterations + assert ( + np.abs(mu_struc[0, 0] - 1.0) < 0.03 + and np.abs(mu_struc[-1, 0] - altmax) < 0.0001 + ), "Looks like Cloudy didn't simulate to 1Rp: " + str( + mu_struc[0, 0] + ) # ensure safe extrapolation + mu_array = interp1d(mu_struc[:, 0], mu_struc[:, 1], fill_value="extrapolate")(r) + + vs = pw_parker.sound_speed( + T, mu_bar + ) # Speed of sound (km/s, assumed to be constant) if no_tidal: - rs = pw_parker.radius_sonic_point(M_pl, vs) # Radius at the sonic point (jupiterRad) - rhos = pw_parker.density_sonic_point(m_dot, rs, vs) # Density at the sonic point (g/cm^3) + rs = pw_parker.radius_sonic_point( + M_pl, vs + ) # Radius at the sonic point (jupiterRad) + rhos = pw_parker.density_sonic_point( + m_dot, rs, vs + ) # Density at the sonic point (g/cm^3) r_array = r * R_pl / rs v_array, rho_array = pw_parker.structure(r_array) else: - Mstar = planet.Mstar / tools.Msun #convert from g to Msun - a = planet.a / tools.AU #convert from cm to AU - rs = pw_parker.radius_sonic_point_tidal(M_pl, vs, Mstar, a) #radius at the sonic point (jupiterRad) - rhos = pw_parker.density_sonic_point(m_dot, rs, vs) # Density at the sonic point (g/cm^3) + Mstar = planet.Mstar / tools.Msun # convert from g to Msun + a = planet.a / tools.AU # convert from cm to AU + rs = pw_parker.radius_sonic_point_tidal( + M_pl, vs, Mstar, a + ) # radius at the sonic point (jupiterRad) + rhos = pw_parker.density_sonic_point( + m_dot, rs, vs + ) # Density at the sonic point (g/cm^3) r_array = r * R_pl / rs v_array, rho_array = pw_parker.structure_tidal(r_array, vs, rs, M_pl, Mstar, a) - save_array = np.column_stack((r*planet.R, rho_array*rhos, v_array*vs*1e5, mu_array)) - save_name = tools.projectpath+'/parker_profiles/'+planet.name+'/'+pdir+'/temp/pprof_'+planet.name+'_T='+str(T)+'_M='+"%.3f" %Mdot +".txt" + save_array = np.column_stack( + (r * planet.R, rho_array * rhos, v_array * vs * 1e5, mu_array) + ) + save_name = ( + tools.projectpath + + "/parker_profiles/" + + planet.name + + "/" + + pdir + + "/temp/pprof_" + + planet.name + + "_T=" + + str(T) + + "_M=" + + "%.3f" % Mdot + + ".txt" + ) zdictstr = "abundance scale factors relative to solar:" for sp in zdict.keys(): - zdictstr += " "+sp+"="+"%.1f" %zdict[sp] - np.savetxt(save_name, save_array, delimiter='\t', header=zdictstr+"\nalt rho v mu") + zdictstr += " " + sp + "=" + "%.1f" % zdict[sp] + np.savetxt( + save_name, save_array, delimiter="\t", header=zdictstr + "\nalt rho v mu" + ) - launch_velocity = v_array[0] #velocity at Rp in units of sonic speed + launch_velocity = v_array[0] # velocity at Rp in units of sonic speed return save_name, mu_bar, launch_velocity @@ -332,20 +464,34 @@ def run_parker_with_cloudy(filename, T, planet, zdict): Radial density, velocity and mean particle mass profiles of the isothermal Parker wind profile. """ - pprof = tools.read_parker('', '', '', '', filename=filename) + pprof = tools.read_parker("", "", "", "", filename=filename) - altmax = pprof.alt.iloc[-1] / planet.R #maximum altitude of the profile in units of Rp + altmax = ( + pprof.alt.iloc[-1] / planet.R + ) # maximum altitude of the profile in units of Rp alt = pprof.alt.values hden = tools.rho_to_hden(pprof.rho.values, abundances=tools.get_abundances(zdict)) dlaw = tools.alt_array_to_Cloudy(alt, hden, altmax, planet.R, 1000, log=True) nuFnu_1AU_linear, Ryd = tools.get_SED_norm_1AU(planet.SEDname) - nuFnu_a_log = np.log10(nuFnu_1AU_linear / ((planet.a - altmax*planet.R)/tools.AU)**2) - - simname = filename.split('.txt')[0] - tools.write_Cloudy_in(simname, title='Simulation of '+filename, overwrite=True, - flux_scaling=[nuFnu_a_log, Ryd], SED=planet.SEDname, - dlaw=dlaw, double_tau=True, cosmic_rays=True, zdict=zdict, constantT=T, outfiles=['.ovr']) + nuFnu_a_log = np.log10( + nuFnu_1AU_linear / ((planet.a - altmax * planet.R) / tools.AU) ** 2 + ) + + simname = filename.split(".txt")[0] + tools.write_Cloudy_in( + simname, + title="Simulation of " + filename, + overwrite=True, + flux_scaling=[nuFnu_a_log, Ryd], + SED=planet.SEDname, + dlaw=dlaw, + double_tau=True, + cosmic_rays=True, + zdict=zdict, + constantT=T, + outfiles=[".ovr"], + ) tools.run_Cloudy(simname) @@ -355,7 +501,7 @@ def run_parker_with_cloudy(filename, T, planet, zdict): def calc_mu_bar(sim): """ Calculates the weighted mean of the radial mean particle mass profile, - according to Eq. A.3 of Lampon et al. (2020). Code adapted from + according to Eq. A.3 of Lampon et al. (2020). Code adapted from p_winds.parker.average_molecular_weight(). Parameters @@ -370,10 +516,10 @@ def calc_mu_bar(sim): """ # Converting units - m_planet = sim.p.M / 1000. #planet mass in kg - r = sim.ovr.alt.values[::-1] / 100. # Radius profile in m - v_r = sim.ovr.v.values[::-1] / 100. # Velocity profile in unit of m / s - temperature = sim.ovr.Te.values[0] # (Isothermal) temperature in units of K + m_planet = sim.p.M / 1000.0 # planet mass in kg + r = sim.ovr.alt.values[::-1] / 100.0 # Radius profile in m + v_r = sim.ovr.v.values[::-1] / 100.0 # Velocity profile in unit of m / s + temperature = sim.ovr.Te.values[0] # (Isothermal) temperature in units of K # Physical constants k_b = 1.380649e-23 # Boltzmann's constant in J / K @@ -384,10 +530,10 @@ def calc_mu_bar(sim): # Eq. A.3 of Lampón et al. 2020 is a combination of several integrals, which # we calculate here - int_1 = simpson(mu_r / r ** 2, r) + int_1 = simpson(mu_r / r**2, r) int_2 = simpson(mu_r * v_r, v_r) int_3 = trapz(mu_r, 1 / mu_r) - int_4 = simpson(1 / r ** 2, r) + int_4 = simpson(1 / r**2, r) int_5 = simpson(v_r, v_r) int_6 = 1 / mu_r[-1] - 1 / mu_r[0] term_1 = grav * m_planet * int_1 + int_2 + k_b * temperature * int_3 @@ -397,13 +543,25 @@ def calc_mu_bar(sim): return mu_bar -def save_cloudy_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, - convergence=0.01, maxit=7, cleantemp=False, - overwrite=False, verbose=False, avoid_pwinds_mubar=False, - no_tidal=False, altmax=20): +def save_cloudy_parker_profile( + planet, + Mdot, + T, + spectrum, + zdict, + pdir, + convergence=0.01, + maxit=7, + cleantemp=False, + overwrite=False, + verbose=False, + avoid_pwinds_mubar=False, + no_tidal=False, + altmax=20, +): """ Calculates an isothermal Parker wind profile with any composition by iteratively - running the p-winds code (dos Santos et al. 2022) and Cloudy (Ferland et al. 1998; 2017, + running the p-winds code (dos Santos et al. 2022) and Cloudy (Ferland et al. 1998; 2017, Chatziokos et al. 2023). This function works iteratively as follows: p_winds calculates a density profile, Cloudy calculates the mean particle mass profile, we calculate the associated mu_bar value, which is passed to p-winds to calculate a new @@ -453,62 +611,147 @@ def save_cloudy_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, Maximum altitude of the profile in units of the planet radius. By default 20. """ - save_name = tools.projectpath+'/parker_profiles/'+planet.name+'/'+pdir+'/pprof_'+planet.name+'_T='+str(T)+'_M='+ \ - "%.3f" %Mdot +".txt" + save_name = ( + tools.projectpath + + "/parker_profiles/" + + planet.name + + "/" + + pdir + + "/pprof_" + + planet.name + + "_T=" + + str(T) + + "_M=" + + "%.3f" % Mdot + + ".txt" + ) if os.path.exists(save_name) and not overwrite: - print("Parker profile already exists and overwrite = False:", planet.name, pdir, "%.3f" %Mdot, T) - return #this quits the function but if we're running a grid, it doesn't quit the whole Python code + print( + "Parker profile already exists and overwrite = False:", + planet.name, + pdir, + "%.3f" % Mdot, + T, + ) + return # this quits the function but if we're running a grid, it doesn't quit the whole Python code if avoid_pwinds_mubar: - tools.verbose_print("Making initial parker profile while assuming a completely neutral mu_bar...", verbose=verbose) + tools.verbose_print( + "Making initial parker profile while assuming a completely neutral mu_bar...", + verbose=verbose, + ) neutral_mu_bar = calc_neutral_mu(zdict) - neutral_mu_struc = np.array([[1., neutral_mu_bar], [altmax, neutral_mu_bar]]) #set up an array with constant mu(r) at the neutral value - filename, previous_mu_bar, launch_velocity = save_temp_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, - mu_bar=neutral_mu_bar, mu_struc=neutral_mu_struc, no_tidal=no_tidal, altmax=altmax) - tools.verbose_print(f"Saved temp parker profile with neutral mu_bar: {previous_mu_bar}" , verbose=verbose) + neutral_mu_struc = np.array( + [[1.0, neutral_mu_bar], [altmax, neutral_mu_bar]] + ) # set up an array with constant mu(r) at the neutral value + filename, previous_mu_bar, launch_velocity = save_temp_parker_profile( + planet, + Mdot, + T, + spectrum, + zdict, + pdir, + mu_bar=neutral_mu_bar, + mu_struc=neutral_mu_struc, + no_tidal=no_tidal, + altmax=altmax, + ) + tools.verbose_print( + f"Saved temp parker profile with neutral mu_bar: {previous_mu_bar}", + verbose=verbose, + ) else: - tools.verbose_print("Making initial parker profile with p-winds...", verbose=verbose) - filename, previous_mu_bar, launch_velocity = save_temp_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, mu_bar=None, no_tidal=no_tidal, altmax=altmax) - tools.verbose_print(f"Saved temp parker profile with p-winds's mu_bar: {previous_mu_bar}" , verbose=verbose) + tools.verbose_print( + "Making initial parker profile with p-winds...", verbose=verbose + ) + filename, previous_mu_bar, launch_velocity = save_temp_parker_profile( + planet, + Mdot, + T, + spectrum, + zdict, + pdir, + mu_bar=None, + no_tidal=no_tidal, + altmax=altmax, + ) + tools.verbose_print( + f"Saved temp parker profile with p-winds's mu_bar: {previous_mu_bar}", + verbose=verbose, + ) for itno in range(maxit): tools.verbose_print(f"Iteration number: {itno+1}", verbose=verbose) - + tools.verbose_print("Running parker profile through Cloudy...", verbose=verbose) simname, pprof = run_parker_with_cloudy(filename, T, planet, zdict) tools.verbose_print("Cloudy run done.", verbose=verbose) sim = tools.Sim(simname, altmax=altmax, planet=planet) - sim.addv(pprof.alt, pprof.v) #add the velocity structure to the sim, so that calc_mu_bar() works. + sim.addv( + pprof.alt, pprof.v + ) # add the velocity structure to the sim, so that calc_mu_bar() works. mu_bar = calc_mu_bar(sim) - tools.verbose_print(f"Making new parker profile with p-winds based on Cloudy's reported mu_bar: {mu_bar}", verbose=verbose) - mu_struc = np.column_stack((sim.ovr.alt.values[::-1]/planet.R, sim.ovr.mu[::-1].values)) #pass Cloudy's mu structure to save in the pprof - filename, mu_bar, launch_velocity = save_temp_parker_profile(planet, Mdot, T, spectrum, zdict, pdir, - mu_bar=mu_bar, mu_struc=mu_struc, no_tidal=no_tidal, altmax=altmax) + tools.verbose_print( + f"Making new parker profile with p-winds based on Cloudy's reported mu_bar: {mu_bar}", + verbose=verbose, + ) + mu_struc = np.column_stack( + (sim.ovr.alt.values[::-1] / planet.R, sim.ovr.mu[::-1].values) + ) # pass Cloudy's mu structure to save in the pprof + filename, mu_bar, launch_velocity = save_temp_parker_profile( + planet, + Mdot, + T, + spectrum, + zdict, + pdir, + mu_bar=mu_bar, + mu_struc=mu_struc, + no_tidal=no_tidal, + altmax=altmax, + ) tools.verbose_print("Saved temp parker profile.", verbose=verbose) - if np.abs(mu_bar - previous_mu_bar)/previous_mu_bar < convergence: + if np.abs(mu_bar - previous_mu_bar) / previous_mu_bar < convergence: print("mu_bar converged:", save_name) if launch_velocity > 1: - warnings.warn(f"This Parker wind profile is supersonic already at Rp: {save_name}") + warnings.warn( + f"This Parker wind profile is supersonic already at Rp: {save_name}" + ) break else: previous_mu_bar = mu_bar - copyfile(filename, filename.split('temp/')[0] + filename.split('temp/')[1]) - tools.verbose_print("Copied final parker profile from temp to parent folder.", verbose=verbose) + copyfile(filename, filename.split("temp/")[0] + filename.split("temp/")[1]) + tools.verbose_print( + "Copied final parker profile from temp to parent folder.", verbose=verbose + ) - if cleantemp: #then we remove the temp files - os.remove(simname+'.in') - os.remove(simname+'.out') - os.remove(simname+'.ovr') + if cleantemp: # then we remove the temp files + os.remove(simname + ".in") + os.remove(simname + ".out") + os.remove(simname + ".ovr") os.remove(filename) tools.verbose_print("Temporary files removed.", verbose=verbose) -def run_s(plname, pdir, Mdot, T, SEDname, fH, zdict, mu_conv, - mu_maxit, overwrite, verbose, avoid_pwinds_mubar, no_tidal): +def run_s( + plname, + pdir, + Mdot, + T, + SEDname, + fH, + zdict, + mu_conv, + mu_maxit, + overwrite, + verbose, + avoid_pwinds_mubar, + no_tidal, +): """ Calculates a single isothermal Parker wind profile. @@ -562,18 +805,46 @@ def run_s(plname, pdir, Mdot, T, SEDname, fH, zdict, mu_conv, """ p = tools.Planet(plname) - if SEDname != 'real': + if SEDname != "real": p.set_var(SEDname=SEDname) - altmax = min(20, int((p.a - p.Rstar) / p.R)) #solve profile up to 20 Rp, unless the star is closer than that - spectrum = cloudy_spec_to_pwinds(tools.cloudypath+'/data/SED/'+p.SEDname, 1., (p.a - altmax*p.R)/tools.AU) #assumes SED is at 1 AU - - if fH != None: #then run p_winds standalone - save_plain_parker_profile(p, Mdot, T, spectrum, h_fraction=fH, pdir=pdir, overwrite=overwrite, no_tidal=no_tidal, altmax=altmax) - else: #then run p_winds/Cloudy iterative scheme - save_cloudy_parker_profile(p, Mdot, T, spectrum, zdict, pdir, - convergence=mu_conv, maxit=mu_maxit, cleantemp=True, - overwrite=overwrite, verbose=verbose, avoid_pwinds_mubar=avoid_pwinds_mubar, - no_tidal=no_tidal, altmax=altmax) + altmax = min( + 20, int((p.a - p.Rstar) / p.R) + ) # solve profile up to 20 Rp, unless the star is closer than that + spectrum = cloudy_spec_to_pwinds( + tools.cloudypath + "/data/SED/" + p.SEDname, + 1.0, + (p.a - altmax * p.R) / tools.AU, + ) # assumes SED is at 1 AU + + if fH != None: # then run p_winds standalone + save_plain_parker_profile( + p, + Mdot, + T, + spectrum, + h_fraction=fH, + pdir=pdir, + overwrite=overwrite, + no_tidal=no_tidal, + altmax=altmax, + ) + else: # then run p_winds/Cloudy iterative scheme + save_cloudy_parker_profile( + p, + Mdot, + T, + spectrum, + zdict, + pdir, + convergence=mu_conv, + maxit=mu_maxit, + cleantemp=True, + overwrite=overwrite, + verbose=verbose, + avoid_pwinds_mubar=avoid_pwinds_mubar, + no_tidal=no_tidal, + altmax=altmax, + ) def catch_errors_run_s(*args): @@ -587,10 +858,26 @@ def catch_errors_run_s(*args): traceback.print_exc() -def run_g(plname, pdir, cores, Mdot_l, Mdot_u, Mdot_s, - T_l, T_u, T_s, SEDname, fH, zdict, mu_conv, - mu_maxit, overwrite, verbose, avoid_pwinds_mubar, - no_tidal): +def run_g( + plname, + pdir, + cores, + Mdot_l, + Mdot_u, + Mdot_s, + T_l, + T_u, + T_s, + SEDname, + fH, + zdict, + mu_conv, + mu_maxit, + overwrite, + verbose, + avoid_pwinds_mubar, + no_tidal, +): """ Calculates a grid of isothermal Parker wind models, by executing the run_s() function in parallel. @@ -656,23 +943,40 @@ def run_g(plname, pdir, cores, Mdot_l, Mdot_u, Mdot_s, p = multiprocessing.Pool(cores) pars = [] - for Mdot in np.arange(float(Mdot_l), float(Mdot_u)+1e-6, float(Mdot_s)): #1e-6 so that upper bound is inclusive - for T in np.arange(int(T_l), int(T_u)+1e-6, int(T_s)).astype(int): - pars.append((plname, pdir, Mdot, T, SEDname, fH, zdict, mu_conv, mu_maxit, overwrite, verbose, avoid_pwinds_mubar, no_tidal)) + for Mdot in np.arange( + float(Mdot_l), float(Mdot_u) + 1e-6, float(Mdot_s) + ): # 1e-6 so that upper bound is inclusive + for T in np.arange(int(T_l), int(T_u) + 1e-6, int(T_s)).astype(int): + pars.append( + ( + plname, + pdir, + Mdot, + T, + SEDname, + fH, + zdict, + mu_conv, + mu_maxit, + overwrite, + verbose, + avoid_pwinds_mubar, + no_tidal, + ) + ) p.starmap(catch_errors_run_s, pars) p.close() p.join() - - -if __name__ == '__main__': +if __name__ == "__main__": class OneOrThreeAction(argparse.Action): """ Custom class for an argparse argument with exactly 1 or 3 values. """ + def __call__(self, parser, namespace, values, option_string=None): if len(values) not in (1, 3): parser.error("Exactly one or three values are required.") @@ -682,69 +986,254 @@ class AddDictAction(argparse.Action): """ Custom class to add an argparse argument to a dictionary. """ + def __call__(self, parser, namespace, values, option_string=None): - if not hasattr(namespace, self.dest) or getattr(namespace, self.dest) is None: + if ( + not hasattr(namespace, self.dest) + or getattr(namespace, self.dest) is None + ): setattr(namespace, self.dest, {}) for value in values: - key, val = value.split('=') + key, val = value.split("=") getattr(namespace, self.dest)[key] = float(val) - t0 = time.time() - parser = argparse.ArgumentParser(description="Creates 1D Parker profile(s) using the p_winds code and Cloudy.") - - parser.add_argument("-plname", required=True, help="planet name (must be in planets.txt)") - parser.add_argument("-pdir", required=True, help="directory where the profiles are saved. It is adviced to choose a name that " \ - "somehow represents the chosen parameters, e.g. 'fH_0.9' or 'z=10'. The path will be $SUNBATHER_PROJECT_PATH/parker_profiles/pdir/") - parser.add_argument("-Mdot", required=True, type=float, nargs='+', action=OneOrThreeAction, help="log10(mass-loss rate), or three values specifying a grid of " \ - "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to three decimal places.") - parser.add_argument("-T", required=True, type=int, nargs='+', action=OneOrThreeAction, help="temperature, or three values specifying a grid of temperatures: lowest, highest, stepsize.") - parser.add_argument("-SEDname", type=str, default='real', help="name of SED to use. Must be in Cloudy's data/SED/ folder [default=SEDname set in planet.txt file]") - parser.add_argument("-overwrite", action='store_true', help="overwrite existing profile if passed [default=False]") + parser = argparse.ArgumentParser( + description="Creates 1D Parker profile(s) using the p_winds code and Cloudy." + ) + + parser.add_argument( + "-plname", required=True, help="planet name (must be in planets.txt)" + ) + parser.add_argument( + "-pdir", + required=True, + help="directory where the profiles are saved. It is adviced to choose a name that " + "somehow represents the chosen parameters, e.g. 'fH_0.9' or 'z=10'. The path will be $SUNBATHER_PROJECT_PATH/parker_profiles/pdir/", + ) + parser.add_argument( + "-Mdot", + required=True, + type=float, + nargs="+", + action=OneOrThreeAction, + help="log10(mass-loss rate), or three values specifying a grid of " + "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to three decimal places.", + ) + parser.add_argument( + "-T", + required=True, + type=int, + nargs="+", + action=OneOrThreeAction, + help="temperature, or three values specifying a grid of temperatures: lowest, highest, stepsize.", + ) + parser.add_argument( + "-SEDname", + type=str, + default="real", + help="name of SED to use. Must be in Cloudy's data/SED/ folder [default=SEDname set in planet.txt file]", + ) + parser.add_argument( + "-overwrite", + action="store_true", + help="overwrite existing profile if passed [default=False]", + ) composition_group = parser.add_mutually_exclusive_group(required=True) - composition_group.add_argument("-fH", type=float, help="hydrogen fraction by number. Using this command results in running standalone p_winds without invoking Cloudy.") - composition_group.add_argument("-z", type=float, help="metallicity (=scale factor relative to solar for all elements except H and He). Using this " \ - "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.") - parser.add_argument("-zelem", action = AddDictAction, nargs='+', default = {}, help="abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem He=0.01. " \ - "Can also be used to toggle elements off, e.g. -zelem Ca=0. Combines with -z argument. Using this " \ - "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.") - parser.add_argument("-cores", type=int, default=1, help="number of parallel runs [default=1]") - parser.add_argument("-mu_conv", type=float, default=0.01, help="relative change in mu allowed for convergence, when using p_winds/Cloudy iterative scheme [default=0.01]") - parser.add_argument("-mu_maxit", type=int, default=7, help="maximum number of iterations the p_winds/Cloudy iterative scheme is ran " \ - "if convergence is not reached [default =7]") - parser.add_argument("-verbose", action='store_true', help="print out mu-bar values of each iteration [default=False]") - parser.add_argument("-avoid_pwinds_mubar", action='store_true', help="avoid using the mu-bar value predicted by p-winds for the first iteration. Instead, " \ - "start with a mu_bar of a completely neutral atmosphere. Helps to avoid the p-winds 'solve_ivp' errors. You may need to " \ - "use a -mu_maxit higher than 7 when toggling this on. [default=False]") - parser.add_argument("-no_tidal", action='store_true', help="neglect the stellar tidal gravity term [default=False, i.e. tidal term included]") + composition_group.add_argument( + "-fH", + type=float, + help="hydrogen fraction by number. Using this command results in running standalone p_winds without invoking Cloudy.", + ) + composition_group.add_argument( + "-z", + type=float, + help="metallicity (=scale factor relative to solar for all elements except H and He). Using this " + "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.", + ) + parser.add_argument( + "-zelem", + action=AddDictAction, + nargs="+", + default={}, + help="abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem He=0.01. " + "Can also be used to toggle elements off, e.g. -zelem Ca=0. Combines with -z argument. Using this " + "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.", + ) + parser.add_argument( + "-cores", type=int, default=1, help="number of parallel runs [default=1]" + ) + parser.add_argument( + "-mu_conv", + type=float, + default=0.01, + help="relative change in mu allowed for convergence, when using p_winds/Cloudy iterative scheme [default=0.01]", + ) + parser.add_argument( + "-mu_maxit", + type=int, + default=7, + help="maximum number of iterations the p_winds/Cloudy iterative scheme is ran " + "if convergence is not reached [default =7]", + ) + parser.add_argument( + "-verbose", + action="store_true", + help="print out mu-bar values of each iteration [default=False]", + ) + parser.add_argument( + "-avoid_pwinds_mubar", + action="store_true", + help="avoid using the mu-bar value predicted by p-winds for the first iteration. Instead, " + "start with a mu_bar of a completely neutral atmosphere. Helps to avoid the p-winds 'solve_ivp' errors. You may need to " + "use a -mu_maxit higher than 7 when toggling this on. [default=False]", + ) + parser.add_argument( + "-no_tidal", + action="store_true", + help="neglect the stellar tidal gravity term [default=False, i.e. tidal term included]", + ) args = parser.parse_args() if args.z != None: zdict = tools.get_zdict(z=args.z, zelem=args.zelem) - else: #if z==None we should not pass that to the tools.get_zdict function + else: # if z==None we should not pass that to the tools.get_zdict function zdict = tools.get_zdict(zelem=args.zelem) - if args.fH != None and (args.zelem != {} or args.mu_conv != 0.01 or args.mu_maxit != 7 or args.avoid_pwinds_mubar): - warnings.warn("The -zelem, -mu_conv -mu_maxit, and -avoid_pwinds_mubar commands only combine with -z, not with -fH, so I will ignore their input.") - - #set up the folder structure if it doesn't exist yet - if not os.path.isdir(tools.projectpath+'/parker_profiles/'): - os.mkdir(tools.projectpath+'/parker_profiles') - if not os.path.isdir(tools.projectpath+'/parker_profiles/'+args.plname+'/'): - os.mkdir(tools.projectpath+'/parker_profiles/'+args.plname) - if not os.path.isdir(tools.projectpath+'/parker_profiles/'+args.plname+'/'+args.pdir+'/'): - os.mkdir(tools.projectpath+'/parker_profiles/'+args.plname+'/'+args.pdir+'/') - if (args.fH == None) and (not os.path.isdir(tools.projectpath+'/parker_profiles/'+args.plname+'/'+args.pdir+'/temp/')): - os.mkdir(tools.projectpath+'/parker_profiles/'+args.plname+'/'+args.pdir+'/temp') - - if (len(args.T) == 1 and len(args.Mdot) == 1): #then we run a single model - run_s(args.plname, args.pdir, args.Mdot[0], args.T[0], args.SEDname, args.fH, zdict, args.mu_conv, args.mu_maxit, args.overwrite, args.verbose, args.avoid_pwinds_mubar, args.no_tidal) - elif (len(args.T) == 3 and len(args.Mdot) == 3): #then we run a grid over both parameters - run_g(args.plname, args.pdir, args.cores, args.Mdot[0], args.Mdot[1], args.Mdot[2], args.T[0], args.T[1], args.T[2], args.SEDname, args.fH, zdict, args.mu_conv, args.mu_maxit, args.overwrite, args.verbose, args.avoid_pwinds_mubar, args.no_tidal) - elif (len(args.T) == 3 and len(args.Mdot) == 1): #then we run a grid over only T - run_g(args.plname, args.pdir, args.cores, args.Mdot[0], args.Mdot[0], args.Mdot[0], args.T[0], args.T[1], args.T[2], args.SEDname, args.fH, zdict, args.mu_conv, args.mu_maxit, args.overwrite, args.verbose, args.avoid_pwinds_mubar, args.no_tidal) - elif (len(args.T) == 1 and len(args.Mdot) == 3): #then we run a grid over only Mdot - run_g(args.plname, args.pdir, args.cores, args.Mdot[0], args.Mdot[1], args.Mdot[2], args.T[0], args.T[0], args.T[0], args.SEDname, args.fH, zdict, args.mu_conv, args.mu_maxit, args.overwrite, args.verbose, args.avoid_pwinds_mubar, args.no_tidal) - - print("\nCalculations took", int(time.time()-t0) // 3600, "hours, ", (int(time.time()-t0)%3600) // 60, "minutes and ", (int(time.time()-t0)%60), "seconds.\n") + if args.fH != None and ( + args.zelem != {} + or args.mu_conv != 0.01 + or args.mu_maxit != 7 + or args.avoid_pwinds_mubar + ): + warnings.warn( + "The -zelem, -mu_conv -mu_maxit, and -avoid_pwinds_mubar commands only combine with -z, not with -fH, so I will ignore their input." + ) + + # set up the folder structure if it doesn't exist yet + if not os.path.isdir(tools.projectpath + "/parker_profiles/"): + os.mkdir(tools.projectpath + "/parker_profiles") + if not os.path.isdir(tools.projectpath + "/parker_profiles/" + args.plname + "/"): + os.mkdir(tools.projectpath + "/parker_profiles/" + args.plname) + if not os.path.isdir( + tools.projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" + ): + os.mkdir( + tools.projectpath + + "/parker_profiles/" + + args.plname + + "/" + + args.pdir + + "/" + ) + if (args.fH == None) and ( + not os.path.isdir( + tools.projectpath + + "/parker_profiles/" + + args.plname + + "/" + + args.pdir + + "/temp/" + ) + ): + os.mkdir( + tools.projectpath + + "/parker_profiles/" + + args.plname + + "/" + + args.pdir + + "/temp" + ) + + if len(args.T) == 1 and len(args.Mdot) == 1: # then we run a single model + run_s( + args.plname, + args.pdir, + args.Mdot[0], + args.T[0], + args.SEDname, + args.fH, + zdict, + args.mu_conv, + args.mu_maxit, + args.overwrite, + args.verbose, + args.avoid_pwinds_mubar, + args.no_tidal, + ) + elif ( + len(args.T) == 3 and len(args.Mdot) == 3 + ): # then we run a grid over both parameters + run_g( + args.plname, + args.pdir, + args.cores, + args.Mdot[0], + args.Mdot[1], + args.Mdot[2], + args.T[0], + args.T[1], + args.T[2], + args.SEDname, + args.fH, + zdict, + args.mu_conv, + args.mu_maxit, + args.overwrite, + args.verbose, + args.avoid_pwinds_mubar, + args.no_tidal, + ) + elif len(args.T) == 3 and len(args.Mdot) == 1: # then we run a grid over only T + run_g( + args.plname, + args.pdir, + args.cores, + args.Mdot[0], + args.Mdot[0], + args.Mdot[0], + args.T[0], + args.T[1], + args.T[2], + args.SEDname, + args.fH, + zdict, + args.mu_conv, + args.mu_maxit, + args.overwrite, + args.verbose, + args.avoid_pwinds_mubar, + args.no_tidal, + ) + elif len(args.T) == 1 and len(args.Mdot) == 3: # then we run a grid over only Mdot + run_g( + args.plname, + args.pdir, + args.cores, + args.Mdot[0], + args.Mdot[1], + args.Mdot[2], + args.T[0], + args.T[0], + args.T[0], + args.SEDname, + args.fH, + zdict, + args.mu_conv, + args.mu_maxit, + args.overwrite, + args.verbose, + args.avoid_pwinds_mubar, + args.no_tidal, + ) + + print( + "\nCalculations took", + int(time.time() - t0) // 3600, + "hours, ", + (int(time.time() - t0) % 3600) // 60, + "minutes and ", + (int(time.time() - t0) % 60), + "seconds.\n", + ) diff --git a/src/sunbather/solveT.py b/src/sunbather/solveT.py index 89799aa..af00927 100644 --- a/src/sunbather/solveT.py +++ b/src/sunbather/solveT.py @@ -1,5 +1,5 @@ #sunbather imports -import .tools +import sunbather.tools #other imports import pandas as pd @@ -44,7 +44,7 @@ def calc_expansion(r, rho, v, Te, mu): def calc_advection(r, rho, v, Te, mu): """ - Calcules advection heating/cooling (Linssen et al. 2024 Eq. 3 first term). + Calculates advection heating/cooling (Linssen et al. 2024 Eq. 3 first term). Parameters ---------- diff --git a/src/sunbather/tools.py b/src/sunbather/tools.py index 10c6d69..d59e5a5 100644 --- a/src/sunbather/tools.py +++ b/src/sunbather/tools.py @@ -1,89 +1,161 @@ -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt import os import glob import re from shutil import copyfile +from fractions import Fraction +import warnings +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt from scipy.interpolate import interp1d from scipy.signal import savgol_filter import scipy.stats as sps from scipy.ndimage import gaussian_filter1d -from fractions import Fraction -import warnings -####################################### -########### GLOBAL CONSTANTS ########## -####################################### +# ###################################### +# ########## GLOBAL CONSTANTS ########## +# ###################################### -sunbatherpath = os.path.dirname(os.path.abspath(__file__)) #the absolute path where this code lives +sunbatherpath = os.path.dirname( + os.path.abspath(__file__) +) # the absolute path where this code lives try: - cloudypath = os.environ['CLOUDY_PATH'] #the path where the Cloudy installation is -except KeyError: - raise KeyError("The environment variable 'CLOUDY_PATH' is not set. " \ - "Please set this variable in your .bashrc/.zshrc file " \ - "to the path where the Cloudy installation is located. " \ - "Do not point it to the /source/ subfolder, but to the main folder.") + # the path where Cloudy is installed + cloudypath = os.environ["CLOUDY_PATH"] +except KeyError as exc: + cloudypath = f"{sunbatherpath}/cloudy/c23.01" + if not os.path.exists(f"{cloudypath}/source/cloudy.exe"): + raise KeyError( + "The environment variable 'CLOUDY_PATH' is not set. " + "Please set this variable in your .bashrc/.zshrc file " + "to the path where the Cloudy installation is located. " + "Do not point it to the /source/ subfolder, but to the main folder." + ) from exc try: - projectpath = os.environ['SUNBATHER_PROJECT_PATH'] #the path where you save your simulations and do analysis -except KeyError: - raise KeyError("The environment variable 'SUNBATHER_PROJECT_PATH' is not set. " \ - "Please set this variable in your .bashrc/.zshrc file " \ - "to the path where you want the sunbather models to be saved. " \ - "Make sure that the 'planets.txt' file is present in that folder.") + projectpath = os.environ[ + "SUNBATHER_PROJECT_PATH" + ] # the path where you save your simulations and do analysis +except KeyError as exc: + raise KeyError( + "The environment variable 'SUNBATHER_PROJECT_PATH' is not set. " + "Please set this variable in your .bashrc/.zshrc file " + "to the path where you want the sunbather models to be saved. " + "Make sure that the 'planets.txt' file is present in that folder." + ) from exc try: - #read planet parameters globally instead of in the Planets class (so we do it only once) - planets_file = pd.read_csv(projectpath+'/planets.txt', dtype={'name':str, 'full name':str, 'R [RJ]':np.float64, - 'Rstar [Rsun]':np.float64, 'a [AU]':np.float64, 'M [MJ]':np.float64, 'Mstar [Msun]':np.float64, - 'transit impact parameter':np.float64, 'SEDname':str}, comment='#') -except FileNotFoundError: - raise FileNotFoundError("The $SUNBATHER_PROJECT_PATH/planets.txt file cannot be found. " \ - "Please check if your $SUNBATHER_PROJECT_PATH actually exists on your machine. "\ - "Then, copy /sunbather/planets.txt to your project path.") - -#define constants: -c = 2.99792458e10 #cm/s -h = 4.135667696e-15 #eV s, used to plot wavelengths in keV units -mH = 1.674e-24 #g -k = 1.381e-16 #erg/K -AU = 1.49597871e13 #cm -pc = 3.08567758e18 #cm -RJ = 7.1492e9 #cm -RE = 6.371e8 #cm -Rsun = 69634000000 #cm -Msun = 1.9891e33 #g -MJ = 1.898e30 #g -ME = 5.9722e27 #g -G = 6.6743e-8 #cm3/g/s2 -Ldict = {'S':0, 'P':1, 'D':2, 'F':3, 'G':4, 'H':5, 'I':6, 'K':7, 'L':8, - 'M':9, 'N':10, 'O':11, 'Q':12, 'R':13, 'T':14} #atom number of states per L orbital - -element_names = {'H':'hydrogen', 'He':'helium', 'Li':'lithium', 'Be':'beryllium', 'B':'boron', 'C':'carbon', - 'N':'nitrogen', 'O':'oxygen', 'F':'fluorine', 'Ne':'neon', 'Na':'sodium', - 'Mg':'magnesium', 'Al':'aluminium', 'Si':'silicon', 'P':'phosphorus', - 'S':'sulphur', 'Cl':'chlorine', 'Ar':'argon', 'K':'potassium', 'Ca':'calcium', - 'Sc':'scandium', 'Ti':'titanium', 'V':'vanadium', 'Cr':'chromium', 'Mn':'manganese', - 'Fe':'iron', 'Co':'cobalt', 'Ni':'nickel', 'Cu':'copper', 'Zn':'zinc'} -element_symbols = dict((reversed(item) for item in element_names.items())) #reverse dictionary mapping e.g. 'hydrogen'->'H' - -#number of corresponding energy levels between Cloudy and NIST - read txt file header for more info -species_enlim = pd.read_csv(sunbatherpath+"/species_enlim.txt", index_col=0, header=1) + # read planet parameters globally instead of in the Planets class (so we do it only + # once) + planets_file = pd.read_csv( + projectpath + "/planets.txt", + dtype={ + "name": str, + "full name": str, + "R [RJ]": np.float64, + "Rstar [Rsun]": np.float64, + "a [AU]": np.float64, + "M [MJ]": np.float64, + "Mstar [Msun]": np.float64, + "transit impact parameter": np.float64, + "SEDname": str, + }, + comment="#", + ) +except FileNotFoundError as exc: + raise FileNotFoundError( + "The $SUNBATHER_PROJECT_PATH/planets.txt file cannot be found. " + "Please check if your $SUNBATHER_PROJECT_PATH actually exists on your machine. " + "Then, copy /sunbather/planets.txt to your project path." + ) from exc + +# define constants: +c = 2.99792458e10 # cm/s +h = 4.135667696e-15 # eV s, used to plot wavelengths in keV units +mH = 1.674e-24 # g +k = 1.381e-16 # erg/K +AU = 1.49597871e13 # cm +pc = 3.08567758e18 # cm +RJ = 7.1492e9 # cm +RE = 6.371e8 # cm +Rsun = 69634000000 # cm +Msun = 1.9891e33 # g +MJ = 1.898e30 # g +ME = 5.9722e27 # g +G = 6.6743e-8 # cm3/g/s2 +Ldict = { + "S": 0, + "P": 1, + "D": 2, + "F": 3, + "G": 4, + "H": 5, + "I": 6, + "K": 7, + "L": 8, + "M": 9, + "N": 10, + "O": 11, + "Q": 12, + "R": 13, + "T": 14, +} # atom number of states per L orbital + +element_names = { + "H": "hydrogen", + "He": "helium", + "Li": "lithium", + "Be": "beryllium", + "B": "boron", + "C": "carbon", + "N": "nitrogen", + "O": "oxygen", + "F": "fluorine", + "Ne": "neon", + "Na": "sodium", + "Mg": "magnesium", + "Al": "aluminium", + "Si": "silicon", + "P": "phosphorus", + "S": "sulphur", + "Cl": "chlorine", + "Ar": "argon", + "K": "potassium", + "Ca": "calcium", + "Sc": "scandium", + "Ti": "titanium", + "V": "vanadium", + "Cr": "chromium", + "Mn": "manganese", + "Fe": "iron", + "Co": "cobalt", + "Ni": "nickel", + "Cu": "copper", + "Zn": "zinc", +} +element_symbols = dict( + (reversed(item) for item in element_names.items()) +) # reverse dictionary mapping e.g. 'hydrogen'->'H' + +# number of corresponding energy levels between Cloudy and NIST - read txt file +# header for more info +species_enlim = pd.read_csv(sunbatherpath + "/species_enlim.txt", index_col=0, header=1) + + +# ###################################### +# ########## CLOUDY SPECIES ########## +# ###################################### -####################################### -########### CLOUDY SPECIES ########## -####################################### - def get_specieslist(max_ion=6, exclude_elements=[]): """ - Returns a list of atomic and ionic species names. Default returns all species up to 6+ - ionization. Higher than 6+ ionization is rarely attained in an exoplanet atmosphere, - but it can occur in high XUV flux scenarios such as young planetary systems. - The species list only includes species for which the NIST database has any spectral - line coefficients, as there is little use saving other species as well. + Returns a list of atomic and ionic species names. Default returns all + species up to 6+ ionization. Higher than 6+ ionization is rarely attained + in an exoplanet atmosphere, but it can occur in high XUV flux scenarios + such as young planetary systems. The species list only includes species + for which the NIST database has any spectral line coefficients, as there is + little use saving other species as well. Parameters ---------- @@ -95,28 +167,32 @@ def get_specieslist(max_ion=6, exclude_elements=[]): Returns ------- specieslist : list - List of atomic and ionic species names in the string format expected by Cloudy. + List of atomic and ionic species names in the string format expected by + Cloudy. """ if max_ion > 12: - warnings.warn("tools.get_specieslist(): You have set max_ion > 12, but " \ - "sunbather is currently only able to process species up to 12+ ionized. " \ - "However, this should typically be enough, even when using a strong XUV flux.") - - if isinstance(exclude_elements, str): #turn into list with one element + warnings.warn( + "tools.get_specieslist(): You have set max_ion > 12, but " + "sunbather is currently only able to process species up to 12+ ionized. " + "However, this should typically be enough, even when using a strong " + "XUV flux." + ) + + if isinstance(exclude_elements, str): # turn into list with one element exclude_elements = [exclude_elements] - specieslist = species_enlim.index.tolist() #all species up to 12+ + specieslist = species_enlim.index.tolist() # all species up to 12+ for element in exclude_elements: - specieslist = [sp for sp in specieslist if sp.split('+')[0] != element] + specieslist = [sp for sp in specieslist if sp.split("+")[0] != element] for sp in specieslist[:]: - sp_split = sp.split('+') + sp_split = sp.split("+") if len(sp_split) == 1: deg_ion = 0 - elif sp_split[1] == '': + elif sp_split[1] == "": deg_ion = 1 else: deg_ion = int(sp_split[1]) @@ -143,19 +219,43 @@ def get_mass(species): Mass of the species in units of g. """ - atom = species.split('+')[0] - - mass_dict = {'H':1.6735575e-24, 'He':6.646477e-24, 'Li':1.15e-23, 'Be':1.4965082e-23, - 'B':1.795e-23, 'C':1.9945e-23, 'N':2.3259e-23, 'O':2.6567e-23, - 'F':3.1547e-23, 'Ne':3.35092e-23, 'Na':3.817541e-23, 'Mg':4.0359e-23, - 'Al':4.48038988e-23, 'Si':4.6636e-23, 'P':5.14331418e-23, 'S':5.324e-23, - 'Cl':5.887e-23, 'Ar':6.6335e-23, 'K':6.49243e-23, 'Ca':6.6551e-23, - 'Sc':7.4651042e-23, 'Ti':7.9485e-23, 'V':8.45904e-23, 'Cr':8.63416e-23, - 'Mn':9.1226768e-23, 'Fe':9.2733e-23, 'Co':9.786087e-23, 'Ni':9.74627e-23, - 'Cu':1.0552e-22, 'Zn':1.086e-22} #g + atom = species.split("+")[0] + + mass_dict = { + "H": 1.6735575e-24, + "He": 6.646477e-24, + "Li": 1.15e-23, + "Be": 1.4965082e-23, + "B": 1.795e-23, + "C": 1.9945e-23, + "N": 2.3259e-23, + "O": 2.6567e-23, + "F": 3.1547e-23, + "Ne": 3.35092e-23, + "Na": 3.817541e-23, + "Mg": 4.0359e-23, + "Al": 4.48038988e-23, + "Si": 4.6636e-23, + "P": 5.14331418e-23, + "S": 5.324e-23, + "Cl": 5.887e-23, + "Ar": 6.6335e-23, + "K": 6.49243e-23, + "Ca": 6.6551e-23, + "Sc": 7.4651042e-23, + "Ti": 7.9485e-23, + "V": 8.45904e-23, + "Cr": 8.63416e-23, + "Mn": 9.1226768e-23, + "Fe": 9.2733e-23, + "Co": 9.786087e-23, + "Ni": 9.74627e-23, + "Cu": 1.0552e-22, + "Zn": 1.086e-22, + } # g mass = mass_dict[atom] - + return mass @@ -163,10 +263,11 @@ def get_mass(species): ########### CLOUDY FILES ########## ####################################### + def process_continuum(filename, nonzero=False): """ - Rreads a .con file from the 'save continuum units angstrom' command. - It renames the columns and adds a wavelength column. + Reads a .con file from the 'save continuum units angstrom' command. + It renames the columns and adds a wavelength column. The flux units of the continuum are as follows: Take the SED in spectral flux density, so F(nu) instead of nu*F(nu), and find the total area by integration. Then multiply with the frequency, @@ -178,7 +279,8 @@ def process_continuum(filename, nonzero=False): filename : str Filename of a 'save continuum' Cloudy output file. nonzero : bool, optional - Whether to remove rows where the incident spectrum is 0 (i.e., not defined), by default False + Whether to remove rows where the incident spectrum is 0 (i.e., not + defined), by default False Returns ------- @@ -187,7 +289,7 @@ def process_continuum(filename, nonzero=False): """ con_df = pd.read_table(filename) - con_df.rename(columns={'#Cont nu':'wav', 'net trans':'nettrans'}, inplace=True) + con_df.rename(columns={"#Cont nu": "wav", "net trans": "nettrans"}, inplace=True) if nonzero: con_df = con_df[con_df.incident != 0] @@ -222,60 +324,82 @@ def process_heating(filename, Rp=None, altmax=None, cloudy_version="17"): If a Cloudy version was used that is not supported by sunbather. """ - #determine max number of columns (otherwise pd.read_table assumes it is the number of the first row) + # determine max number of columns (otherwise pd.read_table assumes it is the number + # of the first row) max_columns = 0 - with open(filename, 'r') as file: + with open(filename, "r") as file: for line in file: - num_columns = len(line.split('\t')) + num_columns = len(line.split("\t")) max_columns = max(max_columns, num_columns) - #set up the column names + # set up the column names if cloudy_version == "17": - fixed_column_names = ['depth', 'temp', 'htot', 'ctot'] + fixed_column_names = ["depth", "temp", "htot", "ctot"] elif cloudy_version == "23": - fixed_column_names = ['depth', 'temp', 'htot', 'ctot', 'adv'] + fixed_column_names = ["depth", "temp", "htot", "ctot", "adv"] else: raise TypeError("Only C17.02 and C23.01 are currently supported.") num_additional_columns = (max_columns - 4) // 2 - additional_column_names = [f'htype{i}' for i in range(1, num_additional_columns + 1) for _ in range(2)] - additional_column_names[1::2] = [f'hfrac{i}' for i in range(1, num_additional_columns + 1)] + additional_column_names = [ + f"htype{i}" for i in range(1, num_additional_columns + 1) for _ in range(2) + ] + additional_column_names[1::2] = [ + f"hfrac{i}" for i in range(1, num_additional_columns + 1) + ] all_column_names = fixed_column_names + additional_column_names - heat = pd.read_table(filename, delimiter='\t', skiprows=1, header=None, names=all_column_names) - - if heat['depth'].eq("#>>>> Ionization not converged.").any(): - warnings.warn(f"The simulation you are reading in exited OK but does contain ionization convergence failures: {filename[:-5]}") - heat = heat[heat['depth'] != "#>>>> Ionization not converged."] #remove those extra lines from the heat DataFrame - - #remove the "second rows", which sometimes are in the .heat file and do not give the heating at a given depth - if type(heat.depth.iloc[0]) == str: #in some cases there are no second rows - heat = heat[heat.depth.map(len)<12] #delete second rows - - heat.depth = pd.to_numeric(heat.depth) #str to float - heat.reset_index(drop=True, inplace=True) #reindex so that it has same index as e.g. .ovr - - if Rp != None and altmax != None: #add altitude scale - heat['alt'] = altmax * Rp - heat.depth + heat = pd.read_table( + filename, delimiter="\t", skiprows=1, header=None, names=all_column_names + ) + + if heat["depth"].eq("#>>>> Ionization not converged.").any(): + warnings.warn( + f"The simulation you are reading in exited OK but does contain ionization " + f"convergence failures: {filename[:-5]}" + ) + heat = heat[ + heat["depth"] != "#>>>> Ionization not converged." + ] # remove those extra lines from the heat DataFrame + + # remove the "second rows", which sometimes are in the .heat file and do + # not give the heating at a given depth + if isinstance(heat.depth.iloc[0], str): # in some cases there are no second rows + heat = heat[heat.depth.map(len) < 12] # delete second rows + + heat.depth = pd.to_numeric(heat.depth) # str to float + heat.reset_index( + drop=True, inplace=True + ) # reindex so that it has same index as e.g. .ovr + + if Rp is not None and altmax is not None: # add altitude scale + heat["alt"] = altmax * Rp - heat.depth agents = [] for column in heat.columns: - if column.startswith('htype'): + if column.startswith("htype"): agents.extend(heat[column].unique()) - agents = list(set(agents)) #all unique heating agents that appear somewhere in the .heat file + agents = list( + set(agents) + ) # all unique heating agents that appear somewhere in the .heat file for agent in agents: - heat[agent] = np.nan #add 'empty' column for each agent + heat[agent] = np.nan # add 'empty' column for each agent - #now do a (probably sub-optimal) for-loop over the whole df to put all hfracs in the corresponding column - htypes = [f'htype{i+1}' for i in range(num_additional_columns)] - hfracs = [f'hfrac{i+1}' for i in range(num_additional_columns)] + # now do a (probably sub-optimal) for-loop over the whole df to put all hfracs in + # the corresponding column + htypes = [f"htype{i+1}" for i in range(num_additional_columns)] + hfracs = [f"hfrac{i+1}" for i in range(num_additional_columns)] for htype, hfrac in zip(htypes, hfracs): for index, agent in heat[htype].items(): rate = heat.loc[index, hfrac] heat.loc[index, agent] = rate - if np.nan in heat.columns: #sometimes columns are partially missing, resulting in columns called nan + if ( + np.nan in heat.columns + ): # sometimes columns are partially missing, resulting in columns called nan heat.drop(columns=[np.nan], inplace=True) - heat['sumfrac'] = heat.loc[:,[col for col in heat.columns if 'hfrac' in col]].sum(axis=1) + heat["sumfrac"] = heat.loc[:, [col for col in heat.columns if "hfrac" in col]].sum( + axis=1 + ) return heat @@ -308,57 +432,73 @@ def process_cooling(filename, Rp=None, altmax=None, cloudy_version="17"): If a Cloudy version was used that is not supported by sunbather. """ - #determine max number of columns (otherwise pd.read_table assumes it is the number of the first row) + # determine max number of columns (otherwise pd.read_table assumes it is + # the number of the first row) max_columns = 0 - with open(filename, 'r') as file: + with open(filename, "r") as file: for line in file: - num_columns = len(line.split('\t')) + num_columns = len(line.split("\t")) max_columns = max(max_columns, num_columns) - #set up the column names + # set up the column names if cloudy_version == "17": - fixed_column_names = ['depth', 'temp', 'htot', 'ctot'] + fixed_column_names = ["depth", "temp", "htot", "ctot"] elif cloudy_version == "23": - fixed_column_names = ['depth', 'temp', 'htot', 'ctot', 'adv'] + fixed_column_names = ["depth", "temp", "htot", "ctot", "adv"] else: raise Exception("Only C17.02 and C23.01 are currently supported.") num_additional_columns = (max_columns - 4) // 2 - additional_column_names = [f'ctype{i}' for i in range(1, num_additional_columns + 1) for _ in range(2)] - additional_column_names[1::2] = [f'cfrac{i}' for i in range(1, num_additional_columns + 1)] + additional_column_names = [ + f"ctype{i}" for i in range(1, num_additional_columns + 1) for _ in range(2) + ] + additional_column_names[1::2] = [ + f"cfrac{i}" for i in range(1, num_additional_columns + 1) + ] all_column_names = fixed_column_names + additional_column_names - cool = pd.read_table(filename, delimiter='\t', skiprows=1, header=None, names=all_column_names) - - if cool['depth'].eq("#>>>> Ionization not converged.").any(): - warnings.warn(f"The simulation you are reading in exited OK but does contain ionization convergence failures: {filename[:-5]}") - #remove those extra lines from the cool DataFrame - cool = cool[cool['depth'] != "#>>>> Ionization not converged."] - cool['depth'] = cool['depth'].astype(float) - cool = cool.reset_index(drop=True) #so it matches other dfs like .ovr - - - if Rp != None and altmax != None: #add altitude scale - cool['alt'] = altmax * Rp - cool.depth + cool = pd.read_table( + filename, delimiter="\t", skiprows=1, header=None, names=all_column_names + ) + + if cool["depth"].eq("#>>>> Ionization not converged.").any(): + warnings.warn( + f"The simulation you are reading in exited OK but does contain ionization " + f"convergence failures: {filename[:-5]}" + ) + # remove those extra lines from the cool DataFrame + cool = cool[cool["depth"] != "#>>>> Ionization not converged."] + cool["depth"] = cool["depth"].astype(float) + cool = cool.reset_index(drop=True) # so it matches other dfs like .ovr + + if Rp is not None and altmax is not None: # add altitude scale + cool["alt"] = altmax * Rp - cool.depth agents = [] for column in cool.columns: - if column.startswith('ctype'): + if column.startswith("ctype"): agents.extend(cool[column].unique()) - agents = list(set(agents)) #all unique cooling agents that appear somewhere in the .cool file + agents = list( + set(agents) + ) # all unique cooling agents that appear somewhere in the .cool file for agent in agents: - cool[agent] = np.nan #add 'empty' column for each agent + cool[agent] = np.nan # add 'empty' column for each agent - #now do a (probably sub-optimal) for-loop over the whole df to put all cfracs in the corresponding column - ctypes = [f'ctype{i+1}' for i in range(num_additional_columns)] - cfracs = [f'cfrac{i+1}' for i in range(num_additional_columns)] + # now do a (probably sub-optimal) for-loop over the whole df to put all cfracs in + # the corresponding column + ctypes = [f"ctype{i+1}" for i in range(num_additional_columns)] + cfracs = [f"cfrac{i+1}" for i in range(num_additional_columns)] for ctype, cfrac in zip(ctypes, cfracs): for index, agent in cool[ctype].items(): rate = cool.loc[index, cfrac] cool.loc[index, agent] = rate - if np.nan in cool.columns: #sometimes columns are partially missing, resulting in columns called nan + if ( + np.nan in cool.columns + ): # sometimes columns are partially missing, resulting in columns called nan cool.drop(columns=[np.nan], inplace=True) - cool['sumfrac'] = cool.loc[:,[col for col in cool.columns if 'cfrac' in col]].sum(axis=1) + cool["sumfrac"] = cool.loc[:, [col for col in cool.columns if "cfrac" in col]].sum( + axis=1 + ) return cool @@ -374,7 +514,7 @@ def process_coolingH2(filename, Rp=None, altmax=None): heating dissoc. electronic exited states, cooling collisions in X (neg = heating), "HeatDexc"=net heat, "-HeatDexc/abundance"=net cool per particle - + If Rp and altmax are given, it adds an altitude/radius scale. Parameters @@ -392,11 +532,24 @@ def process_coolingH2(filename, Rp=None, altmax=None): Parsed output of the 'save H2 cooling' Cloudy command. """ - coolH2 = pd.read_table(filename, names=['depth', 'Te', 'ctot', 'desTH85', - 'desbigH2', 'phdisheat', 'eedisheat', 'collcool', - 'netheat', 'netcoolpp'], header=1) - if Rp != None and altmax != None: - coolH2['alt'] = altmax*Rp - coolH2['depth'] + coolH2 = pd.read_table( + filename, + names=[ + "depth", + "Te", + "ctot", + "desTH85", + "desbigH2", + "phdisheat", + "eedisheat", + "collcool", + "netheat", + "netcoolpp", + ], + header=1, + ) + if Rp is not None and altmax is not None: + coolH2["alt"] = altmax * Rp - coolH2["depth"] return coolH2 @@ -417,8 +570,8 @@ def process_overview(filename, Rp=None, altmax=None, abundances=None): altmax : numeric, optional Maximum altitude of the simulation in units of planet radius, by default None abundances : dict, optional - Dictionary with the abudance of each element, expressed as a fraction of the total. - Can be easily created with get_abundances(). By default None, which + Dictionary with the abudance of each element, expressed as a fraction of the + total. Can be easily created with get_abundances(). By default None, which results in solar composition. Returns @@ -428,14 +581,21 @@ def process_overview(filename, Rp=None, altmax=None, abundances=None): """ ovr = pd.read_table(filename) - ovr.rename(columns={'#depth':'depth'}, inplace=True) - ovr['rho'] = hden_to_rho(ovr.hden, abundances=abundances) #Hdens to total dens - if Rp != None and altmax != None: - ovr['alt'] = altmax * Rp - ovr['depth'] - ovr['mu'] = calc_mu(ovr.rho, ovr.eden, abundances=abundances) - - if (ovr['2H_2/H'].max() > 0.1) or (ovr['CO/C'].max() > 0.1) or (ovr['H2O/O'].max() > 0.1): - warnings.warn(f"Molecules are significant, the calculated mean particle mass could be inaccurate: {filename}") + ovr.rename(columns={"#depth": "depth"}, inplace=True) + ovr["rho"] = hden_to_rho(ovr.hden, abundances=abundances) # Hdens to total dens + if Rp is not None and altmax is not None: + ovr["alt"] = altmax * Rp - ovr["depth"] + ovr["mu"] = calc_mu(ovr.rho, ovr.eden, abundances=abundances) + + if ( + (ovr["2H_2/H"].max() > 0.1) + or (ovr["CO/C"].max() > 0.1) + or (ovr["H2O/O"].max() > 0.1) + ): + warnings.warn( + f"Molecules are significant, the calculated mean particle mass could be " + f"inaccurate: {filename}" + ) return ovr @@ -461,10 +621,10 @@ def process_densities(filename, Rp=None, altmax=None): """ den = pd.read_table(filename) - den.rename(columns={'#depth densities':'depth'}, inplace=True) + den.rename(columns={"#depth densities": "depth"}, inplace=True) - if Rp != None and altmax != None: - den['alt'] = altmax*Rp - den['depth'] + if Rp is not None and altmax is not None: + den["alt"] = altmax * Rp - den["depth"] return den @@ -488,7 +648,7 @@ def process_energies(filename, rewrite=True, cloudy_version="17"): rewrite : bool, optional Whether to rewrite the file to only keeping only the first row. Normally, the energies of each energy level are stored per depth cell of the simulation, - but they should be the same at each depth. Retaining only the values of the + but they should be the same at each depth. Retaining only the values of the first row in this way helps to compress file size. By default True. cloudy_version : str, optional Major Cloudy release version, by default "17" @@ -504,62 +664,94 @@ def process_energies(filename, rewrite=True, cloudy_version="17"): If the energy values are not the same at each depth. """ - en = pd.read_table(filename, float_precision='round_trip') #use round_trip to prevent exp numerical errors - - if en.columns.values[0][0] == '#': #condition checks whether it has already been rewritten, if not, we do all following stuff: - - for col in range(len(en.columns)): #check if all rows are the same - if len(en.iloc[:,col].unique()) != 1: - raise ValueError("In reading .en file, found a column with not identical values!" - +" filename:", filename, "col:", col, "colname:", en.columns[col], "unique values:", - en.iloc[:,col].unique()) - - en.rename(columns={en.columns.values[0] : en.columns.values[0][10:]}, inplace=True) #rename the column - - if rewrite: #save with only first row to save file size - en.iloc[[0],:].to_csv(filename, sep='\t', index=False, float_format='%.5e') - - en_df = pd.DataFrame(index = en.columns.values) - en_df['species'] = [k.split('[')[0] for k in en_df.index.values] #we want to match 'He12' to species='He', for example - en_df['energy'] = en.iloc[0,:].values - en_df['configuration'] = "" - en_df['term'] = "" - en_df['J'] = "" - - - #the & set action takes the intersection of all unique species of the .en file, and those known with NIST levels + en = pd.read_table( + filename, float_precision="round_trip" + ) # use round_trip to prevent exp numerical errors + + if ( + en.columns.values[0][0] == "#" + ): # condition checks whether it has already been rewritten, if not, we do all following stuff: + + for col in range(len(en.columns)): # check if all rows are the same + if len(en.iloc[:, col].unique()) != 1: + raise ValueError( + "In reading .en file, found a column with not identical values!" + + " filename:", + filename, + "col:", + col, + "colname:", + en.columns[col], + "unique values:", + en.iloc[:, col].unique(), + ) + + en.rename( + columns={en.columns.values[0]: en.columns.values[0][10:]}, inplace=True + ) # rename the column + + if rewrite: # save with only first row to save file size + en.iloc[[0], :].to_csv(filename, sep="\t", index=False, float_format="%.5e") + + en_df = pd.DataFrame(index=en.columns.values) + en_df["species"] = [ + k.split("[")[0] for k in en_df.index.values + ] # we want to match 'He12' to species='He', for example + en_df["energy"] = en.iloc[0, :].values + en_df["configuration"] = "" + en_df["term"] = "" + en_df["J"] = "" + + # the & set action takes the intersection of all unique species of the .en file, and those known with NIST levels unique_species = list(set(en_df.species.values) & set(species_enlim.index.tolist())) for species in unique_species: - species_levels = pd.read_table(sunbatherpath+'/RT_tables/'+species+'_levels_processed.txt') #get the NIST levels - species_energies = en_df[en_df.species == species].energy #get Cloudy's energies - - #tolerance of difference between Cloudy's and NISTs energy levels. They usually differ at the decimal level so we need some tolerance. + species_levels = pd.read_table( + sunbatherpath + "/RT_tables/" + species + "_levels_processed.txt" + ) # get the NIST levels + species_energies = en_df[ + en_df.species == species + ].energy # get Cloudy's energies + + # tolerance of difference between Cloudy's and NISTs energy levels. They usually differ at the decimal level so we need some tolerance. atol = species_enlim.loc[species, f"atol_C{cloudy_version}"] - #start by assuming we can match this many energy levels - n_matching = species_enlim.loc[species, f"idx_C{cloudy_version}"] + # start by assuming we can match this many energy levels + n_matching = species_enlim.loc[species, f"idx_C{cloudy_version}"] for n in range(n_matching): - if not np.abs(species_energies.iloc[n] - species_levels.energy.iloc[n]) < atol: - warnings.warn(f"In {filename} while getting atomic states for species {species}, I expected to be able to match the first {n_matching} " + \ - f"energy levels between Cloudy and NIST to a precision of {atol} but I have an energy mismatch at energy level {n+1}. " + \ - f"This should not introduce bugs, as I will now only parse the first {n} levels.") - - #for debugging, you can print the energy levels of Cloudy and NIST: - #print("\nCloudy, NIST, Match?") - #for i in range(n_matching): + if ( + not np.abs(species_energies.iloc[n] - species_levels.energy.iloc[n]) + < atol + ): + warnings.warn( + f"In {filename} while getting atomic states for species {species}, I expected to be able to match the first {n_matching} " + f"energy levels between Cloudy and NIST to a precision of {atol} but I have an energy mismatch at energy level {n+1}. " + f"This should not introduce bugs, as I will now only parse the first {n} levels." + ) + + # for debugging, you can print the energy levels of Cloudy and NIST: + # print("\nCloudy, NIST, Match?") + # for i in range(n_matching): # print(species_energies.iloc[i], species_levels.energy.iloc[i], np.isclose(species_energies.iloc[:n_matching], species_levels.energy.iloc[:n_matching], rtol=0.0, atol=atol)[i]) - n_matching = n #reset n_matching to how many actually match + n_matching = n # reset n_matching to how many actually match break - #Now assign the first n_matching columns to their expected values as given by the NIST species_levels DataFrame - first_iloc = np.where(en_df.species == species)[0][0] #iloc at which the species (e.g. He or Ca+3) starts. - en_df.iloc[first_iloc:first_iloc+n_matching, en_df.columns.get_loc('configuration')] = species_levels.configuration.iloc[:n_matching].values - en_df.iloc[first_iloc:first_iloc+n_matching, en_df.columns.get_loc('term')] = species_levels.term.iloc[:n_matching].values - en_df.iloc[first_iloc:first_iloc+n_matching, en_df.columns.get_loc('J')] = species_levels.J.iloc[:n_matching].values - + # Now assign the first n_matching columns to their expected values as given by the NIST species_levels DataFrame + first_iloc = np.where(en_df.species == species)[0][ + 0 + ] # iloc at which the species (e.g. He or Ca+3) starts. + en_df.iloc[ + first_iloc : first_iloc + n_matching, en_df.columns.get_loc("configuration") + ] = species_levels.configuration.iloc[:n_matching].values + en_df.iloc[ + first_iloc : first_iloc + n_matching, en_df.columns.get_loc("term") + ] = species_levels.term.iloc[:n_matching].values + en_df.iloc[first_iloc : first_iloc + n_matching, en_df.columns.get_loc("J")] = ( + species_levels.J.iloc[:n_matching].values + ) + return en_df @@ -570,8 +762,8 @@ def find_line_lowerstate_in_en_df(species, lineinfo, en_df, verbose=False): and we are looking for the metastable helium line, it will return 'He[2]', meaning the 'He[2]' column of the .den file contains the number densities of the metastable helium atom. - - Additionally, it calculates a multiplication factor <1 for the number + + Additionally, it calculates a multiplication factor <1 for the number density of this energy level. This is for spectral lines that originate from a specific J (total angular momentum quantum number) configuration, but Cloudy does not save the densities of this specific J-value, only of the parent LS state. @@ -587,7 +779,7 @@ def find_line_lowerstate_in_en_df(species, lineinfo, en_df, verbose=False): Parameters ---------- species : str - Name of the atomic or ionic species in the string format expected by Cloudy. + Name of the atomic or ionic species in the string format expected by Cloudy. lineinfo : pandas.DataFrame One row containing the spectral line coefficients from NIST, from the RT.read_NIST_lines() function. @@ -607,70 +799,125 @@ def find_line_lowerstate_in_en_df(species, lineinfo, en_df, verbose=False): the number density of the specific J-state that the spectral line originates from. """ - en_df = en_df[en_df.species == species] #keep only the part for this species to not mix up the energy levels of different ones - match, lineweight = None, None #start with the assumption that we cannot match it - - #check if the line originates from a J sublevel, a term, or only principal quantum number - if str(lineinfo['term_i']) != 'nan' and str(lineinfo['J_i']) != 'nan': - linetype = 'J' #then now match with configuration and term: - matchedrow = en_df[(en_df.configuration == lineinfo.conf_i) & (en_df.term == lineinfo.term_i) & (en_df.J == lineinfo.J_i)] + en_df = en_df[ + en_df.species == species + ] # keep only the part for this species to not mix up the energy levels of different ones + match, lineweight = None, None # start with the assumption that we cannot match it + + # check if the line originates from a J sublevel, a term, or only principal quantum number + if str(lineinfo["term_i"]) != "nan" and str(lineinfo["J_i"]) != "nan": + linetype = "J" # then now match with configuration and term: + matchedrow = en_df[ + (en_df.configuration == lineinfo.conf_i) + & (en_df.term == lineinfo.term_i) + & (en_df.J == lineinfo.J_i) + ] assert len(matchedrow) <= 1 if len(matchedrow) == 1: match = matchedrow.index.item() - lineweight = 1. #since the Cloudy column is for this J specifically, we don't need to downweigh the density + # since the Cloudy column is for this J specifically, we don't need + # to downweigh the density + lineweight = 1.0 elif len(matchedrow) == 0: - #the exact J was not found in Cloudy's levels, but maybe the term is there in Cloudy, just not resolved. - matchedtermrow = en_df[(en_df.configuration == lineinfo.conf_i) & (en_df.term == lineinfo.term_i)] + # the exact J was not found in Cloudy's levels, but maybe the term + # is there in Cloudy, just not resolved. + matchedtermrow = en_df[ + (en_df.configuration == lineinfo.conf_i) + & (en_df.term == lineinfo.term_i) + ] if len(matchedtermrow) == 1: - if str(matchedtermrow.J.values[0]) == 'nan': #this can only happen if the Cloudy level is a term with no J resolved. - #then we use statistical weights to guess how many of the atoms in this term state would be in the J state of the level and use this as lineweight - L = Ldict[''.join(x for x in matchedtermrow.loc[:,'term'].item() if x.isalpha())[-1]] #last letter in term string - S = (float(re.search(r'\d+', matchedtermrow.loc[:,'term'].item()).group())-1.)/2. #first number in term string - J_states = np.arange(np.abs(L-S), np.abs(L+S)+1, 1.0) - J_statweights = 2*J_states + 1 + if ( + str(matchedtermrow.J.values[0]) == "nan" + ): + # This can only happen if the Cloudy level is a term with + # no J resolved. Then we use statistical weights to guess + # how many of the atoms in this term state would be in the + # J state of the level and use this as lineweight + L = Ldict[ + "".join( + x + for x in matchedtermrow.loc[:, "term"].item() + if x.isalpha() + )[-1] + ] # last letter in term string + S = ( + float( + re.search( + r"\d+", matchedtermrow.loc[:, "term"].item() + ).group() + ) + - 1.0 + ) / 2.0 # first number in term string + J_states = np.arange(np.abs(L - S), np.abs(L + S) + 1, 1.0) + J_statweights = 2 * J_states + 1 J_probweights = J_statweights / np.sum(J_statweights) - lineweight = J_probweights[J_states == Fraction(lineinfo.loc['J_i'])][0] + lineweight = J_probweights[ + J_states == Fraction(lineinfo.loc["J_i"]) + ][0] match = matchedtermrow.index.item() else: - verbose_print(f"One J level of the term is resolved, but not the one of this line: {species} "+ lineinfo.conf_i, verbose=verbose) + verbose_print( + f"One J level of the term is resolved, but not the one of this line: {species} " + + lineinfo.conf_i, + verbose=verbose, + ) else: - verbose_print(f"Multiple J levels of the term are resolved, but not the one of this line: {species} "+ lineinfo.conf_i, verbose=verbose) + verbose_print( + f"Multiple J levels of the term are resolved, but not the one of this line: {species} " + + lineinfo.conf_i, + verbose=verbose, + ) - elif str(lineinfo['term_i']) != 'nan': + elif str(lineinfo["term_i"]) != "nan": linetype = "LS" - verbose_print("Currently not able to do lines originating from LS state without J number.", verbose=verbose) - verbose_print(f"Lower state configuration: {species} "+ lineinfo.conf_i, verbose=verbose) + verbose_print( + "Currently not able to do lines originating from LS state without J number.", + verbose=verbose, + ) + verbose_print( + f"Lower state configuration: {species} " + lineinfo.conf_i, verbose=verbose + ) else: linetype = "n" - verbose_print("Currently not able to do lines originating from n state without term. This is not a problem "+ - 'if this line is also in the NIST database with its different term components, such as for e.g. '+ - "H n=2, but only if they aren't such as for H n>6, or if they go to an upper level n>6 from any given level.", verbose=verbose) - verbose_print(f"Lower state configuration: {species} "+ lineinfo.conf_i, verbose=verbose) + verbose_print( + "Currently not able to do lines originating from n state without term. " + "This is not a problem if this line is also in the NIST database with its " + "different term components, such as for e.g. H n=2, but only if they " + "aren't such as for H n>6, or if they go to an upper level n>6 from any " + "given level.", + verbose=verbose, + ) + verbose_print( + f"Lower state configuration: {species} " + lineinfo.conf_i, verbose=verbose + ) - ''' + """ DEVELOPERS NOTE: - If we do decide to make this functionality, for example by summing the densities of all sublevels of a - particular n, we also need to tweak the cleaning of hydrogen lines algorithm. Right now, we remove - double lines only for the upper state, so e.g. for Ly alpha, we remove the separate 2p 3/2 and 2p 1/2 etc. component - and leave only the one line with upper state n=2. - However, we don't do this for lower states, which is not a problem yet because the lower n state lines are ignored as - stated above. However if we make the functionality, we should also remove double lines in the lower level. - ''' + If we do decide to make this functionality, for example by summing the densities + of all sublevels of a particular n, we also need to tweak the cleaning of + hydrogen lines algorithm. Right now, we remove double lines only for the upper + state, so e.g. for Ly alpha, we remove the separate 2p 3/2 and 2p 1/2 etc. + component and leave only the one line with upper state n=2. However, we don't + do this for lower states, which is not a problem yet because the lower n state + lines are ignored as stated above. However if we make the functionality, we + should also remove double lines in the lower level. + """ return match, lineweight -####################################### -########### MISCELLANEOUS ########### -####################################### +# ###################################### +# ########## MISCELLANEOUS ########### +# ###################################### + def verbose_print(message, verbose=False): """ @@ -683,7 +930,7 @@ def verbose_print(message, verbose=False): verbose : bool, optional Whether to print the provided message, by default False """ - + if verbose: print(message) @@ -694,7 +941,7 @@ def get_SED_norm_1AU(SEDname): (i.e., nu*F_nu or lambda*F_lambda) and Ryd units. These are needed because Cloudy does not preserve the normalization of user-specified SEDs. To do a simulation of an atmosphere, the normalization - of the SED must afterwards still be scaled to the planet distance. + of the SED must afterwards still be scaled to the planet distance. Then, the log10 of nuFnu can be passed to Cloudy using the "nuFnu(nu) = ... at ... Ryd" command. This function requires that the units of the SED are Å and @@ -713,16 +960,18 @@ def get_SED_norm_1AU(SEDname): Energy where the monochromatic flux of the nuFnu output variable is specified. """ - with open(cloudypath+'/data/SED/'+SEDname, 'r') as f: + with open(cloudypath + "/data/SED/" + SEDname, "r") as f: for line in f: - if not line.startswith('#'): #skip through the comments at the top - assert ('angstrom' in line) or ('Angstrom' in line) #verify the units - assert 'nuFnu' in line #verify the units + if not line.startswith("#"): # skip through the comments at the top + assert ("angstrom" in line) or ("Angstrom" in line) # verify the units + assert "nuFnu" in line # verify the units break - data = np.genfromtxt(f, skip_header=1) #skip first line, which has extra words specifying the units + data = np.genfromtxt( + f, skip_header=1 + ) # skip first line, which has extra words specifying the units - ang, nuFnu = data[-2,0], data[-2,1] #read out intensity somewhere - Ryd = 911.560270107676 / ang #convert wavelength in Å to energy in Ryd + ang, nuFnu = data[-2, 0], data[-2, 1] # read out intensity somewhere + Ryd = 911.560270107676 / ang # convert wavelength in Å to energy in Ryd return nuFnu, Ryd @@ -753,17 +1002,31 @@ def speciesstring(specieslist, selected_levels=False, cloudy_version="17"): One long string containing the species and the energy level numbers. """ - if not selected_levels: #so just all levels available in cloudy - speciesstr = '"'+specieslist[0]+'[:]"' + if not selected_levels: # so just all levels available in cloudy + speciesstr = '"' + specieslist[0] + '[:]"' if len(specieslist) > 1: for species in specieslist[1:]: - speciesstr += '\n"'+species+'[:]"' - - elif selected_levels: #then we read out the max level that we expect to match the energy of - speciesstr = '"'+specieslist[0]+'[:'+str(species_enlim.loc[specieslist[0], f"idx_C{cloudy_version}"])+']"' + speciesstr += '\n"' + species + '[:]"' + + elif ( + selected_levels + ): # then we read out the max level that we expect to match the energy of + speciesstr = ( + '"' + + specieslist[0] + + "[:" + + str(species_enlim.loc[specieslist[0], f"idx_C{cloudy_version}"]) + + ']"' + ) if len(specieslist) > 1: for species in specieslist[1:]: - speciesstr += '\n"'+species+'[:'+str(species_enlim.loc[species, f"idx_C{cloudy_version}"])+']"' + speciesstr += ( + '\n"' + + species + + "[:" + + str(species_enlim.loc[species, f"idx_C{cloudy_version}"]) + + ']"' + ) return speciesstr @@ -775,7 +1038,8 @@ def read_parker(plname, T, Mdot, pdir, filename=None): Parameters ---------- plname : str - Planet name (must have parameters stored in $SUNBATHER_PROJECT_PATH/planets.txt). + Planet name (must have parameters stored in + $SUNBATHER_PROJECT_PATH/planets.txt). T : str or numeric Temperature in units of K. Mdot : str or numeric @@ -783,26 +1047,43 @@ def read_parker(plname, T, Mdot, pdir, filename=None): pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/*plname*/*pdir*/ where the isothermal parker wind density and velocity profiles are saved. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for example + profiles with different assumptions such as stellar SED/semi-major + axis/composition. filename : str, optional If None, the profile as specified by plname, T, Mdot, pdir is read. If not None, - filename must specfy the full path + filename of the isothermal Parker wind profile - to read in. By default None. + filename must specify the full path + filename of the isothermal Parker wind + profile to read in. By default None. Returns ------- pprof : pandas.DataFrame - Radial density, velocity and mean particle mass profiles of the isothermal Parker wind profile. + Radial density, velocity and mean particle mass profiles of the isothermal + Parker wind profile. """ - if filename == None: - Mdot = "%.3f" % float(Mdot) + if filename is None: + Mdot = f"{float(Mdot):.3f}" T = str(int(T)) - filename = projectpath+'/parker_profiles/'+plname+'/'+pdir+'/pprof_'+plname+'_T='+T+'_M='+Mdot+'.txt' - - pprof = pd.read_table(filename, names=['alt', 'rho', 'v', 'mu'], dtype=np.float64, comment='#') - pprof['drhodr'] = np.gradient(pprof['rho'], pprof['alt']) + filename = ( + projectpath + + "/parker_profiles/" + + plname + + "/" + + pdir + + "/pprof_" + + plname + + "_T=" + + T + + "_M=" + + Mdot + + ".txt" + ) + + pprof = pd.read_table( + filename, names=["alt", "rho", "v", "mu"], dtype=np.float64, comment="#" + ) + pprof["drhodr"] = np.gradient(pprof["rho"], pprof["alt"]) return pprof @@ -813,7 +1094,7 @@ def calc_mu(rho, ne, abundances=None, mass=False): but neglecting molecules (and the negligible mass contributed by electrons). Based on formula: mu = sum(ni*mi) / (sum(ni) + ne) - where ni and mi are the number density and mass of element i, and + where ni and mi are the number density and mass of element i, and ne is the electron number density. Use ni = ntot * fi and ntot = rho / sum(fi*mi) where ntot is the total number density, fi the abundance of element i @@ -828,11 +1109,12 @@ def calc_mu(rho, ne, abundances=None, mass=False): ne : array-like or numeric Electron number density in units of cm-3. abundances : dict, optional - Dictionary with the abundance of each element, expressed as a fraction of the total. - Can be easily created with get_abundances(). By default None, which + Dictionary with the abundance of each element, expressed as a fraction of the + total. Can be easily created with get_abundances(). By default None, which results in solar composition. mass : bool, optional - If True returns mu in units of g, if False returns mu in units of amu, by default False. + If True returns mu in units of g, if False returns mu in units of amu, by + default False. Returns ------- @@ -840,21 +1122,21 @@ def calc_mu(rho, ne, abundances=None, mass=False): Mean particle mass. """ - if abundances == None: + if abundances is None: abundances = get_abundances() - sum_all = 0. + sum_all = 0.0 for element in abundances.keys(): sum_all += abundances[element] * get_mass(element) - mu = sum_all / (1 + ne*sum_all / rho) #mu in g + mu = sum_all / (1 + ne * sum_all / rho) # mu in g if not mass: - mu = mu / mH #mu in amu + mu = mu / mH # mu in amu return mu -def get_zdict(z=1., zelem={}): +def get_zdict(z=1.0, zelem={}): """ Returns a dictionary of the scale factors of each element relative to solar. @@ -873,11 +1155,41 @@ def get_zdict(z=1., zelem={}): to the default solar composition. """ - assert 'H' not in zelem.keys(), "You cannot scale hydrogen, scale everything else instead." - - zdict = {'He':1., 'Li':z, 'Be':z, 'B':z, 'C':z, 'N':z, 'O':z, 'F':z, 'Ne':z, - 'Na':z, 'Mg':z, 'Al':z, 'Si':z, 'P':z, 'S':z, 'Cl':z, 'Ar':z, 'K':z, 'Ca':z, - 'Sc':z, 'Ti':z, 'V':z, 'Cr':z, 'Mn':z, 'Fe':z, 'Co':z, 'Ni':z, 'Cu':z, 'Zn':z} + assert ( + "H" not in zelem.keys() + ), "You cannot scale hydrogen, scale everything else instead." + + zdict = { + "He": 1.0, + "Li": z, + "Be": z, + "B": z, + "C": z, + "N": z, + "O": z, + "F": z, + "Ne": z, + "Na": z, + "Mg": z, + "Al": z, + "Si": z, + "P": z, + "S": z, + "Cl": z, + "Ar": z, + "K": z, + "Ca": z, + "Sc": z, + "Ti": z, + "V": z, + "Cr": z, + "Mn": z, + "Fe": z, + "Co": z, + "Ni": z, + "Cu": z, + "Zn": z, + } for element in zelem.keys(): zdict[element] *= zelem[element] @@ -902,16 +1214,44 @@ def get_abundances(zdict=None): Dictionary with the abundances of all elements, expressed as a fraction of the total. """ - #solar abundance relative to hydrogen (Hazy table 7.1): - rel_abundances = {'H':1., 'He':0.1, 'Li':2.04e-9, 'Be':2.63e-11, 'B':6.17e-10, - 'C':2.45e-4, 'N':8.51e-5, 'O':4.9e-4, 'F':3.02e-8, 'Ne':1e-4, - 'Na':2.14e-6, 'Mg':3.47e-5, 'Al':2.95e-6, 'Si':3.47e-5, 'P':3.2e-7, - 'S':1.84e-5, 'Cl':1.91e-7, 'Ar':2.51e-6, 'K':1.32e-7, 'Ca':2.29e-6, - 'Sc':1.48e-9, 'Ti':1.05e-7, 'V':1e-8, 'Cr':4.68e-7, 'Mn':2.88e-7, - 'Fe':2.82e-5, 'Co':8.32e-8, 'Ni':1.78e-6, 'Cu':1.62e-8, 'Zn':3.98e-8} - - if zdict != None: - assert 'H' not in zdict.keys(), "You cannot scale hydrogen, scale everything else instead." + # solar abundance relative to hydrogen (Hazy table 7.1): + rel_abundances = { + "H": 1.0, + "He": 0.1, + "Li": 2.04e-9, + "Be": 2.63e-11, + "B": 6.17e-10, + "C": 2.45e-4, + "N": 8.51e-5, + "O": 4.9e-4, + "F": 3.02e-8, + "Ne": 1e-4, + "Na": 2.14e-6, + "Mg": 3.47e-5, + "Al": 2.95e-6, + "Si": 3.47e-5, + "P": 3.2e-7, + "S": 1.84e-5, + "Cl": 1.91e-7, + "Ar": 2.51e-6, + "K": 1.32e-7, + "Ca": 2.29e-6, + "Sc": 1.48e-9, + "Ti": 1.05e-7, + "V": 1e-8, + "Cr": 4.68e-7, + "Mn": 2.88e-7, + "Fe": 2.82e-5, + "Co": 8.32e-8, + "Ni": 1.78e-6, + "Cu": 1.62e-8, + "Zn": 3.98e-8, + } + + if zdict is not None: + assert ( + "H" not in zdict.keys() + ), "You cannot scale hydrogen, scale everything else instead." for element in zdict.keys(): rel_abundances[element] *= zdict[element] @@ -924,8 +1264,8 @@ def get_abundances(zdict=None): def rho_to_hden(rho, abundances=None): """ Converts a mass density in units of g cm-3 to a hydrogen number density - in units of cm-3, for a given chemical composition. Based on formula: - rho = nH*mH + ntot*sum(fj*mj) + in units of cm-3, for a given chemical composition. Based on formula: + rho = nH*mH + ntot*sum(fj*mj) where nH is the hydrogen number density, mH the hydrogen atom mass, mj and fj the mass and abundance (=fraction) of element j (the sum excludes hydrogen) and ntot=rho/sum(fi*mi) @@ -939,7 +1279,7 @@ def rho_to_hden(rho, abundances=None): Mass density in units of g cm-3. abundances : dict, optional Dictionary with the abundance of each element, expressed as a fraction of the total. - Can be easily created with get_abundances(). By default None, which + Can be easily created with get_abundances(). By default None, which results in solar composition. Returns @@ -948,16 +1288,18 @@ def rho_to_hden(rho, abundances=None): Hydrogen number density in units of cm-3. """ - if abundances == None: - abundances = get_abundances() #get a solar composition + if abundances is None: + abundances = get_abundances() # get a solar composition - sum_all = 0. + sum_all = 0.0 for element in abundances.keys(): sum_all += abundances[element] * get_mass(element) - sum_noH = sum_all - abundances['H'] * get_mass('H') #subtract hydrogen to get the sum without H + sum_noH = sum_all - abundances["H"] * get_mass( + "H" + ) # subtract hydrogen to get the sum without H - hden = rho/mH * (1 - sum_noH / sum_all) + hden = rho / mH * (1 - sum_noH / sum_all) return hden @@ -965,8 +1307,8 @@ def rho_to_hden(rho, abundances=None): def hden_to_rho(hden, abundances=None): """ Converts a hydrogen number density in units of cm-3 to a mass density - in units of g cm-3, for a given chemical composition. Based on formula: - rho = nH*mH + ntot*sum(fj*mj) + in units of g cm-3, for a given chemical composition. Based on formula: + rho = nH*mH + ntot*sum(fj*mj) where nH is the hydrogen number density, mH the hydrogen atom mass, mj and fj the mass and abundance (=fraction) of element j (the sum excludes hydrogen) and ntot=rho/sum(fi*mi) @@ -980,7 +1322,7 @@ def hden_to_rho(hden, abundances=None): Hydrogen number density in units of cm-3. abundances : dict, optional Dictionary with the abundance of each element, expressed as a fraction of the total. - Can be easily created with get_abundances(). By default None, which + Can be easily created with get_abundances(). By default None, which results in solar composition. Returns @@ -989,16 +1331,18 @@ def hden_to_rho(hden, abundances=None): Mass density in units of g cm-3. """ - if abundances == None: - abundances = get_abundances() #get a solar composition + if abundances is None: + abundances = get_abundances() # get a solar composition - sum_all = 0. + sum_all = 0.0 for element in abundances.keys(): sum_all += abundances[element] * get_mass(element) - sum_noH = sum_all - abundances['H'] * get_mass('H') #subtract hydrogen to get the sum without H + sum_noH = sum_all - abundances["H"] * get_mass( + "H" + ) # subtract hydrogen to get the sum without H - rho = hden*mH / (1 - sum_noH / sum_all) + rho = hden * mH / (1 - sum_noH / sum_all) return rho @@ -1023,7 +1367,7 @@ def roche_radius(a, Mp, Mstar): Hill/Roche radius in units of cm. """ - Rroche = a * pow(Mp/(3.0*(Mstar+Mp)), 1.0/3.0) + Rroche = a * pow(Mp / (3.0 * (Mstar + Mp)), 1.0 / 3.0) return Rroche @@ -1044,22 +1388,73 @@ def set_alt_ax(ax, altmax=8, labels=True): Whether to use an xlabel and xticklabels, by default True """ - ax.set_xscale('log') + ax.set_xscale("log") ax.set_xlim(1, altmax) - ticks = np.concatenate((np.arange(1, 2, 0.1), np.arange(2, altmax+1, 1))) + ticks = np.concatenate((np.arange(1, 2, 0.1), np.arange(2, altmax + 1, 1))) if altmax <= 3: - ticklabels = ['1', '1.1', '1.2', '1.3', '1.4', '1.5', '1.6', '1.7', '1.8', '1.9'] - ticklabels2 = ["%i" %t for t in np.arange(2, altmax+1, 1).astype(int)] + ticklabels = [ + "1", + "1.1", + "1.2", + "1.3", + "1.4", + "1.5", + "1.6", + "1.7", + "1.8", + "1.9", + ] + ticklabels2 = ["%i" % t for t in np.arange(2, altmax + 1, 1).astype(int)] elif altmax <= 10: - ticklabels = ['1', '', '', '', '', '1.5', '', '', '', ''] - ticklabels2 = ["%i" %t for t in np.arange(2, altmax+1, 1).astype(int)] + ticklabels = ["1", "", "", "", "", "1.5", "", "", "", ""] + ticklabels2 = ["%i" % t for t in np.arange(2, altmax + 1, 1).astype(int)] elif altmax <= 14: - ticklabels = ['1', '', '', '', '', '', '', '', '', '', '2', '3', '4', '5', '', '7', '', '', '10'] - ticklabels2 = ['']*(altmax-10) + ticklabels = [ + "1", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "2", + "3", + "4", + "5", + "", + "7", + "", + "", + "10", + ] + ticklabels2 = [""] * (altmax - 10) else: - ticklabels = ['1', '', '', '', '', '', '', '', '', '', '2', '3', '4', '5', '', '7', '', '', '10'] - ticklabels2 = ['']*(altmax-10) - ticklabels2b = np.arange(15, altmax+0.1, 5).astype(int) + ticklabels = [ + "1", + "", + "", + "", + "", + "", + "", + "", + "", + "", + "2", + "3", + "4", + "5", + "", + "7", + "", + "", + "10", + ] + ticklabels2 = [""] * (altmax - 10) + ticklabels2b = np.arange(15, altmax + 0.1, 5).astype(int) index = 4 for t2b in ticklabels2b: ticklabels2[index] = str(t2b) @@ -1070,7 +1465,7 @@ def set_alt_ax(ax, altmax=8, labels=True): ax.set_xticks(ticks) if labels: ax.set_xticklabels(ticklabels) - ax.set_xlabel(r'Radius [$R_p$]') + ax.set_xlabel(r"Radius [$R_p$]") else: ax.set_xticklabels([]) @@ -1111,28 +1506,34 @@ def alt_array_to_Cloudy(alt, quantity, altmax, Rp, nmax, log=True): if isinstance(quantity, pd.Series): quantity = quantity.values - assert alt[1] > alt[0] #should be in ascending alt order - assert alt[-1] - altmax*Rp > -1. #For extrapolation: the alt scale should extend at least to within 1 cm of altmax*Rp + assert alt[1] > alt[0] # should be in ascending alt order + assert ( + alt[-1] - altmax * Rp > -1.0 + ) # For extrapolation: the alt scale should extend at least to within 1 cm of altmax*Rp if not np.isclose(alt[0], Rp, rtol=1e-2, atol=0.0): - warnings.warn(f"Are you sure the altitude array starts at Rp? alt[0]/Rp = {alt[0]/Rp}") - - depth = altmax*Rp - alt - ifunc = interp1d(depth, quantity, fill_value='extrapolate') - - - Clgridr1 = np.logspace(np.log10(alt[0]), np.log10(altmax*Rp), num=int(0.8*nmax)) - Clgridr1[0], Clgridr1[-1] = alt[0], altmax*Rp #reset these for potential log-numerical errors + warnings.warn( + f"Are you sure the altitude array starts at Rp? alt[0]/Rp = {alt[0]/Rp}" + ) + + depth = altmax * Rp - alt + ifunc = interp1d(depth, quantity, fill_value="extrapolate") + + Clgridr1 = np.logspace(np.log10(alt[0]), np.log10(altmax * Rp), num=int(0.8 * nmax)) + Clgridr1[0], Clgridr1[-1] = ( + alt[0], + altmax * Rp, + ) # reset these for potential log-numerical errors Clgridr1 = (Clgridr1[-1] - Clgridr1)[::-1] - #sample the first 10 points better since Cloudy messes up with log-space interpolation there - Clgridr2 = np.logspace(-2, np.log10(Clgridr1[9]), num=(nmax-len(Clgridr1))) + # sample the first 10 points better since Cloudy messes up with log-space interpolation there + Clgridr2 = np.logspace(-2, np.log10(Clgridr1[9]), num=(nmax - len(Clgridr1))) Clgridr = np.concatenate((Clgridr2, Clgridr1[10:])) Clgridr[0] = 1e-35 Clgridq = ifunc(Clgridr) law = np.column_stack((Clgridr, Clgridq)) if log: - law[law[:,1]==0., 1] = 1e-100 + law[law[:, 1] == 0.0, 1] = 1e-100 law = np.log10(law) return law @@ -1166,22 +1567,24 @@ def smooth_gaus_savgol(y, size=None, fraction=None): If neither or both size and fraction were provided. """ - if size != None and fraction == None: + if size is not None and fraction is None: size = max(3, size) - elif fraction != None and size == None: - assert 0. < fraction < 1., "fraction must be greater than 0 and smaller than 1" - size = int(np.ceil(len(y)*fraction) // 2 * 2 + 1) #make it odd + elif fraction is not None and size is None: + assert ( + 0.0 < fraction < 1.0 + ), "fraction must be greater than 0 and smaller than 1" + size = int(np.ceil(len(y) * fraction) // 2 * 2 + 1) # make it odd size = max(3, size) else: raise ValueError("Please provide either 'size' or 'fraction'.") ygaus = gaussian_filter1d(y, size) - ysavgol = savgol_filter(y, 2*int(size/2)+1, polyorder=2) + ysavgol = savgol_filter(y, 2 * int(size / 2) + 1, polyorder=2) savgolweight = np.zeros(len(y)) savgolweight += sps.norm.pdf(range(len(y)), 0, size) savgolweight += sps.norm.pdf(range(len(y)), len(y), size) - savgolweight /= np.max(savgolweight) #normalize + savgolweight /= np.max(savgolweight) # normalize gausweight = 1 - savgolweight ysmooth = ygaus * gausweight + ysavgol * savgolweight @@ -1189,9 +1592,10 @@ def smooth_gaus_savgol(y, size=None, fraction=None): return ysmooth -####################################### -########### CLOUDY I/O ########## -####################################### +# ###################################### +# ########## CLOUDY I/O ########## +# ###################################### + def run_Cloudy(filename, folder=None): """ @@ -1202,7 +1606,7 @@ def run_Cloudy(filename, folder=None): filename : str Name of the simulation input file. If the folder argument is not specfied, filename must include the full path to the simulation. - If the folder argument is specified, the filename should only + If the folder argument is specified, the filename should only specify the filename. folder : str, optional Full path to the directory where the file is located, excluding @@ -1211,13 +1615,15 @@ def run_Cloudy(filename, folder=None): full path. By default None. """ - if folder is None: #then the folder should be in the simname + if folder is None: # then the folder should be in the simname folder, filename = os.path.split(filename) if filename.endswith(".in"): - filename = filename[:-3] #filename should not contain the extension + filename = filename[:-3] # filename should not contain the extension - os.system('cd '+folder+' && '+cloudypath+'/source/cloudy.exe -p '+filename) + os.system( + "cd " + folder + " && " + cloudypath + "/source/cloudy.exe -p " + filename + ) def remove_duplicates(law, fmt): @@ -1245,8 +1651,10 @@ def remove_duplicates(law, fmt): """ nonduplicates = [0] - for i in range(1, len(law)-1): - if format(law[i,1], fmt) != format(law[i-1,1], fmt) or format(law[i,1], fmt) != format(law[i+1,1], fmt): + for i in range(1, len(law) - 1): + if format(law[i, 1], fmt) != format(law[i - 1, 1], fmt) or format( + law[i, 1], fmt + ) != format(law[i + 1, 1], fmt): nonduplicates.append(i) nonduplicates.append(-1) @@ -1255,10 +1663,23 @@ def remove_duplicates(law, fmt): return new_law -def copyadd_Cloudy_in(oldsimname, newsimname, set_thickness=False, - dlaw=None, tlaw=None, cextra=None, hextra=None, - othercommands=None, outfiles=[], denspecies=[], selected_den_levels=False, - constantT=None, double_tau=False, hcfrac=None, cloudy_version="17"): +def copyadd_Cloudy_in( + oldsimname, + newsimname, + set_thickness=False, + dlaw=None, + tlaw=None, + cextra=None, + hextra=None, + othercommands=None, + outfiles=[], + denspecies=[], + selected_den_levels=False, + constantT=None, + double_tau=False, + hcfrac=None, + cloudy_version="17", +): """ Makes a copy of a Cloudy input file and appends commands. @@ -1274,7 +1695,8 @@ def copyadd_Cloudy_in(oldsimname, newsimname, set_thickness=False, dlaw : numpy.ndarray, optional Hydrogen number density in units of cm-3, as a 2D array where dlaw[:,0] specifies the log10 of the depth into the cloud in cm, and dlaw[:,1] - specifies the log10 of the hydrogen number density in units of cm-3, by default None + specifies the log10 of the hydrogen number density in units of cm-3, by default + None tlaw : numpy.ndarray, optional Temperature in units of K as a 2D array where tlaw[:,0] specifies the log10 of the depth into the cloud in cm, and tlaw[:,1] @@ -1294,15 +1716,16 @@ def copyadd_Cloudy_in(oldsimname, newsimname, set_thickness=False, by this function can be included here, by default None outfiles : list, optional List of file extensions indicating which Cloudy output to save. For example, - include '.heat' to include the 'save heating' command, by default ['.ovr', '.cool'] + include '.heat' to include the 'save heating' command, by default ['.ovr', + '.cool'] denspecies : list, optional List of atomic/ionic species for which to save densities and energies, which are needed to do radiative transfer. The list can easily be created by the get_specieslist() function. By default []. selected_den_levels : bool, optional If True, only energy levels up to the number that can be matched to NIST - will be included in the 'save densities' command. If False, all energy levels - of each species will be included, regardless of whether we can match them + will be included in the 'save densities' command. If False, all energy levels + of each species will be included, regardless of whether we can match them to NIST. By default False. constantT : str or numeric, optional Constant temperature in units of K, by default None @@ -1324,14 +1747,18 @@ def copyadd_Cloudy_in(oldsimname, newsimname, set_thickness=False, assert ".den" in outfiles and ".en" in outfiles if ".den" in outfiles or ".en" in outfiles: assert ".den" in outfiles and ".en" in outfiles - if constantT != None: - assert not np.any(tlaw != None) + if constantT is not None: + assert not np.any(tlaw is not None) - copyfile(oldsimname+".in", newsimname+".in") + copyfile(oldsimname + ".in", newsimname + ".in") - with open(newsimname+".in", "a") as f: + with open(newsimname + ".in", "a") as f: if set_thickness: - f.write('\nstop thickness '+'{:.7f}'.format(dlaw[-1,0])+'\t#last dlaw point') + f.write( + "\nstop thickness " + + "{:.7f}".format(dlaw[-1, 0]) + + "\t#last dlaw point" + ) if ".ovr" in outfiles: f.write('\nsave overview ".ovr" last') if ".cool" in outfiles: @@ -1342,63 +1769,114 @@ def copyadd_Cloudy_in(oldsimname, newsimname, set_thickness=False, f.write('\nsave heating ".heat" last') if ".con" in outfiles: f.write('\nsave continuum ".con" last units angstrom') - if ".den" in outfiles: #then ".en" is always there as well due to the assertion above + if ( + ".den" in outfiles + ): # then ".en" is always there as well due to the assertion above if denspecies != []: - f.write('\nsave species densities last ".den"\n'+speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\nend") - f.write('\nsave species energies last ".en"\n'+speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\nend") - if constantT != None: - f.write('\nconstant temperature t= '+str(constantT)+' linear') + f.write( + '\nsave species densities last ".den"\n' + + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\nend" + ) + f.write( + '\nsave species energies last ".en"\n' + + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\nend" + ) + if constantT is not None: + f.write("\nconstant temperature t= " + str(constantT) + " linear") if double_tau: - f.write('\ndouble optical depths #so radiation does not escape into planet core freely') + f.write( + "\ndouble optical depths #so radiation does not escape into planet " + "core freely" + ) if hcfrac: - f.write('\nset WeakHeatCool '+str(hcfrac)+' #for .heat and .cool output files') - if othercommands != None: - f.write("\n"+othercommands) - if np.any(dlaw != None): + f.write( + "\nset WeakHeatCool " + + str(hcfrac) + + " #for .heat and .cool output files" + ) + if othercommands is not None: + f.write("\n" + othercommands) + if np.any(dlaw is not None): dlaw = remove_duplicates(dlaw, "1.7f") f.write("\n# ========= density law ================") f.write("\n#depth sets distances from edge of cloud") f.write("\ndlaw table depth\n") - np.savetxt(f, dlaw, fmt='%1.7f') - f.write('{:.7f}'.format(dlaw[-1,0]+0.1)+ - ' '+'{:.7f}'.format(dlaw[-1,1])) + np.savetxt(f, dlaw, fmt="%1.7f") + f.write( + "{:.7f}".format(dlaw[-1, 0] + 0.1) + " " + "{:.7f}".format(dlaw[-1, 1]) + ) f.write("\nend of dlaw #last point added to prevent roundoff") - if np.any(tlaw != None): + if np.any(tlaw is not None): tlaw = remove_duplicates(tlaw, "1.7f") f.write("\n# ========= temperature law ============") f.write("\n#depth sets distances from edge of cloud") f.write("\ntlaw table depth\n") - np.savetxt(f, tlaw, fmt='%1.7f') - f.write('{:.7f}'.format(tlaw[-1,0]+0.1)+ - ' '+'{:.7f}'.format(tlaw[-1,1])) + np.savetxt(f, tlaw, fmt="%1.7f") + f.write( + "{:.7f}".format(tlaw[-1, 0] + 0.1) + " " + "{:.7f}".format(tlaw[-1, 1]) + ) f.write("\nend of tlaw #last point added to prevent roundoff") - if np.any(cextra != None): + if np.any(cextra is not None): cextra = remove_duplicates(cextra, "1.7f") f.write("\n# ========= cextra law ================") f.write("\n#depth sets distances from edge of cloud") f.write("\ncextra table depth\n") - np.savetxt(f, cextra, fmt='%1.7f') - f.write('{:.7f}'.format(cextra[-1,0]+0.1)+ - ' '+'{:.7f}'.format(cextra[-1,1])) + np.savetxt(f, cextra, fmt="%1.7f") + f.write( + "{:.7f}".format(cextra[-1, 0] + 0.1) + + " " + + "{:.7f}".format(cextra[-1, 1]) + ) f.write("\nend of cextra #last point added to prevent roundoff") - if np.any(hextra != None): + if np.any(hextra is not None): hextra = remove_duplicates(hextra, "1.7f") f.write("\n# ========= hextra law ================") f.write("\n#depth sets distances from edge of cloud") f.write("\nhextra table depth\n") - np.savetxt(f, hextra, fmt='%1.7f') - f.write('{:.7f}'.format(hextra[-1,0]+0.1)+ - ' '+'{:.7f}'.format(hextra[-1,1])) + np.savetxt(f, hextra, fmt="%1.7f") + f.write( + "{:.7f}".format(hextra[-1, 0] + 0.1) + + " " + + "{:.7f}".format(hextra[-1, 1]) + ) f.write("\nend of hextra #last point added to prevent roundoff") -def write_Cloudy_in(simname, title=None, flux_scaling=None, - SED=None, set_thickness=True, - dlaw=None, tlaw=None, cextra=None, hextra=None, - othercommands=None, overwrite=False, iterate='convergence', - nend=3000, outfiles=['.ovr', '.cool'], denspecies=[], selected_den_levels=False, - constantT=None, double_tau=False, cosmic_rays=False, zdict=None, hcfrac=None, - comments=None, cloudy_version="17"): +def write_Cloudy_in( + simname, + title=None, + flux_scaling=None, + SED=None, + set_thickness=True, + dlaw=None, + tlaw=None, + cextra=None, + hextra=None, + othercommands=None, + overwrite=False, + iterate="convergence", + nend=3000, + outfiles=[".ovr", ".cool"], + denspecies=[], + selected_den_levels=False, + constantT=None, + double_tau=False, + cosmic_rays=False, + zdict=None, + hcfrac=None, + comments=None, + cloudy_version="17", +): """ Writes a Cloudy input file for simulating an exoplanet atmosphere. @@ -1455,8 +1933,8 @@ def write_Cloudy_in(simname, title=None, flux_scaling=None, get_specieslist() function. By default []. selected_den_levels : bool, optional If True, only energy levels up to the number that can be matched to NIST - will be included in the 'save densities' command. If False, all energy levels - of each species will be included, regardless of whether we can match them + will be included in the 'save densities' command. If False, all energy levels + of each species will be included, regardless of whether we can match them to NIST. By default False. constantT : str or numeric, optional Constant temperature in units of K, by default None @@ -1483,55 +1961,84 @@ def write_Cloudy_in(simname, title=None, flux_scaling=None, argument, by default "17". """ - assert flux_scaling is not None #we need this to proceed. Give in format [F,E] like nuF(nu) = F at E Ryd - assert SED != None + assert ( + flux_scaling is not None + ) # we need this to proceed. Give in format [F,E] like nuF(nu) = F at E Ryd + assert SED is not None if denspecies != []: assert ".den" in outfiles and ".en" in outfiles if ".den" in outfiles or ".en" in outfiles: assert ".den" in outfiles and ".en" in outfiles and denspecies != [] if not overwrite: - assert not os.path.isfile(simname+".in") - if constantT != None: - assert not np.any(tlaw != None) - - with open(simname+".in", "w") as f: - if comments != None: - f.write(comments+'\n') - if title != None: - f.write('title '+title) + assert not os.path.isfile(simname + ".in") + if constantT is not None: + assert not np.any(tlaw is not None) + + with open(simname + ".in", "w") as f: + if comments is not None: + f.write(comments + "\n") + if title is not None: + f.write("title " + title) f.write("\n# ========= input spectrum ================") - f.write("\nnuF(nu) = "+str(flux_scaling[0])+" at "+str(flux_scaling[1])+" Ryd") - f.write('\ntable SED "'+SED+'"') + f.write( + "\nnuF(nu) = " + + str(flux_scaling[0]) + + " at " + + str(flux_scaling[1]) + + " Ryd" + ) + f.write('\ntable SED "' + SED + '"') if cosmic_rays: - f.write('\ncosmic rays background') + f.write("\ncosmic rays background") f.write("\n# ========= chemistry ================") f.write("\n# solar abundances and metallicity is standard") - if zdict != None: + if zdict is not None: for element in zdict.keys(): - if zdict[element] == 0.: - f.write("\nelement "+element_names[element]+" off") - elif zdict[element] != 1.: #only write it to Cloudy if the scale factor is not 1 - f.write("\nelement scale factor "+element_names[element]+" "+str(zdict[element])) + if zdict[element] == 0.0: + f.write("\nelement " + element_names[element] + " off") + elif ( + zdict[element] != 1.0 + ): # only write it to Cloudy if the scale factor is not 1 + f.write( + "\nelement scale factor " + + element_names[element] + + " " + + str(zdict[element]) + ) f.write("\n# ========= other ================") - if nend != None: - f.write("\nset nend "+str(nend)+" #models at high density need >1400 zones") + if nend is not None: + f.write( + "\nset nend " + + str(nend) + + " #models at high density need >1400 zones" + ) f.write("\nset temperature floor 5 linear") f.write("\nstop temperature off #otherwise it stops at 1e4 K") - if iterate == 'convergence': + if iterate == "convergence": f.write("\niterate to convergence") else: - f.write("niterate "+str(iterate)) + f.write("niterate " + str(iterate)) f.write("\nprint last iteration") if set_thickness: - f.write('\nstop thickness '+'{:.7f}'.format(dlaw[-1,0])+'\t#last dlaw point') - if constantT != None: - f.write('\nconstant temperature t= '+str(constantT)+' linear') + f.write( + "\nstop thickness " + + "{:.7f}".format(dlaw[-1, 0]) + + "\t#last dlaw point" + ) + if constantT is not None: + f.write("\nconstant temperature t= " + str(constantT) + " linear") if double_tau: - f.write('\ndouble optical depths #so radiation does not escape into planet core freely') + f.write( + "\ndouble optical depths #so radiation does not escape into planet core freely" + ) if hcfrac: - f.write('\nset WeakHeatCool '+str(hcfrac)+' #for .heat and .cool output files') - if othercommands != None: - f.write("\n"+othercommands) + f.write( + "\nset WeakHeatCool " + + str(hcfrac) + + " #for .heat and .cool output files" + ) + if othercommands is not None: + f.write("\n" + othercommands) f.write("\n# ========= output ================") if ".ovr" in outfiles: f.write('\nsave overview ".ovr" last') @@ -1543,52 +2050,78 @@ def write_Cloudy_in(simname, title=None, flux_scaling=None, f.write('\nsave heating ".heat" last') if ".con" in outfiles: f.write('\nsave continuum ".con" last units angstrom') - if ".den" in outfiles: #then ".en" is always there as well. - f.write('\nsave species densities last ".den"\n'+speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\nend") - f.write('\nsave species energies last ".en"\n'+speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\nend") + if ".den" in outfiles: # then ".en" is always there as well. + f.write( + '\nsave species densities last ".den"\n' + + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\nend" + ) + f.write( + '\nsave species energies last ".en"\n' + + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\nend" + ) if dlaw is not None: dlaw = remove_duplicates(dlaw, "1.7f") f.write("\n# ========= density law ================") f.write("\n#depth sets distances from edge of cloud") f.write("\ndlaw table depth\n") - np.savetxt(f, dlaw, fmt='%1.7f') - f.write('{:.7f}'.format(dlaw[-1,0]+0.1)+ - ' '+'{:.7f}'.format(dlaw[-1,1])) + np.savetxt(f, dlaw, fmt="%1.7f") + f.write( + "{:.7f}".format(dlaw[-1, 0] + 0.1) + " " + "{:.7f}".format(dlaw[-1, 1]) + ) f.write("\nend of dlaw #last point added to prevent roundoff") if tlaw is not None: tlaw = remove_duplicates(tlaw, "1.7f") f.write("\n# ========= temperature law ============") f.write("\n#depth sets distances from edge of cloud") f.write("\ntlaw table depth\n") - np.savetxt(f, tlaw, fmt='%1.7f') - f.write('{:.7f}'.format(tlaw[-1,0]+0.1)+ - ' '+'{:.7f}'.format(tlaw[-1,1])) + np.savetxt(f, tlaw, fmt="%1.7f") + f.write( + "{:.7f}".format(tlaw[-1, 0] + 0.1) + " " + "{:.7f}".format(tlaw[-1, 1]) + ) f.write("\nend of tlaw #last point added to prevent roundoff") if cextra is not None: cextra = remove_duplicates(cextra, "1.7f") f.write("\n# ========= cextra law ================") f.write("\n#depth sets distances from edge of cloud") f.write("\ncextra table depth\n") - np.savetxt(f, cextra, fmt='%1.7f') - f.write('{:.7f}'.format(cextra[-1,0]+0.1)+ - ' '+'{:.7f}'.format(cextra[-1,1])) + np.savetxt(f, cextra, fmt="%1.7f") + f.write( + "{:.7f}".format(cextra[-1, 0] + 0.1) + + " " + + "{:.7f}".format(cextra[-1, 1]) + ) f.write("\nend of cextra #last point added to prevent roundoff") if hextra is not None: hextra = remove_duplicates(hextra, "1.7f") f.write("\n# ========= hextra law ================") f.write("\n#depth sets distances from edge of cloud") f.write("\nhextra table depth\n") - np.savetxt(f, hextra, fmt='%1.7f') - f.write('{:.7f}'.format(hextra[-1,0]+0.1)+ - ' '+'{:.7f}'.format(hextra[-1,1])) + np.savetxt(f, hextra, fmt="%1.7f") + f.write( + "{:.7f}".format(hextra[-1, 0] + 0.1) + + " " + + "{:.7f}".format(hextra[-1, 1]) + ) f.write("\nend of hextra #last point added to prevent roundoff") -def insertden_Cloudy_in(simname, denspecies, selected_den_levels=True, rerun=False, cloudy_version="17"): +def insertden_Cloudy_in( + simname, denspecies, selected_den_levels=True, rerun=False, cloudy_version="17" +): """ Takes a Cloudy .in input file and adds extra species to the 'save species densities' command. This is useful for example if you first went - through the convergeT_parker.py temperature convergence scheme, + through the convergeT_parker.py temperature convergence scheme, but later want to add additional species to the 'converged' simulation. Parameters @@ -1601,8 +2134,8 @@ def insertden_Cloudy_in(simname, denspecies, selected_den_levels=True, rerun=Fal get_specieslist() function. selected_den_levels : bool, optional If True, only energy levels up to the number that can be matched to NIST - will be included in the 'save densities' command. If False, all energy levels - of each species will be included, regardless of whether we can match them + will be included in the 'save densities' command. If False, all energy levels + of each species will be included, regardless of whether we can match them to NIST. By default True. rerun : bool, optional Whether to run the new Cloudy input file, by default False @@ -1615,33 +2148,73 @@ def insertden_Cloudy_in(simname, denspecies, selected_den_levels=True, rerun=Fal If there are multiple 'save species densities' commands in the Cloudy input file. """ - with open(simname+".in", "r") as f: + with open(simname + ".in", "r") as f: oldcontent = f.readlines() newcontent = oldcontent - indices = [i for i, s in enumerate(oldcontent) if 'save species densities' in s] - if len(indices) == 0: #then there is no 'save species densities' command yet - newcontent.append('\nsave species densities last ".den"\n'+speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\nend") - newcontent.append('\nsave species energies last ".en"\n'+speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\nend") - - elif len(indices) == 1: #then there already is a 'save species densities' command with some species + indices = [i for i, s in enumerate(oldcontent) if "save species densities" in s] + if len(indices) == 0: # then there is no 'save species densities' command yet + newcontent.append( + '\nsave species densities last ".den"\n' + + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\nend" + ) + newcontent.append( + '\nsave species energies last ".en"\n' + + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\nend" + ) + + elif ( + len(indices) == 1 + ): # then there already is a 'save species densities' command with some species for sp in denspecies.copy(): - if len([i for i, s in enumerate(oldcontent) if sp+"[" in s]) != 0: #check if this species is already in the file + if ( + len([i for i, s in enumerate(oldcontent) if sp + "[" in s]) != 0 + ): # check if this species is already in the file denspecies.remove(sp) print(sp, "was already in the .in file so I did not add it again.") if len(denspecies) >= 1: - newcontent.insert(indices[0]+1, speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\n") - #also add them to the 'save species energies' list - indices2 = [i for i, s in enumerate(oldcontent) if 'save species energies' in s] - newcontent.insert(indices2[0]+1, speciesstring(denspecies, selected_levels=selected_den_levels, cloudy_version=cloudy_version)+"\n") + newcontent.insert( + indices[0] + 1, + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\n", + ) + # also add them to the 'save species energies' list + indices2 = [ + i for i, s in enumerate(oldcontent) if "save species energies" in s + ] + newcontent.insert( + indices2[0] + 1, + speciesstring( + denspecies, + selected_levels=selected_den_levels, + cloudy_version=cloudy_version, + ) + + "\n", + ) else: return else: - raise ValueError("There are multiple 'save species densities' commands in the .in file. This shouldn't be the case, please check.") + raise ValueError( + "There are multiple 'save species densities' commands in the .in file. This shouldn't be the case, please check." + ) - newcontent = "".join(newcontent) #turn list into string - with open(simname+".in", "w") as f: #overwrite the old file + newcontent = "".join(newcontent) # turn list into string + with open(simname + ".in", "w") as f: # overwrite the old file f.write(newcontent) if rerun: @@ -1652,12 +2225,15 @@ def insertden_Cloudy_in(simname, denspecies, selected_den_levels=True, rerun=Fal ########### CLASSES ########### ####################################### + class Parker: """ Class that stores a Parker wind profile and its parameters. """ - def __init__(self, plname, T, Mdot, pdir, fH=None, zdict=None, SED=None, readin=True): + def __init__( + self, plname, T, Mdot, pdir, fH=None, zdict=None, SED=None, readin=True + ): """ Parameters ---------- @@ -1692,11 +2268,11 @@ def __init__(self, plname, T, Mdot, pdir, fH=None, zdict=None, SED=None, readin= elif type(Mdot) == float or type(Mdot) == int: self.Mdot = "%.3f" % Mdot self.Mdotf = Mdot - if fH != None: + if fH is not None: self.fH = fH - if zdict != None: + if zdict is not None: self.zdict = zdict - if SED != None: + if SED is not None: self.SED = SED if readin: self.prof = read_parker(plname, T, Mdot, pdir) @@ -1707,7 +2283,18 @@ class Planet: Class that stores planet/star parameters. """ - def __init__(self, name, fullname=None, R=None, Rstar=None, a=None, M=None, Mstar=None, bp=None, SEDname=None): + def __init__( + self, + name, + fullname=None, + R=None, + Rstar=None, + a=None, + M=None, + Mstar=None, + bp=None, + SEDname=None, + ): """ Parameters ---------- @@ -1735,43 +2322,59 @@ def __init__(self, name, fullname=None, R=None, Rstar=None, a=None, M=None, Msta Stellar SED name, by default None """ - #check if we can fetch planet parameters from planets.txt: - if name in planets_file['name'].values or name in planets_file['full name'].values: - this_planet = planets_file[(planets_file['name'] == name) | (planets_file['full name'] == name)] - assert len(this_planet) == 1, "Multiple entries were found in planets.txt for this planet name." - - self.name = this_planet['name'].values[0] - self.fullname = this_planet['full name'].values[0] - self.R = this_planet['R [RJ]'].values[0] * RJ #in cm - self.Rstar = this_planet['Rstar [Rsun]'].values[0] *Rsun #in cm - self.a = this_planet['a [AU]'].values[0] * AU #in cm - self.M = this_planet['M [MJ]'].values[0] * MJ #in g - self.Mstar = this_planet['Mstar [Msun]'].values[0] * Msun #in g - self.bp = this_planet['transit impact parameter'].values[0] #dimensionless - self.SEDname = this_planet['SEDname'].values[0].strip() #strip to remove whitespace from beginning and end - - #if any specified, overwrite values read from planets.txt - if fullname != None: + # check if we can fetch planet parameters from planets.txt: + if ( + name in planets_file["name"].values + or name in planets_file["full name"].values + ): + this_planet = planets_file[ + (planets_file["name"] == name) | (planets_file["full name"] == name) + ] + assert ( + len(this_planet) == 1 + ), "Multiple entries were found in planets.txt for this planet name." + + self.name = this_planet["name"].values[0] + self.fullname = this_planet["full name"].values[0] + self.R = this_planet["R [RJ]"].values[0] * RJ # in cm + self.Rstar = this_planet["Rstar [Rsun]"].values[0] * Rsun # in cm + self.a = this_planet["a [AU]"].values[0] * AU # in cm + self.M = this_planet["M [MJ]"].values[0] * MJ # in g + self.Mstar = this_planet["Mstar [Msun]"].values[0] * Msun # in g + self.bp = this_planet["transit impact parameter"].values[0] # dimensionless + self.SEDname = ( + this_planet["SEDname"].values[0].strip() + ) # strip to remove whitespace from beginning and end + + # if any specified, overwrite values read from planets.txt + if fullname is not None: self.fullname = fullname - if R != None: + if R is not None: self.R = R - if Rstar != None: + if Rstar is not None: self.Rstar = Rstar - if a != None: + if a is not None: self.a = a - if M != None: + if M is not None: self.M = M - if Mstar != None: + if Mstar is not None: self.Mstar = Mstar - if bp != None: + if bp is not None: self.bp = bp - if SEDname != None: + if SEDname is not None: self.SEDname = SEDname else: - assert fullname is not None and R is not None and Rstar is not None and a is not None and M is not None and \ - Mstar is not None and bp is not None and SEDname is not None, \ - "I'm trying to make a Planet that is not in the planets.txt file, but I don't have all required arguments." + assert ( + fullname is not None + and R is not None + and Rstar is not None + and a is not None + and M is not None + and Mstar is not None + and bp is not None + and SEDname is not None + ), "I'm trying to make a Planet that is not in the planets.txt file, but I don't have all required arguments." self.name = name self.fullname = fullname self.R = R @@ -1786,34 +2389,45 @@ def __init__(self, name, fullname=None, R=None, Rstar=None, a=None, M=None, Msta self.__update_phi() self.__update_Kp() - def set_var(self, name=None, fullname=None, R=None, Rstar=None, a=None, M=None, Mstar=None, bp=None, SEDname=None): + def set_var( + self, + name=None, + fullname=None, + R=None, + Rstar=None, + a=None, + M=None, + Mstar=None, + bp=None, + SEDname=None, + ): """ Change planet/star parameters after initialization. """ - if name != None: + if name is not None: self.name = name - if R != None: + if R is not None: self.R = R self.__update_phi() - if Rstar != None: + if Rstar is not None: self.Rstar = Rstar - if a != None: + if a is not None: self.a = a self.__update_Rroche() self.__update_Kp() - if M != None: + if M is not None: self.M = M self.__update_phi() self.__update_Rroche() self.__update_Kp() - if Mstar != None: + if Mstar is not None: self.Mstar = Mstar self.__update_Rroche() self.__update_Kp() - if bp != None: + if bp is not None: self.bp = bp - if SEDname != None: + if SEDname is not None: self.SEDname = SEDname def __update_phi(self): @@ -1821,7 +2435,7 @@ def __update_phi(self): Tries to set/update the gravitational potential. """ - if (self.M != None) and (self.R != None): + if (self.M is not None) and (self.R is not None): self.phi = G * self.M / self.R else: self.phi = None @@ -1831,7 +2445,7 @@ def __update_Rroche(self): Tries to set/update the Roche radius. """ - if (self.a != None) and (self.M != None) and (self.Mstar != None): + if (self.a is not None) and (self.M is not None) and (self.Mstar is not None): self.Rroche = roche_radius(self.a, self.M, self.Mstar) else: self.Rroche = None @@ -1841,7 +2455,7 @@ def __update_Kp(self): Tries to set/update the orbital velocity semi-amplitude. """ - if (self.a != None) and (self.M != None) and (self.Mstar != None): + if (self.a is not None) and (self.M is not None) and (self.Mstar is not None): self.Kp = np.sqrt(G * (self.M + self.Mstar) / self.a) else: self.Kp = None @@ -1869,74 +2483,146 @@ def print_params(self): if self.SEDname is not None: print(f"Stellar spectrum name: {self.SEDname}") if self.Rroche is not None: - print(f"Roche radius: {self.Rroche} cm, {self.Rroche / RJ} RJ, {self.Rroche / self.R} Rp") + print( + f"Roche radius: {self.Rroche} cm, {self.Rroche / RJ} RJ, " + f"{self.Rroche / self.R} Rp" + ) if self.phi is not None: print(f"log10(Gravitational potential): {np.log10(self.phi)} log10(erg/g)") if self.Kp is not None: - print(f"Orbital velocity semi-amplitude: {self.Kp} cm/s, {self.Kp/1e5} km/s") + print( + f"Orbital velocity semi-amplitude: {self.Kp} cm/s, {self.Kp/1e5} km/s" + ) - def plot_transit_geometry(self, phase=0., altmax=None): + def plot_transit_geometry(self, phase=0.0, altmax=None): """ - Plots a schematic of the transit geometry. Helpful to understand - where the planet and its atmosphere are relative to the stellar disk, - for a given planet impact parameter and phase. The dotted line shows - the planet Roche radius. The altmax argument can be used to draw - another dashed line in units of the planet radius, for example the - extent of the sunbather simulation (typically 8 Rp). + Plots a schematic of the transit geometry. Helpful to understand where the + planet and its atmosphere are relative to the stellar disk, for a given planet + impact parameter and phase. The dotted line shows the planet Roche radius. The + altmax argument can be used to draw another dashed line in units of the planet + radius, for example the extent of the sunbather simulation (typically 8 Rp). """ fig, ax = plt.subplots(1) - #draw star - ax.plot(self.Rstar*np.cos(np.linspace(0, 2*np.pi, 100)), self.Rstar*np.sin(np.linspace(0, 2*np.pi, 100)), c='k', zorder=0) - ax.text(1/np.sqrt(2)*self.Rstar, -1/np.sqrt(2)*self.Rstar, r"$R_s$", color="k", ha="left", va="top", zorder=0) - - #draw planet - pl_zorder = -1 if (phase%1 > 0.25 and phase%1 < 0.75) else 1 - ax.plot(self.a*np.sin(2*np.pi*phase) + self.R*np.cos(np.linspace(0, 2*np.pi, 100)), - self.bp*self.Rstar + self.R*np.sin(np.linspace(0, 2*np.pi, 100)), c='b', zorder=pl_zorder) - ax.text(self.a*np.sin(2*np.pi*phase) + 1/np.sqrt(2)*self.R, self.bp*self.Rstar - 1/np.sqrt(2)*self.R, - r"$R_P$", color="b", ha="left", va="top", zorder=pl_zorder) - - #draw planet vy direction - if phase%1 > 0.75 or phase%1 < 0.25: - ax.text(self.a*np.sin(2*np.pi*phase) + self.R, self.bp*self.Rstar, r"$\rightarrow$", color="b", ha="left", va="top", zorder=pl_zorder) - title = f"Phase: {phase} mod 1 = {phase%1}" - elif phase%1 > 0.25 and phase%1 < 0.75: - ax.text(self.a*np.sin(2*np.pi*phase) - self.R, self.bp*self.Rstar, r"$\leftarrow$", color="b", ha="right", va="top", zorder=pl_zorder) - title = f"Phase: {phase} mod 1 = {phase%1} (planet behind star)" - else: #at 0.25 or 0.75, only vx velocity + title = "" + # draw star + ax.plot( + self.Rstar * np.cos(np.linspace(0, 2 * np.pi, 100)), + self.Rstar * np.sin(np.linspace(0, 2 * np.pi, 100)), + c="k", + zorder=0, + ) + ax.text( + 1 / np.sqrt(2) * self.Rstar, + -1 / np.sqrt(2) * self.Rstar, + r"$R_s$", + color="k", + ha="left", + va="top", + zorder=0, + ) + + # draw planet + pl_zorder = -1 if (phase % 1 > 0.25 and phase % 1 < 0.75) else 1 + ax.plot( + self.a * np.sin(2 * np.pi * phase) + + self.R * np.cos(np.linspace(0, 2 * np.pi, 100)), + self.bp * self.Rstar + self.R * np.sin(np.linspace(0, 2 * np.pi, 100)), + c="b", + zorder=pl_zorder, + ) + ax.text( + self.a * np.sin(2 * np.pi * phase) + 1 / np.sqrt(2) * self.R, + self.bp * self.Rstar - 1 / np.sqrt(2) * self.R, + r"$R_P$", + color="b", + ha="left", + va="top", + zorder=pl_zorder, + ) + + # draw planet vy direction + if phase % 1 > 0.75 or phase % 1 < 0.25: + ax.text( + self.a * np.sin(2 * np.pi * phase) + self.R, + self.bp * self.Rstar, + r"$\rightarrow$", + color="b", + ha="left", + va="top", + zorder=pl_zorder, + ) + title = f"Phase: {phase} mod 1 = {phase % 1}" + elif phase % 1 > 0.25 and phase % 1 < 0.75: + ax.text( + self.a * np.sin(2 * np.pi * phase) - self.R, + self.bp * self.Rstar, + r"$\leftarrow$", + color="b", + ha="right", + va="top", + zorder=pl_zorder, + ) + title = f"Phase: {phase} mod 1 = {phase % 1} (planet behind star)" + else: # at 0.25 or 0.75, only vx velocity pass - - #draw Roche indication + + # draw Roche indication if self.Rroche is not None: - ax.plot(self.a*np.sin(2*np.pi*phase) + self.Rroche*np.cos(np.linspace(0, 2*np.pi, 100)), - self.bp*self.Rstar + self.Rroche*np.sin(np.linspace(0, 2*np.pi, 100)), c='b', linestyle='dotted') - ax.text(self.a*np.sin(2*np.pi*phase) + 1/np.sqrt(2)*self.Rroche, self.bp*self.Rstar - 1/np.sqrt(2)*self.Rroche, - r"$R_{Roche}$", color="b", ha="left", va="top", zorder=pl_zorder) - - #draw altmax indication + ax.plot( + self.a * np.sin(2 * np.pi * phase) + + self.Rroche * np.cos(np.linspace(0, 2 * np.pi, 100)), + self.bp * self.Rstar + + self.Rroche * np.sin(np.linspace(0, 2 * np.pi, 100)), + c="b", + linestyle="dotted", + ) + ax.text( + self.a * np.sin(2 * np.pi * phase) + 1 / np.sqrt(2) * self.Rroche, + self.bp * self.Rstar - 1 / np.sqrt(2) * self.Rroche, + r"$R_{Roche}$", + color="b", + ha="left", + va="top", + zorder=pl_zorder, + ) + + # draw altmax indication if altmax is not None: - ax.plot(self.a*np.sin(2*np.pi*phase) + altmax*self.R*np.cos(np.linspace(0, 2*np.pi, 100)), - self.bp*self.Rstar + altmax*self.R*np.sin(np.linspace(0, 2*np.pi, 100)), c='b', linestyle='dashed') - ax.text(self.a*np.sin(2*np.pi*phase) + altmax/np.sqrt(2)*self.R, self.bp*self.Rstar - altmax/np.sqrt(2)*self.R, - "altmax", color="b", ha="left", va="top", zorder=pl_zorder) - - plt.axis('equal') - ax.set_xlabel('y [cm]') - ax.set_ylabel('z [cm]') + ax.plot( + self.a * np.sin(2 * np.pi * phase) + + altmax * self.R * np.cos(np.linspace(0, 2 * np.pi, 100)), + self.bp * self.Rstar + + altmax * self.R * np.sin(np.linspace(0, 2 * np.pi, 100)), + c="b", + linestyle="dashed", + ) + ax.text( + self.a * np.sin(2 * np.pi * phase) + altmax / np.sqrt(2) * self.R, + self.bp * self.Rstar - altmax / np.sqrt(2) * self.R, + "altmax", + color="b", + ha="left", + va="top", + zorder=pl_zorder, + ) + + plt.axis("equal") + ax.set_xlabel("y [cm]") + ax.set_ylabel("z [cm]") ax.set_title(title) plt.show() - def max_T0(self, mu_bar=1.): + def max_T0(self, mu_bar=1.0): """ Calculates the maximum isothermal temperature T0 that the Parker wind can have, for it to still be transonic. If T0 is higher than this value, - Rp > Rs which breaks the assumption of the Parker wind. + Rp > Rs which breaks the assumption of the Parker wind. See Vissapragada et al. (2024) on TOI-1420 b. """ maxT0 = G * self.M * mH * mu_bar / (2 * self.R * k) - + return maxT0 @@ -1946,7 +2632,15 @@ class Sim: an escaping exoplanet atmosphere. """ - def __init__(self, simname, altmax=None, proceedFail=False, files=['all'], planet=None, parker=None): + def __init__( + self, + simname, + altmax=None, + proceedFail=False, + files=["all"], + planet=None, + parker=None, + ): """ Parameters ---------- @@ -1954,20 +2648,23 @@ def __init__(self, simname, altmax=None, proceedFail=False, files=['all'], plane Full path + simulation name excluding file extension. altmax : int, optional Maximum altitude of the simulation in units of the planet radius. Will also - be automatically read from the input file if written as a comment. By default None. + be automatically read from the input file if written as a comment. By + default None. proceedFail : bool, optional - Whether to proceed loading the simulation if Cloudy did not exit OK, by default False + Whether to proceed loading the simulation if Cloudy did not exit OK, by + default False files : list, optional - List of file extensions of Cloudy output to load. For example, - include '.heat' to read the output of the 'save heating' command. - By default ['all'], which reads in all output files present that are understood by - this class. + List of file extensions of Cloudy output to load. For example, include + '.heat' to read the output of the 'save heating' command. By default + ['all'], which reads in all output files present that are understood by this + class. planet : Planet, optional - Object storing planet parameters. Will also be automatically read from the input file - if written as a comment. By default None. + Object storing planet parameters. Will also be automatically read from the + input file if written as a comment. By default None. parker : Parker, optional - Object storing the isothermal Parker wind atmospheric profiles and parameters. Will - also be automatically read from the input file if written as a comment. By default None. + Object storing the isothermal Parker wind atmospheric profiles and + parameters. Will also be automatically read from the input file if written + as a comment. By default None. Raises ------ @@ -1985,137 +2682,189 @@ def __init__(self, simname, altmax=None, proceedFail=False, files=['all'], plane raise TypeError("simname must be set to a string") self.simname = simname - #check the Cloudy version, and if the simulation did not crash. + # check the Cloudy version, and if the simulation did not crash. _succesful = False - with open(simname+'.out', 'r') as f: + with open(simname + ".out", "r") as f: _outfile_content = f.read() if "Cloudy exited OK" in _outfile_content: _succesful = True else: _succesful = False - + if "Cloudy 17" in _outfile_content: self.cloudy_version = "17" - elif "Cloudy 23" in _outfile_content: + elif "Cloudy 23" in _outfile_content or "Cloudy (c23" in _outfile_content: self.cloudy_version = "23" elif _succesful: - raise TypeError(f"This simulation did not use Cloudy v17 or v23, which are the only supported versions: {simname}") + raise TypeError( + f"This simulation did not use Cloudy v17 or v23, which are the " + f"only supported versions: {simname}" + ) if not _succesful and not proceedFail: - raise FileNotFoundError(f"This simulation went wrong: {simname} Check the .out file!") + raise FileNotFoundError( + f"This simulation went wrong: {simname} Check the .out file!" + ) - #read the .in file to extract some sim info like changes to the chemical composition and altmax + # read the .in file to extract some sim info like changes to the chemical + # composition and altmax self.disabled_elements = [] zelem = {} - _parker_T, _parker_Mdot, _parker_dir = None, None, None #temp variables - with open(simname+'.in', 'r') as f: + _parker_T, _parker_Mdot, _parker_dir = None, None, None # temp variables + with open(simname + ".in", "r") as f: for line in f: - if line[0] == '#': #then it is a comment written by sunbather, extract info: - #check if a planet was defined - if 'plname' in line: - self.p = Planet(line.split('=')[-1].strip('\n')) - - #check if a Parker profile was defined - if 'parker_T' in line: - _parker_T = int(line.split('=')[-1].strip('\n')) - if 'parker_Mdot' in line: - _parker_Mdot = line.split('=')[-1].strip('\n') - if 'parker_dir' in line: - _parker_dir = line.split('=')[-1].strip('\n') - - #check if an altmax was defined - if 'altmax' in line: - self.altmax = int(line.split('=')[1].strip('\n')) - - #read SED - if 'table SED' in line: + if ( + line[0] == "#" + ): # then it is a comment written by sunbather, extract info: + # check if a planet was defined + if "plname" in line: + self.p = Planet(line.split("=")[-1].strip("\n")) + + # check if a Parker profile was defined + if "parker_T" in line: + _parker_T = int(line.split("=")[-1].strip("\n")) + if "parker_Mdot" in line: + _parker_Mdot = line.split("=")[-1].strip("\n") + if "parker_dir" in line: + _parker_dir = line.split("=")[-1].strip("\n") + + # check if an altmax was defined + if "altmax" in line: + self.altmax = int(line.split("=")[1].strip("\n")) + + # read SED + if "table SED" in line: self.SEDname = line.split('"')[1] - - #read chemical composition - if 'element scale factor' in line.rstrip(): - zelem[element_symbols[line.split(' ')[3]]] = float(line.rstrip().split(' ')[-1]) - elif 'element' in line.rstrip() and 'off' in line.rstrip(): - self.disabled_elements.append(element_symbols[line.split(' ')[1]]) - zelem[element_symbols[line.split(' ')[1]]] = 0. - - #set zdict and abundances as attributes + + # read chemical composition + if "element scale factor" in line.rstrip(): + zelem[element_symbols[line.split(" ")[3]]] = float( + line.rstrip().split(" ")[-1] + ) + elif "element" in line.rstrip() and "off" in line.rstrip(): + self.disabled_elements.append(element_symbols[line.split(" ")[1]]) + zelem[element_symbols[line.split(" ")[1]]] = 0.0 + + # set zdict and abundances as attributes self.zdict = get_zdict(zelem=zelem) self.abundances = get_abundances(zdict=self.zdict) - #overwrite/set manually given Planet object - if planet != None: + # overwrite/set manually given Planet object + if planet is not None: assert isinstance(planet, Planet) - if hasattr(self, 'p'): - warnings.warn("I had already read out the Planet object from the .in file, but I will overwrite that with the object you have given.") + if hasattr(self, "p"): + warnings.warn( + "I had already read out the Planet object from the .in file, but I " + "will overwrite that with the object you have given." + ) self.p = planet - #check if the SED of the Planet object matches the SED of the Cloudy simulation - if hasattr(self, 'p') and hasattr(self, 'SEDname'): + # check if the SED of the Planet object matches the SED of the Cloudy simulation + if hasattr(self, "p") and hasattr(self, "SEDname"): if self.p.SEDname != self.SEDname: - warnings.warn(f"I read in the .in file that the SED used is {self.SEDname} which is different from the one of your Planet object. " \ - "I will change the .SEDname attribute of the Planet object to match the one actually used in the simulation. Are you " \ - "sure that also the associated Parker wind profile is correct?") - self.p.set_var(SEDname = self.SEDname) - - #try to set a Parker object if the .in file had the required info for that - if hasattr(self, 'p') and (_parker_T != None) and (_parker_Mdot != None) and (_parker_dir != None): + warnings.warn( + f"I read in the .in file that the SED used is {self.SEDname} which " + f"is different from the one of your Planet object. " + f"I will change the .SEDname attribute of the Planet object to " + f"match the one actually used in the simulation. Are you " + f"sure that also the associated Parker wind profile is correct?" + ) + self.p.set_var(SEDname=self.SEDname) + + # try to set a Parker object if the .in file had the required info for that + if ( + hasattr(self, "p") + and (_parker_T is not None) + and (_parker_Mdot is not None) + and (_parker_dir is not None) + ): self.par = Parker(self.p.name, _parker_T, _parker_Mdot, _parker_dir) - - #overwrite/set manually given Parker object - if parker != None: + + # overwrite/set manually given Parker object + if parker is not None: assert isinstance(parker, Parker) - if hasattr(self, 'par'): - warnings.warn("I had already read out the Parker object from the .in file, but I will overwrite that with the object you have given.") + if hasattr(self, "par"): + warnings.warn( + "I had already read out the Parker object from the .in file, but I " + "will overwrite that with the object you have given." + ) self.par = parker - #overwrite/set manually given altmax - if altmax != None: - if not (isinstance(altmax, float) or isinstance(altmax, int)): - raise TypeError("altmax must be set to a float or int") #can it actually be a float? I'm not sure if the code can handle it - check and try. - if hasattr(self, 'altmax'): + # overwrite/set manually given altmax + if altmax is not None: + if not isinstance(altmax, (float, int)): + # can it actually be a float? I'm not sure if the code can handle it - + # check and try. + raise TypeError( + "altmax must be set to a float or int" + ) + if hasattr(self, "altmax"): if self.altmax != altmax: - warnings.warn("I read the altmax from the .in file, but the value you have explicitly passed is different. " \ - "I will use your value, but please make sure it is correct.") + warnings.warn( + "I read the altmax from the .in file, but the value you have " + "explicitly passed is different. " + "I will use your value, but please make sure it is correct." + ) self.altmax = altmax - - #temporary variables for adding the alt-columns to the pandas dataframes + # temporary variables for adding the alt-columns to the pandas dataframes _Rp, _altmax = None, None - if hasattr(self, 'p') and hasattr(self, 'altmax'): + if hasattr(self, "p") and hasattr(self, "altmax"): _Rp = self.p.R _altmax = self.altmax - - #read in the Cloudy simulation files + + # read in the Cloudy simulation files self.simfiles = [] - for simfile in glob.glob(simname+'.*', recursive=True): - filetype = simfile.split('.')[-1] - if filetype=='ovr' and ('ovr' in files or 'all' in files): - self.ovr = process_overview(self.simname+'.ovr', Rp=_Rp, altmax=_altmax, abundances=self.abundances) - self.simfiles.append('ovr') - if filetype=='con' and ('con' in files or 'all' in files): - self.con = process_continuum(self.simname+'.con') - self.simfiles.append('con') - if filetype=='heat' and ('heat' in files or 'all' in files): - self.heat = process_heating(self.simname+'.heat', Rp=_Rp, altmax=_altmax, cloudy_version=self.cloudy_version) - self.simfiles.append('heat') - if filetype=='cool' and ('cool' in files or 'all' in files): - self.cool = process_cooling(self.simname+'.cool', Rp=_Rp, altmax=_altmax, cloudy_version=self.cloudy_version) - self.simfiles.append('cool') - if filetype=='coolH2' and ('coolH2' in files or 'all' in files): - self.coolH2 = process_coolingH2(self.simname+'.coolH2', Rp=_Rp, altmax=_altmax) - self.simfiles.append('coolH2') - if filetype=='den' and ('den' in files or 'all' in files): - self.den = process_densities(self.simname+'.den', Rp=_Rp, altmax=_altmax) - self.simfiles.append('den') - if filetype=='en' and ('en' in files or 'all' in files): - self.en = process_energies(self.simname+'.en', cloudy_version=self.cloudy_version) - self.simfiles.append('en') - - #set the velocity structure in .ovr if we have an associated Parker profile - needed for radiative transfer - if hasattr(self, 'par') and hasattr(self, 'ovr'): - if hasattr(self.par, 'prof') and hasattr(self.ovr, 'alt'): + for simfile in glob.glob(simname + ".*", recursive=True): + filetype = simfile.split(".")[-1] + if filetype == "ovr" and ("ovr" in files or "all" in files): + self.ovr = process_overview( + self.simname + ".ovr", + Rp=_Rp, + altmax=_altmax, + abundances=self.abundances, + ) + self.simfiles.append("ovr") + if filetype == "con" and ("con" in files or "all" in files): + self.con = process_continuum(self.simname + ".con") + self.simfiles.append("con") + if filetype == "heat" and ("heat" in files or "all" in files): + self.heat = process_heating( + self.simname + ".heat", + Rp=_Rp, + altmax=_altmax, + cloudy_version=self.cloudy_version, + ) + self.simfiles.append("heat") + if filetype == "cool" and ("cool" in files or "all" in files): + self.cool = process_cooling( + self.simname + ".cool", + Rp=_Rp, + altmax=_altmax, + cloudy_version=self.cloudy_version, + ) + self.simfiles.append("cool") + if filetype == "coolH2" and ("coolH2" in files or "all" in files): + self.coolH2 = process_coolingH2( + self.simname + ".coolH2", Rp=_Rp, altmax=_altmax + ) + self.simfiles.append("coolH2") + if filetype == "den" and ("den" in files or "all" in files): + self.den = process_densities( + self.simname + ".den", Rp=_Rp, altmax=_altmax + ) + self.simfiles.append("den") + if filetype == "en" and ("en" in files or "all" in files): + self.en = process_energies( + self.simname + ".en", cloudy_version=self.cloudy_version + ) + self.simfiles.append("en") + + # set the velocity structure in .ovr if we have an associated Parker profile - + # needed for radiative transfer + if hasattr(self, "par") and hasattr(self, "ovr"): + if hasattr(self.par, "prof") and hasattr(self.ovr, "alt"): Sim.addv(self, self.par.prof.alt, self.par.prof.v) - def get_simfile(self, simfile): """ Returns the output of the requested simulation output file. @@ -2124,53 +2873,60 @@ def get_simfile(self, simfile): """ if simfile not in self.simfiles: - raise FileNotFoundError("This simulation does not have a", simfile, "output file.") + raise FileNotFoundError( + "This simulation does not have a", simfile, "output file." + ) - if simfile == 'ovr': + if simfile == "ovr": return self.ovr - elif simfile == 'con': + if simfile == "con": return self.con - elif simfile == 'heat': + if simfile == "heat": return self.heat - elif simfile == 'cool': + if simfile == "cool": return self.cool - elif simfile == 'coolH2': + if simfile == "coolH2": return self.coolH2 - elif simfile == 'den': + if simfile == "den": return self.den - elif simfile == 'en': + if simfile == "en": return self.en - elif simfile == 'ionFe': + if simfile == "ionFe": return self.ionFe - elif simfile == 'ionNa': + if simfile == "ionNa": return self.ionNa - + return None def add_parker(self, parker): """ - Adds a Parker profile object to the Sim, in case it wasn't added upon initialization. + Adds a Parker profile object to the Sim, in case it wasn't added upon + initialization. """ assert isinstance(parker, Parker) self.par = parker - if hasattr(parker, 'prof'): + if hasattr(parker, "prof"): Sim.addv(self, parker.prof.alt, parker.prof.v) - def addv(self, alt, v, delete_negative=True): """ - Adds a velocity profile in cm s-1 on the Cloudy grid. Will be added to the .ovr file, - but also available as the .v attribute for backwards compatability of sunbather. - Called automatically when adding a Parker object to the Sim. + Adds a velocity profile in cm s-1 on the Cloudy grid. Will be added to the .ovr + file, but also available as the .v attribute for backwards compatibility of + sunbather. Called automatically when adding a Parker object to the Sim. """ - assert 'ovr' in self.simfiles, "Simulation must have a 'save overview .ovr file" - assert 'alt' in self.ovr.columns, "The .ovr file must have an altitude column (which in turn requires a known Rp and altmax)" + assert "ovr" in self.simfiles, "Simulation must have a 'save overview .ovr file" + assert ( + "alt" in self.ovr.columns + ), ( + "The .ovr file must have an altitude column (which in turn requires a " + "known Rp and altmax)" + ) if delete_negative: - v[v < 0.] = 0. + v[v < 0.0] = 0.0 - self.ovr['v'] = interp1d(alt, v)(self.ovr.alt) + self.ovr["v"] = interp1d(alt, v)(self.ovr.alt) vseries = pd.Series(index=self.ovr.alt.index, dtype=float) vseries[self.ovr.alt.index] = interp1d(alt, v)(self.ovr.alt) From a9dafd0095b3e7f87ddd62d093d10640f3bc0988 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:26:04 +0100 Subject: [PATCH 15/63] update install cloudy script --- src/sunbather/install_cloudy.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index 6874d49..9d39156 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -3,6 +3,7 @@ import urllib.request import tarfile import subprocess +import shutil class GetCloudy: @@ -15,7 +16,8 @@ def __init__(self, version="23.01"): major = version.split(".")[0] self.url = f"https://data.nublado.org/cloudy_releases/c{major}/" self.filename = "c{version}.tar.gz" - self.cloudypath = f"{pathlib.Path(__file__).parent.resolve()}/cloudy/" + self.sunbatherpath = f"{pathlib.Path(__file__).parent.resolve()}" + self.cloudypath = f"{self.sunbatherpath}/cloudy/" def download(self): """ @@ -52,3 +54,9 @@ def test(self): "It should print \"Cloudy exited OK\" at the end." ) subprocess.Popen(["./cloudy.exe",]).wait() + + def copy_data(self): + shutil.copy2( + f"{self.sunbatherpath}/stellar_SEDs/*.spec", + f"{self.cloudypath}/c{self.version}/data/SED/", + ) From df083e648345f4248a28832aaadf3eb84c411b46 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:28:45 +0100 Subject: [PATCH 16/63] refactor files --- src/sunbather/convergeT_parker.py | 646 ++++++++++++++++++++++------- src/sunbather/install_cloudy.py | 17 +- src/sunbather/solveT.py | 649 ++++++++++++++++++++---------- src/sunbather/tools.py | 12 +- 4 files changed, 953 insertions(+), 371 deletions(-) diff --git a/src/sunbather/convergeT_parker.py b/src/sunbather/convergeT_parker.py index 1f7b2fb..ca14e4d 100644 --- a/src/sunbather/convergeT_parker.py +++ b/src/sunbather/convergeT_parker.py @@ -1,8 +1,4 @@ -#sunbather imports -import .tools -import .solveT - -#other imports +# other imports import pandas as pd import numpy as np import multiprocessing @@ -13,6 +9,10 @@ import argparse import traceback +# sunbather imports +import sunbather.tools +import sunbather.solveT + def find_close_model(parentfolder, T, Mdot, tolT=2000, tolMdot=1.0): """ @@ -41,31 +41,64 @@ def find_close_model(parentfolder, T, Mdot, tolT=2000, tolMdot=1.0): [T0, Mdot] of the closest found finished model, or [None, None] if none were found within the tolerance. """ - pattern = re.compile(r'parker_\d+_\d+\.\d{3}$') #this is how folder names should be + pattern = re.compile( + r"parker_\d+_\d+\.\d{3}$" + ) # this is how folder names should be all_files_and_folders = os.listdir(parentfolder) - allfolders = [os.path.join(parentfolder, folder)+'/' for folder in all_files_and_folders if pattern.match(folder) and os.path.isdir(os.path.join(parentfolder, folder))] - - convergedfolders = [] #stores the T and Mdot values of all folders with 0.out files + allfolders = [ + os.path.join(parentfolder, folder) + "/" + for folder in all_files_and_folders + if pattern.match(folder) and os.path.isdir(os.path.join(parentfolder, folder)) + ] + + convergedfolders = ( + [] + ) # stores the T and Mdot values of all folders with 0.out files for folder in allfolders: - if os.path.isfile(folder+'converged.out'): - folderparams = folder.split('/')[-2].split('_') + if os.path.isfile(folder + "converged.out"): + folderparams = folder.split("/")[-2].split("_") convergedfolders.append([int(folderparams[1]), float(folderparams[2])]) - if [int(T), float(Mdot)] in convergedfolders: #if the current folder is found, remove it + if [ + int(T), + float(Mdot), + ] in convergedfolders: # if the current folder is found, remove it convergedfolders.remove([int(T), float(Mdot)]) - if convergedfolders == []: #then we default to constant starting value + if convergedfolders == []: # then we default to constant starting value clconv = [None, None] - else: #find closest converged profile - dist = lambda x, y: (x[0]-y[0])**2 + (2000*(x[1]-y[1]))**2 #1 order of magnitude Mdot is now 'equal weighted' to 2000K - clconv = min(convergedfolders, key=lambda fol: dist(fol, [int(T), float(Mdot)])) #closest converged [T, Mdot] - if (np.abs(clconv[0] - int(T)) > tolT) or (np.abs(clconv[1] - float(Mdot)) > tolMdot): + else: # find closest converged profile + dist = ( + lambda x, y: (x[0] - y[0]) ** 2 + (2000 * (x[1] - y[1])) ** 2 + ) # 1 order of magnitude Mdot is now 'equal weighted' to 2000K + clconv = min( + convergedfolders, key=lambda fol: dist(fol, [int(T), float(Mdot)]) + ) # closest converged [T, Mdot] + if (np.abs(clconv[0] - int(T)) > tolT) or ( + np.abs(clconv[1] - float(Mdot)) > tolMdot + ): clconv = [None, None] return clconv -def run_s(plname, Mdot, T, itno, fc, dir, SEDname, overwrite, startT, pdir, zdict=None, altmax=8, save_sp=[], constantT=False, maxit=16): +def run_s( + plname, + Mdot, + T, + itno, + fc, + dir, + SEDname, + overwrite, + startT, + pdir, + zdict=None, + altmax=8, + save_sp=[], + constantT=False, + maxit=16, +): """ Solves for a nonisothermal temperature profile of a single isothermal Parker wind (density and velocity) profile. @@ -125,96 +158,201 @@ def run_s(plname, Mdot, T, itno, fc, dir, SEDname, overwrite, startT, pdir, zdic Maximum number of iterations, by default 16. """ - Mdot = "%.3f" % float(Mdot) #enforce this format to get standard file names. + Mdot = "%.3f" % float(Mdot) # enforce this format to get standard file names. T = str(T) - #set up the planet object + # set up the planet object planet = tools.Planet(plname) - if SEDname != 'real': + if SEDname != "real": planet.set_var(SEDname=SEDname) - #set up the folder structure - pathTstruc = tools.projectpath+'/sims/1D/'+planet.name+'/'+dir+'/' - path = pathTstruc+'parker_'+T+'_'+Mdot+'/' + # set up the folder structure + pathTstruc = tools.projectpath + "/sims/1D/" + planet.name + "/" + dir + "/" + path = pathTstruc + "parker_" + T + "_" + Mdot + "/" - #check if this parker profile exists in the given pdir + # check if this parker profile exists in the given pdir try: pprof = tools.read_parker(planet.name, T, Mdot, pdir) except FileNotFoundError: - print("This parker profile does not exist:", tools.projectpath+'/parker_profiles/'+planet.name+'/'+pdir+'/pprof_'+planet.name+'_T='+str(T)+'_M='+Mdot+'.txt') - return #quit the run_s function but not the code - - #check for overwriting - if os.path.isdir(path): #the simulation exists already + print( + "This parker profile does not exist:", + tools.projectpath + + "/parker_profiles/" + + planet.name + + "/" + + pdir + + "/pprof_" + + planet.name + + "_T=" + + str(T) + + "_M=" + + Mdot + + ".txt", + ) + return # quit the run_s function but not the code + + # check for overwriting + if os.path.isdir(path): # the simulation exists already if not overwrite: - print("Simulation already exists and overwrite = False:", plname, dir, Mdot, T) - return #this quits the function but if we're running a grid, it doesn't quit the whole Python code + print( + "Simulation already exists and overwrite = False:", plname, dir, Mdot, T + ) + return # this quits the function but if we're running a grid, it doesn't quit the whole Python code else: - os.mkdir(path[:-1]) #make the folder + os.mkdir(path[:-1]) # make the folder - #get profiles and parameters we need for the input file + # get profiles and parameters we need for the input file alt = pprof.alt.values hden = tools.rho_to_hden(pprof.rho.values, abundances=tools.get_abundances(zdict)) dlaw = tools.alt_array_to_Cloudy(alt, hden, altmax, planet.R, 1000, log=True) nuFnu_1AU_linear, Ryd = tools.get_SED_norm_1AU(planet.SEDname) - nuFnu_a_log = np.log10(nuFnu_1AU_linear / ((planet.a - altmax*planet.R)/tools.AU)**2) - - comments = '# plname='+planet.name+'\n# parker_T='+str(T)+'\n# parker_Mdot='+str(Mdot)+'\n# parker_dir='+pdir+'\n# altmax='+str(altmax) - - if constantT: #this will run the profile at the isothermal T value instead of converging a nonisothermal profile + nuFnu_a_log = np.log10( + nuFnu_1AU_linear / ((planet.a - altmax * planet.R) / tools.AU) ** 2 + ) + + comments = ( + "# plname=" + + planet.name + + "\n# parker_T=" + + str(T) + + "\n# parker_Mdot=" + + str(Mdot) + + "\n# parker_dir=" + + pdir + + "\n# altmax=" + + str(altmax) + ) + + if ( + constantT + ): # this will run the profile at the isothermal T value instead of converging a nonisothermal profile if save_sp == []: - tools.write_Cloudy_in(path+'constantT', title=planet.name+' 1D Parker with T='+str(T)+' and log(Mdot)='+str(Mdot), - flux_scaling=[nuFnu_a_log, Ryd], SED=planet.SEDname, dlaw=dlaw, double_tau=True, - overwrite=overwrite, cosmic_rays=True, zdict=zdict, comments=comments, constantT=T) + tools.write_Cloudy_in( + path + "constantT", + title=planet.name + + " 1D Parker with T=" + + str(T) + + " and log(Mdot)=" + + str(Mdot), + flux_scaling=[nuFnu_a_log, Ryd], + SED=planet.SEDname, + dlaw=dlaw, + double_tau=True, + overwrite=overwrite, + cosmic_rays=True, + zdict=zdict, + comments=comments, + constantT=T, + ) else: - tools.write_Cloudy_in(path+'constantT', title=planet.name+' 1D Parker with T='+str(T)+' and log(Mdot)='+str(Mdot), - flux_scaling=[nuFnu_a_log, Ryd], SED=planet.SEDname, dlaw=dlaw, double_tau=True, - overwrite=overwrite, cosmic_rays=True, zdict=zdict, comments=comments, constantT=T, - outfiles=['.den', '.en'], denspecies=save_sp, selected_den_levels=True) - - tools.run_Cloudy('constantT', folder=path) #run the Cloudy simulation + tools.write_Cloudy_in( + path + "constantT", + title=planet.name + + " 1D Parker with T=" + + str(T) + + " and log(Mdot)=" + + str(Mdot), + flux_scaling=[nuFnu_a_log, Ryd], + SED=planet.SEDname, + dlaw=dlaw, + double_tau=True, + overwrite=overwrite, + cosmic_rays=True, + zdict=zdict, + comments=comments, + constantT=T, + outfiles=[".den", ".en"], + denspecies=save_sp, + selected_den_levels=True, + ) + + tools.run_Cloudy("constantT", folder=path) # run the Cloudy simulation return - #if we got to here, we are not doing a constantT simulation, so we set up the convergence scheme files - #write Cloudy template input file - each iteration will add their current temperature structure to this template - tools.write_Cloudy_in(path+'template', title=planet.name+' 1D Parker with T='+str(T)+' and log(Mdot)='+str(Mdot), - flux_scaling=[nuFnu_a_log, Ryd], SED=planet.SEDname, dlaw=dlaw, double_tau=True, - overwrite=overwrite, cosmic_rays=True, zdict=zdict, comments=comments) - - if itno == 0: #this means we resume from the highest found previously ran iteration - pattern = r'iteration(\d+)\.out' #search pattern: iteration followed by an integer - max_iteration = -1 #set an impossible number - for filename in os.listdir(path): #loop through all files/folder in the path - if os.path.isfile(os.path.join(path, filename)): #if it is a file (not a folder) - if re.search(pattern, filename): #if it matches the pattern - iteration_number = int(re.search(pattern, filename).group(1)) #extract the iteration number - if iteration_number > max_iteration: #update highest found iteration number + # if we got to here, we are not doing a constantT simulation, so we set up the convergence scheme files + # write Cloudy template input file - each iteration will add their current temperature structure to this template + tools.write_Cloudy_in( + path + "template", + title=planet.name + + " 1D Parker with T=" + + str(T) + + " and log(Mdot)=" + + str(Mdot), + flux_scaling=[nuFnu_a_log, Ryd], + SED=planet.SEDname, + dlaw=dlaw, + double_tau=True, + overwrite=overwrite, + cosmic_rays=True, + zdict=zdict, + comments=comments, + ) + + if ( + itno == 0 + ): # this means we resume from the highest found previously ran iteration + pattern = ( + r"iteration(\d+)\.out" # search pattern: iteration followed by an integer + ) + max_iteration = -1 # set an impossible number + for filename in os.listdir(path): # loop through all files/folder in the path + if os.path.isfile( + os.path.join(path, filename) + ): # if it is a file (not a folder) + if re.search(pattern, filename): # if it matches the pattern + iteration_number = int( + re.search(pattern, filename).group(1) + ) # extract the iteration number + if ( + iteration_number > max_iteration + ): # update highest found iteration number max_iteration = iteration_number - if max_iteration == -1: #this means no files were found - print(f"This folder does not contain any iteration files {path}, so I cannot resume from the highest one. Will instead start at itno = 1.") + if max_iteration == -1: # this means no files were found + print( + f"This folder does not contain any iteration files {path}, so I cannot resume from the highest one. Will instead start at itno = 1." + ) itno = 1 else: - print(f"Found the highest iteration {path}iteration{max_iteration}, will resume at that same itno.") + print( + f"Found the highest iteration {path}iteration{max_iteration}, will resume at that same itno." + ) itno = max_iteration if itno == 1: - #get starting temperature structure - clconv = find_close_model(pathTstruc, T, Mdot) #find if there are any nearby models we can start from - if startT == 'constant': #then we start with the isothermal value - tools.copyadd_Cloudy_in(path+'template', path+'iteration1', constantT=T) - - elif clconv == [None, None] or startT == 'free': #then we start in free (=radiative eq.) mode - copyfile(path+'template.in', path+'iteration1.in') - - elif startT == 'nearby': #then clconv cannot be [None, None] and we start from a previous converged T(r) - print(f"Model {path} starting from previously converged temperature profile: T0 = {clconv[0]}, Mdot = {clconv[1]}") - prev_conv_T = pd.read_table(pathTstruc+'parker_'+str(clconv[0])+'_'+"{:.3f}".format(clconv[1])+'/converged.txt', delimiter=' ') - Cltlaw = tools.alt_array_to_Cloudy(prev_conv_T.R * planet.R, prev_conv_T.Te, altmax, planet.R, 1000) - tools.copyadd_Cloudy_in(path+'template', path+'iteration1', tlaw=Cltlaw) - - - #with everything in order, run the actual temperature convergence scheme + # get starting temperature structure + clconv = find_close_model( + pathTstruc, T, Mdot + ) # find if there are any nearby models we can start from + if startT == "constant": # then we start with the isothermal value + tools.copyadd_Cloudy_in(path + "template", path + "iteration1", constantT=T) + + elif ( + clconv == [None, None] or startT == "free" + ): # then we start in free (=radiative eq.) mode + copyfile(path + "template.in", path + "iteration1.in") + + elif ( + startT == "nearby" + ): # then clconv cannot be [None, None] and we start from a previous converged T(r) + print( + f"Model {path} starting from previously converged temperature profile: T0 = {clconv[0]}, Mdot = {clconv[1]}" + ) + prev_conv_T = pd.read_table( + pathTstruc + + "parker_" + + str(clconv[0]) + + "_" + + "{:.3f}".format(clconv[1]) + + "/converged.txt", + delimiter=" ", + ) + Cltlaw = tools.alt_array_to_Cloudy( + prev_conv_T.R * planet.R, prev_conv_T.Te, altmax, planet.R, 1000 + ) + tools.copyadd_Cloudy_in(path + "template", path + "iteration1", tlaw=Cltlaw) + + # with everything in order, run the actual temperature convergence scheme solveT.run_loop(path, itno, fc, save_sp, maxit) @@ -229,7 +367,27 @@ def catch_errors_run_s(*args): traceback.print_exc() -def run_g(plname, cores, Mdot_l, Mdot_u, Mdot_s, T_l, T_u, T_s, fc, dir, SEDname, overwrite, startT, pdir, zdict, altmax, save_sp, constantT, maxit): +def run_g( + plname, + cores, + Mdot_l, + Mdot_u, + Mdot_s, + T_l, + T_u, + T_s, + fc, + dir, + SEDname, + overwrite, + startT, + pdir, + zdict, + altmax, + save_sp, + constantT, + maxit, +): """ Solves for a nonisothermal temperature profile of a grid of isothermal Parker wind models, by executing the run_s() function in parallel. @@ -298,23 +456,42 @@ def run_g(plname, cores, Mdot_l, Mdot_u, Mdot_s, T_l, T_u, T_s, fc, dir, SEDname p = multiprocessing.Pool(cores) pars = [] - for Mdot in np.arange(float(Mdot_l), float(Mdot_u)+1e-6, float(Mdot_s)): #1e-6 so that upper bound is inclusive - for T in np.arange(int(T_l), int(T_u)+1e-6, int(T_s)).astype(int): - pars.append((plname, Mdot, T, 1, fc, dir, SEDname, overwrite, startT, pdir, zdict, altmax, save_sp, constantT, maxit)) + for Mdot in np.arange( + float(Mdot_l), float(Mdot_u) + 1e-6, float(Mdot_s) + ): # 1e-6 so that upper bound is inclusive + for T in np.arange(int(T_l), int(T_u) + 1e-6, int(T_s)).astype(int): + pars.append( + ( + plname, + Mdot, + T, + 1, + fc, + dir, + SEDname, + overwrite, + startT, + pdir, + zdict, + altmax, + save_sp, + constantT, + maxit, + ) + ) p.starmap(catch_errors_run_s, pars) p.close() p.join() - - -if __name__ == '__main__': +if __name__ == "__main__": class OneOrThreeAction(argparse.Action): """ Custom class for an argparse argument with exactly 1 or 3 values. """ + def __call__(self, parser, namespace, values, option_string=None): if len(values) not in (1, 3): parser.error("Exactly one or three values are required.") @@ -324,70 +501,253 @@ class AddDictAction(argparse.Action): """ Custom class to add an argparse argument to a dictionary. """ + def __call__(self, parser, namespace, values, option_string=None): - if not hasattr(namespace, self.dest) or getattr(namespace, self.dest) is None: + if ( + not hasattr(namespace, self.dest) + or getattr(namespace, self.dest) is None + ): setattr(namespace, self.dest, {}) for value in values: - key, val = value.split('=') + key, val = value.split("=") getattr(namespace, self.dest)[key] = float(val) - t0 = time.time() - parser = argparse.ArgumentParser(description="Runs the temperature convergence for 1D Parker profile(s).") - - parser.add_argument("-plname", required=True, help="planet name (must be in planets.txt)") - parser.add_argument("-dir", required=True, type=str, help="folder where the temperature structures are solved. e.g. Tstruc_fH_0.9 or Tstruc_z_100_3xEUV etc.") - parser.add_argument("-pdir", required=True, type=str, help="parker profile folder/dir to use, e.g. fH_0.9 or z_100.") - parser.add_argument("-Mdot", required=True, type=float, nargs='+', action=OneOrThreeAction, help="log10(mass-loss rate), or three values specifying a grid of " \ - "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to three decimal places.") - parser.add_argument("-T", required=True, type=int, nargs='+', action=OneOrThreeAction, help="temperature, or three values specifying a grid of temperatures: lowest, highest, stepsize.") - parser.add_argument("-cores", type=int, default=1, help="number of parallel runs [default=1]") - parser.add_argument("-fc", type=float, default=1.1, help="convergence factor (heat/cool should be below this value) [default=1.1]") - parser.add_argument("-startT", choices=["nearby", "free", "constant"], default="nearby", help="initial T structure, either 'constant', 'free' or 'nearby' [default=nearby]") - parser.add_argument("-itno", type=int, default=1, help="starting iteration number (itno != 1 only works with -overwrite). As a special use, you can pass " \ - "-itno 0 which will automatically find the highest previously ran iteration number [default=1]") - parser.add_argument("-maxit", type=int, default=20, help="maximum number of iterations [default = 20]") - parser.add_argument("-SEDname", type=str, default='real', help="name of SED to use. Must be in Cloudy's data/SED/ folder [default=SEDname set in planet.txt file]") - parser.add_argument("-overwrite", action='store_true', help="overwrite existing simulation if passed [default=False]") - parser.add_argument("-z", type=float, default=1., help="metallicity (=scale factor relative to solar for all elements except H and He) [default=1.]") - parser.add_argument("-zelem", action = AddDictAction, nargs='+', default = {}, help="abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem He=0.01. " \ - "Can also be used to toggle elements off, e.g. -zelem Ca=0. Combines with -z argument. Using this " \ - "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.") - parser.add_argument("-altmax", type=int, default=8, help="maximum altitude of the simulation in units of Rp. [default=8]") - parser.add_argument("-save_sp", type=str, nargs='+', default=['all'], help="atomic or ionic species to save densities for (needed for radiative transfer). " \ - "You can add multiple as e.g. -save_sp He Ca+ Fe3+ Passing 'all' includes all species that weren't turned off. In that case, you can "\ - "set the maximum degree of ionization with the -save_sp_max_ion flag. default=[] i.e. none.") - parser.add_argument("-save_sp_max_ion", type=int, default=6, help="only used when you set -save_sp all This command sets the maximum degree of ionization "\ - "that will be saved. [default=6] but using lower values saves significant file size if high ions are not needed. The maximum number is 12, "\ - "but such highly ionized species only occur at very high XUV flux, such as in young systems.") - parser.add_argument("-constantT", action='store_true', help="run the profile at the isothermal temperature instead of converging upon the temperature structure. [default=False]") - + parser = argparse.ArgumentParser( + description="Runs the temperature convergence for 1D Parker profile(s)." + ) + + parser.add_argument( + "-plname", required=True, help="planet name (must be in planets.txt)" + ) + parser.add_argument( + "-dir", + required=True, + type=str, + help="folder where the temperature structures are solved. e.g. Tstruc_fH_0.9 or Tstruc_z_100_3xEUV etc.", + ) + parser.add_argument( + "-pdir", + required=True, + type=str, + help="parker profile folder/dir to use, e.g. fH_0.9 or z_100.", + ) + parser.add_argument( + "-Mdot", + required=True, + type=float, + nargs="+", + action=OneOrThreeAction, + help="log10(mass-loss rate), or three values specifying a grid of " + "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to three decimal places.", + ) + parser.add_argument( + "-T", + required=True, + type=int, + nargs="+", + action=OneOrThreeAction, + help="temperature, or three values specifying a grid of temperatures: lowest, highest, stepsize.", + ) + parser.add_argument( + "-cores", type=int, default=1, help="number of parallel runs [default=1]" + ) + parser.add_argument( + "-fc", + type=float, + default=1.1, + help="convergence factor (heat/cool should be below this value) [default=1.1]", + ) + parser.add_argument( + "-startT", + choices=["nearby", "free", "constant"], + default="nearby", + help="initial T structure, either 'constant', 'free' or 'nearby' [default=nearby]", + ) + parser.add_argument( + "-itno", + type=int, + default=1, + help="starting iteration number (itno != 1 only works with -overwrite). As a special use, you can pass " + "-itno 0 which will automatically find the highest previously ran iteration number [default=1]", + ) + parser.add_argument( + "-maxit", + type=int, + default=20, + help="maximum number of iterations [default = 20]", + ) + parser.add_argument( + "-SEDname", + type=str, + default="real", + help="name of SED to use. Must be in Cloudy's data/SED/ folder [default=SEDname set in planet.txt file]", + ) + parser.add_argument( + "-overwrite", + action="store_true", + help="overwrite existing simulation if passed [default=False]", + ) + parser.add_argument( + "-z", + type=float, + default=1.0, + help="metallicity (=scale factor relative to solar for all elements except H and He) [default=1.]", + ) + parser.add_argument( + "-zelem", + action=AddDictAction, + nargs="+", + default={}, + help="abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem He=0.01. " + "Can also be used to toggle elements off, e.g. -zelem Ca=0. Combines with -z argument. Using this " + "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.", + ) + parser.add_argument( + "-altmax", + type=int, + default=8, + help="maximum altitude of the simulation in units of Rp. [default=8]", + ) + parser.add_argument( + "-save_sp", + type=str, + nargs="+", + default=["all"], + help="atomic or ionic species to save densities for (needed for radiative transfer). " + "You can add multiple as e.g. -save_sp He Ca+ Fe3+ Passing 'all' includes all species that weren't turned off. In that case, you can " + "set the maximum degree of ionization with the -save_sp_max_ion flag. default=[] i.e. none.", + ) + parser.add_argument( + "-save_sp_max_ion", + type=int, + default=6, + help="only used when you set -save_sp all This command sets the maximum degree of ionization " + "that will be saved. [default=6] but using lower values saves significant file size if high ions are not needed. The maximum number is 12, " + "but such highly ionized species only occur at very high XUV flux, such as in young systems.", + ) + parser.add_argument( + "-constantT", + action="store_true", + help="run the profile at the isothermal temperature instead of converging upon the temperature structure. [default=False]", + ) args = parser.parse_args() zdict = tools.get_zdict(z=args.z, zelem=args.zelem) - if 'all' in args.save_sp: - args.save_sp = tools.get_specieslist(exclude_elements=[sp for sp,zval in zdict.items() if zval == 0.], max_ion=args.save_sp_max_ion) - - #set up the folder structure if it doesn't exist yet - if not os.path.isdir(tools.projectpath+'/sims/'): - os.mkdir(tools.projectpath+'/sims') - if not os.path.isdir(tools.projectpath+'/sims/1D/'): - os.mkdir(tools.projectpath+'/sims/1D') - if not os.path.isdir(tools.projectpath+'/sims/1D/'+args.plname+'/'): - os.mkdir(tools.projectpath+'/sims/1D/'+args.plname) - if not os.path.isdir(tools.projectpath+'/sims/1D/'+args.plname+'/'+args.dir+'/'): - os.mkdir(tools.projectpath+'/sims/1D/'+args.plname+'/'+args.dir) - - if (len(args.T) == 1 and len(args.Mdot) == 1): #then we run a single model - run_s(args.plname, args.Mdot[0], str(args.T[0]), args.itno, args.fc, args.dir, args.SEDname, args.overwrite, args.startT, args.pdir, zdict, args.altmax, args.save_sp, args.constantT, args.maxit) - elif (len(args.T) == 3 and len(args.Mdot) == 3): #then we run a grid over both parameters - run_g(args.plname, args.cores, args.Mdot[0], args.Mdot[1], args.Mdot[2], args.T[0], args.T[1], args.T[2], args.fc, args.dir, args.SEDname, args.overwrite, args.startT, args.pdir, zdict, args.altmax, args.save_sp, args.constantT, args.maxit) - elif (len(args.T) == 3 and len(args.Mdot) == 1): #then we run a grid over only T - run_g(args.plname, args.cores, args.Mdot[0], args.Mdot[0], args.Mdot[0], args.T[0], args.T[1], args.T[2], args.fc, args.dir, args.SEDname, args.overwrite, args.startT, args.pdir, zdict, args.altmax, args.save_sp, args.constantT, args.maxit) - elif (len(args.T) == 1 and len(args.Mdot) == 3): #then we run a grid over only Mdot - run_g(args.plname, args.cores, args.Mdot[0], args.Mdot[1], args.Mdot[2], args.T[0], args.T[0], args.T[0], args.fc, args.dir, args.SEDname, args.overwrite, args.startT, args.pdir, zdict, args.altmax, args.save_sp, args.constantT, args.maxit) - - print("\nCalculations took", int(time.time()-t0) // 3600, "hours, ", (int(time.time()-t0)%3600) // 60, "minutes and ", (int(time.time()-t0)%60), "seconds.\n") + if "all" in args.save_sp: + args.save_sp = tools.get_specieslist( + exclude_elements=[sp for sp, zval in zdict.items() if zval == 0.0], + max_ion=args.save_sp_max_ion, + ) + + # set up the folder structure if it doesn't exist yet + if not os.path.isdir(tools.projectpath + "/sims/"): + os.mkdir(tools.projectpath + "/sims") + if not os.path.isdir(tools.projectpath + "/sims/1D/"): + os.mkdir(tools.projectpath + "/sims/1D") + if not os.path.isdir(tools.projectpath + "/sims/1D/" + args.plname + "/"): + os.mkdir(tools.projectpath + "/sims/1D/" + args.plname) + if not os.path.isdir( + tools.projectpath + "/sims/1D/" + args.plname + "/" + args.dir + "/" + ): + os.mkdir(tools.projectpath + "/sims/1D/" + args.plname + "/" + args.dir) + + if len(args.T) == 1 and len(args.Mdot) == 1: # then we run a single model + run_s( + args.plname, + args.Mdot[0], + str(args.T[0]), + args.itno, + args.fc, + args.dir, + args.SEDname, + args.overwrite, + args.startT, + args.pdir, + zdict, + args.altmax, + args.save_sp, + args.constantT, + args.maxit, + ) + elif ( + len(args.T) == 3 and len(args.Mdot) == 3 + ): # then we run a grid over both parameters + run_g( + args.plname, + args.cores, + args.Mdot[0], + args.Mdot[1], + args.Mdot[2], + args.T[0], + args.T[1], + args.T[2], + args.fc, + args.dir, + args.SEDname, + args.overwrite, + args.startT, + args.pdir, + zdict, + args.altmax, + args.save_sp, + args.constantT, + args.maxit, + ) + elif len(args.T) == 3 and len(args.Mdot) == 1: # then we run a grid over only T + run_g( + args.plname, + args.cores, + args.Mdot[0], + args.Mdot[0], + args.Mdot[0], + args.T[0], + args.T[1], + args.T[2], + args.fc, + args.dir, + args.SEDname, + args.overwrite, + args.startT, + args.pdir, + zdict, + args.altmax, + args.save_sp, + args.constantT, + args.maxit, + ) + elif len(args.T) == 1 and len(args.Mdot) == 3: # then we run a grid over only Mdot + run_g( + args.plname, + args.cores, + args.Mdot[0], + args.Mdot[1], + args.Mdot[2], + args.T[0], + args.T[0], + args.T[0], + args.fc, + args.dir, + args.SEDname, + args.overwrite, + args.startT, + args.pdir, + zdict, + args.altmax, + args.save_sp, + args.constantT, + args.maxit, + ) + + print( + "\nCalculations took", + int(time.time() - t0) // 3600, + "hours, ", + (int(time.time() - t0) % 3600) // 60, + "minutes and ", + (int(time.time() - t0) % 60), + "seconds.\n", + ) diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index 9d39156..9246f7c 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -10,6 +10,7 @@ class GetCloudy: """ Class to download and compile the Cloudy program """ + def __init__(self, version="23.01"): self.version = version self.path = "./" @@ -44,16 +45,24 @@ def compile(self): tar.extractall(filter="data") os.chdir(f"{self.cloudypath}/c{self.version}/source/") - subprocess.Popen(["make",]).wait() + subprocess.Popen( + [ + "make", + ] + ).wait() def test(self): # Quickly test the Cloudy installation: in the source folder, run ./cloudy.exe, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. os.chdir(f"{self.cloudypath}/c{self.version}/source/") print( - "Type \"test\" and hit return twice. " - "It should print \"Cloudy exited OK\" at the end." + 'Type "test" and hit return twice. ' + 'It should print "Cloudy exited OK" at the end.' ) - subprocess.Popen(["./cloudy.exe",]).wait() + subprocess.Popen( + [ + "./cloudy.exe", + ] + ).wait() def copy_data(self): shutil.copy2( diff --git a/src/sunbather/solveT.py b/src/sunbather/solveT.py index af00927..3a4a9ba 100644 --- a/src/sunbather/solveT.py +++ b/src/sunbather/solveT.py @@ -1,7 +1,7 @@ -#sunbather imports +# sunbather imports import sunbather.tools -#other imports +# other imports import pandas as pd import numpy as np import matplotlib.pyplot as plt @@ -36,10 +36,12 @@ def calc_expansion(r, rho, v, Te, mu): Expansion cooling rate. """ - expansion = tools.k/tools.mH * Te * v / mu * np.gradient(rho, r) - assert np.max(expansion) <= 0, "Found positive expansion cooling rates (i.e., heating)." + expansion = tools.k / tools.mH * Te * v / mu * np.gradient(rho, r) + assert ( + np.max(expansion) <= 0 + ), "Found positive expansion cooling rates (i.e., heating)." - return expansion + return expansion def calc_advection(r, rho, v, Te, mu): @@ -65,7 +67,7 @@ def calc_advection(r, rho, v, Te, mu): Advection heating/cooling rate. """ - advection = -1 * tools.k/(tools.mH * 2/3) * rho * v * np.gradient(Te/mu, r) + advection = -1 * tools.k / (tools.mH * 2 / 3) * rho * v * np.gradient(Te / mu, r) return advection @@ -104,27 +106,33 @@ def simtogrid(sim, grid): Advection cooling rate in units of erg s-1 cm-3, as positive values. """ - #get Cloudy quantities - Te = interp1d(sim.ovr.alt, sim.ovr.Te, fill_value='extrapolate')(grid) - mu = interp1d(sim.ovr.alt[sim.ovr.alt < 0.999 * sim.altmax * sim.p.R], sim.ovr.mu[sim.ovr.alt < 0.999 * sim.altmax * sim.p.R], fill_value='extrapolate')(grid) - radheat = interp1d(sim.ovr.alt, sim.cool.htot, fill_value='extrapolate')(grid) - radcool = interp1d(sim.ovr.alt, sim.cool.ctot, fill_value='extrapolate')(grid) - - #get isothermal Parker wind quantities - rho = interp1d(sim.par.prof.alt, sim.par.prof.rho, fill_value='extrapolate')(grid) - v = interp1d(sim.par.prof.alt, sim.par.prof.v, fill_value='extrapolate')(grid) - - #calculate bulk terms - expcool = -1 * calc_expansion(grid, rho, v, Te, mu) #minus sign to get expansion cooling rates as positive values + # get Cloudy quantities + Te = interp1d(sim.ovr.alt, sim.ovr.Te, fill_value="extrapolate")(grid) + mu = interp1d( + sim.ovr.alt[sim.ovr.alt < 0.999 * sim.altmax * sim.p.R], + sim.ovr.mu[sim.ovr.alt < 0.999 * sim.altmax * sim.p.R], + fill_value="extrapolate", + )(grid) + radheat = interp1d(sim.ovr.alt, sim.cool.htot, fill_value="extrapolate")(grid) + radcool = interp1d(sim.ovr.alt, sim.cool.ctot, fill_value="extrapolate")(grid) + + # get isothermal Parker wind quantities + rho = interp1d(sim.par.prof.alt, sim.par.prof.rho, fill_value="extrapolate")(grid) + v = interp1d(sim.par.prof.alt, sim.par.prof.v, fill_value="extrapolate")(grid) + + # calculate bulk terms + expcool = -1 * calc_expansion( + grid, rho, v, Te, mu + ) # minus sign to get expansion cooling rates as positive values adv = calc_advection(grid, rho, v, Te, mu) - #apply very slight smoothing because the Cloudy .ovr quantities have mediocre reported numerical precision + # apply very slight smoothing because the Cloudy .ovr quantities have mediocre reported numerical precision expcool = tools.smooth_gaus_savgol(expcool, fraction=0.01) adv = tools.smooth_gaus_savgol(adv, fraction=0.01) advheat, advcool = np.copy(adv), -1 * np.copy(adv) - advheat[advheat < 0] = 0. - advcool[advcool < 0] = 0. + advheat[advheat < 0] = 0.0 + advcool[advcool < 0] = 0.0 return Te, mu, rho, v, radheat, radcool, expcool, advheat, advcool @@ -155,10 +163,12 @@ def calc_HCratio(radheat, radcool, expcool, advheat, advcool): """ totheat = radheat + advheat - totcool = radcool + expcool + advcool #all cooling rates are positive values - nettotal = (totheat - totcool) + totcool = radcool + expcool + advcool # all cooling rates are positive values + nettotal = totheat - totcool - HCratio = np.sign(nettotal) * np.maximum(totheat, totcool) / np.minimum(totheat,totcool) + HCratio = ( + np.sign(nettotal) * np.maximum(totheat, totcool) / np.minimum(totheat, totcool) + ) return HCratio @@ -183,13 +193,15 @@ def get_new_Tstruc(old_Te, HCratio, fac): New temperature profile. """ - deltaT = fac * np.sign(HCratio) * np.log10(np.abs(HCratio)) #take log-based approach to deltaT - fT = np.copy(deltaT) #the temperature multiplication fraction + deltaT = ( + fac * np.sign(HCratio) * np.log10(np.abs(HCratio)) + ) # take log-based approach to deltaT + fT = np.copy(deltaT) # the temperature multiplication fraction fT[deltaT < 0] = 1 + deltaT[deltaT < 0] - fT[deltaT > 0] = 1/(1 - deltaT[deltaT > 0]) - fT = np.clip(fT, 0.5, 2) #max change is a factor 2 up or down in temperature + fT[deltaT > 0] = 1 / (1 - deltaT[deltaT > 0]) + fT = np.clip(fT, 0.5, 2) # max change is a factor 2 up or down in temperature newTe = old_Te * fT - newTe = np.clip(newTe, 1e1, 1e6) #set minimum temperature to 10K + newTe = np.clip(newTe, 1e1, 1e6) # set minimum temperature to 10K return newTe @@ -245,30 +257,48 @@ def last_false_index(arr): """ return len(arr) - np.argmax(~arr[::-1]) - 1 - #check for advection dominated regime - adv_cloc = len(HCratio) #start by setting a 'too high' value - advheat_dominates = (advheat > radheat) #boolean array where advection heating dominates - bothrad_dominate = ((radheat > advheat) & (radcool > advcool) & (radcool > expcool)) #boolean array where radiative heating dominates AND radiative cooling dominates + # check for advection dominated regime + adv_cloc = len(HCratio) # start by setting a 'too high' value + advheat_dominates = ( + advheat > radheat + ) # boolean array where advection heating dominates + bothrad_dominate = ( + (radheat > advheat) & (radcool > advcool) & (radcool > expcool) + ) # boolean array where radiative heating dominates AND radiative cooling dominates highest_r_above_which_no_bothrad_dominate = last_true_index(bothrad_dominate) - advheat_dominates[:highest_r_above_which_no_bothrad_dominate] = False #now the boolean array stores where advection heating dominates AND where there is no point at higher altitudes that is rad. heat and rad. cool dominated - if True in advheat_dominates: #if there is no such point, adv_cloc stays default value - advdomloc = first_true_index(advheat_dominates) #get lowest altitude location where advection dominates - advheat_unimportant = (advheat < 0.25 * radheat) #boolean array where advection heating is relatively unimportant - advunimploc = last_true_index(advheat_unimportant[:advdomloc]) #first point at lower altitude where advection becomes unimportant (if no point exists, it will become advdomloc) - #then walk to higher altitude again to find converged point. We are more lax with H/C ratio if advection dominates more. - almost_converged = (np.abs(HCratio[advunimploc:]) < 1.3 * np.clip((advheat[advunimploc:] / radheat[advunimploc:])**(2./3.), 1, 10)) - if True in almost_converged: #otherwise it stays default value + advheat_dominates[:highest_r_above_which_no_bothrad_dominate] = ( + False # now the boolean array stores where advection heating dominates AND where there is no point at higher altitudes that is rad. heat and rad. cool dominated + ) + if ( + True in advheat_dominates + ): # if there is no such point, adv_cloc stays default value + advdomloc = first_true_index( + advheat_dominates + ) # get lowest altitude location where advection dominates + advheat_unimportant = ( + advheat < 0.25 * radheat + ) # boolean array where advection heating is relatively unimportant + advunimploc = last_true_index( + advheat_unimportant[:advdomloc] + ) # first point at lower altitude where advection becomes unimportant (if no point exists, it will become advdomloc) + # then walk to higher altitude again to find converged point. We are more lax with H/C ratio if advection dominates more. + almost_converged = np.abs(HCratio[advunimploc:]) < 1.3 * np.clip( + (advheat[advunimploc:] / radheat[advunimploc:]) ** (2.0 / 3.0), 1, 10 + ) + if True in almost_converged: # otherwise it stays default value adv_cloc = advunimploc + first_true_index(almost_converged) - #check for regime where radiative cooling is weak. Usually this means that expansion cooling dominates, but advection cooling can contribute in some cases - exp_cloc = len(HCratio) #start by setting a 'too high' value - expcool_dominates = (radcool / (radcool+expcool+advcool) < 0.2) + # check for regime where radiative cooling is weak. Usually this means that expansion cooling dominates, but advection cooling can contribute in some cases + exp_cloc = len(HCratio) # start by setting a 'too high' value + expcool_dominates = radcool / (radcool + expcool + advcool) < 0.2 if True and False in expcool_dominates: - exp_cloc = last_false_index(expcool_dominates) #this way of evaluating it guarantees that all entries after this one are True - elif False not in expcool_dominates: #if they are all True + exp_cloc = last_false_index( + expcool_dominates + ) # this way of evaluating it guarantees that all entries after this one are True + elif False not in expcool_dominates: # if they are all True exp_cloc = 0 - cloc = min(adv_cloc, exp_cloc) #use the lowest radius point + cloc = min(adv_cloc, exp_cloc) # use the lowest radius point return cloc @@ -297,37 +327,61 @@ def relaxTstruc(grid, path, itno, Te, HCratio): Adjusted temperature profile to use for the next iteration. """ - if itno == 2: #save for first time - np.savetxt(path+'iterations.txt', np.column_stack((grid, np.repeat(0.3, len(grid)), Te)), - header='grid fac1 Te1', comments='', delimiter=' ', fmt='%.7e') - - iterations_file = pd.read_csv(path+'iterations.txt', header=0, sep=' ') - fac = iterations_file['fac'+str(itno-1)].values - - newTe_relax = get_new_Tstruc(Te, HCratio, fac) #adjust the temperature profile - newTe_relax = tools.smooth_gaus_savgol(newTe_relax, fraction = 1./(20*itno)) #smooth it - newTe_relax = np.clip(newTe_relax, 1e1, 1e6) #smoothing may have pushed newTe_relax < 10K again. - - if itno >= 4: #check for fluctuations. If so, we decrease the deltaT factor - prev_prevTe = iterations_file['Te'+str(itno-2)] - previous_ratio = Te / prev_prevTe #compare itno-2 to itno-1 - this_ratio = newTe_relax / Te #compare itno-1 to the current itno (because of smoothing this ratio is not exactly the same as fT) - fl = (((previous_ratio < 1) & (this_ratio > 1)) | ((previous_ratio > 1) & (this_ratio < 1))) #boolean indicating where temperature fluctuates - fac[fl] = 2/3 * fac[fl] #take smaller changes in T in regions where the temperature fluctuates - fac = np.clip(tools.smooth_gaus_savgol(fac, size=10), 0.02, 0.3) #smooth the factor itself as well - newTe_relax = get_new_Tstruc(Te, HCratio, fac) #recalculate new temperature profile with updated fac - newTe_relax = tools.smooth_gaus_savgol(newTe_relax, fraction = 1/(20*itno)) #smooth it + if itno == 2: # save for first time + np.savetxt( + path + "iterations.txt", + np.column_stack((grid, np.repeat(0.3, len(grid)), Te)), + header="grid fac1 Te1", + comments="", + delimiter=" ", + fmt="%.7e", + ) + + iterations_file = pd.read_csv(path + "iterations.txt", header=0, sep=" ") + fac = iterations_file["fac" + str(itno - 1)].values + + newTe_relax = get_new_Tstruc(Te, HCratio, fac) # adjust the temperature profile + newTe_relax = tools.smooth_gaus_savgol( + newTe_relax, fraction=1.0 / (20 * itno) + ) # smooth it + newTe_relax = np.clip( + newTe_relax, 1e1, 1e6 + ) # smoothing may have pushed newTe_relax < 10K again. + + if itno >= 4: # check for fluctuations. If so, we decrease the deltaT factor + prev_prevTe = iterations_file["Te" + str(itno - 2)] + previous_ratio = Te / prev_prevTe # compare itno-2 to itno-1 + this_ratio = ( + newTe_relax / Te + ) # compare itno-1 to the current itno (because of smoothing this ratio is not exactly the same as fT) + fl = ((previous_ratio < 1) & (this_ratio > 1)) | ( + (previous_ratio > 1) & (this_ratio < 1) + ) # boolean indicating where temperature fluctuates + fac[fl] = ( + 2 / 3 * fac[fl] + ) # take smaller changes in T in regions where the temperature fluctuates + fac = np.clip( + tools.smooth_gaus_savgol(fac, size=10), 0.02, 0.3 + ) # smooth the factor itself as well + newTe_relax = get_new_Tstruc( + Te, HCratio, fac + ) # recalculate new temperature profile with updated fac + newTe_relax = tools.smooth_gaus_savgol( + newTe_relax, fraction=1 / (20 * itno) + ) # smooth it newTe_relax = np.clip(newTe_relax, 1e1, 1e6) - iterations_file['fac'+str(itno)] = fac - iterations_file.to_csv(path+'iterations.txt', sep=' ', float_format='%.7e', index=False) + iterations_file["fac" + str(itno)] = fac + iterations_file.to_csv( + path + "iterations.txt", sep=" ", float_format="%.7e", index=False + ) return newTe_relax def constructTstruc(grid, newTe_relax, cloc, v, rho, mu, radheat, radcool): """ - Proposes a new temperature profile based on a 'construction' algorithm, + Proposes a new temperature profile based on a 'construction' algorithm, starting at the cloc and at higher altitudes. Parameters @@ -355,50 +409,86 @@ def constructTstruc(grid, newTe_relax, cloc, v, rho, mu, radheat, radcool): Adjusted temperature profile to use for the next iteration. """ - newTe_construct = np.copy(newTe_relax) #start with the temp struc from the relaxation function + newTe_construct = np.copy( + newTe_relax + ) # start with the temp struc from the relaxation function - expansion_Tdivmu = tools.k/tools.mH * v * np.gradient(rho, grid) #this is expansion except for the T/mu term (still negative values) - advection_gradTdivmu = -1 * tools.k/(tools.mH * 2/3) * rho * v #this is advection except for the d(T/mu)/dr term + expansion_Tdivmu = ( + tools.k / tools.mH * v * np.gradient(rho, grid) + ) # this is expansion except for the T/mu term (still negative values) + advection_gradTdivmu = ( + -1 * tools.k / (tools.mH * 2 / 3) * rho * v + ) # this is advection except for the d(T/mu)/dr term def one_cell_HCratio(T, index): expcool = expansion_Tdivmu[index] * T / mu[index] - adv = advection_gradTdivmu[index] * ((T/mu[index]) - (newTe_construct[index-1]/mu[index-1]))/(grid[index] - grid[index-1]) - - #instead of completely keeping the radiative heating and cooling rate the same while we are solving for T in this bin, - #we adjust it a little bit. This helps to prevent that the temperature changes are too drastic and go into a regime where - #radiation becomes important again. We guess a quadratic dependence of the rates on T. This is not the true dependence, - #but it does reduce to the original rate when T -> original T, which is important. - guess_radheat = radheat[index] * (newTe_construct[index] / T)**2 - guess_radcool = radcool[index] * (T / newTe_construct[index])**2 - - totheat = guess_radheat + max(adv, 0) #if adv is negative we don't add it here - totcool = guess_radcool - expcool - min(adv, 0) #if adv is positive we don't add it here, we subtract expcool and adv because they are negative - - HCratio = max(totheat, totcool) / min(totheat, totcool) #both entities are positive - - return HCratio - 1 #find root of this value to get H/C close to 1 - - - for i in range(cloc+1, len(grid)): #walk from cloc to higher altitudes - result = minimize_scalar(one_cell_HCratio, method='bounded', bounds=[1e1,1e6], args=(i)) + adv = ( + advection_gradTdivmu[index] + * ((T / mu[index]) - (newTe_construct[index - 1] / mu[index - 1])) + / (grid[index] - grid[index - 1]) + ) + + # instead of completely keeping the radiative heating and cooling rate the same while we are solving for T in this bin, + # we adjust it a little bit. This helps to prevent that the temperature changes are too drastic and go into a regime where + # radiation becomes important again. We guess a quadratic dependence of the rates on T. This is not the true dependence, + # but it does reduce to the original rate when T -> original T, which is important. + guess_radheat = radheat[index] * (newTe_construct[index] / T) ** 2 + guess_radcool = radcool[index] * (T / newTe_construct[index]) ** 2 + + totheat = guess_radheat + max(adv, 0) # if adv is negative we don't add it here + totcool = ( + guess_radcool - expcool - min(adv, 0) + ) # if adv is positive we don't add it here, we subtract expcool and adv because they are negative + + HCratio = max(totheat, totcool) / min( + totheat, totcool + ) # both entities are positive + + return HCratio - 1 # find root of this value to get H/C close to 1 + + for i in range(cloc + 1, len(grid)): # walk from cloc to higher altitudes + result = minimize_scalar( + one_cell_HCratio, method="bounded", bounds=[1e1, 1e6], args=(i) + ) newTe_construct[i] = result.x - - #smooth around the abrupt edge where the constructed part sets in - smooth_newTe_construct = tools.smooth_gaus_savgol(newTe_construct, fraction=0.03) #first smooth the complete T(r) profile - smooth_newTe_construct = np.clip(smooth_newTe_construct, 1e1, 1e6) #after smoothing we might have ended up below 10K - #now combine the smoothed profile around 'cloc', and the non-smoothed version away from 'cloc' + # smooth around the abrupt edge where the constructed part sets in + smooth_newTe_construct = tools.smooth_gaus_savgol( + newTe_construct, fraction=0.03 + ) # first smooth the complete T(r) profile + smooth_newTe_construct = np.clip( + smooth_newTe_construct, 1e1, 1e6 + ) # after smoothing we might have ended up below 10K + # now combine the smoothed profile around 'cloc', and the non-smoothed version away from 'cloc' smooth_weight = np.zeros(len(grid)) - smooth_weight += sps.norm.pdf(range(len(grid)), cloc, int(len(grid)/30)) - smooth_weight /= np.max(smooth_weight) #normalize + smooth_weight += sps.norm.pdf(range(len(grid)), cloc, int(len(grid) / 30)) + smooth_weight /= np.max(smooth_weight) # normalize raw_weight = 1 - smooth_weight - newTe_construct = smooth_newTe_construct * smooth_weight + newTe_construct * raw_weight + newTe_construct = ( + smooth_newTe_construct * smooth_weight + newTe_construct * raw_weight + ) return newTe_construct -def make_rates_plot(altgrid, Te, newTe_relax, radheat, radcool, expcool, advheat, advcool, rho, HCratio, altmax, fc, - newTe_construct=None, cloc=None, title=None, savename=None): +def make_rates_plot( + altgrid, + Te, + newTe_relax, + radheat, + radcool, + expcool, + advheat, + advcool, + rho, + HCratio, + altmax, + fc, + newTe_construct=None, + cloc=None, + title=None, + savename=None, +): """ Makes a plot of the previous and newly proposed temperature profiles, as well as the different heating/cooling rates and their ratio based on the @@ -441,53 +531,77 @@ def make_rates_plot(altgrid, Te, newTe_relax, radheat, radcool, expcool, advheat """ HCratiopos, HCrationeg = np.copy(HCratio), -1 * np.copy(HCratio) - HCratiopos[HCratiopos < 0] = 0. - HCrationeg[HCrationeg < 0] = 0. + HCratiopos[HCratiopos < 0] = 0.0 + HCrationeg[HCrationeg < 0] = 0.0 - fig, (ax1, ax2, ax3) = plt.subplots(3, figsize=(4,7)) + fig, (ax1, ax2, ax3) = plt.subplots(3, figsize=(4, 7)) if title != None: ax1.set_title(title) - ax1.plot(altgrid, Te, color='#4CAF50', label='previous') - ax1.plot(altgrid, newTe_relax, color='#FFA500', label='relaxation') + ax1.plot(altgrid, Te, color="#4CAF50", label="previous") + ax1.plot(altgrid, newTe_relax, color="#FFA500", label="relaxation") if newTe_construct is not None: - ax1.plot(altgrid, newTe_construct, color='#800080', label='construction') - ax1.scatter(altgrid[cloc], newTe_relax[cloc], color='#800080') - ax1.set_ylabel('Temperature [K]') - ax1.legend(loc='best', fontsize=8) - - ax2.plot(altgrid, radheat/rho, color='red', linewidth=2.) - ax2.plot(altgrid, radcool/rho, color='blue') - ax2.plot(altgrid, expcool/rho, color='blue', linestyle='dashed') - ax2.plot(altgrid, advheat/rho, color='red', linestyle='dotted') - ax2.plot(altgrid, advcool/rho, color='blue', linestyle='dotted') - ax2.set_yscale('log') - ax2.set_ylim(0.1*min(min(radheat/rho), min(radcool/rho)), 2*max(max(radheat/rho), max(radcool/rho), max(expcool/rho), max(advheat/rho), max(advcool/rho))) - ax2.set_ylabel('Rate [erg/s/g]') - ax2.legend(((Line2D([], [], color='red', linestyle=(0,(6,6))), Line2D([], [], color='blue', linestyle=(6,(6,6)))), - Line2D([], [], color='blue', linestyle='dashed'), - (Line2D([], [], color='red', linestyle=(0,(1,2,1,8))), Line2D([], [], color='blue', linestyle=(6,(1,2,1,8))))), - ('radiation', 'expansion', 'advection'), loc='best', fontsize=8) - - ax3.plot(altgrid, HCratiopos, color='red') - ax3.plot(altgrid, HCrationeg, color='blue') - ax3.axhline(fc, color='k', linestyle='dotted') - ax3.set_yscale('log') + ax1.plot(altgrid, newTe_construct, color="#800080", label="construction") + ax1.scatter(altgrid[cloc], newTe_relax[cloc], color="#800080") + ax1.set_ylabel("Temperature [K]") + ax1.legend(loc="best", fontsize=8) + + ax2.plot(altgrid, radheat / rho, color="red", linewidth=2.0) + ax2.plot(altgrid, radcool / rho, color="blue") + ax2.plot(altgrid, expcool / rho, color="blue", linestyle="dashed") + ax2.plot(altgrid, advheat / rho, color="red", linestyle="dotted") + ax2.plot(altgrid, advcool / rho, color="blue", linestyle="dotted") + ax2.set_yscale("log") + ax2.set_ylim( + 0.1 * min(min(radheat / rho), min(radcool / rho)), + 2 + * max( + max(radheat / rho), + max(radcool / rho), + max(expcool / rho), + max(advheat / rho), + max(advcool / rho), + ), + ) + ax2.set_ylabel("Rate [erg/s/g]") + ax2.legend( + ( + ( + Line2D([], [], color="red", linestyle=(0, (6, 6))), + Line2D([], [], color="blue", linestyle=(6, (6, 6))), + ), + Line2D([], [], color="blue", linestyle="dashed"), + ( + Line2D([], [], color="red", linestyle=(0, (1, 2, 1, 8))), + Line2D([], [], color="blue", linestyle=(6, (1, 2, 1, 8))), + ), + ), + ("radiation", "expansion", "advection"), + loc="best", + fontsize=8, + ) + + ax3.plot(altgrid, HCratiopos, color="red") + ax3.plot(altgrid, HCrationeg, color="blue") + ax3.axhline(fc, color="k", linestyle="dotted") + ax3.set_yscale("log") ax3.set_ylim(bottom=1) - ax3.set_ylabel('Ratio heat/cool') + ax3.set_ylabel("Ratio heat/cool") - #use these with the altgrid: + # use these with the altgrid: tools.set_alt_ax(ax1, altmax=altmax, labels=False) tools.set_alt_ax(ax2, altmax=altmax, labels=False) tools.set_alt_ax(ax3, altmax=altmax, labels=True) fig.tight_layout() if savename != None: - plt.savefig(savename, bbox_inches='tight', dpi=200) + plt.savefig(savename, bbox_inches="tight", dpi=200) plt.clf() plt.close() -def make_converged_plot(altgrid, altmax, path, Te, radheat, rho, radcool, expcool, advheat, advcool): +def make_converged_plot( + altgrid, altmax, path, Te, radheat, rho, radcool, expcool, advheat, advcool +): """ Makes a plot of the converged temperature profile, as well as the different heating/cooling rates. @@ -516,35 +630,56 @@ def make_converged_plot(altgrid, altmax, path, Te, radheat, rho, radcool, expcoo Advection cooling rate in units of erg s-1 cm-3, as positive values. """ - fig, (ax1, ax2) = plt.subplots(2, figsize=(4,5.5)) - ax1.plot(altgrid, Te, color='k') - ax1.set_ylabel('Temperature [K]') - - ax2.plot(altgrid, radheat/rho, color='red') - ax2.plot(altgrid, radcool/rho, color='blue') - ax2.plot(altgrid, expcool/rho, color='blue', linestyle='dashed') - ax2.plot(altgrid, advheat/rho, color='red', linestyle='dotted') - ax2.plot(altgrid, advcool/rho, color='blue', linestyle='dotted') - ax2.set_yscale('log') - ax2.set_ylim(0.1*min(min(radheat/rho), min(radcool/rho)), 2*max(max(radheat/rho), max(radcool/rho), max(expcool/rho), max(advheat/rho), max(advcool/rho))) - ax2.set_ylabel('Rate [erg/s/g]') - ax2.legend(((Line2D([], [], color='red', linestyle=(0,(6,6))), Line2D([], [], color='blue', linestyle=(6,(6,6)))), - Line2D([], [], color='blue', linestyle='dashed'), - (Line2D([], [], color='red', linestyle=(0,(1,2,1,8))), Line2D([], [], color='blue', linestyle=(6,(1,2,1,8))))), - ('radiation', 'expansion', 'advection'), loc='best', fontsize=8) - - - #use these with the altgrid: + fig, (ax1, ax2) = plt.subplots(2, figsize=(4, 5.5)) + ax1.plot(altgrid, Te, color="k") + ax1.set_ylabel("Temperature [K]") + + ax2.plot(altgrid, radheat / rho, color="red") + ax2.plot(altgrid, radcool / rho, color="blue") + ax2.plot(altgrid, expcool / rho, color="blue", linestyle="dashed") + ax2.plot(altgrid, advheat / rho, color="red", linestyle="dotted") + ax2.plot(altgrid, advcool / rho, color="blue", linestyle="dotted") + ax2.set_yscale("log") + ax2.set_ylim( + 0.1 * min(min(radheat / rho), min(radcool / rho)), + 2 + * max( + max(radheat / rho), + max(radcool / rho), + max(expcool / rho), + max(advheat / rho), + max(advcool / rho), + ), + ) + ax2.set_ylabel("Rate [erg/s/g]") + ax2.legend( + ( + ( + Line2D([], [], color="red", linestyle=(0, (6, 6))), + Line2D([], [], color="blue", linestyle=(6, (6, 6))), + ), + Line2D([], [], color="blue", linestyle="dashed"), + ( + Line2D([], [], color="red", linestyle=(0, (1, 2, 1, 8))), + Line2D([], [], color="blue", linestyle=(6, (1, 2, 1, 8))), + ), + ), + ("radiation", "expansion", "advection"), + loc="best", + fontsize=8, + ) + + # use these with the altgrid: tools.set_alt_ax(ax1, altmax=altmax, labels=False) tools.set_alt_ax(ax2, altmax=altmax) fig.tight_layout() - plt.savefig(path+'converged.png', bbox_inches='tight', dpi=200) + plt.savefig(path + "converged.png", bbox_inches="tight", dpi=200) plt.clf() plt.close() -def check_converged(fc, HCratio, newTe, prevTe, linthresh=50.): +def check_converged(fc, HCratio, newTe, prevTe, linthresh=50.0): """ Checks whether the temperature profile is converged. At every radial cell, it checks for three conditions, one of which must be satisfied: @@ -553,7 +688,7 @@ def check_converged(fc, HCratio, newTe, prevTe, linthresh=50.): that a H/C equal to fc would induce. In principle, we would expect that if this were the case, H/C itself would be < fc, but smoothing of the temperature profile can cause different behavior. For example, we can get stuck - in a loop where H/C > fc, we then propose a new temperature profile that is + in a loop where H/C > fc, we then propose a new temperature profile that is significantly different, but then after the smoothing step we end up with the profile that we had before. To break out of such a loop that never converges, we check if the temperature changes are less than we would expect for an @@ -584,10 +719,16 @@ def check_converged(fc, HCratio, newTe, prevTe, linthresh=50.): Whether the temperature profile is converged. """ - ratioTe = np.maximum(newTe, prevTe) / np.minimum(newTe, prevTe) #take element wise ratio - diffTe = np.abs(newTe - prevTe) #take element-wise absolute difference - - if np.all((np.abs(HCratio) < fc) | (ratioTe < (1 + 0.3 * np.log10(fc))) | (diffTe < linthresh)): + ratioTe = np.maximum(newTe, prevTe) / np.minimum( + newTe, prevTe + ) # take element wise ratio + diffTe = np.abs(newTe - prevTe) # take element-wise absolute difference + + if np.all( + (np.abs(HCratio) < fc) + | (ratioTe < (1 + 0.3 * np.log10(fc))) + | (diffTe < linthresh) + ): converged = True else: converged = False @@ -612,12 +753,14 @@ def clean_converged_folder(folder): if not os.path.isdir(folder): warnings.warn(f"This folder does not exist: {folder}") - elif not os.path.isfile(folder+'/converged.in'): + elif not os.path.isfile(folder + "/converged.in"): warnings.warn(f"This folder wasn't converged, I will not clean it: {folder}") else: for filename in os.listdir(folder): - if filename[:9] != 'converged' and os.path.isfile(os.path.join(folder, filename)): + if filename[:9] != "converged" and os.path.isfile( + os.path.join(folder, filename) + ): os.remove(os.path.join(folder, filename)) @@ -647,74 +790,150 @@ def run_loop(path, itno, fc, save_sp=[], maxit=16): Maximum number of iterations, by default 16. """ - if itno == 1: #iteration1 is just running Cloudy. Then, we move on to iteration2 - tools.run_Cloudy('iteration1', folder=path) + if itno == 1: # iteration1 is just running Cloudy. Then, we move on to iteration2 + tools.run_Cloudy("iteration1", folder=path) itno += 1 - #now, we have ran our iteration1 and can start the iterative scheme to find a new profile: + # now, we have ran our iteration1 and can start the iterative scheme to find a new profile: while itno <= maxit: - prev_sim = tools.Sim(path+f'iteration{itno-1}') #load Cloudy results from previous iteration - Rp = prev_sim.p.R #planet radius in cm - altmax = prev_sim.altmax #maximum radius of the simulation in units of Rp - - #make logspaced grid to use throughout the code, interpolate all quantities onto this grid. - rgrid = np.logspace(np.log10(Rp), np.log10(altmax*Rp), num=1000) - - Te, mu, rho, v, radheat, radcool, expcool, advheat, advcool = simtogrid(prev_sim, rgrid) #get all needed Cloudy quantities on the grid - HCratio = calc_HCratio(radheat, radcool, expcool, advheat, advcool) #H/C or C/H ratio, depending on which is larger - - #now the procedure starts - we first produce a new temperature profile - newTe_relax = relaxTstruc(rgrid, path, itno, Te, HCratio) #apply the relaxation algorithm - cloc = calc_cloc(radheat, radcool, expcool, advheat, advcool, HCratio) #look for a point from where we could use construction + prev_sim = tools.Sim( + path + f"iteration{itno-1}" + ) # load Cloudy results from previous iteration + Rp = prev_sim.p.R # planet radius in cm + altmax = prev_sim.altmax # maximum radius of the simulation in units of Rp + + # make logspaced grid to use throughout the code, interpolate all quantities onto this grid. + rgrid = np.logspace(np.log10(Rp), np.log10(altmax * Rp), num=1000) + + Te, mu, rho, v, radheat, radcool, expcool, advheat, advcool = simtogrid( + prev_sim, rgrid + ) # get all needed Cloudy quantities on the grid + HCratio = calc_HCratio( + radheat, radcool, expcool, advheat, advcool + ) # H/C or C/H ratio, depending on which is larger + + # now the procedure starts - we first produce a new temperature profile + newTe_relax = relaxTstruc( + rgrid, path, itno, Te, HCratio + ) # apply the relaxation algorithm + cloc = calc_cloc( + radheat, radcool, expcool, advheat, advcool, HCratio + ) # look for a point from where we could use construction newTe_construct = None if cloc != len(rgrid): - newTe_construct = constructTstruc(rgrid, newTe_relax, int(cloc), v, rho, mu, radheat, radcool) #apply construction algorithm - - make_rates_plot(rgrid/Rp, Te, newTe_relax, radheat, radcool, expcool, advheat, advcool, - rho, HCratio, altmax, fc, title=f'iteration {itno}', - savename=path+f'iteration{itno}.png', newTe_construct=newTe_construct, cloc=cloc) - - #get the final new temperature profile, based on whether the construction algorithm was applied + newTe_construct = constructTstruc( + rgrid, newTe_relax, int(cloc), v, rho, mu, radheat, radcool + ) # apply construction algorithm + + make_rates_plot( + rgrid / Rp, + Te, + newTe_relax, + radheat, + radcool, + expcool, + advheat, + advcool, + rho, + HCratio, + altmax, + fc, + title=f"iteration {itno}", + savename=path + f"iteration{itno}.png", + newTe_construct=newTe_construct, + cloc=cloc, + ) + + # get the final new temperature profile, based on whether the construction algorithm was applied if newTe_construct is None: newTe = newTe_relax else: newTe = newTe_construct - #add this temperature profile to the 'iterations' file for future reference - iterations_file = pd.read_csv(path+'iterations.txt', header=0, sep=' ') - iterations_file['Te'+str(itno)] = newTe - iterations_file.to_csv(path+'iterations.txt', sep=' ', float_format='%.7e', index=False) - - #now we check if the profile is converged. - if itno <= 2: #always update the Te profile at least once - in case we start from a 'close' Parker wind profile that immediately satisfies fc + # add this temperature profile to the 'iterations' file for future reference + iterations_file = pd.read_csv(path + "iterations.txt", header=0, sep=" ") + iterations_file["Te" + str(itno)] = newTe + iterations_file.to_csv( + path + "iterations.txt", sep=" ", float_format="%.7e", index=False + ) + + # now we check if the profile is converged. + if ( + itno <= 2 + ): # always update the Te profile at least once - in case we start from a 'close' Parker wind profile that immediately satisfies fc converged = False - else: - prevTe = iterations_file['Te'+str(itno-1)].values #read out from file instead of Sim because the file has higher resolution - converged = check_converged(fc, HCratio, newTe, prevTe, linthresh=50.) #check convergence criteria - - if converged: #run once more with more output - make_converged_plot(rgrid/Rp, altmax, path, Te, radheat, rho, radcool, expcool, advheat, advcool) - #calculate these terms for the output converged.txt file - for fast access of some key parameters without loading in the Cloudy sim. - np.savetxt(path+'converged.txt', np.column_stack((rgrid/Rp, rho, Te, mu, radheat, radcool, expcool, advheat, advcool)), fmt='%1.5e', - header='R rho Te mu radheat radcool expcool advheat advcool', comments='') - - #we run the last simulation one more time but with all the output files - tools.copyadd_Cloudy_in(path+'iteration'+str(itno-1), path+'converged', - outfiles=['.heat', '.den', '.en'], denspecies=save_sp, - selected_den_levels=True, hcfrac=0.01) - tools.run_Cloudy('converged', folder=path) - tools.Sim(path+'converged') #read in the simulation, so we open the .en file (if it exists) and hence compress its size (see tools.process_energies()) - clean_converged_folder(path) #remove all non-converged files + else: + prevTe = iterations_file[ + "Te" + str(itno - 1) + ].values # read out from file instead of Sim because the file has higher resolution + converged = check_converged( + fc, HCratio, newTe, prevTe, linthresh=50.0 + ) # check convergence criteria + + if converged: # run once more with more output + make_converged_plot( + rgrid / Rp, + altmax, + path, + Te, + radheat, + rho, + radcool, + expcool, + advheat, + advcool, + ) + # calculate these terms for the output converged.txt file - for fast access of some key parameters without loading in the Cloudy sim. + np.savetxt( + path + "converged.txt", + np.column_stack( + ( + rgrid / Rp, + rho, + Te, + mu, + radheat, + radcool, + expcool, + advheat, + advcool, + ) + ), + fmt="%1.5e", + header="R rho Te mu radheat radcool expcool advheat advcool", + comments="", + ) + + # we run the last simulation one more time but with all the output files + tools.copyadd_Cloudy_in( + path + "iteration" + str(itno - 1), + path + "converged", + outfiles=[".heat", ".den", ".en"], + denspecies=save_sp, + selected_den_levels=True, + hcfrac=0.01, + ) + tools.run_Cloudy("converged", folder=path) + tools.Sim( + path + "converged" + ) # read in the simulation, so we open the .en file (if it exists) and hence compress its size (see tools.process_energies()) + clean_converged_folder(path) # remove all non-converged files print(f"Temperature profile converged: {path}") - - break - else: #set up the next iteration - Cltlaw = tools.alt_array_to_Cloudy(rgrid, newTe, altmax, Rp, 1000) #convert the temperature profile to a table format accepted by Cloudy + break - tools.copyadd_Cloudy_in(path+'template', path+'iteration'+str(itno), tlaw=Cltlaw) #add temperature profile to the template input file - if itno != maxit: #no use running it if we are not entering the next while-loop iteration - tools.run_Cloudy(f'iteration{itno}', folder=path) + else: # set up the next iteration + Cltlaw = tools.alt_array_to_Cloudy( + rgrid, newTe, altmax, Rp, 1000 + ) # convert the temperature profile to a table format accepted by Cloudy + + tools.copyadd_Cloudy_in( + path + "template", path + "iteration" + str(itno), tlaw=Cltlaw + ) # add temperature profile to the template input file + if ( + itno != maxit + ): # no use running it if we are not entering the next while-loop iteration + tools.run_Cloudy(f"iteration{itno}", folder=path) else: print(f"Failed temperature convergence after {itno} iterations: {path}") diff --git a/src/sunbather/tools.py b/src/sunbather/tools.py index d59e5a5..0157116 100644 --- a/src/sunbather/tools.py +++ b/src/sunbather/tools.py @@ -829,9 +829,7 @@ def find_line_lowerstate_in_en_df(species, lineinfo, en_df, verbose=False): ] if len(matchedtermrow) == 1: - if ( - str(matchedtermrow.J.values[0]) == "nan" - ): + if str(matchedtermrow.J.values[0]) == "nan": # This can only happen if the Cloudy level is a term with # no J resolved. Then we use statistical weights to guess # how many of the atoms in this term state would be in the @@ -2794,9 +2792,7 @@ def __init__( if not isinstance(altmax, (float, int)): # can it actually be a float? I'm not sure if the code can handle it - # check and try. - raise TypeError( - "altmax must be set to a float or int" - ) + raise TypeError("altmax must be set to a float or int") if hasattr(self, "altmax"): if self.altmax != altmax: warnings.warn( @@ -2916,9 +2912,7 @@ def addv(self, alt, v, delete_negative=True): """ assert "ovr" in self.simfiles, "Simulation must have a 'save overview .ovr file" - assert ( - "alt" in self.ovr.columns - ), ( + assert "alt" in self.ovr.columns, ( "The .ovr file must have an altitude column (which in turn requires a " "known Rp and altmax)" ) From 38dbb34977899f063f5cd855a002652317a27e98 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:38:49 +0100 Subject: [PATCH 17/63] ignore a few checks --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index c73e032..df64f31 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -20,4 +20,4 @@ jobs: pip install pylint - name: Analysing the code with pylint run: | - pylint $(git ls-files '*.py') + pylint -d C0301,C0103,C0209 $(git ls-files '*.py') From c61567784ff127ac103f451daa887694b2efa0f8 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:39:26 +0100 Subject: [PATCH 18/63] update file --- src/sunbather/solveT.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sunbather/solveT.py b/src/sunbather/solveT.py index 3a4a9ba..2c692be 100644 --- a/src/sunbather/solveT.py +++ b/src/sunbather/solveT.py @@ -1,5 +1,5 @@ # sunbather imports -import sunbather.tools +import sunbather.tools as tools # other imports import pandas as pd From bb6a412065b4ca669c4c32c3e0bed2904d2d2222 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:41:58 +0100 Subject: [PATCH 19/63] Install dependencies --- .github/workflows/pylint.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index df64f31..99759ae 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -18,6 +18,7 @@ jobs: run: | python -m pip install --upgrade pip pip install pylint + pip install -e . - name: Analysing the code with pylint run: | pylint -d C0301,C0103,C0209 $(git ls-files '*.py') From dd14a5abc4d5c0d196d9e6e391f422ef0147aca5 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:45:32 +0100 Subject: [PATCH 20/63] relax astropy restrictions --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index fa231ce..5eb17de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ dependencies = [ "pandas >= 1.1.4, <3", "matplotlib >= 3.7.1, <4", "scipy >= 1.9.0, <1.14", - "astropy >= 5.3, <7", + "astropy >= 5.0, <7", "p-winds >= 1.3.4, <2", ] requires-python = ">= 3.9" From d0338f78e9ee932739404d44842884ac8f8fb3e8 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:47:56 +0100 Subject: [PATCH 21/63] Don't test python 3.8 - not supported --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 99759ae..76ff17f 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -7,7 +7,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10"] + python-version: ["3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} From 722a2984a396a7cc7e18e3d080701a9fcafae273 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 29 Oct 2024 15:58:28 +0100 Subject: [PATCH 22/63] Accept pylint score 7.4 or higher --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 76ff17f..47a45b7 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -21,4 +21,4 @@ jobs: pip install -e . - name: Analysing the code with pylint run: | - pylint -d C0301,C0103,C0209 $(git ls-files '*.py') + pylint -d C0301,C0103,C0209 --fail-under 7.4 $(git ls-files '*.py') From c31bf3c3d118dff671de40c025e24553358cf5bf Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 13:32:12 +0100 Subject: [PATCH 23/63] updates --- src/sunbather/RT.py | 650 +++++++++++++++++++++--------- src/sunbather/construct_parker.py | 22 +- src/sunbather/convergeT_parker.py | 7 +- src/sunbather/install_cloudy.py | 8 +- 4 files changed, 473 insertions(+), 214 deletions(-) diff --git a/src/sunbather/RT.py b/src/sunbather/RT.py index 565a155..28b2f42 100644 --- a/src/sunbather/RT.py +++ b/src/sunbather/RT.py @@ -1,4 +1,4 @@ -# other imports +import warnings import pandas as pd import numpy as np import numpy.ma as ma @@ -6,16 +6,23 @@ from scipy.special import voigt_profile from scipy.integrate import trapezoid from scipy.ndimage import gaussian_filter1d -import warnings -# sunbather imports import sunbather.tools as tools -sigt0 = 2.654e-2 #cm2 s-1 = cm2 Hz, from Axner et al. 2004 +sigt0 = 2.654e-2 # cm2 s-1 = cm2 Hz, from Axner et al. 2004 -def project_1D_to_2D(r1, q1, Rp, numb=101, x_projection=False, cut_at=None, - skip_alt_range=None, skip_alt_range_dayside=None, skip_alt_range_nightside=None): +def project_1D_to_2D( + r1, + q1, + Rp, + numb=101, + x_projection=False, + cut_at=None, + skip_alt_range=None, + skip_alt_range_dayside=None, + skip_alt_range_nightside=None, +): """ Projects a 1D sub-stellar solution onto a 2D grid. This function preserves the maximum altitude of the 1D ray, so that the 2D output looks like a half @@ -80,30 +87,48 @@ def project_1D_to_2D(r1, q1, Rp, numb=101, x_projection=False, cut_at=None, assert r1[1] > r1[0], "arrays must be in order of ascending altitude" - b_edges = np.logspace(np.log10(0.1*Rp), np.log10(r1[-1] - 0.9*Rp), num=numb) + 0.9*Rp #impact parameters for 2D rays - these are the boundaries of the 'rays' - b_centers = (b_edges[1:] + b_edges[:-1]) / 2. #these are the actual positions of the rays and this is where the quantity is calculated at - xhalf = np.logspace(np.log10(0.101*Rp), np.log10(r1[-1]+0.1*Rp), num=numb) - 0.1*Rp #positive x grid - x = np.concatenate((-xhalf[::-1], xhalf)) #total x grid with both negative and positive values (for day- and nightside) + b_edges = ( + np.logspace(np.log10(0.1 * Rp), np.log10(r1[-1] - 0.9 * Rp), num=numb) + + 0.9 * Rp + ) # impact parameters for 2D rays - these are the boundaries of the 'rays' + b_centers = ( + b_edges[1:] + b_edges[:-1] + ) / 2.0 # these are the actual positions of the rays and this is where the quantity is calculated at + xhalf = ( + np.logspace(np.log10(0.101 * Rp), np.log10(r1[-1] + 0.1 * Rp), num=numb) + - 0.1 * Rp + ) # positive x grid + x = np.concatenate( + (-xhalf[::-1], xhalf) + ) # total x grid with both negative and positive values (for day- and nightside) xx, bb = np.meshgrid(x, b_centers) - rr = np.sqrt(bb**2 + xx**2) #radii from planet core in 2D + rr = np.sqrt(bb**2 + xx**2) # radii from planet core in 2D - q2 = interp1d(r1, q1, fill_value=0., bounds_error=False)(rr) + q2 = interp1d(r1, q1, fill_value=0.0, bounds_error=False)(rr) if x_projection: - q2 = q2 * xx / rr #now q2 is the projection in the x-direction + q2 = q2 * xx / rr # now q2 is the projection in the x-direction - if cut_at != None: #set values to zero outside the cut_at boundary - q2[rr > cut_at] = 0. + if cut_at != None: # set values to zero outside the cut_at boundary + q2[rr > cut_at] = 0.0 - #some options that were used in Linssen&Oklopcic (2023) to find where the line contribution comes from: + # some options that were used in Linssen&Oklopcic (2023) to find where the line contribution comes from: if skip_alt_range is not None: assert skip_alt_range[0] < skip_alt_range[1] - q2[(rr > skip_alt_range[0]) & (rr < skip_alt_range[1])] = 0. + q2[(rr > skip_alt_range[0]) & (rr < skip_alt_range[1])] = 0.0 if skip_alt_range_dayside is not None: assert skip_alt_range_dayside[0] < skip_alt_range_dayside[1] - q2[(rr > skip_alt_range_dayside[0]) & (rr < skip_alt_range_dayside[1]) & (xx < 0.)] = 0. + q2[ + (rr > skip_alt_range_dayside[0]) + & (rr < skip_alt_range_dayside[1]) + & (xx < 0.0) + ] = 0.0 if skip_alt_range_nightside is not None: assert skip_alt_range_nightside[0] < skip_alt_range_nightside[1] - q2[(rr > skip_alt_range_nightside[0]) & (rr < skip_alt_range_nightside[1]) & (xx > 0.)] = 0. + q2[ + (rr > skip_alt_range_nightside[0]) + & (rr < skip_alt_range_nightside[1]) + & (xx > 0.0) + ] = 0.0 return b_edges, b_centers, x, q2 @@ -137,9 +162,13 @@ def limbdark_quad(mu, ab): limb darkening law. """ - a, b = ab[:,0], ab[:,1] - I = 1 - a[:,None,None]*(1-mu[None,:,:]) - b[:,None,None]*(1-mu[None,:,:])**2 - + a, b = ab[:, 0], ab[:, 1] + I = ( + 1 + - a[:, None, None] * (1 - mu[None, :, :]) + - b[:, None, None] * (1 - mu[None, :, :]) ** 2 + ) + return I @@ -164,19 +193,21 @@ def avg_limbdark_quad(ab): the quadratic limb darkening law. """ - a, b = ab[:,0], ab[:,1] - rf = np.linspace(0, 1, num=1000) #sample the stellar disk in 1000 rings - rfm = (rf[:-1] + rf[1:])/2 #midpoints - mu = np.sqrt(1 - rfm**2) #mu of each ring - I = 1 - a[:,None]*(1-mu[None,:]) - b[:,None]*(1-mu[None,:])**2 #I of each ring - projsurf = np.pi*(rf[1:]**2 - rf[:-1]**2) #area of each ring + a, b = ab[:, 0], ab[:, 1] + rf = np.linspace(0, 1, num=1000) # sample the stellar disk in 1000 rings + rfm = (rf[:-1] + rf[1:]) / 2 # midpoints + mu = np.sqrt(1 - rfm**2) # mu of each ring + I = ( + 1 - a[:, None] * (1 - mu[None, :]) - b[:, None] * (1 - mu[None, :]) ** 2 + ) # I of each ring + projsurf = np.pi * (rf[1:] ** 2 - rf[:-1] ** 2) # area of each ring - I_avg = np.sum(I * projsurf, axis=1) / np.pi #sum over the radial axis + I_avg = np.sum(I * projsurf, axis=1) / np.pi # sum over the radial axis return I_avg -def calc_tau(x, ndens, Te, vx, nu, nu0, m, sig0, gamma, v_turb=0.): +def calc_tau(x, ndens, Te, vx, nu, nu0, m, sig0, gamma, v_turb=0.0): """ Calculates optical depth using Eq. 19 from Oklopcic&Hirata 2018. Does this at once for all rays, lines and frequencies. When doing @@ -229,16 +260,29 @@ def calc_tau(x, ndens, Te, vx, nu, nu0, m, sig0, gamma, v_turb=0.): if not isinstance(gamma, np.ndarray): gamma = np.array([gamma]) - gaus_sigma = np.sqrt(tools.k * Te[None,None,:] / m + 0.5*v_turb**2) * nu0[None,:,None,None] / tools.c - #the following has a minus sign like in Eq. 21 of Oklopcic&Hirata (2018) because their formula is only correct if you take v_LOS from star->planet i.e. vx - Delnu = (nu[:,None,None,None] - nu0[None,:,None,None]) - nu0[None,:,None,None] / tools.c * vx[None,None,:] - tau_cube = trapezoid(ndens[None,None,:] * sig0[None,:,None,None] * voigt_profile(Delnu, gaus_sigma, gamma[None,:,None,None]), x=x) - tau = np.sum(tau_cube, axis=1) #sum up the contributions of the different lines -> now tau has axis 0:freq, axis 1:rayno + gaus_sigma = ( + np.sqrt(tools.k * Te[None, None, :] / m + 0.5 * v_turb**2) + * nu0[None, :, None, None] + / tools.c + ) + # the following has a minus sign like in Eq. 21 of Oklopcic&Hirata (2018) because their formula is only correct if you take v_LOS from star->planet i.e. vx + Delnu = (nu[:, None, None, None] - nu0[None, :, None, None]) - nu0[ + None, :, None, None + ] / tools.c * vx[None, None, :] + tau_cube = trapezoid( + ndens[None, None, :] + * sig0[None, :, None, None] + * voigt_profile(Delnu, gaus_sigma, gamma[None, :, None, None]), + x=x, + ) + tau = np.sum( + tau_cube, axis=1 + ) # sum up the contributions of the different lines -> now tau has axis 0:freq, axis 1:rayno return tau -def calc_cum_tau(x, ndens, Te, vx, nu, nu0, m, sig0, gamma, v_turb=0.): +def calc_cum_tau(x, ndens, Te, vx, nu, nu0, m, sig0, gamma, v_turb=0.0): """ Calculates cumulative optical depth using Eq. 19 from Oklopcic&Hirata 2018, at one particular frequency. Does this at once for all rays and lines. @@ -290,19 +334,33 @@ def calc_cum_tau(x, ndens, Te, vx, nu, nu0, m, sig0, gamma, v_turb=0.): if not isinstance(gamma, np.ndarray): gamma = np.array([gamma]) - gaus_sigma = np.sqrt(tools.k * Te[None,None,:] / m + 0.5*v_turb**2) * nu0[None,:,None,None] / tools.c - #the following has a minus sign like in Eq. 21 of Oklopcic&Hirata (2018) because their formula is only correct if you take v_LOS from star->planet i.e. vx - Delnu = (nu - nu0[:,None,None]) - nu0[:,None,None] / tools.c * vx[None,:] - integrand = ndens[None,:] * sig0[:,None,None] * voigt_profile(Delnu, gaus_sigma, gamma[:,None,None]) + gaus_sigma = ( + np.sqrt(tools.k * Te[None, None, :] / m + 0.5 * v_turb**2) + * nu0[None, :, None, None] + / tools.c + ) + # the following has a minus sign like in Eq. 21 of Oklopcic&Hirata (2018) because their formula is only correct if you take v_LOS from star->planet i.e. vx + Delnu = (nu - nu0[:, None, None]) - nu0[:, None, None] / tools.c * vx[None, :] + integrand = ( + ndens[None, :] + * sig0[:, None, None] + * voigt_profile(Delnu, gaus_sigma, gamma[:, None, None]) + ) bin_tau = np.zeros_like(integrand) - bin_tau[:,:,1:] = (integrand[:,:,1:] + np.roll(integrand, 1, axis=2)[:,:,1:])/2. * np.diff(x)[None,None,:] - bin_tau = np.sum(bin_tau, axis=0) #sum up contribution of different lines, now bin_tau has same shape as Te - cum_tau = np.cumsum(bin_tau, axis=1) #do cumulative sum over the x-direction + bin_tau[:, :, 1:] = ( + (integrand[:, :, 1:] + np.roll(integrand, 1, axis=2)[:, :, 1:]) + / 2.0 + * np.diff(x)[None, None, :] + ) + bin_tau = np.sum( + bin_tau, axis=0 + ) # sum up contribution of different lines, now bin_tau has same shape as Te + cum_tau = np.cumsum(bin_tau, axis=1) # do cumulative sum over the x-direction return cum_tau, bin_tau -def tau_to_FinFout(b_edges, tau, Rs, bp=0., ab=np.zeros(2), a=0., phase=0.): +def tau_to_FinFout(b_edges, tau, Rs, bp=0.0, ab=np.zeros(2), a=0.0, phase=0.0): """ Takes in optical depth values and calculates the Fin/Fout transit spectrum, using the stellar radius and optional limb darkening and transit phase @@ -341,25 +399,47 @@ def tau_to_FinFout(b_edges, tau, Rs, bp=0., ab=np.zeros(2), a=0., phase=0.): """ if ab.ndim == 1: - ab = ab[None,:] - - #add some impact parameters and tau=inf bins that make up the planet core: - b_edges = np.concatenate((np.linspace(0, b_edges[0], num=50, endpoint=False), b_edges)) - b_centers = (b_edges[1:] + b_edges[:-1]) / 2 #calculate bin centers with the added planet core rays included - tau = np.concatenate((np.ones((np.shape(tau)[0], 50))*np.inf, tau), axis=1) - - projsurf = np.pi*(b_edges[1:]**2 - b_edges[:-1]**2) #ring surface of each ray (now has same length as b_centers) - phis = np.linspace(0, 2*np.pi, num=500, endpoint=False) #divide rings into different angles phi - #rc is the distance to stellar center. Axis 0: radial rings, axis 1: phi - rc = np.sqrt((bp*Rs + b_centers[:,None]*np.cos(phis[None,:]))**2 + (b_centers[:,None]*np.sin(phis[None,:]) + a*np.sin(2*np.pi*phase))**2) - rc = ma.masked_where(rc > Rs, rc) #will ensure I is masked (and later set to 0) outside stellar projected disk - mu = np.sqrt(1 - (rc/Rs)**2) #angle, see 'limbdark_quad' function + ab = ab[None, :] + + # add some impact parameters and tau=inf bins that make up the planet core: + b_edges = np.concatenate( + (np.linspace(0, b_edges[0], num=50, endpoint=False), b_edges) + ) + b_centers = ( + b_edges[1:] + b_edges[:-1] + ) / 2 # calculate bin centers with the added planet core rays included + tau = np.concatenate((np.ones((np.shape(tau)[0], 50)) * np.inf, tau), axis=1) + + projsurf = np.pi * ( + b_edges[1:] ** 2 - b_edges[:-1] ** 2 + ) # ring surface of each ray (now has same length as b_centers) + phis = np.linspace( + 0, 2 * np.pi, num=500, endpoint=False + ) # divide rings into different angles phi + # rc is the distance to stellar center. Axis 0: radial rings, axis 1: phi + rc = np.sqrt( + (bp * Rs + b_centers[:, None] * np.cos(phis[None, :])) ** 2 + + (b_centers[:, None] * np.sin(phis[None, :]) + a * np.sin(2 * np.pi * phase)) + ** 2 + ) + rc = ma.masked_where( + rc > Rs, rc + ) # will ensure I is masked (and later set to 0) outside stellar projected disk + mu = np.sqrt(1 - (rc / Rs) ** 2) # angle, see 'limbdark_quad' function I = limbdark_quad(mu, ab) - Ir_avg = np.sum(I, axis=2) / len(phis) #average I per ray - Ir_avg = Ir_avg.filled(fill_value=0.) #convert back to regular numpy array - Is_avg = avg_limbdark_quad(ab) #average I of the full stellar disk - - FinFout = np.ones_like(tau[:,0]) - np.sum(((1 - np.exp(-tau)) * Ir_avg*projsurf[None,:]/(Is_avg[:,None]*np.pi*Rs**2)), axis=1) + Ir_avg = np.sum(I, axis=2) / len(phis) # average I per ray + Ir_avg = Ir_avg.filled(fill_value=0.0) # convert back to regular numpy array + Is_avg = avg_limbdark_quad(ab) # average I of the full stellar disk + + FinFout = np.ones_like(tau[:, 0]) - np.sum( + ( + (1 - np.exp(-tau)) + * Ir_avg + * projsurf[None, :] + / (Is_avg[:, None] * np.pi * Rs**2) + ), + axis=1, + ) return FinFout @@ -382,28 +462,51 @@ def read_NIST_lines(species, wavlower=None, wavupper=None): Line coefficients needed for radiative transfer calculations. """ - spNIST = pd.read_table(tools.sunbatherpath+'/RT_tables/'+species+'_lines_NIST.txt') #line info - #remove lines with nan fik or Aik values. Note that lineno doesn't change (uses index instead of rowno.) + spNIST = pd.read_table( + tools.sunbatherpath + "/RT_tables/" + species + "_lines_NIST.txt" + ) # line info + # remove lines with nan fik or Aik values. Note that lineno doesn't change (uses index instead of rowno.) spNIST = spNIST[spNIST.fik.notna()] - spNIST = spNIST[spNIST['Aki(s^-1)'].notna()] + spNIST = spNIST[spNIST["Aki(s^-1)"].notna()] if spNIST.empty: warnings.warn(f"No lines with necessary coefficients found for {species}") return spNIST - if type(spNIST['Ei(Ry)'].iloc[0]) == str: #if there are no [](), the datatype will be float already - spNIST['Ei(Ry)'] = spNIST['Ei(Ry)'].str.extract('(\d+)', expand=False).astype(float) #remove non-numeric characters such as [] and () - spNIST['sig0'] = sigt0 * spNIST.fik - spNIST['nu0'] = tools.c*1e8 / (spNIST['ritz_wl_vac(A)']) #speed of light to AA/s - spNIST['lorgamma'] = spNIST['Aki(s^-1)'] / (4*np.pi) #lorentzian gamma is not function of depth or nu. Value in Hz + if ( + type(spNIST["Ei(Ry)"].iloc[0]) == str + ): # if there are no [](), the datatype will be float already + spNIST["Ei(Ry)"] = ( + spNIST["Ei(Ry)"].str.extract("(\d+)", expand=False).astype(float) + ) # remove non-numeric characters such as [] and () + spNIST["sig0"] = sigt0 * spNIST.fik + spNIST["nu0"] = tools.c * 1e8 / (spNIST["ritz_wl_vac(A)"]) # speed of light to AA/s + spNIST["lorgamma"] = spNIST["Aki(s^-1)"] / ( + 4 * np.pi + ) # lorentzian gamma is not function of depth or nu. Value in Hz if wavlower != None: - spNIST.drop(labels=spNIST.index[spNIST['ritz_wl_vac(A)'] <= wavlower], inplace=True) + spNIST.drop( + labels=spNIST.index[spNIST["ritz_wl_vac(A)"] <= wavlower], inplace=True + ) if wavupper != None: - spNIST.drop(labels=spNIST.index[spNIST['ritz_wl_vac(A)'] >= wavupper], inplace=True) + spNIST.drop( + labels=spNIST.index[spNIST["ritz_wl_vac(A)"] >= wavupper], inplace=True + ) return spNIST -def FinFout(sim, wavsAA, species, numrays=100, width_fac=1., ab=np.zeros(2), phase=0., phase_bulkshift=False, v_turb=0., cut_at=None): +def FinFout( + sim, + wavsAA, + species, + numrays=100, + width_fac=1.0, + ab=np.zeros(2), + phase=0.0, + phase_bulkshift=False, + v_turb=0.0, + cut_at=None, +): """ Calculates a transit spectrum in units of in-transit flux / out-of-transit flux (i.e., Fin/Fout). Only spectral lines originating from provided species will be calculated. @@ -411,7 +514,7 @@ def FinFout(sim, wavsAA, species, numrays=100, width_fac=1., ab=np.zeros(2), pha Parameters ---------- sim : tools.Sim - Cloudy simulation output of an upper atmosphere. Needs to have tools.Planet and + Cloudy simulation output of an upper atmosphere. Needs to have tools.Planet and tools.Parker class attributes. wavsAA : array-like Wavelengths to calculate transit spectrum on, in units of Å (1D array). @@ -465,19 +568,27 @@ def FinFout(sim, wavsAA, species, numrays=100, width_fac=1., ab=np.zeros(2), pha but which could not be calculated due to their excitation state not being reported by Cloudy. """ - assert hasattr(sim, 'p'), "The sim must have an attributed Planet object" - assert 'v' in sim.ovr.columns, "We need a velocity structure, such as that from adding a Parker object to the sim" - assert hasattr(sim, 'den'), "The sim must have a .den file that stores the densities of the atomic/ionic excitation states. " \ - "Please re-run your Cloudy simulation while saving these. Either re-run sunbather.convergeT_parker.py " \ - "with the -save_sp flag, or use the tools.insertden_Cloudy_in() function with rerun=True." - - ab = np.array(ab) #turn possible list into array + assert hasattr(sim, "p"), "The sim must have an attributed Planet object" + assert ( + "v" in sim.ovr.columns + ), "We need a velocity structure, such as that from adding a Parker object to the sim" + assert hasattr(sim, "den"), ( + "The sim must have a .den file that stores the densities of the atomic/ionic excitation states. " + "Please re-run your Cloudy simulation while saving these. Either re-run sunbather.convergeT_parker.py " + "with the -save_sp flag, or use the tools.insertden_Cloudy_in() function with rerun=True." + ) + + ab = np.array(ab) # turn possible list into array if ab.ndim == 1: - ab = ab[None,:] #add frequency axis - assert ab.ndim == 2 and np.shape(ab)[1] == 2 and (np.shape(ab)[0] == 1 or np.shape(ab)[0] == len(wavsAA)), "Give ab as shape (1,2) or (2,) or (len(wavsAA),2)" + ab = ab[None, :] # add frequency axis + assert ( + ab.ndim == 2 + and np.shape(ab)[1] == 2 + and (np.shape(ab)[0] == 1 or np.shape(ab)[0] == len(wavsAA)) + ), "Give ab as shape (1,2) or (2,) or (len(wavsAA),2)" Rs, Rp = sim.p.Rstar, sim.p.R - nus = tools.c*1e8 / wavsAA #Hz, converted c to AA/s + nus = tools.c * 1e8 / wavsAA # Hz, converted c to AA/s r1 = sim.ovr.alt.values[::-1] Te1 = sim.ovr.Te.values[::-1] @@ -487,70 +598,119 @@ def FinFout(sim, wavsAA, species, numrays=100, width_fac=1., ab=np.zeros(2), pha be, _, x, vx = project_1D_to_2D(r1, v1, Rp, numb=numrays, x_projection=True) if phase_bulkshift: - assert hasattr(sim.p, 'Kp'), "The Planet object does not have a Kp attribute, likely because either a, Mp or Mstar is unknown" - vx = vx - sim.p.Kp * np.sin(phase * 2*np.pi) #negative sign because x is defined as positive towards the observer. + assert hasattr( + sim.p, "Kp" + ), "The Planet object does not have a Kp attribute, likely because either a, Mp or Mstar is unknown" + vx = vx - sim.p.Kp * np.sin( + phase * 2 * np.pi + ) # negative sign because x is defined as positive towards the observer. state_ndens = {} - tau = np.zeros((len(wavsAA), len(be)-1)) + tau = np.zeros((len(wavsAA), len(be) - 1)) if isinstance(species, str): species = [species] - found_lines = [] #will store nu0 of all lines that were used (might be nice to make it a dict per species in future!) - notfound_lines = [] #will store nu0 of all lines that were not found + found_lines = ( + [] + ) # will store nu0 of all lines that were used (might be nice to make it a dict per species in future!) + notfound_lines = [] # will store nu0 of all lines that were not found for spec in species: if spec in sim.den.columns: - warnings.warn(f"Your requested species {spec} is not resolved into multiple energy levels by Cloudy. " + \ - f"I will make the spectrum assuming all {spec} is in the ground-state.") - elif not any(spec+"[" in col for col in sim.den.columns): - warnings.warn(f"Your requested species {spec} is not present in Cloudy's output, so the spectrum will be flat. " + \ - "Please re-do your Cloudy simulation while saving this species. Either use the tools.insertden_Cloudy_in() " + \ - "function, or run convergeT_parker.py again with the correct -save_sp arguments.") + warnings.warn( + f"Your requested species {spec} is not resolved into multiple energy levels by Cloudy. " + + f"I will make the spectrum assuming all {spec} is in the ground-state." + ) + elif not any(spec + "[" in col for col in sim.den.columns): + warnings.warn( + f"Your requested species {spec} is not present in Cloudy's output, so the spectrum will be flat. " + + "Please re-do your Cloudy simulation while saving this species. Either use the tools.insertden_Cloudy_in() " + + "function, or run convergeT_parker.py again with the correct -save_sp arguments." + ) continue spNIST = read_NIST_lines(spec, wavlower=wavsAA[0], wavupper=wavsAA[-1]) - - if len(species) == 1 and len(spNIST) == 0: - warnings.warn(f"Your requested species {spec} does not have any lines in this wavelength range (according to the NIST database), " \ - "so the spectrum will be flat.") - - for lineno in spNIST.index.values: #loop over all lines in the spNIST table. - gaus_sigma_max = np.sqrt(tools.k * np.nanmax(Te) / tools.get_mass(spec) + 0.5*v_turb**2) * spNIST.nu0.loc[lineno] / tools.c #maximum stddev of Gaussian part - max_voigt_width = 5*(gaus_sigma_max+spNIST['lorgamma'].loc[lineno]) * width_fac #the max offset of Voigt components (=natural+thermal broad.) - linenu_low = (1 + np.min(vx)/tools.c) * spNIST.nu0.loc[lineno] - max_voigt_width - linenu_hi = (1 + np.max(vx)/tools.c) * spNIST.nu0.loc[lineno] + max_voigt_width - nus_line = nus[(nus > linenu_low) & (nus < linenu_hi)] #the frequency values that make sense to calculate for this line - if nus_line.size == 0: #then this line is not in our wav range and we skip it - continue #to next spectral line - - #get all columns in .den file which energy corresponds to this Ei - colname, lineweight = tools.find_line_lowerstate_in_en_df(spec, spNIST.loc[lineno], sim.en) - if colname == None: #we skip this line if the line energy is not found. - notfound_lines.append(spNIST['ritz_wl_vac(A)'][lineno]) - continue #to next spectral line - - found_lines.append((spNIST['ritz_wl_vac(A)'].loc[lineno], colname)) #if we got to here, we did find the spectral line + if len(species) == 1 and len(spNIST) == 0: + warnings.warn( + f"Your requested species {spec} does not have any lines in this wavelength range (according to the NIST database), " + "so the spectrum will be flat." + ) + + for lineno in spNIST.index.values: # loop over all lines in the spNIST table. + gaus_sigma_max = ( + np.sqrt( + tools.k * np.nanmax(Te) / tools.get_mass(spec) + 0.5 * v_turb**2 + ) + * spNIST.nu0.loc[lineno] + / tools.c + ) # maximum stddev of Gaussian part + max_voigt_width = ( + 5 * (gaus_sigma_max + spNIST["lorgamma"].loc[lineno]) * width_fac + ) # the max offset of Voigt components (=natural+thermal broad.) + linenu_low = (1 + np.min(vx) / tools.c) * spNIST.nu0.loc[ + lineno + ] - max_voigt_width + linenu_hi = (1 + np.max(vx) / tools.c) * spNIST.nu0.loc[ + lineno + ] + max_voigt_width + + nus_line = nus[ + (nus > linenu_low) & (nus < linenu_hi) + ] # the frequency values that make sense to calculate for this line + if ( + nus_line.size == 0 + ): # then this line is not in our wav range and we skip it + continue # to next spectral line + + # get all columns in .den file which energy corresponds to this Ei + colname, lineweight = tools.find_line_lowerstate_in_en_df( + spec, spNIST.loc[lineno], sim.en + ) + if colname == None: # we skip this line if the line energy is not found. + notfound_lines.append(spNIST["ritz_wl_vac(A)"][lineno]) + continue # to next spectral line + + found_lines.append( + (spNIST["ritz_wl_vac(A)"].loc[lineno], colname) + ) # if we got to here, we did find the spectral line if colname in state_ndens.keys(): ndens = state_ndens[colname] else: ndens1 = sim.den[colname].values[::-1] - be, _, x, ndens = project_1D_to_2D(r1, ndens1, Rp, numb=numrays, cut_at=cut_at) - state_ndens[colname] = ndens #add to dictionary for future reference - - ndens_lw = ndens*lineweight #important that we make this a new variable as otherwise state_ndens would change as well! - - tau_line = calc_tau(x, ndens_lw, Te, vx, nus_line, spNIST.nu0.loc[lineno], tools.get_mass(spec), spNIST.sig0.loc[lineno], spNIST['lorgamma'].loc[lineno], v_turb=v_turb) - tau[(nus > linenu_low) & (nus < linenu_hi), :] += tau_line #add the tau values to the correct nu bins + be, _, x, ndens = project_1D_to_2D( + r1, ndens1, Rp, numb=numrays, cut_at=cut_at + ) + state_ndens[colname] = ndens # add to dictionary for future reference + + ndens_lw = ( + ndens * lineweight + ) # important that we make this a new variable as otherwise state_ndens would change as well! + + tau_line = calc_tau( + x, + ndens_lw, + Te, + vx, + nus_line, + spNIST.nu0.loc[lineno], + tools.get_mass(spec), + spNIST.sig0.loc[lineno], + spNIST["lorgamma"].loc[lineno], + v_turb=v_turb, + ) + tau[ + (nus > linenu_low) & (nus < linenu_hi), : + ] += tau_line # add the tau values to the correct nu bins FinFout = tau_to_FinFout(be, tau, Rs, bp=sim.p.bp, ab=ab, phase=phase, a=sim.p.a) return FinFout, found_lines, notfound_lines -def tau_1D(sim, wavAA, species, width_fac=1., v_turb=0.): +def tau_1D(sim, wavAA, species, width_fac=1.0, v_turb=0.0): """ Maps out the optical depth at one specific wavelength. The running integral of the optical deph is calculated at each depth of the ray. @@ -563,7 +723,7 @@ def tau_1D(sim, wavAA, species, width_fac=1., v_turb=0.): Parameters ---------- sim : tools.Sim - Cloudy simulation output of an upper atmosphere. Needs to have tools.Planet and + Cloudy simulation output of an upper atmosphere. Needs to have tools.Planet and tools.Parker class attributes. wavAA : numeric Wavelength to calculate the optical depths at, in units of Å. @@ -596,56 +756,95 @@ def tau_1D(sim, wavAA, species, width_fac=1., v_turb=0.): but which could not be calculated due to their excitation state not being reported by Cloudy. """ - assert isinstance(wavAA, float) or isinstance(wavAA, int), "Pass one wavelength in Å as a float or int" - assert hasattr(sim, 'p'), "The sim must have an attributed Planet object" - assert 'v' in sim.ovr.columns, "We need a velocity structure, such as that from adding a Parker object to the sim." + assert isinstance(wavAA, float) or isinstance( + wavAA, int + ), "Pass one wavelength in Å as a float or int" + assert hasattr(sim, "p"), "The sim must have an attributed Planet object" + assert ( + "v" in sim.ovr.columns + ), "We need a velocity structure, such as that from adding a Parker object to the sim." Rs, Rp = sim.p.Rstar, sim.p.R - nu = tools.c*1e8 / wavAA #Hz, converted c to AA/s + nu = tools.c * 1e8 / wavAA # Hz, converted c to AA/s d = sim.ovr.depth.values Te = sim.ovr.Te.values - v = sim.ovr.v.values #radial velocity - vx = -v #because we do the substellar ray which is towards the -x direction + v = sim.ovr.v.values # radial velocity + vx = -v # because we do the substellar ray which is towards the -x direction tot_cum_tau, tot_bin_tau = np.zeros_like(d), np.zeros_like(d) if isinstance(species, str): species = [species] - found_lines = [] #will store nu0 of all lines that were used (might be nice to make it a dict per species in future!) - notfound_lines = [] #will store nu0 of all lines that were not found + found_lines = ( + [] + ) # will store nu0 of all lines that were used (might be nice to make it a dict per species in future!) + notfound_lines = [] # will store nu0 of all lines that were not found for spec in species: spNIST = read_NIST_lines(spec) - for lineno in spNIST.index.values: #loop over all lines in the spNIST table. - gaus_sigma_max = np.sqrt(tools.k * np.nanmax(Te) / tools.get_mass(spec) + 0.5*v_turb**2) * spNIST.nu0.loc[lineno] / tools.c #maximum stddev of Gaussian part - max_voigt_width = 5*(gaus_sigma_max+spNIST['lorgamma'].loc[lineno]) * width_fac #the max offset of Voigt components (=natural+thermal broad.) - linenu_low = (1 + np.min(vx)/tools.c) * spNIST.nu0.loc[lineno] - max_voigt_width - linenu_hi = (1 + np.max(vx)/tools.c) * spNIST.nu0.loc[lineno] + max_voigt_width - - if (nu < linenu_low) | (nu > linenu_hi): #then this line does not probe our requested wav and we skip it - continue #to next spectral line - - #get all columns in .den file which energy corresponds to this Ei - colname, lineweight = tools.find_line_lowerstate_in_en_df(spec, spNIST.loc[lineno], sim.en) - if colname == None: #we skip this line if the line energy is not found. - notfound_lines.append(spNIST['ritz_wl_vac(A)'][lineno]) - continue #to next spectral line - - found_lines.append((spNIST['ritz_wl_vac(A)'].loc[lineno], colname)) #if we got to here, we did find the spectral line - - ndens = sim.den[colname].values * lineweight #see explanation in FinFout_2D function - - cum_tau, bin_tau = calc_cum_tau(d, ndens, Te, vx, nu, spNIST.nu0.loc[lineno], tools.get_mass(spec), spNIST.sig0.loc[lineno], spNIST['lorgamma'].loc[lineno], v_turb=v_turb) - tot_cum_tau += cum_tau[0] #add the tau values to the total (of all species & lines together) + for lineno in spNIST.index.values: # loop over all lines in the spNIST table. + gaus_sigma_max = ( + np.sqrt( + tools.k * np.nanmax(Te) / tools.get_mass(spec) + 0.5 * v_turb**2 + ) + * spNIST.nu0.loc[lineno] + / tools.c + ) # maximum stddev of Gaussian part + max_voigt_width = ( + 5 * (gaus_sigma_max + spNIST["lorgamma"].loc[lineno]) * width_fac + ) # the max offset of Voigt components (=natural+thermal broad.) + linenu_low = (1 + np.min(vx) / tools.c) * spNIST.nu0.loc[ + lineno + ] - max_voigt_width + linenu_hi = (1 + np.max(vx) / tools.c) * spNIST.nu0.loc[ + lineno + ] + max_voigt_width + + if (nu < linenu_low) | ( + nu > linenu_hi + ): # then this line does not probe our requested wav and we skip it + continue # to next spectral line + + # get all columns in .den file which energy corresponds to this Ei + colname, lineweight = tools.find_line_lowerstate_in_en_df( + spec, spNIST.loc[lineno], sim.en + ) + if colname == None: # we skip this line if the line energy is not found. + notfound_lines.append(spNIST["ritz_wl_vac(A)"][lineno]) + continue # to next spectral line + + found_lines.append( + (spNIST["ritz_wl_vac(A)"].loc[lineno], colname) + ) # if we got to here, we did find the spectral line + + ndens = ( + sim.den[colname].values * lineweight + ) # see explanation in FinFout_2D function + + cum_tau, bin_tau = calc_cum_tau( + d, + ndens, + Te, + vx, + nu, + spNIST.nu0.loc[lineno], + tools.get_mass(spec), + spNIST.sig0.loc[lineno], + spNIST["lorgamma"].loc[lineno], + v_turb=v_turb, + ) + tot_cum_tau += cum_tau[ + 0 + ] # add the tau values to the total (of all species & lines together) tot_bin_tau += bin_tau[0] return tot_cum_tau, tot_bin_tau, found_lines, notfound_lines -def tau_12D(sim, wavAA, species, width_fac=1., v_turb=0., cut_at=None): +def tau_12D(sim, wavAA, species, width_fac=1.0, v_turb=0.0, cut_at=None): """ Maps out the optical depth at one specific wavelength. The running integral of the optical deph is calculated at each stellar light ray @@ -655,7 +854,7 @@ def tau_12D(sim, wavAA, species, width_fac=1., v_turb=0., cut_at=None): Parameters ---------- sim : tools.Sim - Cloudy simulation output of an upper atmosphere. Needs to have tools.Planet and + Cloudy simulation output of an upper atmosphere. Needs to have tools.Planet and tools.Parker class attributes. wavAA : numeric Wavelength to calculate the optical depths at, in units of Å. @@ -692,49 +891,93 @@ def tau_12D(sim, wavAA, species, width_fac=1., v_turb=0., cut_at=None): but which could not be calculated due to their excitation state not being reported by Cloudy. """ - assert isinstance(wavAA, float) or isinstance(wavAA, int), "Pass one wavelength in Å as a float or int" - assert hasattr(sim, 'p') - assert 'v' in sim.ovr.columns, "We need a velocity structure, such as that from adding a Parker object to the sim." + assert isinstance(wavAA, float) or isinstance( + wavAA, int + ), "Pass one wavelength in Å as a float or int" + assert hasattr(sim, "p") + assert ( + "v" in sim.ovr.columns + ), "We need a velocity structure, such as that from adding a Parker object to the sim." - nu = tools.c*1e8 / wavAA #Hz, converted c to AA/s + nu = tools.c * 1e8 / wavAA # Hz, converted c to AA/s - be, bc, x, Te = project_1D_to_2D(sim.ovr.alt.values[::-1], sim.ovr.Te.values[::-1], sim.p.R) - be, bc, x, vx = project_1D_to_2D(sim.ovr.alt.values[::-1], sim.ovr.v.values[::-1], sim.p.R, x_projection=True) + be, bc, x, Te = project_1D_to_2D( + sim.ovr.alt.values[::-1], sim.ovr.Te.values[::-1], sim.p.R + ) + be, bc, x, vx = project_1D_to_2D( + sim.ovr.alt.values[::-1], sim.ovr.v.values[::-1], sim.p.R, x_projection=True + ) tot_cum_tau, tot_bin_tau = np.zeros_like(vx), np.zeros_like(vx) if isinstance(species, str): species = [species] - found_lines = [] #will store nu0 of all lines that were used (might be nice to make it a dict per species in future!) - notfound_lines = [] #will store nu0 of all lines that were not found + found_lines = ( + [] + ) # will store nu0 of all lines that were used (might be nice to make it a dict per species in future!) + notfound_lines = [] # will store nu0 of all lines that were not found for spec in species: spNIST = read_NIST_lines(spec) - for lineno in spNIST.index.values: #loop over all lines in the spNIST table. - gaus_sigma_max = np.sqrt(tools.k * np.nanmax(Te) / tools.get_mass(spec) + 0.5*v_turb**2) * spNIST.nu0.loc[lineno] / tools.c #maximum stddev of Gaussian part - max_voigt_width = 5*(gaus_sigma_max+spNIST['lorgamma'].loc[lineno]) * width_fac #the max offset of Voigt components (=natural+thermal broad.) - linenu_low = (1 + np.min(vx)/tools.c) * spNIST.nu0.loc[lineno] - max_voigt_width - linenu_hi = (1 + np.max(vx)/tools.c) * spNIST.nu0.loc[lineno] + max_voigt_width - - if (nu < linenu_low) | (nu > linenu_hi): #then this line does not probe our requested wav and we skip it - continue #to next spectral line - - #get all columns in .den file which energy corresponds to this Ei - colname, lineweight = tools.find_line_lowerstate_in_en_df(spec, spNIST.loc[lineno], sim.en) - if colname == None: #we skip this line if the line energy is not found. - notfound_lines.append(spNIST['ritz_wl_vac(A)'][lineno]) - continue #to next spectral line - - found_lines.append((spNIST['ritz_wl_vac(A)'].loc[lineno], colname)) #if we got to here, we did find the spectral line - - #multiply with the lineweight! Such that for unresolved J, a line originating from J=1/2 does not also get density of J=3/2 state - _, _, _, ndens = project_1D_to_2D(sim.ovr.alt.values[::-1], sim.den[colname].values[::-1], sim.p.R, cut_at=cut_at) + for lineno in spNIST.index.values: # loop over all lines in the spNIST table. + gaus_sigma_max = ( + np.sqrt( + tools.k * np.nanmax(Te) / tools.get_mass(spec) + 0.5 * v_turb**2 + ) + * spNIST.nu0.loc[lineno] + / tools.c + ) # maximum stddev of Gaussian part + max_voigt_width = ( + 5 * (gaus_sigma_max + spNIST["lorgamma"].loc[lineno]) * width_fac + ) # the max offset of Voigt components (=natural+thermal broad.) + linenu_low = (1 + np.min(vx) / tools.c) * spNIST.nu0.loc[ + lineno + ] - max_voigt_width + linenu_hi = (1 + np.max(vx) / tools.c) * spNIST.nu0.loc[ + lineno + ] + max_voigt_width + + if (nu < linenu_low) | ( + nu > linenu_hi + ): # then this line does not probe our requested wav and we skip it + continue # to next spectral line + + # get all columns in .den file which energy corresponds to this Ei + colname, lineweight = tools.find_line_lowerstate_in_en_df( + spec, spNIST.loc[lineno], sim.en + ) + if colname == None: # we skip this line if the line energy is not found. + notfound_lines.append(spNIST["ritz_wl_vac(A)"][lineno]) + continue # to next spectral line + + found_lines.append( + (spNIST["ritz_wl_vac(A)"].loc[lineno], colname) + ) # if we got to here, we did find the spectral line + + # multiply with the lineweight! Such that for unresolved J, a line originating from J=1/2 does not also get density of J=3/2 state + _, _, _, ndens = project_1D_to_2D( + sim.ovr.alt.values[::-1], + sim.den[colname].values[::-1], + sim.p.R, + cut_at=cut_at, + ) ndens *= lineweight - cum_tau, bin_tau = calc_cum_tau(x, ndens, Te, vx, nu, spNIST.nu0.loc[lineno], tools.get_mass(spec), spNIST.sig0.loc[lineno], spNIST['lorgamma'].loc[lineno], v_turb=v_turb) - tot_cum_tau += cum_tau #add the tau values to the total (of all species & lines together) + cum_tau, bin_tau = calc_cum_tau( + x, + ndens, + Te, + vx, + nu, + spNIST.nu0.loc[lineno], + tools.get_mass(spec), + spNIST.sig0.loc[lineno], + spNIST["lorgamma"].loc[lineno], + v_turb=v_turb, + ) + tot_cum_tau += cum_tau # add the tau values to the total (of all species & lines together) tot_bin_tau += bin_tau return tot_cum_tau, tot_bin_tau, found_lines, notfound_lines @@ -758,7 +1001,7 @@ def FinFout2RpRs(FinFout): Transit spectrum in units of planet size / star size. """ - RpRs = np.sqrt(1-FinFout) + RpRs = np.sqrt(1 - FinFout) return RpRs @@ -781,7 +1024,7 @@ def RpRs2FinFout(RpRs): In-transit / out-transit flux values """ - FinFout = 1-RpRs**2 + FinFout = 1 - RpRs**2 return FinFout @@ -828,7 +1071,12 @@ def air2vac(wavs_air): """ s = 1e4 / wavs_air - n = 1 + 0.00008336624212083 + 0.02408926869968 / (130.1065924522 - s**2) + 0.0001599740894897 / (38.92568793293 - s**2) + n = ( + 1 + + 0.00008336624212083 + + 0.02408926869968 / (130.1065924522 - s**2) + + 0.0001599740894897 / (38.92568793293 - s**2) + ) wavs_vac = wavs_air * n return wavs_vac @@ -857,9 +1105,9 @@ def constantR_wavs(wav_lower, wav_upper, R): wavs = [] while wav < wav_upper: wavs.append(wav) - wav += wav/R + wav += wav / R wavs = np.array(wavs) - + return wavs @@ -868,7 +1116,7 @@ def convolve_spectrum_R(wavs, flux, R, verbose=False): Convolves a spectrum with a Gaussian filter down to a lower spectral resolution. This function uses a constant gaussian width that is calculated from the middle wavelength point. This means that it only works properly when the wavs array spans a relatively small bandwidth. - Since R = delta-lambda / lambda, if the bandwidth is too large, the assumption made here that + Since R = delta-lambda / lambda, if the bandwidth is too large, the assumption made here that delta-lambda is the same over the whole array will not be valid. Parameters @@ -889,16 +1137,24 @@ def convolve_spectrum_R(wavs, flux, R, verbose=False): """ assert wavs[1] > wavs[0], "Wavelengths must be in ascending order" - assert np.allclose(np.diff(wavs), np.diff(wavs)[0], atol=0., rtol=1e-5), "Wavelengths must be equidistant" + assert np.allclose( + np.diff(wavs), np.diff(wavs)[0], atol=0.0, rtol=1e-5 + ), "Wavelengths must be equidistant" if wavs[-1] / wavs[0] > 1.05: - warnings.warn("The wavelengths change by more than 5 percent in your array. Converting R into a constant delta-lambda becomes questionable.") + warnings.warn( + "The wavelengths change by more than 5 percent in your array. Converting R into a constant delta-lambda becomes questionable." + ) - delta_lambda = wavs[int(len(wavs)/2)] / R #width of the filter in wavelength - use middle wav point - FWHM = delta_lambda / np.diff(wavs)[0] #width of the filter in pixels - sigma = FWHM / (2*np.sqrt(2*np.log(2))) #std dev. of the gaussian in pixels + delta_lambda = ( + wavs[int(len(wavs) / 2)] / R + ) # width of the filter in wavelength - use middle wav point + FWHM = delta_lambda / np.diff(wavs)[0] # width of the filter in pixels + sigma = FWHM / (2 * np.sqrt(2 * np.log(2))) # std dev. of the gaussian in pixels if verbose: - print(f"R={R}, lamb={wavs[0]}, delta-lamb={delta_lambda}, FWHM={FWHM} pix, sigma={sigma} pix") + print( + f"R={R}, lamb={wavs[0]}, delta-lamb={delta_lambda}, FWHM={FWHM} pix, sigma={sigma} pix" + ) convolved_spectrum = gaussian_filter1d(flux, sigma) diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index 5ab5082..9ee5123 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -1,19 +1,19 @@ # other imports -import numpy as np import os import time +import argparse +import multiprocessing +import traceback +import warnings from shutil import copyfile +import numpy as np import matplotlib.pyplot as plt import astropy.units as u from p_winds import tools as pw_tools from p_winds import parker as pw_parker from p_winds import hydrogen as pw_hydrogen -from scipy.integrate import simpson, trapz +from scipy.integrate import simpson, trapezoid from scipy.interpolate import interp1d -import argparse -import multiprocessing -import traceback -import warnings # sunbather imports import sunbather.tools as tools @@ -530,11 +530,11 @@ def calc_mu_bar(sim): # Eq. A.3 of Lampón et al. 2020 is a combination of several integrals, which # we calculate here - int_1 = simpson(mu_r / r**2, r) - int_2 = simpson(mu_r * v_r, v_r) - int_3 = trapz(mu_r, 1 / mu_r) - int_4 = simpson(1 / r**2, r) - int_5 = simpson(v_r, v_r) + int_1 = simpson(mu_r / r**2, x=r) + int_2 = simpson(mu_r * v_r, x=v_r) + int_3 = trapezoid(mu_r, 1 / mu_r) + int_4 = simpson(1 / r**2, x=r) + int_5 = simpson(v_r, x=v_r) int_6 = 1 / mu_r[-1] - 1 / mu_r[0] term_1 = grav * m_planet * int_1 + int_2 + k_b * temperature * int_3 term_2 = grav * m_planet * int_4 + int_5 + k_b * temperature * int_6 diff --git a/src/sunbather/convergeT_parker.py b/src/sunbather/convergeT_parker.py index ca14e4d..f0abf69 100644 --- a/src/sunbather/convergeT_parker.py +++ b/src/sunbather/convergeT_parker.py @@ -1,6 +1,4 @@ # other imports -import pandas as pd -import numpy as np import multiprocessing from shutil import copyfile import time @@ -8,10 +6,11 @@ import re import argparse import traceback +import pandas as pd +import numpy as np # sunbather imports -import sunbather.tools -import sunbather.solveT +from sunbather import tools, solveT def find_close_model(parentfolder, T, Mdot, tolT=2000, tolMdot=1.0): diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index 9246f7c..29a4b02 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -36,14 +36,18 @@ def download(self): # Go to the v23 download page and download the "c23.01.tar.gz" file return - def compile(self): + def extract(self): """ - Extracts and builds Cloudy. + Extracts Cloudy. """ os.chdir(self.cloudypath) with tarfile.open(self.filename, "r:gz") as tar: tar.extractall(filter="data") + def compile(self): + """ + Compiles Cloudy. + """ os.chdir(f"{self.cloudypath}/c{self.version}/source/") subprocess.Popen( [ From 5a05e4c62596ed97c3b7b49473194ae2279fa6db Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 13:32:37 +0100 Subject: [PATCH 24/63] update test.py --- tests/test.py | 193 +++++++++++++++++++++++++++++++------------------- 1 file changed, 121 insertions(+), 72 deletions(-) diff --git a/tests/test.py b/tests/test.py index 720cbdd..a7bb374 100644 --- a/tests/test.py +++ b/tests/test.py @@ -1,106 +1,155 @@ import os import sys -#sunbather imports -import sunbather.tools as tools -import sunbather.RT as RT - -#other imports +# other imports import pandas as pd import numpy as np from scipy.interpolate import interp1d import shutil +# sunbather imports +from sunbather import tools, RT -print("\nWill perform installation check by running the three main sunbather modules and checking if the output is as expected. " \ - +"Expected total run-time: 10 to 60 minutes. Should print 'success' at the end.\n") +# the absolute path where this code lives +this_path = os.path.dirname(os.path.abspath(__file__)) +src_path = this_path.split('tests')[-2] + 'src/' -### SETUP CHECKS ### +print( + "\nWill perform installation check by running the three main sunbather modules and checking if the output is as expected. " + + "Expected total run-time: 10 to 60 minutes. Should print 'success' at the end.\n" +) -#make sure projectpath exists -assert os.path.isdir(tools.projectpath), "Please create the projectpath folder on your machine" -#make sure the planets.txt file exists -assert os.path.isfile(tools.projectpath+'/planets.txt'), "Please make sure the 'planets.txt' file is present in $SUNBATHER_PROJECT_PATH" -#make sure the SED we need for this test has been copied to Cloudy -assert os.path.isfile(tools.cloudypath+'/data/SED/eps_Eri_binned.spec'), "Please copy /sunbather/stellar_SEDs/eps_Eri_binned.spec into $CLOUDY_PATH/data/SED/" +# SETUP CHECKS +# make sure projectpath exists +assert os.path.isdir( + tools.projectpath +), "Please create the projectpath folder on your machine" +# make sure the planets.txt file exists +assert os.path.isfile( + tools.projectpath + "/planets.txt" +), "Please make sure the 'planets.txt' file is present in $SUNBATHER_PROJECT_PATH" +# make sure the SED we need for this test has been copied to Cloudy +assert os.path.isfile( + tools.cloudypath + "/data/SED/eps_Eri_binned.spec" +), "Please copy /sunbather/stellar_SEDs/eps_Eri_binned.spec into $CLOUDY_PATH/data/SED/" -### CHECK IF test.py HAS BEEN RAN BEFORE ### +# ## CHECK IF test.py HAS BEEN RAN BEFORE ### -parker_profile_file = tools.projectpath+"/parker_profiles/WASP52b/test/pprof_WASP52b_T=9000_M=11.000.txt" -simulation_folder = tools.projectpath+"/sims/1D/WASP52b/test/parker_9000_11.000/" +parker_profile_file = ( + tools.projectpath + + "/parker_profiles/WASP52b/test/pprof_WASP52b_T=9000_M=11.000.txt" +) +simulation_folder = tools.projectpath + "/sims/1D/WASP52b/test/parker_9000_11.000/" if os.path.exists(parker_profile_file) or os.path.exists(simulation_folder): - confirmation = input(f"It looks like test.py has been ran before, as {parker_profile_file} and/or {simulation_folder} already exist. Do you want to delete the previous output before continuing (recommended)? (y/n): ") - if confirmation.lower() == "y": - if os.path.exists(parker_profile_file): - os.remove(parker_profile_file) - if os.path.exists(simulation_folder): - shutil.rmtree(simulation_folder) - print("\nFile(s) deleted successfully.") - else: - print("\nDeletion cancelled.") - - - -print("\nChecking construct_parker.py. A runtime for this module will follow when done...\n") + confirmation = input( + f"It looks like test.py has been ran before, as {parker_profile_file} and/or {simulation_folder} already exist. Do you want to delete the previous output before continuing (recommended)? (y/n): " + ) + if confirmation.lower() == "y": + if os.path.exists(parker_profile_file): + os.remove(parker_profile_file) + if os.path.exists(simulation_folder): + shutil.rmtree(simulation_folder) + print("\nFile(s) deleted successfully.") + else: + print("\nDeletion cancelled.") + + +print( + "\nChecking construct_parker.py. A runtime for this module will follow when done...\n" +) ### CREATING PARKER PROFILE ### -#create a parker profile - we use the p-winds/Cloudy hybrid scheme -os.system(f"cd {tools.sunbatherpath} && python construct_parker.py -plname WASP52b -pdir test -Mdot 11.0 -T 9000 -z 10 -zelem Ca=0 -overwrite") -#load the created profile -pprof_created = pd.read_table(tools.projectpath+'/parker_profiles/WASP52b/test/pprof_WASP52b_T=9000_M=11.000.txt', - names=['alt', 'rho', 'v', 'mu'], dtype=np.float64, comment='#') -#load the expected output -pprof_expected = pd.read_table(this_path+'/materials/pprof_WASP52b_T=9000_M=11.000.txt', - names=['alt', 'rho', 'v', 'mu'], dtype=np.float64, comment='#') -#check if they are equal to within 1% in altitude and mu and 10% in rho and v. -assert np.isclose(pprof_created[['alt', 'mu']], pprof_expected[['alt', 'mu']], rtol=0.01).all().all(), "The profile created with the construct_parker.py module is not as expected" -assert np.isclose(pprof_created[['rho', 'v']], pprof_expected[['rho', 'v']], rtol=0.1).all().all(), "The profile created with the construct_parker.py module is not as expected" - - - -print("\nChecking convergeT_parker.py. A runtime for this module will follow when done...\n") - -### CONVERGING TEMPERATURE STRUCTURE WITH CLOUDY ### - -#run the created profile through Cloudy -os.system(f"cd {tools.sunbatherpath} && python convergeT_parker.py -plname WASP52b -pdir test -dir test -Mdot 11.0 -T 9000 -z 10 -zelem Ca=0 -overwrite") -#load the created simulation -sim_created = tools.Sim(tools.projectpath+'/sims/1D/WASP52b/test/parker_9000_11.000/converged') -#load the expected simulation -sim_expected = tools.Sim(this_path+'/materials/converged') -#interpolate them to a common altitude grid as Cloudy's internal depth-grid may vary between simulations -alt_grid = np.logspace(np.log10(max(sim_created.ovr.alt.iloc[-1], sim_expected.ovr.alt.iloc[-1])+1e4), - np.log10(min(sim_created.ovr.alt.iloc[0], sim_expected.ovr.alt.iloc[0])-1e4), num=100) +# create a parker profile - we use the p-winds/Cloudy hybrid scheme +os.system( + f"cd {tools.sunbatherpath} && python construct_parker.py -plname WASP52b -pdir test -Mdot 11.0 -T 9000 -z 10 -zelem Ca=0 -overwrite" +) +# load the created profile +pprof_created = pd.read_table( + tools.projectpath + + "/parker_profiles/WASP52b/test/pprof_WASP52b_T=9000_M=11.000.txt", + names=["alt", "rho", "v", "mu"], + dtype=np.float64, + comment="#", +) +# load the expected output +pprof_expected = pd.read_table( + this_path + "/materials/pprof_WASP52b_T=9000_M=11.000.txt", + names=["alt", "rho", "v", "mu"], + dtype=np.float64, + comment="#", +) +# check if they are equal to within 1% in altitude and mu and 10% in rho and v. +assert ( + np.isclose(pprof_created[["alt", "mu"]], pprof_expected[["alt", "mu"]], rtol=0.01) + .all() + .all() +), "The profile created with the construct_parker.py module is not as expected" +assert ( + np.isclose(pprof_created[["rho", "v"]], pprof_expected[["rho", "v"]], rtol=0.1) + .all() + .all() +), "The profile created with the construct_parker.py module is not as expected" + + +print( + "\nChecking convergeT_parker.py. A runtime for this module will follow when done...\n" +) + +# ## CONVERGING TEMPERATURE STRUCTURE WITH CLOUDY ### + +# run the created profile through Cloudy +os.system( + f"cd {tools.sunbatherpath} " + f"&& python convergeT_parker.py " + f"-plname WASP52b -pdir test -dir test " + f"-Mdot 11.0 -T 9000 -z 10 -zelem Ca=0 -overwrite" +) +# load the created simulation +sim_created = tools.Sim( + tools.projectpath + "/sims/1D/WASP52b/test/parker_9000_11.000/converged" +) +# load the expected simulation +sim_expected = tools.Sim(this_path + "/materials/converged") +# interpolate them to a common altitude grid as Cloudy's internal depth-grid may vary between simulations +alt_grid = np.logspace( + np.log10(max(sim_created.ovr.alt.iloc[-1], sim_expected.ovr.alt.iloc[-1]) + 1e4), + np.log10(min(sim_created.ovr.alt.iloc[0], sim_expected.ovr.alt.iloc[0]) - 1e4), + num=100, +) T_created = interp1d(sim_created.ovr.alt, sim_created.ovr.Te)(alt_grid) T_expected = interp1d(sim_expected.ovr.alt, sim_expected.ovr.Te)(alt_grid) -#check if they are equal to within 10% -assert np.isclose(T_created, T_expected, rtol=0.1).all(), "The converged temperature profile of Cloudy is not as expected" - +# check if they are equal to within 10% +assert np.isclose( + T_created, T_expected, rtol=0.1 +).all(), "The converged temperature profile of Cloudy is not as expected" print("\nChecking RT.py...\n") ### MAKING TRANSIT SPECTRA ### -#make a helium spectrum +# make a helium spectrum wavs = np.linspace(10830, 10836, num=300) -FinFout_created, found_lines, notfound_lines = RT.FinFout(sim_created, wavs, 'He') -#load the expected helium spectrum -FinFout_expected = np.genfromtxt(this_path+'/materials/FinFout_helium.txt')[:,1] -assert np.isclose(FinFout_created, FinFout_expected, rtol=0.05).all(), "The created helium spectrum is not as expected" -#make a magnesium+ spectrum +FinFout_created, found_lines, notfound_lines = RT.FinFout(sim_created, wavs, "He") +# load the expected helium spectrum +FinFout_expected = np.genfromtxt(this_path + "/materials/FinFout_helium.txt")[:, 1] +assert np.isclose( + FinFout_created, FinFout_expected, rtol=0.05 +).all(), "The created helium spectrum is not as expected" +# make a magnesium+ spectrum wavs = np.linspace(2795.5, 2797, num=300) -FinFout_created, found_lines, notfound_lines = RT.FinFout(sim_created, wavs, 'Mg+') -#load the expected magnesium+ spectrum -FinFout_expected = np.genfromtxt(this_path+'/materials/FinFout_magnesium+.txt')[:,1] -assert np.isclose(FinFout_created, FinFout_expected, rtol=0.05).all(), "The created magnesium+ spectrum is not as expected" - +FinFout_created, found_lines, notfound_lines = RT.FinFout(sim_created, wavs, "Mg+") +# load the expected magnesium+ spectrum +FinFout_expected = np.genfromtxt(this_path + "/materials/FinFout_magnesium+.txt")[:, 1] +assert np.isclose( + FinFout_created, FinFout_expected, rtol=0.05 +).all(), "The created magnesium+ spectrum is not as expected" -#if we made it past all the asserts, the code is correctly installed +# if we made it past all the asserts, the code is correctly installed print("\nSuccess.") From 7ec95bbf402070599e444fec0b6714d2549afb4f Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 14:37:37 +0100 Subject: [PATCH 25/63] update convergeT_parker.py --- src/sunbather/convergeT_parker.py | 220 +++++++++++++++++++----------- 1 file changed, 141 insertions(+), 79 deletions(-) diff --git a/src/sunbather/convergeT_parker.py b/src/sunbather/convergeT_parker.py index f0abf69..ebdc0c3 100644 --- a/src/sunbather/convergeT_parker.py +++ b/src/sunbather/convergeT_parker.py @@ -1,4 +1,3 @@ -# other imports import multiprocessing from shutil import copyfile import time @@ -16,15 +15,17 @@ def find_close_model(parentfolder, T, Mdot, tolT=2000, tolMdot=1.0): """ Takes a parent folder where multiple 1D parker profiles have been ran, - and for given T and Mdot it looks for another model that is already finished and closest - to the given model, so that we can start our new simulation from that converged temperature - structure. It returns the T and Mdot - of the close converged folder, or None if there aren't any (within the tolerance). + and for given T and Mdot it looks for another model that is already + finished and closest to the given model, so that we can start our new + simulation from that converged temperature structure. It returns the T and + Mdot of the close converged folder, or None if there aren't any (within the + tolerance). Parameters ---------- parentfolder : str - Parent folder containing sunbather simulations within folders with the parker_*T0*_*Mdot* name format. + Parent folder containing sunbather simulations within folders with the + parker_*T0*_*Mdot* name format. T : numeric Target isothermal temperature in units of K. Mdot : numeric @@ -32,12 +33,14 @@ def find_close_model(parentfolder, T, Mdot, tolT=2000, tolMdot=1.0): tolT : numeric, optional Maximum T0 difference with the target temperature, by default 2000 K tolMdot : numeric, optional - Maximum log10(Mdot) difference with the target mass-loss rate, by default 1 dex + Maximum log10(Mdot) difference with the target mass-loss rate, by + default 1 dex Returns ------- clconv : list - [T0, Mdot] of the closest found finished model, or [None, None] if none were found within the tolerance. + [T0, Mdot] of the closest found finished model, or [None, None] if none + were found within the tolerance. """ pattern = re.compile( @@ -64,7 +67,7 @@ def find_close_model(parentfolder, T, Mdot, tolT=2000, tolMdot=1.0): ] in convergedfolders: # if the current folder is found, remove it convergedfolders.remove([int(T), float(Mdot)]) - if convergedfolders == []: # then we default to constant starting value + if not convergedfolders: # then we default to constant starting value clconv = [None, None] else: # find closest converged profile dist = ( @@ -87,24 +90,26 @@ def run_s( T, itno, fc, - dir, + workingdir, SEDname, overwrite, startT, pdir, zdict=None, altmax=8, - save_sp=[], + save_sp=None, constantT=False, maxit=16, ): """ - Solves for a nonisothermal temperature profile of a single isothermal Parker wind (density and velocity) profile. + Solves for a nonisothermal temperature profile of a single isothermal + Parker wind (density and velocity) profile. Parameters ---------- plname : str - Planet name (must have parameters stored in $SUNBATHER_PROJECT_PATH/planets.txt). + Planet name (must have parameters stored in + $SUNBATHER_PROJECT_PATH/planets.txt). Mdot : str or numeric log of the mass-loss rate in units of g s-1. T : str or int @@ -116,8 +121,8 @@ def run_s( look for the highest iteration number to start from. fc : numeric H/C convergence factor, see Linssen et al. (2024). A sensible value is 1.1. - dir : str - Directory as $SUNBATHER_PROJECT_PATH/sims/1D/planetname/*dir*/ + workingdir : str + Directory as $SUNBATHER_PROJECT_PATH/sims/1D/planetname/*workingdir*/ where the temperature profile will be solved. A folder named parker_*T*_*Mdot*/ will be made there. SEDname : str @@ -130,15 +135,16 @@ def run_s( Either 'constant', 'free' or 'nearby'. Sets the initial temperature profile guessed/used for the first iteration. 'constant' sets it equal to the parker wind isothermal value. - 'free' lets Cloudy solve it, so you will get the radiative equilibrium structure. - 'nearby' looks in the dir folder for previously solved - Parker wind profiles and starts from a converged one. Then, if no converged - ones are available, uses 'free' instead. + 'free' lets Cloudy solve it, so you will get the radiative equilibrium + structure. 'nearby' looks in the workingdir folder for previously solved + Parker wind profiles and starts from a converged one. Then, if no + converged ones are available, uses 'free' instead. pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/planetname/*pdir*/ where we take the isothermal parker wind density and velocity profiles from. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. zdict : dict, optional Dictionary with the scale factors of all elements relative to the default solar composition. Can be easily created with tools.get_zdict(). @@ -156,8 +162,10 @@ def run_s( maxit : int, optional Maximum number of iterations, by default 16. """ + if save_sp is None: + save_sp = [] - Mdot = "%.3f" % float(Mdot) # enforce this format to get standard file names. + Mdot = f"{float(Mdot):.3f}" # enforce this format to get standard file names. T = str(T) # set up the planet object @@ -166,7 +174,7 @@ def run_s( planet.set_var(SEDname=SEDname) # set up the folder structure - pathTstruc = tools.projectpath + "/sims/1D/" + planet.name + "/" + dir + "/" + pathTstruc = tools.projectpath + "/sims/1D/" + planet.name + "/" + workingdir + "/" path = pathTstruc + "parker_" + T + "_" + Mdot + "/" # check if this parker profile exists in the given pdir @@ -194,9 +202,12 @@ def run_s( if os.path.isdir(path): # the simulation exists already if not overwrite: print( - "Simulation already exists and overwrite = False:", plname, dir, Mdot, T + "Simulation already exists and overwrite = False:", + plname, workingdir, Mdot, T ) - return # this quits the function but if we're running a grid, it doesn't quit the whole Python code + # this quits the function but if we're running a grid, it doesn't + # quit the whole Python code + return else: os.mkdir(path[:-1]) # make the folder @@ -223,9 +234,11 @@ def run_s( + str(altmax) ) + # this will run the profile at the isothermal T value instead of converging + # a nonisothermal profile if ( constantT - ): # this will run the profile at the isothermal T value instead of converging a nonisothermal profile + ): if save_sp == []: tools.write_Cloudy_in( path + "constantT", @@ -269,8 +282,10 @@ def run_s( tools.run_Cloudy("constantT", folder=path) # run the Cloudy simulation return - # if we got to here, we are not doing a constantT simulation, so we set up the convergence scheme files - # write Cloudy template input file - each iteration will add their current temperature structure to this template + # if we got to here, we are not doing a constantT simulation, so we set up + # the convergence scheme files + # write Cloudy template input file - each iteration will add their current + # temperature structure to this template tools.write_Cloudy_in( path + "template", title=planet.name @@ -303,18 +318,17 @@ def run_s( iteration_number = int( re.search(pattern, filename).group(1) ) # extract the iteration number - if ( - iteration_number > max_iteration - ): # update highest found iteration number - max_iteration = iteration_number + max_iteration = max(max_iteration, iteration_number) if max_iteration == -1: # this means no files were found print( - f"This folder does not contain any iteration files {path}, so I cannot resume from the highest one. Will instead start at itno = 1." + f"This folder does not contain any iteration files {path}, so I cannot " + f"resume from the highest one. Will instead start at itno = 1." ) itno = 1 else: print( - f"Found the highest iteration {path}iteration{max_iteration}, will resume at that same itno." + f"Found the highest iteration {path}iteration{max_iteration}, will " + f"resume at that same itno." ) itno = max_iteration @@ -331,18 +345,21 @@ def run_s( ): # then we start in free (=radiative eq.) mode copyfile(path + "template.in", path + "iteration1.in") + # then clconv cannot be [None, None] and we start from a previous + # converged T(r) elif ( startT == "nearby" - ): # then clconv cannot be [None, None] and we start from a previous converged T(r) + ): print( - f"Model {path} starting from previously converged temperature profile: T0 = {clconv[0]}, Mdot = {clconv[1]}" + f"Model {path} starting from previously converged temperature profile: " + f"T0 = {clconv[0]}, Mdot = {clconv[1]}" ) prev_conv_T = pd.read_table( pathTstruc + "parker_" + str(clconv[0]) + "_" - + "{:.3f}".format(clconv[1]) + + f"{clconv[1]:.3f}" + "/converged.txt", delimiter=" ", ) @@ -357,7 +374,8 @@ def run_s( def catch_errors_run_s(*args): """ - Executes the run_s() function with provided arguments, while catching errors more gracefully. + Executes the run_s() function with provided arguments, while catching + errors more gracefully. """ try: @@ -376,7 +394,7 @@ def run_g( T_u, T_s, fc, - dir, + workingdir, SEDname, overwrite, startT, @@ -388,13 +406,14 @@ def run_g( maxit, ): """ - Solves for a nonisothermal temperature profile of a grid of isothermal Parker wind models, - by executing the run_s() function in parallel. + Solves for a nonisothermal temperature profile of a grid of isothermal + Parker wind models, by executing the run_s() function in parallel. Parameters ---------- plname : str - Planet name (must have parameters stored in $SUNBATHER_PROJECT_PATH/planets.txt). + Planet name (must have parameters stored in + $SUNBATHER_PROJECT_PATH/planets.txt). cores : int Number of parallel processes to spawn (i.e., number of CPU cores). Mdot_l : str or numeric @@ -411,8 +430,8 @@ def run_g( Step size of the temperature grid in units of K. fc : numeric H/C convergence factor, see Linssen et al. (2024). A sensible value is 1.1. - dir : str - Directory as $SUNBATHER_PROJECT_PATH/sims/1D/planetname/*dir*/ + workingdir : str + Directory as $SUNBATHER_PROJECT_PATH/sims/1D/planetname/*workingdir*/ where the temperature profile will be solved. A folder named parker_*T*_*Mdot*/ will be made there. SEDname : str @@ -425,15 +444,17 @@ def run_g( Either 'constant', 'free' or 'nearby'. Sets the initial temperature profile guessed/used for the first iteration. 'constant' sets it equal to the parker wind isothermal value. - 'free' lets Cloudy solve it, so you will get the radiative equilibrium structure. - 'nearby' looks in the dir folder for previously solved + 'free' lets Cloudy solve it, so you will get the radiative equilibrium + structure. + 'nearby' looks in the workingdir folder for previously solved Parker wind profiles and starts from a converged one. Then, if no converged ones are available, uses 'free' instead. pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/planetname/*pdir*/ where we take the isothermal parker wind density and velocity profiles from. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. zdict : dict, optional Dictionary with the scale factors of all elements relative to the default solar composition. Can be easily created with tools.get_zdict(). @@ -466,7 +487,7 @@ def run_g( T, 1, fc, - dir, + workingdir, SEDname, overwrite, startT, @@ -484,7 +505,10 @@ def run_g( p.join() -if __name__ == "__main__": +def main(): + """ + Main function + """ class OneOrThreeAction(argparse.Action): """ @@ -514,7 +538,8 @@ def __call__(self, parser, namespace, values, option_string=None): t0 = time.time() parser = argparse.ArgumentParser( - description="Runs the temperature convergence for 1D Parker profile(s)." + description="Runs the temperature convergence for 1D Parker profile(s).", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) parser.add_argument( @@ -524,7 +549,11 @@ def __call__(self, parser, namespace, values, option_string=None): "-dir", required=True, type=str, - help="folder where the temperature structures are solved. e.g. Tstruc_fH_0.9 or Tstruc_z_100_3xEUV etc.", + dest="workingdir", + help=( + "folder where the temperature structures are solved. e.g. Tstruc_fH_0.9 or " + "Tstruc_z_100_3xEUV etc." + ), ) parser.add_argument( "-pdir", @@ -538,8 +567,11 @@ def __call__(self, parser, namespace, values, option_string=None): type=float, nargs="+", action=OneOrThreeAction, - help="log10(mass-loss rate), or three values specifying a grid of " - "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to three decimal places.", + help=( + "log10(mass-loss rate), or three values specifying a grid of " + "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to " + "three decimal places." + ), ) parser.add_argument( "-T", @@ -547,89 +579,115 @@ def __call__(self, parser, namespace, values, option_string=None): type=int, nargs="+", action=OneOrThreeAction, - help="temperature, or three values specifying a grid of temperatures: lowest, highest, stepsize.", + help=( + "temperature, or three values specifying a grid of temperatures: lowest, " + "highest, stepsize." + ), ) parser.add_argument( - "-cores", type=int, default=1, help="number of parallel runs [default=1]" + "-cores", type=int, default=1, help="number of parallel runs" ) parser.add_argument( "-fc", type=float, default=1.1, - help="convergence factor (heat/cool should be below this value) [default=1.1]", + help="convergence factor (heat/cool should be below this value)", ) parser.add_argument( "-startT", choices=["nearby", "free", "constant"], default="nearby", - help="initial T structure, either 'constant', 'free' or 'nearby' [default=nearby]", + help=( + "initial T structure, either 'constant', 'free' or 'nearby'" + ), ) parser.add_argument( "-itno", type=int, default=1, - help="starting iteration number (itno != 1 only works with -overwrite). As a special use, you can pass " - "-itno 0 which will automatically find the highest previously ran iteration number [default=1]", + help=( + "starting iteration number (itno != 1 only works with -overwrite). As a " + "special use, you can pass -itno 0 which will automatically find the " + "highest previously ran iteration number" + ), ) parser.add_argument( "-maxit", type=int, default=20, - help="maximum number of iterations [default = 20]", + help="maximum number of iterations", ) parser.add_argument( "-SEDname", type=str, default="real", - help="name of SED to use. Must be in Cloudy's data/SED/ folder [default=SEDname set in planet.txt file]", + help=( + "name of SED to use. Must be in Cloudy's data/SED/ folder" + ), ) parser.add_argument( "-overwrite", action="store_true", - help="overwrite existing simulation if passed [default=False]", + help="overwrite existing simulation if passed", ) parser.add_argument( "-z", type=float, default=1.0, - help="metallicity (=scale factor relative to solar for all elements except H and He) [default=1.]", + help=( + "metallicity (=scale factor relative to solar for all elements except H " + "and He)" + ), ) parser.add_argument( "-zelem", action=AddDictAction, nargs="+", default={}, - help="abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem He=0.01. " - "Can also be used to toggle elements off, e.g. -zelem Ca=0. Combines with -z argument. Using this " - "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.", + help=( + "abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem " + "He=0.01. Can also be used to toggle elements off, e.g. -zelem Ca=0. " + "Combines with -z argument. Using this command results in running p_winds " + "in an an iterative scheme where Cloudy updates the mu parameter." + ), ) parser.add_argument( "-altmax", type=int, default=8, - help="maximum altitude of the simulation in units of Rp. [default=8]", + help="maximum altitude of the simulation in units of Rp.", ) parser.add_argument( "-save_sp", type=str, nargs="+", default=["all"], - help="atomic or ionic species to save densities for (needed for radiative transfer). " - "You can add multiple as e.g. -save_sp He Ca+ Fe3+ Passing 'all' includes all species that weren't turned off. In that case, you can " - "set the maximum degree of ionization with the -save_sp_max_ion flag. default=[] i.e. none.", + help=( + "atomic or ionic species to save densities for (needed for radiative " + "transfer). You can add multiple as e.g. -save_sp He Ca+ Fe3+ Passing " + "'all' includes all species that weren't turned off. In that case, you can " + "set the maximum degree of ionization with the -save_sp_max_ion flag. " + ), ) parser.add_argument( "-save_sp_max_ion", type=int, default=6, - help="only used when you set -save_sp all This command sets the maximum degree of ionization " - "that will be saved. [default=6] but using lower values saves significant file size if high ions are not needed. The maximum number is 12, " - "but such highly ionized species only occur at very high XUV flux, such as in young systems.", + help=( + "only used when you set -save_sp all This command sets the maximum " + "degree of ionization that will be saved. [default=6] but using lower " + "values saves significant file size if high ions are not needed. The " + "maximum number is 12, but such highly ionized species only occur at very " + "high XUV flux, such as in young systems." + ), ) parser.add_argument( "-constantT", action="store_true", - help="run the profile at the isothermal temperature instead of converging upon the temperature structure. [default=False]", + help=( + "run the profile at the isothermal temperature instead of converging upon " + "the temperature structure." + ), ) args = parser.parse_args() @@ -661,7 +719,7 @@ def __call__(self, parser, namespace, values, option_string=None): str(args.T[0]), args.itno, args.fc, - args.dir, + args.workingdir, args.SEDname, args.overwrite, args.startT, @@ -685,7 +743,7 @@ def __call__(self, parser, namespace, values, option_string=None): args.T[1], args.T[2], args.fc, - args.dir, + args.workingdir, args.SEDname, args.overwrite, args.startT, @@ -707,7 +765,7 @@ def __call__(self, parser, namespace, values, option_string=None): args.T[1], args.T[2], args.fc, - args.dir, + args.workingdir, args.SEDname, args.overwrite, args.startT, @@ -729,7 +787,7 @@ def __call__(self, parser, namespace, values, option_string=None): args.T[0], args.T[0], args.fc, - args.dir, + args.workingdir, args.SEDname, args.overwrite, args.startT, @@ -750,3 +808,7 @@ def __call__(self, parser, namespace, values, option_string=None): (int(time.time() - t0) % 60), "seconds.\n", ) + + +if __name__ == "__main__": + main() From afa97659fa2f9fce1552161297b9a240b590c343 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 15:48:25 +0100 Subject: [PATCH 26/63] Update pylint.yml - stricter score limit --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 47a45b7..3f89231 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -21,4 +21,4 @@ jobs: pip install -e . - name: Analysing the code with pylint run: | - pylint -d C0301,C0103,C0209 --fail-under 7.4 $(git ls-files '*.py') + pylint -d C0301,C0103,C0209 --fail-under 8.5 $(git ls-files '*.py') From 5b6691c14afad01af9f3945e3ffbf1eb35fad980 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 22:25:42 +0100 Subject: [PATCH 27/63] refactor RT.py --- src/sunbather/RT.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/src/sunbather/RT.py b/src/sunbather/RT.py index 28b2f42..12a6076 100644 --- a/src/sunbather/RT.py +++ b/src/sunbather/RT.py @@ -1,13 +1,13 @@ import warnings import pandas as pd import numpy as np -import numpy.ma as ma +from numpy import ma from scipy.interpolate import interp1d from scipy.special import voigt_profile from scipy.integrate import trapezoid from scipy.ndimage import gaussian_filter1d -import sunbather.tools as tools +from sunbather import tools sigt0 = 2.654e-2 # cm2 s-1 = cm2 Hz, from Axner et al. 2004 @@ -108,7 +108,7 @@ def project_1D_to_2D( if x_projection: q2 = q2 * xx / rr # now q2 is the projection in the x-direction - if cut_at != None: # set values to zero outside the cut_at boundary + if cut_at is not None: # set values to zero outside the cut_at boundary q2[rr > cut_at] = 0.0 # some options that were used in Linssen&Oklopcic (2023) to find where the line contribution comes from: @@ -471,11 +471,9 @@ def read_NIST_lines(species, wavlower=None, wavupper=None): if spNIST.empty: warnings.warn(f"No lines with necessary coefficients found for {species}") return spNIST - if ( - type(spNIST["Ei(Ry)"].iloc[0]) == str - ): # if there are no [](), the datatype will be float already + if isinstance(spNIST["Ei(Ry)"].iloc[0], str): # if there are no [](), the datatype will be float already spNIST["Ei(Ry)"] = ( - spNIST["Ei(Ry)"].str.extract("(\d+)", expand=False).astype(float) + spNIST["Ei(Ry)"].str.extract(r"(\d+)", expand=False).astype(float) ) # remove non-numeric characters such as [] and () spNIST["sig0"] = sigt0 * spNIST.fik spNIST["nu0"] = tools.c * 1e8 / (spNIST["ritz_wl_vac(A)"]) # speed of light to AA/s @@ -483,11 +481,11 @@ def read_NIST_lines(species, wavlower=None, wavupper=None): 4 * np.pi ) # lorentzian gamma is not function of depth or nu. Value in Hz - if wavlower != None: + if wavlower is not None: spNIST.drop( labels=spNIST.index[spNIST["ritz_wl_vac(A)"] <= wavlower], inplace=True ) - if wavupper != None: + if wavupper is not None: spNIST.drop( labels=spNIST.index[spNIST["ritz_wl_vac(A)"] >= wavupper], inplace=True ) @@ -668,7 +666,7 @@ def FinFout( colname, lineweight = tools.find_line_lowerstate_in_en_df( spec, spNIST.loc[lineno], sim.en ) - if colname == None: # we skip this line if the line energy is not found. + if colname is None: # we skip this line if the line energy is not found. notfound_lines.append(spNIST["ritz_wl_vac(A)"][lineno]) continue # to next spectral line @@ -756,9 +754,7 @@ def tau_1D(sim, wavAA, species, width_fac=1.0, v_turb=0.0): but which could not be calculated due to their excitation state not being reported by Cloudy. """ - assert isinstance(wavAA, float) or isinstance( - wavAA, int - ), "Pass one wavelength in Å as a float or int" + assert isinstance(wavAA, (float, int)), "Pass one wavelength in Å as a float or int" assert hasattr(sim, "p"), "The sim must have an attributed Planet object" assert ( "v" in sim.ovr.columns @@ -812,7 +808,7 @@ def tau_1D(sim, wavAA, species, width_fac=1.0, v_turb=0.0): colname, lineweight = tools.find_line_lowerstate_in_en_df( spec, spNIST.loc[lineno], sim.en ) - if colname == None: # we skip this line if the line energy is not found. + if colname is None: # we skip this line if the line energy is not found. notfound_lines.append(spNIST["ritz_wl_vac(A)"][lineno]) continue # to next spectral line @@ -891,9 +887,7 @@ def tau_12D(sim, wavAA, species, width_fac=1.0, v_turb=0.0, cut_at=None): but which could not be calculated due to their excitation state not being reported by Cloudy. """ - assert isinstance(wavAA, float) or isinstance( - wavAA, int - ), "Pass one wavelength in Å as a float or int" + assert isinstance(wavAA, (float, int)), "Pass one wavelength in Å as a float or int" assert hasattr(sim, "p") assert ( "v" in sim.ovr.columns @@ -948,7 +942,7 @@ def tau_12D(sim, wavAA, species, width_fac=1.0, v_turb=0.0, cut_at=None): colname, lineweight = tools.find_line_lowerstate_in_en_df( spec, spNIST.loc[lineno], sim.en ) - if colname == None: # we skip this line if the line energy is not found. + if colname is None: # we skip this line if the line energy is not found. notfound_lines.append(spNIST["ritz_wl_vac(A)"][lineno]) continue # to next spectral line From 008e7dab37da30ff63ba379cbbcc157f5bac29b8 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 22:29:51 +0100 Subject: [PATCH 28/63] refactor construct_parker.py --- src/sunbather/construct_parker.py | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index 9ee5123..d04154d 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -7,16 +7,16 @@ import warnings from shutil import copyfile import numpy as np -import matplotlib.pyplot as plt +# import matplotlib.pyplot as plt import astropy.units as u -from p_winds import tools as pw_tools +# from p_winds import tools as pw_tools from p_winds import parker as pw_parker from p_winds import hydrogen as pw_hydrogen from scipy.integrate import simpson, trapezoid from scipy.interpolate import interp1d # sunbather imports -import sunbather.tools as tools +from sunbather import tools def cloudy_spec_to_pwinds(SEDfilename, dist_SED, dist_planet): @@ -45,7 +45,7 @@ def cloudy_spec_to_pwinds(SEDfilename, dist_SED, dist_planet): SED at the planet distance in the dictionary format that p-winds expects. """ - with open(SEDfilename, "r") as f: + with open(SEDfilename, "r", encoding="utf-8") as f: for line in f: if not line.startswith("#"): # skip through the comments at the top assert ("angstrom" in line) or ("Angstrom" in line) # verify the units @@ -721,8 +721,7 @@ def save_cloudy_parker_profile( f"This Parker wind profile is supersonic already at Rp: {save_name}" ) break - else: - previous_mu_bar = mu_bar + previous_mu_bar = mu_bar copyfile(filename, filename.split("temp/")[0] + filename.split("temp/")[1]) tools.verbose_print( @@ -816,7 +815,7 @@ def run_s( (p.a - altmax * p.R) / tools.AU, ) # assumes SED is at 1 AU - if fH != None: # then run p_winds standalone + if fH is not None: # then run p_winds standalone save_plain_parker_profile( p, Mdot, @@ -970,7 +969,7 @@ def run_g( p.join() -if __name__ == "__main__": +def main(): class OneOrThreeAction(argparse.Action): """ @@ -1096,12 +1095,12 @@ def __call__(self, parser, namespace, values, option_string=None): ) args = parser.parse_args() - if args.z != None: + if args.z is not None: zdict = tools.get_zdict(z=args.z, zelem=args.zelem) else: # if z==None we should not pass that to the tools.get_zdict function zdict = tools.get_zdict(zelem=args.zelem) - if args.fH != None and ( + if args.fH is not None and ( args.zelem != {} or args.mu_conv != 0.01 or args.mu_maxit != 7 @@ -1127,7 +1126,7 @@ def __call__(self, parser, namespace, values, option_string=None): + args.pdir + "/" ) - if (args.fH == None) and ( + if (args.fH is None) and ( not os.path.isdir( tools.projectpath + "/parker_profiles/" @@ -1237,3 +1236,7 @@ def __call__(self, parser, namespace, values, option_string=None): (int(time.time() - t0) % 60), "seconds.\n", ) + + +if __name__ == "__main__": + main() From 013632c37fe7f352c6bf5e391e1a622773f14d69 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 22:37:00 +0100 Subject: [PATCH 29/63] refactor solveT.py --- src/sunbather/solveT.py | 80 ++++++++++++++++++++--------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/src/sunbather/solveT.py b/src/sunbather/solveT.py index 2c692be..7cd94da 100644 --- a/src/sunbather/solveT.py +++ b/src/sunbather/solveT.py @@ -1,5 +1,5 @@ -# sunbather imports -import sunbather.tools as tools +import os +import warnings # other imports import pandas as pd @@ -9,8 +9,9 @@ from scipy.optimize import minimize_scalar from scipy.interpolate import interp1d import scipy.stats as sps -import os -import warnings + +# sunbather imports +from sunbather import tools def calc_expansion(r, rho, v, Te, mu): @@ -291,7 +292,7 @@ def last_false_index(arr): # check for regime where radiative cooling is weak. Usually this means that expansion cooling dominates, but advection cooling can contribute in some cases exp_cloc = len(HCratio) # start by setting a 'too high' value expcool_dominates = radcool / (radcool + expcool + advcool) < 0.2 - if True and False in expcool_dominates: + if True and False in expcool_dominates: # FIXME True in expcool_dominates and False in expcool_dominates?? -SR exp_cloc = last_false_index( expcool_dominates ) # this way of evaluating it guarantees that all entries after this one are True @@ -535,7 +536,7 @@ def make_rates_plot( HCrationeg[HCrationeg < 0] = 0.0 fig, (ax1, ax2, ax3) = plt.subplots(3, figsize=(4, 7)) - if title != None: + if title is not None: ax1.set_title(title) ax1.plot(altgrid, Te, color="#4CAF50", label="previous") ax1.plot(altgrid, newTe_relax, color="#FFA500", label="relaxation") @@ -552,14 +553,14 @@ def make_rates_plot( ax2.plot(altgrid, advcool / rho, color="blue", linestyle="dotted") ax2.set_yscale("log") ax2.set_ylim( - 0.1 * min(min(radheat / rho), min(radcool / rho)), + 0.1 * min(radheat / rho, radcool / rho), 2 * max( - max(radheat / rho), - max(radcool / rho), - max(expcool / rho), - max(advheat / rho), - max(advcool / rho), + radheat / rho, + radcool / rho, + expcool / rho, + advheat / rho, + advcool / rho, ), ) ax2.set_ylabel("Rate [erg/s/g]") @@ -593,7 +594,7 @@ def make_rates_plot( tools.set_alt_ax(ax3, altmax=altmax, labels=True) fig.tight_layout() - if savename != None: + if savename is not None: plt.savefig(savename, bbox_inches="tight", dpi=200) plt.clf() plt.close() @@ -641,14 +642,14 @@ def make_converged_plot( ax2.plot(altgrid, advcool / rho, color="blue", linestyle="dotted") ax2.set_yscale("log") ax2.set_ylim( - 0.1 * min(min(radheat / rho), min(radcool / rho)), + 0.1 * min(radheat / rho, radcool / rho), 2 * max( - max(radheat / rho), - max(radcool / rho), - max(expcool / rho), - max(advheat / rho), - max(advcool / rho), + radheat / rho, + radcool / rho, + expcool / rho, + advheat / rho, + advcool / rho, ), ) ax2.set_ylabel("Rate [erg/s/g]") @@ -724,14 +725,11 @@ def check_converged(fc, HCratio, newTe, prevTe, linthresh=50.0): ) # take element wise ratio diffTe = np.abs(newTe - prevTe) # take element-wise absolute difference - if np.all( + converged = np.all( (np.abs(HCratio) < fc) | (ratioTe < (1 + 0.3 * np.log10(fc))) | (diffTe < linthresh) - ): - converged = True - else: - converged = False + ) return converged @@ -764,7 +762,7 @@ def clean_converged_folder(folder): os.remove(os.path.join(folder, filename)) -def run_loop(path, itno, fc, save_sp=[], maxit=16): +def run_loop(path, itno, fc, save_sp=None, maxit=16): """ Solves for the nonisothermal temperature profile of a Parker wind profile through an iterative convergence scheme including Cloudy. @@ -789,6 +787,8 @@ def run_loop(path, itno, fc, save_sp=[], maxit=16): maxit : int, optional Maximum number of iterations, by default 16. """ + if save_sp is None: + save_sp = [] if itno == 1: # iteration1 is just running Cloudy. Then, we move on to iteration2 tools.run_Cloudy("iteration1", folder=path) @@ -922,19 +922,19 @@ def run_loop(path, itno, fc, save_sp=[], maxit=16): break - else: # set up the next iteration - Cltlaw = tools.alt_array_to_Cloudy( - rgrid, newTe, altmax, Rp, 1000 - ) # convert the temperature profile to a table format accepted by Cloudy + # set up the next iteration + Cltlaw = tools.alt_array_to_Cloudy( + rgrid, newTe, altmax, Rp, 1000 + ) # convert the temperature profile to a table format accepted by Cloudy - tools.copyadd_Cloudy_in( - path + "template", path + "iteration" + str(itno), tlaw=Cltlaw - ) # add temperature profile to the template input file - if ( - itno != maxit - ): # no use running it if we are not entering the next while-loop iteration - tools.run_Cloudy(f"iteration{itno}", folder=path) - else: - print(f"Failed temperature convergence after {itno} iterations: {path}") - - itno += 1 + tools.copyadd_Cloudy_in( + path + "template", path + "iteration" + str(itno), tlaw=Cltlaw + ) # add temperature profile to the template input file + if ( + itno != maxit + ): # no use running it if we are not entering the next while-loop iteration + tools.run_Cloudy(f"iteration{itno}", folder=path) + else: + print(f"Failed temperature convergence after {itno} iterations: {path}") + + itno += 1 From 289151ada523874284c5106579ef1ae069cc1e67 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 30 Oct 2024 22:38:44 +0100 Subject: [PATCH 30/63] refactor tools.py --- src/sunbather/tools.py | 78 ++++++++++++++++++++++++------------------ 1 file changed, 45 insertions(+), 33 deletions(-) diff --git a/src/sunbather/tools.py b/src/sunbather/tools.py index 0157116..a2df647 100644 --- a/src/sunbather/tools.py +++ b/src/sunbather/tools.py @@ -148,7 +148,7 @@ # ###################################### -def get_specieslist(max_ion=6, exclude_elements=[]): +def get_specieslist(max_ion=6, exclude_elements=None): """ Returns a list of atomic and ionic species names. Default returns all species up to 6+ ionization. Higher than 6+ ionization is rarely attained @@ -170,6 +170,8 @@ def get_specieslist(max_ion=6, exclude_elements=[]): List of atomic and ionic species names in the string format expected by Cloudy. """ + if exclude_elements is None: + exclude_elements = [] if max_ion > 12: warnings.warn( @@ -259,9 +261,9 @@ def get_mass(species): return mass -####################################### -########### CLOUDY FILES ########## -####################################### +# ###################################### +# ########## CLOUDY FILES ########## +# ###################################### def process_continuum(filename, nonzero=False): @@ -327,7 +329,7 @@ def process_heating(filename, Rp=None, altmax=None, cloudy_version="17"): # determine max number of columns (otherwise pd.read_table assumes it is the number # of the first row) max_columns = 0 - with open(filename, "r") as file: + with open(filename, "r", encoding="utf-8") as file: for line in file: num_columns = len(line.split("\t")) max_columns = max(max_columns, num_columns) @@ -435,7 +437,7 @@ def process_cooling(filename, Rp=None, altmax=None, cloudy_version="17"): # determine max number of columns (otherwise pd.read_table assumes it is # the number of the first row) max_columns = 0 - with open(filename, "r") as file: + with open(filename, "r", encoding="utf-8") as file: for line in file: num_columns = len(line.split("\t")) max_columns = max(max_columns, num_columns) @@ -672,7 +674,7 @@ def process_energies(filename, rewrite=True, cloudy_version="17"): en.columns.values[0][0] == "#" ): # condition checks whether it has already been rewritten, if not, we do all following stuff: - for col in range(len(en.columns)): # check if all rows are the same + for i, col in enumerate(en.columns): # check if all rows are the same if len(en.iloc[:, col].unique()) != 1: raise ValueError( "In reading .en file, found a column with not identical values!" @@ -743,12 +745,12 @@ def process_energies(filename, rewrite=True, cloudy_version="17"): 0 ] # iloc at which the species (e.g. He or Ca+3) starts. en_df.iloc[ - first_iloc : first_iloc + n_matching, en_df.columns.get_loc("configuration") + first_iloc: first_iloc + n_matching, en_df.columns.get_loc("configuration") ] = species_levels.configuration.iloc[:n_matching].values en_df.iloc[ - first_iloc : first_iloc + n_matching, en_df.columns.get_loc("term") + first_iloc: first_iloc + n_matching, en_df.columns.get_loc("term") ] = species_levels.term.iloc[:n_matching].values - en_df.iloc[first_iloc : first_iloc + n_matching, en_df.columns.get_loc("J")] = ( + en_df.iloc[first_iloc: first_iloc + n_matching, en_df.columns.get_loc("J")] = ( species_levels.J.iloc[:n_matching].values ) @@ -958,7 +960,7 @@ def get_SED_norm_1AU(SEDname): Energy where the monochromatic flux of the nuFnu output variable is specified. """ - with open(cloudypath + "/data/SED/" + SEDname, "r") as f: + with open(cloudypath + "/data/SED/" + SEDname, "r", encoding="utf-8") as f: for line in f: if not line.startswith("#"): # skip through the comments at the top assert ("angstrom" in line) or ("Angstrom" in line) # verify the units @@ -1134,7 +1136,7 @@ def calc_mu(rho, ne, abundances=None, mass=False): return mu -def get_zdict(z=1.0, zelem={}): +def get_zdict(z=1.0, zelem=None): """ Returns a dictionary of the scale factors of each element relative to solar. @@ -1152,6 +1154,8 @@ def get_zdict(z=1.0, zelem={}): Dictionary with the scale factors of all elements relative to the default solar composition. """ + if zelem is None: + zelem = {} assert ( "H" not in zelem.keys() @@ -1524,7 +1528,7 @@ def alt_array_to_Cloudy(alt, quantity, altmax, Rp, nmax, log=True): ) # reset these for potential log-numerical errors Clgridr1 = (Clgridr1[-1] - Clgridr1)[::-1] # sample the first 10 points better since Cloudy messes up with log-space interpolation there - Clgridr2 = np.logspace(-2, np.log10(Clgridr1[9]), num=(nmax - len(Clgridr1))) + Clgridr2 = np.logspace(-2, np.log10(Clgridr1[9]), num=nmax - len(Clgridr1)) Clgridr = np.concatenate((Clgridr2, Clgridr1[10:])) Clgridr[0] = 1e-35 @@ -1670,8 +1674,8 @@ def copyadd_Cloudy_in( cextra=None, hextra=None, othercommands=None, - outfiles=[], - denspecies=[], + outfiles=None, + denspecies=None, selected_den_levels=False, constantT=None, double_tau=False, @@ -1740,6 +1744,10 @@ def copyadd_Cloudy_in( Major Cloudy release version, used only in combination with the denspecies argument, by default "17". """ + if outfiles is None: + outfiles = [] + if denspecies is None: + denspecies = [] if denspecies != []: assert ".den" in outfiles and ".en" in outfiles @@ -1750,7 +1758,7 @@ def copyadd_Cloudy_in( copyfile(oldsimname + ".in", newsimname + ".in") - with open(newsimname + ".in", "a") as f: + with open(newsimname + ".in", "a", encoding="utf-8") as f: if set_thickness: f.write( "\nstop thickness " @@ -1864,8 +1872,8 @@ def write_Cloudy_in( overwrite=False, iterate="convergence", nend=3000, - outfiles=[".ovr", ".cool"], - denspecies=[], + outfiles=None, + denspecies=None, selected_den_levels=False, constantT=None, double_tau=False, @@ -1958,6 +1966,11 @@ def write_Cloudy_in( Major Cloudy release version, used only in combination with the denspecies argument, by default "17". """ + if outfiles is None: + outfiles = [".ovr", ".cool"] + + if denspecies is None: + denspecies = [] assert ( flux_scaling is not None @@ -1972,7 +1985,7 @@ def write_Cloudy_in( if constantT is not None: assert not np.any(tlaw is not None) - with open(simname + ".in", "w") as f: + with open(simname + ".in", "w", encoding="utf-8") as f: if comments is not None: f.write(comments + "\n") if title is not None: @@ -2146,7 +2159,7 @@ def insertden_Cloudy_in( If there are multiple 'save species densities' commands in the Cloudy input file. """ - with open(simname + ".in", "r") as f: + with open(simname + ".in", "r", encoding="utf-8") as f: oldcontent = f.readlines() newcontent = oldcontent @@ -2212,16 +2225,16 @@ def insertden_Cloudy_in( ) newcontent = "".join(newcontent) # turn list into string - with open(simname + ".in", "w") as f: # overwrite the old file + with open(simname + ".in", "w", encoding="utf-8") as f: # overwrite the old file f.write(newcontent) if rerun: run_Cloudy(simname) -####################################### -########### CLASSES ########### -####################################### +# ###################################### +# ########## CLASSES ########### +# ###################################### class Parker: @@ -2260,10 +2273,10 @@ def __init__( self.plname = plname self.T = int(T) - if type(Mdot) == str: + if isinstance(Mdot, str): self.Mdot = Mdot self.Mdotf = float(Mdot) - elif type(Mdot) == float or type(Mdot) == int: + elif isinstance(Mdot, (float, int)): self.Mdot = "%.3f" % Mdot self.Mdotf = Mdot if fH is not None: @@ -2635,7 +2648,7 @@ def __init__( simname, altmax=None, proceedFail=False, - files=["all"], + files="all", planet=None, parker=None, ): @@ -2675,6 +2688,8 @@ def __init__( TypeError If the altmax argument is not numeric. """ + if isinstance(files, str): + files = [files] if not isinstance(simname, str): raise TypeError("simname must be set to a string") @@ -2682,12 +2697,9 @@ def __init__( # check the Cloudy version, and if the simulation did not crash. _succesful = False - with open(simname + ".out", "r") as f: + with open(simname + ".out", "r", encoding="utf-8") as f: _outfile_content = f.read() - if "Cloudy exited OK" in _outfile_content: - _succesful = True - else: - _succesful = False + _succesful = "Cloudy exited OK" in _outfile_content if "Cloudy 17" in _outfile_content: self.cloudy_version = "17" @@ -2708,7 +2720,7 @@ def __init__( self.disabled_elements = [] zelem = {} _parker_T, _parker_Mdot, _parker_dir = None, None, None # temp variables - with open(simname + ".in", "r") as f: + with open(simname + ".in", "r", encoding="utf-8") as f: for line in f: if ( line[0] == "#" From f7f37aba8ba13ae30d5289831f2dbe70103ebe34 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 10:27:21 +0100 Subject: [PATCH 31/63] update gitignore, add .flake8 --- .flake8 | 6 ++++++ .gitignore | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..327f5fb --- /dev/null +++ b/.flake8 @@ -0,0 +1,6 @@ +[flake8] +max-line-length = 88 +per-file-ignores = + # imported but unused + __init__.py: F401 + diff --git a/.gitignore b/.gitignore index 0a6e545..f1846e5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ .DS_Store __pycache__/ -.ipynb_checkpoints +*.ipynb_checkpoints +*.egg-info +*.swp examples/WASP52b_dT.csv examples/WASP52b_sigmaT.csv examples/WASP52b_nsig_fit.csv From c5e55175221d1acd4e98618fa87bcfffa0451a89 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 10:27:53 +0100 Subject: [PATCH 32/63] add test (initial) --- tests/test_sunbather.py | 52 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 tests/test_sunbather.py diff --git a/tests/test_sunbather.py b/tests/test_sunbather.py new file mode 100644 index 0000000..4d7f7df --- /dev/null +++ b/tests/test_sunbather.py @@ -0,0 +1,52 @@ +""" +Tests for the sunbather package +""" +import os +import pytest + + +def f(): + raise SystemExit(1) + + +def test_import(): + """ + Tests if sunbather can be imported. + """ + try: + import sunbather + except ImportError: + f() + + +def test_projectdirs(): + """ + Make sure projectpath exists + """ + from sunbather import tools + assert os.path.isdir( + tools.projectpath + ), "Please create the projectpath folder on your machine" + + +def test_planetstxt(): + """ + Make sure the planets.txt file exists + """ + from sunbather import tools + assert os.path.isfile( + tools.projectpath + "/planets.txt" + ), "Please make sure the 'planets.txt' file is present in $SUNBATHER_PROJECT_PATH" + + +def test_seds(): + """ + Make sure the SED we need for this test has been copied to Cloudy + """ + from sunbather import tools + assert os.path.isfile( + tools.cloudypath + "/data/SED/eps_Eri_binned.spec" + ), ( + "Please copy /sunbather/stellar_SEDs/eps_Eri_binned.spec " + "into $CLOUDY_PATH/data/SED/" + ) From 28e3ed77a8a582d20ca1ae2d46b07d194780927a Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 10:28:53 +0100 Subject: [PATCH 33/63] ignore cloudy dir to prevent accidental inclusion --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index f1846e5..a87e2cf 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ examples/WASP52b_sigmaT.csv examples/WASP52b_nsig_fit.csv env/ dist/ +src/sunbather/cloudy From e93c4b0c9a05522f7bf15b0c8791a15b9ef632ac Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 10:29:46 +0100 Subject: [PATCH 34/63] move planets.txt --- planets.txt => src/sunbather/data/planets.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename planets.txt => src/sunbather/data/planets.txt (100%) diff --git a/planets.txt b/src/sunbather/data/planets.txt similarity index 100% rename from planets.txt rename to src/sunbather/data/planets.txt From fc9d46468cc3b1b2b6c2aa1c8df16fcbd41cadb6 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 10:53:34 +0100 Subject: [PATCH 35/63] move data files --- src/sunbather/{ => data}/stellar_SEDs/GJ1132_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ1214_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ15A_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ163_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ176_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ436_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ581_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ649_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ667C_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ674_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ676A_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ699_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ729_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ832_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ849_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/GJ876_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/HATP12_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/HATP26_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/HD149026_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/HD40307_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/HD85512_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/HD97658_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/K4_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/L-678-39_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/L-98-59_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/L-980-5_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/LHS-2686_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/LP-791-18_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/TOI193_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/TOI2134.spec | 0 src/sunbather/{ => data}/stellar_SEDs/TRAPPIST-1_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/WASP127_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/WASP17_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/WASP43_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/WASP77A_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/eps_Eri_binned.spec | 0 src/sunbather/{ => data}/stellar_SEDs/solar.spec | 0 37 files changed, 0 insertions(+), 0 deletions(-) rename src/sunbather/{ => data}/stellar_SEDs/GJ1132_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ1214_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ15A_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ163_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ176_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ436_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ581_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ649_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ667C_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ674_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ676A_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ699_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ729_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ832_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ849_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/GJ876_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/HATP12_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/HATP26_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/HD149026_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/HD40307_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/HD85512_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/HD97658_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/K4_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/L-678-39_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/L-98-59_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/L-980-5_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/LHS-2686_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/LP-791-18_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/TOI193_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/TOI2134.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/TRAPPIST-1_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/WASP127_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/WASP17_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/WASP43_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/WASP77A_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/eps_Eri_binned.spec (100%) rename src/sunbather/{ => data}/stellar_SEDs/solar.spec (100%) diff --git a/src/sunbather/stellar_SEDs/GJ1132_binned.spec b/src/sunbather/data/stellar_SEDs/GJ1132_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ1132_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ1132_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ1214_binned.spec b/src/sunbather/data/stellar_SEDs/GJ1214_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ1214_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ1214_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ15A_binned.spec b/src/sunbather/data/stellar_SEDs/GJ15A_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ15A_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ15A_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ163_binned.spec b/src/sunbather/data/stellar_SEDs/GJ163_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ163_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ163_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ176_binned.spec b/src/sunbather/data/stellar_SEDs/GJ176_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ176_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ176_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ436_binned.spec b/src/sunbather/data/stellar_SEDs/GJ436_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ436_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ436_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ581_binned.spec b/src/sunbather/data/stellar_SEDs/GJ581_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ581_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ581_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ649_binned.spec b/src/sunbather/data/stellar_SEDs/GJ649_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ649_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ649_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ667C_binned.spec b/src/sunbather/data/stellar_SEDs/GJ667C_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ667C_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ667C_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ674_binned.spec b/src/sunbather/data/stellar_SEDs/GJ674_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ674_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ674_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ676A_binned.spec b/src/sunbather/data/stellar_SEDs/GJ676A_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ676A_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ676A_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ699_binned.spec b/src/sunbather/data/stellar_SEDs/GJ699_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ699_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ699_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ729_binned.spec b/src/sunbather/data/stellar_SEDs/GJ729_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ729_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ729_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ832_binned.spec b/src/sunbather/data/stellar_SEDs/GJ832_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ832_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ832_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ849_binned.spec b/src/sunbather/data/stellar_SEDs/GJ849_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ849_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ849_binned.spec diff --git a/src/sunbather/stellar_SEDs/GJ876_binned.spec b/src/sunbather/data/stellar_SEDs/GJ876_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/GJ876_binned.spec rename to src/sunbather/data/stellar_SEDs/GJ876_binned.spec diff --git a/src/sunbather/stellar_SEDs/HATP12_binned.spec b/src/sunbather/data/stellar_SEDs/HATP12_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/HATP12_binned.spec rename to src/sunbather/data/stellar_SEDs/HATP12_binned.spec diff --git a/src/sunbather/stellar_SEDs/HATP26_binned.spec b/src/sunbather/data/stellar_SEDs/HATP26_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/HATP26_binned.spec rename to src/sunbather/data/stellar_SEDs/HATP26_binned.spec diff --git a/src/sunbather/stellar_SEDs/HD149026_binned.spec b/src/sunbather/data/stellar_SEDs/HD149026_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/HD149026_binned.spec rename to src/sunbather/data/stellar_SEDs/HD149026_binned.spec diff --git a/src/sunbather/stellar_SEDs/HD40307_binned.spec b/src/sunbather/data/stellar_SEDs/HD40307_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/HD40307_binned.spec rename to src/sunbather/data/stellar_SEDs/HD40307_binned.spec diff --git a/src/sunbather/stellar_SEDs/HD85512_binned.spec b/src/sunbather/data/stellar_SEDs/HD85512_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/HD85512_binned.spec rename to src/sunbather/data/stellar_SEDs/HD85512_binned.spec diff --git a/src/sunbather/stellar_SEDs/HD97658_binned.spec b/src/sunbather/data/stellar_SEDs/HD97658_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/HD97658_binned.spec rename to src/sunbather/data/stellar_SEDs/HD97658_binned.spec diff --git a/src/sunbather/stellar_SEDs/K4_binned.spec b/src/sunbather/data/stellar_SEDs/K4_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/K4_binned.spec rename to src/sunbather/data/stellar_SEDs/K4_binned.spec diff --git a/src/sunbather/stellar_SEDs/L-678-39_binned.spec b/src/sunbather/data/stellar_SEDs/L-678-39_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/L-678-39_binned.spec rename to src/sunbather/data/stellar_SEDs/L-678-39_binned.spec diff --git a/src/sunbather/stellar_SEDs/L-98-59_binned.spec b/src/sunbather/data/stellar_SEDs/L-98-59_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/L-98-59_binned.spec rename to src/sunbather/data/stellar_SEDs/L-98-59_binned.spec diff --git a/src/sunbather/stellar_SEDs/L-980-5_binned.spec b/src/sunbather/data/stellar_SEDs/L-980-5_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/L-980-5_binned.spec rename to src/sunbather/data/stellar_SEDs/L-980-5_binned.spec diff --git a/src/sunbather/stellar_SEDs/LHS-2686_binned.spec b/src/sunbather/data/stellar_SEDs/LHS-2686_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/LHS-2686_binned.spec rename to src/sunbather/data/stellar_SEDs/LHS-2686_binned.spec diff --git a/src/sunbather/stellar_SEDs/LP-791-18_binned.spec b/src/sunbather/data/stellar_SEDs/LP-791-18_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/LP-791-18_binned.spec rename to src/sunbather/data/stellar_SEDs/LP-791-18_binned.spec diff --git a/src/sunbather/stellar_SEDs/TOI193_binned.spec b/src/sunbather/data/stellar_SEDs/TOI193_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/TOI193_binned.spec rename to src/sunbather/data/stellar_SEDs/TOI193_binned.spec diff --git a/src/sunbather/stellar_SEDs/TOI2134.spec b/src/sunbather/data/stellar_SEDs/TOI2134.spec similarity index 100% rename from src/sunbather/stellar_SEDs/TOI2134.spec rename to src/sunbather/data/stellar_SEDs/TOI2134.spec diff --git a/src/sunbather/stellar_SEDs/TRAPPIST-1_binned.spec b/src/sunbather/data/stellar_SEDs/TRAPPIST-1_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/TRAPPIST-1_binned.spec rename to src/sunbather/data/stellar_SEDs/TRAPPIST-1_binned.spec diff --git a/src/sunbather/stellar_SEDs/WASP127_binned.spec b/src/sunbather/data/stellar_SEDs/WASP127_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/WASP127_binned.spec rename to src/sunbather/data/stellar_SEDs/WASP127_binned.spec diff --git a/src/sunbather/stellar_SEDs/WASP17_binned.spec b/src/sunbather/data/stellar_SEDs/WASP17_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/WASP17_binned.spec rename to src/sunbather/data/stellar_SEDs/WASP17_binned.spec diff --git a/src/sunbather/stellar_SEDs/WASP43_binned.spec b/src/sunbather/data/stellar_SEDs/WASP43_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/WASP43_binned.spec rename to src/sunbather/data/stellar_SEDs/WASP43_binned.spec diff --git a/src/sunbather/stellar_SEDs/WASP77A_binned.spec b/src/sunbather/data/stellar_SEDs/WASP77A_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/WASP77A_binned.spec rename to src/sunbather/data/stellar_SEDs/WASP77A_binned.spec diff --git a/src/sunbather/stellar_SEDs/eps_Eri_binned.spec b/src/sunbather/data/stellar_SEDs/eps_Eri_binned.spec similarity index 100% rename from src/sunbather/stellar_SEDs/eps_Eri_binned.spec rename to src/sunbather/data/stellar_SEDs/eps_Eri_binned.spec diff --git a/src/sunbather/stellar_SEDs/solar.spec b/src/sunbather/data/stellar_SEDs/solar.spec similarity index 100% rename from src/sunbather/stellar_SEDs/solar.spec rename to src/sunbather/data/stellar_SEDs/solar.spec From bcbc7f18da0dc5c4cc57cc42606297332b09d5f5 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 14:25:33 +0100 Subject: [PATCH 36/63] fix install_cloudy.py --- src/sunbather/install_cloudy.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index 29a4b02..035eeb2 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -1,5 +1,6 @@ import os import pathlib +from urllib.error import HTTPError import urllib.request import tarfile import subprocess @@ -16,7 +17,7 @@ def __init__(self, version="23.01"): self.path = "./" major = version.split(".")[0] self.url = f"https://data.nublado.org/cloudy_releases/c{major}/" - self.filename = "c{version}.tar.gz" + self.filename = f"c{self.version}.tar.gz" self.sunbatherpath = f"{pathlib.Path(__file__).parent.resolve()}" self.cloudypath = f"{self.sunbatherpath}/cloudy/" @@ -27,12 +28,18 @@ def download(self): if not pathlib.Path(self.cloudypath).is_dir(): os.mkdir(self.cloudypath) else: - print("Directory already exists! Skipping download.") - return + print("Directory already exists! Checking if download is still needed...") + if os.path.exists(self.cloudypath + self.filename): + print("Already downloaded, skipping ahead.") + return os.chdir(self.cloudypath) - with urllib.request.urlopen(f"{self.url}{self.filename}") as g: - with open(self.filename, "b+w") as f: - f.write(g.read()) + try: + with urllib.request.urlopen(f"{self.url}{self.filename}") as g: + with open(self.filename, "b+w") as f: + f.write(g.read()) + except HTTPError as exc: + print(f"Could not download Cloudy from {self.url}{self.filename}...") + return # Go to the v23 download page and download the "c23.01.tar.gz" file return @@ -69,7 +76,8 @@ def test(self): ).wait() def copy_data(self): - shutil.copy2( - f"{self.sunbatherpath}/stellar_SEDs/*.spec", + shutil.copytree( + f"{self.sunbatherpath}/data/stellar_SEDs/", f"{self.cloudypath}/c{self.version}/data/SED/", + dirs_exist_ok=True ) From 73bd329c7de2ee4c1112014ec1da401748bb0fe6 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 31 Oct 2024 14:26:01 +0100 Subject: [PATCH 37/63] let __init__.py install cloudy if it does not yet exist --- src/sunbather/__init__.py | 40 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py index e69de29..48d42b0 100644 --- a/src/sunbather/__init__.py +++ b/src/sunbather/__init__.py @@ -0,0 +1,40 @@ +""" +Initialize sunbather +""" +import os + +# Ensure Cloudy exists +try: + CLOUDYVERSION = os.environ["CLOUDY_VERSION"] +except KeyError: + CLOUDYVERSION = "23.01" +SUNBATHERPATH = os.path.dirname( + os.path.abspath(__file__) +) # the absolute path where this code lives +try: + # the path where Cloudy is installed + CLOUDYPATH = os.environ["CLOUDY_PATH"] +except KeyError as exc: + CLOUDYPATH = f"{SUNBATHERPATH}/cloudy/c{CLOUDYVERSION}" +if not os.path.exists(f"{CLOUDYPATH}/source/cloudy.exe"): + q = input( + f"Cloudy not found and CLOUDY_PATH is not set. " + f"Do you want to install Cloudy {CLOUDYVERSION} now in the Sunbather path? " + f"(y/n) " + ) + while q.lower() not in ["y", "n"]: + q = input("Please enter 'y' or 'n'") + if q == "n": + raise KeyError( + "Cloudy not found, and the environment variable 'CLOUDY_PATH' is not set. " + "Please set this variable in your .bashrc/.zshrc file " + "to the path where the Cloudy installation is located. " + "Do not point it to the /source/ subfolder, but to the main folder." + ) from exc + from sunbather.install_cloudy import GetCloudy + INSTALLER = GetCloudy(version=CLOUDYVERSION) + INSTALLER.download() + INSTALLER.extract() + INSTALLER.compile() + INSTALLER.test() + INSTALLER.copy_data() From 161077412f7d2d7a955f4b6237568c92e158922b Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 13 Nov 2024 11:14:15 +0100 Subject: [PATCH 38/63] Add docs/wiki --- wiki/FAQ.md | 90 ++++++++++++++++++++++++++++++++++++++++++++ wiki/Glossary.md | 45 ++++++++++++++++++++++ wiki/Home.md | 4 ++ wiki/Installation.md | 28 ++++++++++++++ 4 files changed, 167 insertions(+) create mode 100644 wiki/FAQ.md create mode 100644 wiki/Glossary.md create mode 100644 wiki/Home.md create mode 100644 wiki/Installation.md diff --git a/wiki/FAQ.md b/wiki/FAQ.md new file mode 100644 index 0000000..1619f39 --- /dev/null +++ b/wiki/FAQ.md @@ -0,0 +1,90 @@ +## How do I create Parker wind profiles? + +Add the parameters of the planet/star system to the *$SUNBATHER_PROJECT_PATH/planets.txt* file. Make sure the SED you specify in _planets.txt_ is present in the _$CLOUDY_PATH/data/SED/_ folder in the right format. Then run the `construct_parker.py` module in your terminal (use `-help` to see the arguments). + +## How do I choose the composition of the atmosphere? + +The composition usually has to be specified at two stages: + +1. When creating the Parker wind profiles with the `construct_parker.py` module: You can choose to use a pure H/He composition (which uses `p-winds` standalone) by specifying the hydrogen fraction by number with the `-fH` argument. You can also choose an arbitrary composition that includes metal species (which uses `p-winds` and _Cloudy_ in tandem) with the `-z` and `-zelem` arguments. In this case, `-z` specifies a metallicity relative to solar and is thus a scaling factor for all metal species. `-zelem` can be used to scale the abundance of individual elements, for example as `-zelem Na=3 Mg+=10 Fe+2=0.5 K=0`. Note that `-z` and `-zelem` can be used together and are **multiplicative**. In `construct_parker.py`, the composition only affects the wind structure through the mean molecular weight. Therefore, using `-z` and `-zelem` is only needed for (highly) supersolar metallicities; using `-fH 0.9` will usually suffice for a solar composition atmosphere. + +2. When simulating Parker wind profiles with _Cloudy_ with the `convergeT_parker.py` module: You can specify the composition with the `-z` and `-zelem` arguments as explained under point 1. The default is a solar composition, so `-z 1`. If you want to simulate a pure H/He composition with _Cloudy_, you can pass `-z 0` (and specify the He abundance through `-zelem He=...)`. Contrary to point 1 however, in `convergeT_parker.py`, the metal content directly affects the thermal structure and XUV absorption, so we recommend using `-z 1` even when you only make hydrogen and helium spectra. + +## How do I calculate the transmission spectrum? + +Create the Parker wind profile with `construct_parker.py` and simulate it with _Cloudy_ with `convergeT_parker.py` while making sure you specify for which species you want to save output with the `-save_sp` argument (if unsure, just pass `-save_sp all`). Then, load the _Cloudy_ output in your Python script with the `tools.Sim` class (see FAQ below), and use the `RT.FinFout()` function to make the transit spectrum. At minimum, `RT.FinFout()` expects the `Sim` object, a wavelength array, and a list of species for which to calculate the spectrum. See the _sunbather/examples/fit_helium.ipynb_ notebook for an example. + +## How do I simulate one planet with different stellar SEDs? + +The safest way is to add another entry in the *$SUNBATHER_PROJECT_PATH/planets.txt* file, with the same parameter values, but a different "name" and "SEDname" (the "full name" can be the same). + +Alternatively and more prone to mistakes, the `construct_parker.py` and `convergeT_parker.py` modules also has the `-SEDname` argument which allows you to specify a different name of the SED file without making a new entry in the _planets.txt_ file. In this case, it is **strongly advised** to use a different `-pdir` and `-dir` (that references the SED type) as well. + +## Why do I have to specify a `-pdir` and a `-dir`? + +Generally, for one planet you may want to create Parker wind profiles with different temperatures, mass-loss rates, but also different atmospheric compositions. The `-pdir` and `-dir` correspond to actual folders on your machine. Each folder groups together profiles with different $T$ and $\dot{M}$, so the `-pdir` and `-dir` effectively allow you to separate the profiles by composition. `-pdir` corresponds to the folder where the Parker wind **structure** (i.e. density and velocity as a function of radius) is stored: *$SUNBATHER_PROJECT_PATH/parker_profiles/planetname/pdir/*, and `-dir` corresponds to the folder where the _Cloudy_ simulations of the profiles are stored: *$SUNBATHER_PROJECT_PATH/sims/1D/planetname/dir/*. + +For example, you can make one `-pdir` which stores a grid of $T-\dot{M}$ profiles at a H/He ratio of 90/10, and another which stores a grid of profiles at a ratio of 99/01. The reason that the `-dir` argument is not the exact same as the `-pdir` argument, is that you may want to create your Parker wind structure profile only once (in one `-pdir` folder) but then run it multiple times with _Cloudy_ while changing the abundance of one particular trace element (in multiple `-dir` folders). The latter would usually not really change the atmospheric structure, but could produce a very different spectral feature. + +## How do I read / plot the output of Cloudy in Python? + +The `Sim` class in the `tools.py` module can be used to read in simulations by giving the full path to the simulation. _Cloudy_ output is separated into different output files, which all have the same name but a different extension. The bulk structure of the atmosphere (including temperature and density) is stored in the ".ovr" file. The radiative heating and cooling rates as a function of radius are stored in the ".heat" and ".cool" files. The densities of different energy levels of different atomic/ionic species are stored in the ".den" file. These files are all read in as a Pandas dataframe and can be accessed as follows: + +``` python +import sys +sys.path.append("/path/to/sunbather/src/") +import tools + +mysimulation = tools.Sim(tools.projectpath+"/sims/1D/planetname/dir/parker_T_Mdot/converged") + +#to get the planet parameters of this simulation: +mysimulation.p.R #radius +mysimulation.p.Mstar #mass of host star + +#to get Cloudy output +mysimulation.ovr.alt #radius grid of the following profiles: +mysimulation.ovr.rho #density profile +mysimulation.ovr.Te #temperature profile +mysimulation.ovr.v #velocity profile +mysimulation.cool.ctot #total radiative cooling +mysimulation.den['H[1]'] #density of ground-state atomic hydrogen +mysimulation.den['He[2]'] #density of metastable helium +mysimulation.den['Fe+2[10]'] #density of the tenth energy level of Fe 2+ +``` + +## Can I run a Parker wind profile through Cloudy while using the isothermal temperature profile? + +Yes, you can pass the `-constantT` flag to `convergeT_parker.py` to simulate the Parker wind profile without converging on a nonisothermal temperature structure. This will save a _Cloudy_ simulation called "constantT" and the folder structure works the same way as for converged simulations: you again need to pass a `-dir` where the simulation is saved, and you can in principle use the same directory that you use for converged profiles (but you will need to pass the `-overwrite` flag if the converged nonisothermal simulation already exists - nothing will be overwritten in this case though!). + +## I forgot to specify for which species I want Cloudy output with the `-save_sp` argument. Do I need to run `convergeT_parker.py` again from scratch? + +You can use the `tools.insertden_Cloudy_in()` function to add species to a (converged) Cloudy simulation file and run it again, without having to go through the temperature convergence scheme again. If you want to do this for a grid of Parker wind models, you will have to set up a loop over the correct filepaths yourself. + +## Can I run an atmospheric profile other than an (isothermal) Parker wind? + +You can "trick" the code into running an arbitrary outflow profile by saving your density and velocity profile in the expected file format in the *$SUNBATHER_PROJECT_PATH/parker_profiles/* folder. For example, you can create a simple density and velocity profile in Python: + +``` python +p = tools.Planet('generic_planet') #make sure you add the parameters in planets.txt + +r = np.linspace(1, 10, num=1000) * p.R #in cm +rho = 1e-15 / np.linspace(1, 10, num=1000)**3 #falls with r^3 +v = 5e4 * np.linspace(1, 10, num=1000) #starts at 0.5km/s, increases linearly with r so that Mdot = 4 pi rho v r^2 is constant +mu = np.repeat(np.nan, 1000) #mu is not used by convergeT_parker.py + +print("log(Mdot) =", np.log10(4*np.pi*r[0]**2*rho[0]*v[0])) + +np.savetxt(tools.projectpath+'/parker_profiles/'+p.name+'/geometric/pprof_'+p.name+'_T=0_M=0.000.txt', np.column_stack((r, rho, v, mu)), delimiter='\t') +``` + +You can then solve the temperature structure of this profile with: `python convergeT_parker.py -plname generic_planet -pdir geometric -dir geometric -T 0 -Mdot 0` + +Similarly, you could for example postprocess the density and velocity profile of an _ATES_ simulation (Caldiroli et al. 2021) with _sunbather_ to produce a transmission spectrum. + +## How do I stop the simulation at the Roche radius / choose the maximum radius? + +The `construct_parker.py` module always creates a profile up until 20 $R_p$ and this can only be changed by editing the source code. + +The `convergeT_parker.py` module by default simulates the atmosphere with *Cloudy* up until 8 $R_p$ and this can be changed with the `-altmax` argument. + +The `RT.FinFout()` function by default makes a transit spectrum based on the full *Cloudy* simulation (so up until 8 $R_p$), but you can give an upper boundary in cm with the `cut_at` argument. For example, if you want to include only material up until the planet's Roche radius when making the transit spectrum, it generally doesn't hurt to leave `construct_parker.py` and `convergeT_parker.py` at the default values, and just pass `cut_at=mysimulation.p.Rroche` to `RT.FinFout()` (assuming `mysimulation` is the `tools.Sim` object of your *Cloudy* simulation). \ No newline at end of file diff --git a/wiki/Glossary.md b/wiki/Glossary.md new file mode 100644 index 0000000..ea5a2a1 --- /dev/null +++ b/wiki/Glossary.md @@ -0,0 +1,45 @@ +This wiki page is a glossary that provides additional information on various modules/classes/functionalities included in _sunbather_. We also refer to "Hazy", which is the official documentation of _Cloudy_ and can be found in your _$CLOUDY_PATH/docs/_ folder. + + +## The `tools.py` module +This module contains many basic functions and classes that are used by the other _sunbather_ modules, and can also be used when postprocessing/analyzing _sunbather_ output. + +This module is not intended to be run from the command line, but rather imported into other scripts in order to use its functions. + + +## The `RT.py` module +This module contains functions to perform radiative transfer calculations of the planet transmission spectrum. + +This module is not intended to be run from the command line, but rather imported into other scripts in order to use its functions. + + +## The `construct_parker.py` module +This module is used to create Parker wind profiles. The module can make pure H/He profiles, in which case it is basically a wrapper around the [`p-winds` code](https://github.com/ladsantos/p-winds) (dos Santos et al. 2022). The code can however also make Parker wind profiles for an arbitrary composition (e.g. at a given scaled solar metallicity), which is much more computationally expensive, because it then iteratively runs `p-winds` and _Cloudy_. In this mode, _Cloudy_ is used to obtain the mean molecular weight structure of the atmosphere for the given composition, which `p-winds` uses to calculate the density and velocity structure. + +This module is intended to be run from the command line while supplying arguments. Running `python construct_parker.py --help` will give an explanation of each argument. + +Example use: `python construct_parker.py -plname WASP52b -pdir z_10 -T 8000 -Mdot 11.0 -z 10`. This creates a Parker wind profile for the planet WASP52b (must be defined in *planets.txt*) for a temperature of 8000 K, mass-loss rate of 10^11 g s-1 and a 10x solar metallicity composition, and saves the atmospheric structure as a .txt file in *$SUNBATHER_PROJECT_PATH/parker_profiles/WASP52b/z_10/*. + + +## The `convergeT_parker.py` module +This module is used to run Parker wind profiles through _Cloudy_ to (iteratively) solve for a non-isothermal temperature structure. Additionally, the "converged" simulation can then be postprocessed with functionality of the `RT.py` module in order to make transmission spectra. This module is basically a convenience wrapper which sets up the necessary folder structure and input arguments for the `solveT.py` module that actually performs the iterative scheme described in Linssen et al. (2022). + +This module is intended to be run from the command line while supplying arguments. Running `python convergeT_parker.py --help` will give an explanation of each argument. + +Example use: `python convergeT_parker.py -plname HATP11b -pdir fH_0.99 -dir fiducial -T 5000 10000 200 -Mdot 9.0 11.0 0.1 -zelem He=0.1 -cores 4 -save_sp H He Ca+`. This simulates Parker wind models with Cloudy for the planet HATP11b (must be defined in *planets.txt*) for a grid of temperatures between 5000 K and 10000 K in steps of 200 K, mass-loss rates between 10^9 g s-1 and 10^11 g s-1 in steps of 0.1 dex. It looks for the density and velocity structure of these models in the folder *$SUNBATHER_PROJECT_PATH/parker_profiles/HATP11b/fH_0.99/* (so these models have to be created first in that folder using `construct_parker.py`) and saves the _Cloudy_ simulations in the folder *$SUNBATHER_PROJECT_PATH/sims/1D/HATP11b/fiducial/*. It scales the abundance of helium (which is solar by default in _Cloudy_, i.e. ~10% by number) by a factor 0.1 so that it becomes 1% by number. 4 different calculations of the $T$-$\dot{M}$-grid are done in parallel, and the atomic hydrogen, helium and singly ionized calcium output are saved by _Cloudy_, so that afterwards we can use `RT.FinFout()` to make Halpha, metastable helium and Ca II infrared triplet spectra. + + +## The `solveT.py` module +This module contains the iterative scheme described in Linssen et al. (2022) to solve for a non-isothermal temperature structure of a given atmospheric profile. It is called by `convergeT_parker.py`. As long as you're simulating Parker wind profiles (and not some other custom profile), you should be fine using `convergeT_parker.py` instead of this module. + + +## The *\$SUNBATHER_PROJECT_PATH* (or internally: *tools.projectpath*) directory +This is the directory on your machine where all Parker wind profiles and _Cloudy_ simulations are saved. You can choose any location and name you like, as long as it doesn't contain any spaces. The full path to this directory must be set as your `$SUNBATHER_PROJECT_PATH` environmental variable (see installation instructions). The reason _sunbather_ uses a project path is to keep all output from simulations (i.e. user-specific files) separate from the source code. + + +## The _planets.txt_ file +This file stores the bulk parameters of the planets that are simulated. A template of this file is provided in the _sunbather_ base directory, but you must copy it to your _$SUNBATHER_PROJECT_PATH_ in order for it to work. Every time you want to simulate a new planet/star system, you must add a line to this file with its parameters. You can add comments at the end of the line with a # (for example referencing where the values are from). The first column specifies the "name", which is a tag for this system that cannot contain spaces and is used for the `-plname` argument of `construct_parker.py` and `convergeT_parker.py`, as well as for the `tools.Planet` class to access the system parameters in Python. The second column specifies the "full name", which can be any string you like and can be used e.g. when plotting results. The third column is the radius of the planet in Jupiter radii (7.1492e9 cm). The fourth column is the radius of the star in solar radii (6.9634e10 cm). The fifth column is the semi-major axis of the system in AU (1.49597871e13 cm). The sixth column is the mass of the planet in Jupiter masses (1.898e30 g). The seventh column is the mass of the star in solar masses (1.9891e33 g). The eighth column is the transit impact parameter (dimensionless, 0 is across the center of the stellar disk, 1 is grazing the stellar limb). The ninth column is the name of the stellar SED - see "Stellar SED handling" below in this glossary. + + +## Stellar SED handling +When running _sunbather_, the spectral energy distribution (SED) of the host star has to be available to _Cloudy_, which looks for it in its _$CLOUDY_PATH/data/SED/_ folder. Therefore, every SED you want to use has be **copied to that folder, and requires a specific format**: the first column must be wavelengths in units of Å and the second column must be the $\lambda F_{\lambda} = \nu F_{\nu}$ flux **at a distance of 1 AU** in units of erg s-1 cm-2. Additionally, on the first line, after the first flux value, the following keywords must appear: "units angstrom nuFnu". In the */sunbather/stellar_SEDs/* folder, we have provided a few example SEDs in the correct format. Even though _Cloudy_ in principle supports other units, _sunbather_ doesn't, so please stick to the units as described. Normalization of the flux to the planet orbital distance is done automatically by *sunbather* based on the semi-major axis value given in the *planets.txt* file. diff --git a/wiki/Home.md b/wiki/Home.md new file mode 100644 index 0000000..1605b5d --- /dev/null +++ b/wiki/Home.md @@ -0,0 +1,4 @@ +Welcome to the _sunbather_ wiki! On the right side, you'll find the table of contents. + +Logo + text + diff --git a/wiki/Installation.md b/wiki/Installation.md new file mode 100644 index 0000000..d03c940 --- /dev/null +++ b/wiki/Installation.md @@ -0,0 +1,28 @@ +# Installing _Cloudy_ + +_sunbather_ has been developed and tested with _Cloudy v17.02_ and _v23.01_. Newer versions of _Cloudy_ are likely also compatible with _sunbather_, but this has not been thoroughly tested. Therefore, we currently recommend using _v23.01_. Complete _Cloudy_ download and installation instructions can be found [here](https://gitlab.nublado.org/cloudy/cloudy/-/wikis/home). In short, for most Unix systems, the steps are as follows: + +1. Go to the [v23 download page](https://data.nublado.org/cloudy_releases/c23/) and download the "c23.01.tar.gz" file (or go to the [v17 download page](https://data.nublado.org/cloudy_releases/c17/old/) and download the "c17.02.tar.gz" file). +2. Extract it in a location where you want to install _Cloudy_. +3. `cd` into the _/c23.01/source/_ or _/c17.02/source/_ folder and compile the code by running `make`. +4. Quickly test the _Cloudy_ installation: in the source folder, run `./cloudy.exe`, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. + +If you have trouble installing _Cloudy_, we refer to the download instructions linked above, as well as the _Cloudy_ [help forum](https://cloudyastrophysics.groups.io/g/Main/topics). + +# Installing _sunbather_ + +1. Clone _sunbather_ from Github. The code runs entirely in Python. It was developed using Python 3.9.0 and the following packages are prerequisites: `numpy (v1.24.3), pandas (v1.1.4), matplotlib (v3.7.1), scipy (v1.8.0), astropy (v5.3), p-winds (v1.3.4)`. _sunbather_ also succesfully ran with the newest versions (as of Sep. 18, 2023) of these packages. We have however not yet thoroughly tested all of its functionality with these newer versions, so we currently cannot guarantee that it works, but feel free to try! In any case, we recommend making a Python [virtual environment](https://realpython.com/python-virtual-environments-a-primer/) to run _sunbather_ in. +2. Create a directory anywhere on your machine where the code will save all models/simulations/etc. This will be the "project" folder, and you can give it any name you like. This is to keep the output of _sunbather_ separate from the _sunbather_ source code. +3. Set an environmental variable `$CLOUDY_PATH` to your _Cloudy_ installation base directory, and set `$SUNBATHER_PROJECT_PATH` to the "project" folder. We recommend setting these in your _~/.bashrc_ or _~/.zshrc_ file: + ``` + export CLOUDY_PATH="/full/path/to/c23.01/" + export SUNBATHER_PROJECT_PATH="/full/path/to/project/folder/" + ``` +4. Copy the */sunbather/planets.txt* file to your project folder. +5. Copy the stellar spectra from _/sunbather/stellar_SEDs/_ to _$CLOUDY_PATH/data/SED/_ . These include the [MUSCLES](https://archive.stsci.edu/prepds/muscles/) spectra. +6. Test your _sunbather_ installation: run _/sunbather/tests/test.py_, which should print "Success". If the test fails, feel free to open an issue or contact d.c.linssen@uva.nl with your error. + +# Getting started + +1. To get familiar with _sunbather_, we recommend you go through the Jupyter notebooks in the _/sunbather/examples/_ folder, where example use cases (such as creating atmospheric profiles, calculating transmission spectra and fitting observational data) are worked out and explained. +2. For more details on how to use the code, check out the Glossary and FAQ pages on this wiki. We specifically recommend you read the glossary sections "The _planets.txt_ file" and "Stellar SED handling". \ No newline at end of file From 5d238196f6e6114cb6cf3c43a4426936741a23cb Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 13 Nov 2024 11:47:26 +0100 Subject: [PATCH 39/63] add MANIFEST.in --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) create mode 100644 MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..0629df0 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +recursive-include src/sunbather * From d36e17aeefdfff44a8cc446ab0f10f504a0a87a2 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 14 Nov 2024 13:41:04 +0100 Subject: [PATCH 40/63] update __init__.py --- src/sunbather/__init__.py | 103 ++++++++++++++++++++++++++------------ 1 file changed, 70 insertions(+), 33 deletions(-) diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py index 48d42b0..38da095 100644 --- a/src/sunbather/__init__.py +++ b/src/sunbather/__init__.py @@ -2,39 +2,76 @@ Initialize sunbather """ import os +import pathlib +import shutil -# Ensure Cloudy exists -try: - CLOUDYVERSION = os.environ["CLOUDY_VERSION"] -except KeyError: - CLOUDYVERSION = "23.01" -SUNBATHERPATH = os.path.dirname( - os.path.abspath(__file__) -) # the absolute path where this code lives -try: - # the path where Cloudy is installed - CLOUDYPATH = os.environ["CLOUDY_PATH"] -except KeyError as exc: - CLOUDYPATH = f"{SUNBATHERPATH}/cloudy/c{CLOUDYVERSION}" -if not os.path.exists(f"{CLOUDYPATH}/source/cloudy.exe"): - q = input( - f"Cloudy not found and CLOUDY_PATH is not set. " - f"Do you want to install Cloudy {CLOUDYVERSION} now in the Sunbather path? " - f"(y/n) " - ) +def install_cloudy(): + """ + Checks if Cloudy executable exists, and if not, prompts to download and build it. + """ + try: + CLOUDYVERSION = os.environ["CLOUDY_VERSION"] + except KeyError: + CLOUDYVERSION = "23.01" + SUNBATHERPATH = os.path.dirname( + os.path.abspath(__file__) + ) # the absolute path where this code lives + try: + # the path where Cloudy is installed + CLOUDYPATH = os.environ["CLOUDY_PATH"] + except KeyError as exc: + CLOUDYPATH = f"{SUNBATHERPATH}/cloudy/c{CLOUDYVERSION}" + if not os.path.exists(f"{CLOUDYPATH}/source/cloudy.exe"): + q = input( + f"Cloudy not found and CLOUDY_PATH is not set. " + f"Do you want to install Cloudy {CLOUDYVERSION} now in the Sunbather path? " + f"(y/n) " + ) + while q.lower() not in ["y", "n"]: + q = input("Please enter 'y' or 'n'") + if q == "n": + raise KeyError( + "Cloudy not found, and the environment variable 'CLOUDY_PATH' is not set. " + "Please set this variable in your .bashrc/.zshrc file " + "to the path where the Cloudy installation is located. " + "Do not point it to the /source/ subfolder, but to the main folder." + ) from exc + from sunbather.install_cloudy import GetCloudy + INSTALLER = GetCloudy(version=CLOUDYVERSION) + INSTALLER.download() + INSTALLER.extract() + INSTALLER.compile() + INSTALLER.test() + INSTALLER.copy_data() + + +def make_workingdir(): + """ + Checks if the SUNBATHER_PROJECT_PATH environment variable has been set and + asks for input if not. Also asks to copy the default files to the working dir. + """ + try: + workingdir = os.environ["SUNBATHER_PROJECT_PATH"] + except KeyError: + workingdir = input("Enter the working dir for Sunbather: ") + q = input(f"Copy default files to the working dir ({workingdir})? (y/n) ") while q.lower() not in ["y", "n"]: - q = input("Please enter 'y' or 'n'") + q = input("Please enter 'y' or 'n': ") if q == "n": - raise KeyError( - "Cloudy not found, and the environment variable 'CLOUDY_PATH' is not set. " - "Please set this variable in your .bashrc/.zshrc file " - "to the path where the Cloudy installation is located. " - "Do not point it to the /source/ subfolder, but to the main folder." - ) from exc - from sunbather.install_cloudy import GetCloudy - INSTALLER = GetCloudy(version=CLOUDYVERSION) - INSTALLER.download() - INSTALLER.extract() - INSTALLER.compile() - INSTALLER.test() - INSTALLER.copy_data() + return + + sunbatherpath = f"{pathlib.Path(__file__).parent.resolve()}" + shutil.copytree( + sunbatherpath + "/data/workingdir", + workingdir, + ) + + +def firstrun(): + """ + Runs 'install_cloudy()' and 'make_workingdir()'. + """ + install_cloudy() + make_workingdir() + +firstrun() From c1266ea372a70c93441f3e2e8d213ba23df91f53 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 14 Nov 2024 13:41:32 +0100 Subject: [PATCH 41/63] Add module docstring --- src/sunbather/construct_parker.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index d04154d..807b1fd 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -1,3 +1,6 @@ +""" +Functions to construct parker +""" # other imports import os import time From 178dac268404861531daba23ba56b5e9d94bb4b2 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 14 Nov 2024 13:41:53 +0100 Subject: [PATCH 42/63] update tools.py --- src/sunbather/tools.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/src/sunbather/tools.py b/src/sunbather/tools.py index a2df647..1794307 100644 --- a/src/sunbather/tools.py +++ b/src/sunbather/tools.py @@ -1,3 +1,6 @@ +""" +Tools for sunbather +""" import os import glob import re @@ -38,18 +41,21 @@ "SUNBATHER_PROJECT_PATH" ] # the path where you save your simulations and do analysis except KeyError as exc: - raise KeyError( - "The environment variable 'SUNBATHER_PROJECT_PATH' is not set. " - "Please set this variable in your .bashrc/.zshrc file " - "to the path where you want the sunbather models to be saved. " - "Make sure that the 'planets.txt' file is present in that folder." - ) from exc + projectpath = "./" + if not os.path.exists(f"{projectpath}/planets.txt"): + raise FileNotFoundError( + "The environment variable 'SUNBATHER_PROJECT_PATH' is not set, and no " + "planets.txt file found in current directory. Please set the " + "'SUNBATHER_PROJECT_PATH' variable in your .bashrc/.zshrc file " + "to the path where you want the sunbather models to be saved, " + "and make sure that the 'planets.txt' file is present in that folder." + ) from exc -try: +if os.path.exists(f"{projectpath}/planets.txt"): # read planet parameters globally instead of in the Planets class (so we do it only # once) planets_file = pd.read_csv( - projectpath + "/planets.txt", + f"{projectpath}/planets.txt", dtype={ "name": str, "full name": str, @@ -63,12 +69,12 @@ }, comment="#", ) -except FileNotFoundError as exc: +else: raise FileNotFoundError( "The $SUNBATHER_PROJECT_PATH/planets.txt file cannot be found. " "Please check if your $SUNBATHER_PROJECT_PATH actually exists on your machine. " "Then, copy /sunbather/planets.txt to your project path." - ) from exc + ) # define constants: c = 2.99792458e10 # cm/s From e1acd702d0bef0bcd947e394672a53fc720e9980 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 14 Nov 2024 13:45:52 +0100 Subject: [PATCH 43/63] Add initial Sphinx API documentation --- docs/Makefile | 20 ++++++++++++++++++++ docs/api.rst | 12 ++++++++++++ docs/conf.py | 36 ++++++++++++++++++++++++++++++++++++ docs/index.rst | 18 ++++++++++++++++++ docs/requirements.txt | 2 ++ 5 files changed, 88 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/api.rst create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/requirements.txt diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 0000000..07bdabf --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,12 @@ +API +=== + +.. autosummary:: + :toctree: generated + + sunbather + sunbather.construct_parker + sunbather.convergeT_parker + sunbather.install_cloudy + sunbather.solveT + sunbather.tools diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..8ed865e --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,36 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +import sys +from pathlib import Path + +sys.path.insert(0, str(Path('..', 'src').resolve())) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "sunbather" +copyright = "2024, Dion Linssen" +author = "Dion Linssen" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "myst_parser", +] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +# html_theme = 'alabaster' +html_theme = "sphinx_rtd_theme" +html_static_path = ["_static"] diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..cde744e --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,18 @@ +.. sunbather documentation master file, created by + sphinx-quickstart on Wed Nov 13 11:48:00 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +sunbather documentation +======================= + +Add your content using ``reStructuredText`` syntax. See the +`reStructuredText `_ +documentation for details. + + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + api diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..bfbfd38 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,2 @@ +Sphinx==8.1.3 +sphinx-rtd-theme==3.0.1 From a8281f7e09e9ece70164e6754c1525ceb52b6454 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 12:08:50 +0100 Subject: [PATCH 44/63] Update conf.py for Read the Docs --- docs/conf.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index 8ed865e..73ac00e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,6 +19,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [ + "sphinx_rtd_theme", "sphinx.ext.autodoc", "sphinx.ext.autosummary", "myst_parser", @@ -34,3 +35,7 @@ # html_theme = 'alabaster' html_theme = "sphinx_rtd_theme" html_static_path = ["_static"] + + +# Configuration for Read the Docs +html_baseurl = os.environ.get("READTHEDOCS_CANONICAL_URL", "/") From fb0c0fa61a6e9c1ef4fecaf3db7507e82ec8f1d3 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 12:11:09 +0100 Subject: [PATCH 45/63] Create .readthedocs.yaml --- .readthedocs.yaml | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..1235053 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,35 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.8" + # You can also specify other tool versions: + # nodejs: "20" + # rust: "1.70" + # golang: "1.20" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs + # builder: "dirhtml" + # Fail on all warnings to avoid broken references + # fail_on_warning: true + +# Optionally build your docs in additional formats such as PDF and ePub +# formats: +# - pdf +# - epub + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +# python: +# install: +# - requirements: docs/requirements.txt From 6dd07d3172e66a7c39fe231ecd5f8ff7dd446cc9 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 13:53:25 +0100 Subject: [PATCH 46/63] Update conf.py --- docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/conf.py b/docs/conf.py index 73ac00e..827410d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -4,6 +4,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html import sys +import os from pathlib import Path sys.path.insert(0, str(Path('..', 'src').resolve())) From 03e38fc6cabf7f3bf5c45dc5edff88572e9afa34 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 13:55:38 +0100 Subject: [PATCH 47/63] Update .readthedocs.yaml --- .readthedocs.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 1235053..5216efc 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -30,6 +30,6 @@ sphinx: # Optional but recommended, declare the Python requirements required # to build your documentation # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html -# python: -# install: -# - requirements: docs/requirements.txt +python: + install: + - requirements: docs/requirements.txt From c0ac2441aa00a751d594fb356fbd0f999f297f65 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 13:56:52 +0100 Subject: [PATCH 48/63] Update requirements.txt --- docs/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index bfbfd38..6b104fb 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -Sphinx==8.1.3 -sphinx-rtd-theme==3.0.1 +Sphinx +sphinx-rtd-theme From 6b639064c8d58ca9b70bf7432afcbfac45ac45f4 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 14:29:16 +0100 Subject: [PATCH 49/63] Update conf.py --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 827410d..2a6d44a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -23,7 +23,7 @@ "sphinx_rtd_theme", "sphinx.ext.autodoc", "sphinx.ext.autosummary", - "myst_parser", + # "myst_parser", ] templates_path = ["_templates"] From c1ff57bdef9e2244d3e6521e22807f6273d3cfdf Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 14:56:11 +0100 Subject: [PATCH 50/63] Do not automatically ask to install Cloudy on first run --- src/sunbather/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py index 38da095..6a125ca 100644 --- a/src/sunbather/__init__.py +++ b/src/sunbather/__init__.py @@ -73,5 +73,3 @@ def firstrun(): """ install_cloudy() make_workingdir() - -firstrun() From 5f70718fd6a30039be772ec73ec7d4ef55abc941 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 14:59:58 +0100 Subject: [PATCH 51/63] prevent name clash, import tools --- src/sunbather/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py index 6a125ca..b05c199 100644 --- a/src/sunbather/__init__.py +++ b/src/sunbather/__init__.py @@ -5,7 +5,9 @@ import pathlib import shutil -def install_cloudy(): +import sunbather.tools + +def check_cloudy(): """ Checks if Cloudy executable exists, and if not, prompts to download and build it. """ @@ -69,7 +71,7 @@ def make_workingdir(): def firstrun(): """ - Runs 'install_cloudy()' and 'make_workingdir()'. + Runs 'check_cloudy()' and 'make_workingdir()'. """ - install_cloudy() + check_cloudy() make_workingdir() From 3215ba5618b9dcbd690bd45fa235419cd9aa5b70 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 15:06:53 +0100 Subject: [PATCH 52/63] update doc requirements --- docs/requirements.in | 2 + docs/requirements.txt | 119 +++++++++++++++++++++++++++++++++++++++++- 2 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 docs/requirements.in diff --git a/docs/requirements.in b/docs/requirements.in new file mode 100644 index 0000000..256bba1 --- /dev/null +++ b/docs/requirements.in @@ -0,0 +1,2 @@ +sphinx == 7.1.2 +sphinx-rtd-theme == 3.0.2 diff --git a/docs/requirements.txt b/docs/requirements.txt index 6b104fb..e4470a3 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,117 @@ -Sphinx -sphinx-rtd-theme +# +# This file is autogenerated by pip-compile with Python 3.12 +# by the following command: +# +# pip-compile --output-file=requirements.txt --strip-extras ../pyproject.toml requirements.in +# +alabaster==0.7.16 + # via sphinx +astropy==6.1.6 + # via + # p-winds + # sunbather (../pyproject.toml) +astropy-iers-data==0.2024.11.18.0.35.2 + # via astropy +babel==2.16.0 + # via sphinx +certifi==2024.8.30 + # via requests +charset-normalizer==3.4.0 + # via requests +contourpy==1.3.1 + # via matplotlib +cycler==0.12.1 + # via matplotlib +docutils==0.20.1 + # via + # sphinx + # sphinx-rtd-theme +flatstar==0.2.1a0 + # via p-winds +fonttools==4.55.0 + # via matplotlib +idna==3.10 + # via requests +imagesize==1.4.1 + # via sphinx +jinja2==3.1.4 + # via sphinx +kiwisolver==1.4.7 + # via matplotlib +markupsafe==3.0.2 + # via jinja2 +matplotlib==3.9.2 + # via sunbather (../pyproject.toml) +numpy==2.1.3 + # via + # astropy + # contourpy + # flatstar + # matplotlib + # p-winds + # pandas + # pyerfa + # scipy + # sunbather (../pyproject.toml) +p-winds==1.4.7 + # via sunbather (../pyproject.toml) +packaging==24.2 + # via + # astropy + # matplotlib + # sphinx +pandas==2.2.3 + # via sunbather (../pyproject.toml) +pillow==11.0.0 + # via + # flatstar + # matplotlib +pyerfa==2.0.1.5 + # via astropy +pygments==2.18.0 + # via sphinx +pyparsing==3.2.0 + # via matplotlib +python-dateutil==2.9.0.post0 + # via + # matplotlib + # pandas +pytz==2024.2 + # via pandas +pyyaml==6.0.2 + # via astropy +requests==2.32.3 + # via sphinx +scipy==1.13.1 + # via + # p-winds + # sunbather (../pyproject.toml) +six==1.16.0 + # via python-dateutil +snowballstemmer==2.2.0 + # via sphinx +sphinx==7.1.2 + # via + # -r requirements.in + # sphinx-rtd-theme + # sphinxcontrib-jquery +sphinx-rtd-theme==3.0.2 + # via -r requirements.in +sphinxcontrib-applehelp==2.0.0 + # via sphinx +sphinxcontrib-devhelp==2.0.0 + # via sphinx +sphinxcontrib-htmlhelp==2.1.0 + # via sphinx +sphinxcontrib-jquery==4.1 + # via sphinx-rtd-theme +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==2.0.0 + # via sphinx +sphinxcontrib-serializinghtml==2.0.0 + # via sphinx +tzdata==2024.2 + # via pandas +urllib3==2.2.3 + # via requests From 3c64651f527fc8dc7f87b182138a0630382d6807 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 15:11:26 +0100 Subject: [PATCH 53/63] make the docs with Python 3.12 --- .readthedocs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 5216efc..31dbf0d 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -8,7 +8,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.8" + python: "3.12" # You can also specify other tool versions: # nodejs: "20" # rust: "1.70" From 24262c90e6ed5143ad8ff395881e63b32c6ebffe Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 15:50:59 +0100 Subject: [PATCH 54/63] Fewer globals, use getters instead --- src/sunbather/tools.py | 123 +++++++++++++++++++++-------------------- 1 file changed, 62 insertions(+), 61 deletions(-) diff --git a/src/sunbather/tools.py b/src/sunbather/tools.py index 1794307..e402f16 100644 --- a/src/sunbather/tools.py +++ b/src/sunbather/tools.py @@ -10,6 +10,7 @@ import numpy as np import pandas as pd import matplotlib.pyplot as plt + from scipy.interpolate import interp1d from scipy.signal import savgol_filter import scipy.stats as sps @@ -23,53 +24,62 @@ sunbatherpath = os.path.dirname( os.path.abspath(__file__) ) # the absolute path where this code lives -try: - # the path where Cloudy is installed - cloudypath = os.environ["CLOUDY_PATH"] -except KeyError as exc: - cloudypath = f"{sunbatherpath}/cloudy/c23.01" - if not os.path.exists(f"{cloudypath}/source/cloudy.exe"): - raise KeyError( - "The environment variable 'CLOUDY_PATH' is not set. " - "Please set this variable in your .bashrc/.zshrc file " - "to the path where the Cloudy installation is located. " - "Do not point it to the /source/ subfolder, but to the main folder." - ) from exc - -try: - projectpath = os.environ[ - "SUNBATHER_PROJECT_PATH" - ] # the path where you save your simulations and do analysis -except KeyError as exc: - projectpath = "./" - if not os.path.exists(f"{projectpath}/planets.txt"): - raise FileNotFoundError( - "The environment variable 'SUNBATHER_PROJECT_PATH' is not set, and no " - "planets.txt file found in current directory. Please set the " - "'SUNBATHER_PROJECT_PATH' variable in your .bashrc/.zshrc file " - "to the path where you want the sunbather models to be saved, " - "and make sure that the 'planets.txt' file is present in that folder." - ) from exc - -if os.path.exists(f"{projectpath}/planets.txt"): - # read planet parameters globally instead of in the Planets class (so we do it only - # once) - planets_file = pd.read_csv( - f"{projectpath}/planets.txt", - dtype={ - "name": str, - "full name": str, - "R [RJ]": np.float64, - "Rstar [Rsun]": np.float64, - "a [AU]": np.float64, - "M [MJ]": np.float64, - "Mstar [Msun]": np.float64, - "transit impact parameter": np.float64, - "SEDname": str, - }, - comment="#", - ) -else: + + +def get_cloudy_path(): + try: + # the path where Cloudy is installed + cloudypath = os.environ["CLOUDY_PATH"] + except KeyError as exc: + cloudypath = f"{sunbatherpath}/cloudy/c23.01" + if not os.path.exists(f"{cloudypath}/source/cloudy.exe"): + raise KeyError( + "The environment variable 'CLOUDY_PATH' is not set. " + "Please set this variable in your .bashrc/.zshrc file " + "to the path where the Cloudy installation is located. " + "Do not point it to the /source/ subfolder, but to the main folder." + ) from exc + return cloudypath + + +def get_sunbather_project_path(): + try: + projectpath = os.environ[ + "SUNBATHER_PROJECT_PATH" + ] # the path where you save your simulations and do analysis + except KeyError as exc: + projectpath = "./" + if not os.path.exists(f"{projectpath}/planets.txt"): + raise FileNotFoundError( + "The environment variable 'SUNBATHER_PROJECT_PATH' is not set, and no " + "planets.txt file found in current directory. Please set the " + "'SUNBATHER_PROJECT_PATH' variable in your .bashrc/.zshrc file " + "to the path where you want the sunbather models to be saved, " + "and make sure that the 'planets.txt' file is present in that folder." + ) from exc + return projectpath + + +def get_planets_file(): + if os.path.exists(f"{get_sunbather_project_path()}/planets.txt"): + # read planet parameters globally instead of in the Planets class (so we do it only + # once) + planets_file = pd.read_csv( + f"{get_sunbather_project_path()}/planets.txt", + dtype={ + "name": str, + "full name": str, + "R [RJ]": np.float64, + "Rstar [Rsun]": np.float64, + "a [AU]": np.float64, + "M [MJ]": np.float64, + "Mstar [Msun]": np.float64, + "transit impact parameter": np.float64, + "SEDname": str, + }, + comment="#", + ) + return planets_file raise FileNotFoundError( "The $SUNBATHER_PROJECT_PATH/planets.txt file cannot be found. " "Please check if your $SUNBATHER_PROJECT_PATH actually exists on your machine. " @@ -966,7 +976,7 @@ def get_SED_norm_1AU(SEDname): Energy where the monochromatic flux of the nuFnu output variable is specified. """ - with open(cloudypath + "/data/SED/" + SEDname, "r", encoding="utf-8") as f: + with open(f"{get_cloudy_path()}/data/SED/{SEDname}", "r", encoding="utf-8") as f: for line in f: if not line.startswith("#"): # skip through the comments at the top assert ("angstrom" in line) or ("Angstrom" in line) # verify the units @@ -1072,18 +1082,8 @@ def read_parker(plname, T, Mdot, pdir, filename=None): Mdot = f"{float(Mdot):.3f}" T = str(int(T)) filename = ( - projectpath - + "/parker_profiles/" - + plname - + "/" - + pdir - + "/pprof_" - + plname - + "_T=" - + T - + "_M=" - + Mdot - + ".txt" + f"{get_sunbather_project_path()}/parker_profiles/{plname}/" + f"{pdir}/pprof_{plname}_T={T}_M={Mdot}.txt" ) pprof = pd.read_table( @@ -1630,7 +1630,7 @@ def run_Cloudy(filename, folder=None): filename = filename[:-3] # filename should not contain the extension os.system( - "cd " + folder + " && " + cloudypath + "/source/cloudy.exe -p " + filename + f"cd {folder} && {get_cloudy_path()}/source/cloudy.exe -p {filename}" ) @@ -2340,6 +2340,7 @@ def __init__( """ # check if we can fetch planet parameters from planets.txt: + planets_file = get_planets_file() if ( name in planets_file["name"].values or name in planets_file["full name"].values From 04f41caf5e93aa96397ca51e4108a28ece8eaa0a Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 15:57:00 +0100 Subject: [PATCH 55/63] use getter rather than global --- src/sunbather/convergeT_parker.py | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/sunbather/convergeT_parker.py b/src/sunbather/convergeT_parker.py index ebdc0c3..3516f05 100644 --- a/src/sunbather/convergeT_parker.py +++ b/src/sunbather/convergeT_parker.py @@ -174,7 +174,8 @@ def run_s( planet.set_var(SEDname=SEDname) # set up the folder structure - pathTstruc = tools.projectpath + "/sims/1D/" + planet.name + "/" + workingdir + "/" + projectpath = tools.get_sunbather_project_path() + pathTstruc = projectpath + "/sims/1D/" + planet.name + "/" + workingdir + "/" path = pathTstruc + "parker_" + T + "_" + Mdot + "/" # check if this parker profile exists in the given pdir @@ -183,7 +184,7 @@ def run_s( except FileNotFoundError: print( "This parker profile does not exist:", - tools.projectpath + projectpath + "/parker_profiles/" + planet.name + "/" @@ -701,16 +702,17 @@ def __call__(self, parser, namespace, values, option_string=None): ) # set up the folder structure if it doesn't exist yet - if not os.path.isdir(tools.projectpath + "/sims/"): - os.mkdir(tools.projectpath + "/sims") - if not os.path.isdir(tools.projectpath + "/sims/1D/"): - os.mkdir(tools.projectpath + "/sims/1D") - if not os.path.isdir(tools.projectpath + "/sims/1D/" + args.plname + "/"): - os.mkdir(tools.projectpath + "/sims/1D/" + args.plname) + projectpath = tools.get_sunbather_project_path() + if not os.path.isdir(projectpath + "/sims/"): + os.mkdir(projectpath + "/sims") + if not os.path.isdir(projectpath + "/sims/1D/"): + os.mkdir(projectpath + "/sims/1D") + if not os.path.isdir(projectpath + "/sims/1D/" + args.plname + "/"): + os.mkdir(projectpath + "/sims/1D/" + args.plname) if not os.path.isdir( - tools.projectpath + "/sims/1D/" + args.plname + "/" + args.dir + "/" + projectpath + "/sims/1D/" + args.plname + "/" + args.dir + "/" ): - os.mkdir(tools.projectpath + "/sims/1D/" + args.plname + "/" + args.dir) + os.mkdir(projectpath + "/sims/1D/" + args.plname + "/" + args.dir) if len(args.T) == 1 and len(args.Mdot) == 1: # then we run a single model run_s( From bb9e173bb6dc1766393a37b03bf438322635af29 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 16:04:57 +0100 Subject: [PATCH 56/63] fix globals --- src/sunbather/construct_parker.py | 27 +++++++++++++++------------ tests/test.py | 16 +++++++++------- tests/test_sunbather.py | 8 +++++--- 3 files changed, 29 insertions(+), 22 deletions(-) diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index 807b1fd..242a011 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -155,8 +155,9 @@ def save_plain_parker_profile( Mdot = float(Mdot) T = int(T) + projectpath = tools.get_sunbather_project_path() save_name = ( - tools.projectpath + projectpath + "/parker_profiles/" + planet.name + "/" @@ -418,7 +419,7 @@ def save_temp_parker_profile( (r * planet.R, rho_array * rhos, v_array * vs * 1e5, mu_array) ) save_name = ( - tools.projectpath + projectpath + "/parker_profiles/" + planet.name + "/" @@ -614,8 +615,9 @@ def save_cloudy_parker_profile( Maximum altitude of the profile in units of the planet radius. By default 20. """ + projectpath = tools.get_sunbather_project_path() save_name = ( - tools.projectpath + projectpath + "/parker_profiles/" + planet.name + "/" @@ -813,7 +815,7 @@ def run_s( 20, int((p.a - p.Rstar) / p.R) ) # solve profile up to 20 Rp, unless the star is closer than that spectrum = cloudy_spec_to_pwinds( - tools.cloudypath + "/data/SED/" + p.SEDname, + tools.get_cloudy_path() + "/data/SED/" + p.SEDname, 1.0, (p.a - altmax * p.R) / tools.AU, ) # assumes SED is at 1 AU @@ -1114,15 +1116,16 @@ def __call__(self, parser, namespace, values, option_string=None): ) # set up the folder structure if it doesn't exist yet - if not os.path.isdir(tools.projectpath + "/parker_profiles/"): - os.mkdir(tools.projectpath + "/parker_profiles") - if not os.path.isdir(tools.projectpath + "/parker_profiles/" + args.plname + "/"): - os.mkdir(tools.projectpath + "/parker_profiles/" + args.plname) + projectpath = tools.get_sunbather_project_path() + if not os.path.isdir(projectpath + "/parker_profiles/"): + os.mkdir(projectpath + "/parker_profiles") + if not os.path.isdir(projectpath + "/parker_profiles/" + args.plname + "/"): + os.mkdir(projectpath + "/parker_profiles/" + args.plname) if not os.path.isdir( - tools.projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" + projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" ): os.mkdir( - tools.projectpath + projectpath + "/parker_profiles/" + args.plname + "/" @@ -1131,7 +1134,7 @@ def __call__(self, parser, namespace, values, option_string=None): ) if (args.fH is None) and ( not os.path.isdir( - tools.projectpath + projectpath + "/parker_profiles/" + args.plname + "/" @@ -1140,7 +1143,7 @@ def __call__(self, parser, namespace, values, option_string=None): ) ): os.mkdir( - tools.projectpath + projectpath + "/parker_profiles/" + args.plname + "/" diff --git a/tests/test.py b/tests/test.py index a7bb374..11071c3 100644 --- a/tests/test.py +++ b/tests/test.py @@ -23,26 +23,28 @@ # SETUP CHECKS # make sure projectpath exists +projectpath = tools.get_sunbather_project_path() + assert os.path.isdir( - tools.projectpath + projectpath ), "Please create the projectpath folder on your machine" # make sure the planets.txt file exists assert os.path.isfile( - tools.projectpath + "/planets.txt" + projectpath + "/planets.txt" ), "Please make sure the 'planets.txt' file is present in $SUNBATHER_PROJECT_PATH" # make sure the SED we need for this test has been copied to Cloudy assert os.path.isfile( - tools.cloudypath + "/data/SED/eps_Eri_binned.spec" + tools.get_cloudy_path() + "/data/SED/eps_Eri_binned.spec" ), "Please copy /sunbather/stellar_SEDs/eps_Eri_binned.spec into $CLOUDY_PATH/data/SED/" # ## CHECK IF test.py HAS BEEN RAN BEFORE ### parker_profile_file = ( - tools.projectpath + projectpath + "/parker_profiles/WASP52b/test/pprof_WASP52b_T=9000_M=11.000.txt" ) -simulation_folder = tools.projectpath + "/sims/1D/WASP52b/test/parker_9000_11.000/" +simulation_folder = projectpath + "/sims/1D/WASP52b/test/parker_9000_11.000/" if os.path.exists(parker_profile_file) or os.path.exists(simulation_folder): confirmation = input( @@ -70,7 +72,7 @@ ) # load the created profile pprof_created = pd.read_table( - tools.projectpath + projectpath + "/parker_profiles/WASP52b/test/pprof_WASP52b_T=9000_M=11.000.txt", names=["alt", "rho", "v", "mu"], dtype=np.float64, @@ -111,7 +113,7 @@ ) # load the created simulation sim_created = tools.Sim( - tools.projectpath + "/sims/1D/WASP52b/test/parker_9000_11.000/converged" + projectpath + "/sims/1D/WASP52b/test/parker_9000_11.000/converged" ) # load the expected simulation sim_expected = tools.Sim(this_path + "/materials/converged") diff --git a/tests/test_sunbather.py b/tests/test_sunbather.py index 4d7f7df..8790393 100644 --- a/tests/test_sunbather.py +++ b/tests/test_sunbather.py @@ -24,8 +24,9 @@ def test_projectdirs(): Make sure projectpath exists """ from sunbather import tools + projectpath = tools.get_sunbather_project_path() assert os.path.isdir( - tools.projectpath + projectpath ), "Please create the projectpath folder on your machine" @@ -34,8 +35,9 @@ def test_planetstxt(): Make sure the planets.txt file exists """ from sunbather import tools + projectpath = tools.get_sunbather_project_path() assert os.path.isfile( - tools.projectpath + "/planets.txt" + projectpath + "/planets.txt" ), "Please make sure the 'planets.txt' file is present in $SUNBATHER_PROJECT_PATH" @@ -45,7 +47,7 @@ def test_seds(): """ from sunbather import tools assert os.path.isfile( - tools.cloudypath + "/data/SED/eps_Eri_binned.spec" + tools.get_cloudy_path() + "/data/SED/eps_Eri_binned.spec" ), ( "Please copy /sunbather/stellar_SEDs/eps_Eri_binned.spec " "into $CLOUDY_PATH/data/SED/" From 247d9e5785c05d12a1bdfbd722c3747d5b951e90 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Mon, 18 Nov 2024 16:15:09 +0100 Subject: [PATCH 57/63] Update pylint.yml --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 3f89231..fccf9ac 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies From 140d8f4242ae14547c86f0f0b66a7b43cf90a9ba Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Tue, 19 Nov 2024 15:15:26 +0100 Subject: [PATCH 58/63] update construct_parker --- src/sunbather/construct_parker.py | 347 +++++++++++++++++------------- 1 file changed, 192 insertions(+), 155 deletions(-) diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index 242a011..a95d7c2 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -1,7 +1,9 @@ """ Functions to construct parker """ + # other imports +import sys import os import time import argparse @@ -10,8 +12,10 @@ import warnings from shutil import copyfile import numpy as np + # import matplotlib.pyplot as plt import astropy.units as u + # from p_winds import tools as pw_tools from p_winds import parker as pw_parker from p_winds import hydrogen as pw_hydrogen @@ -27,7 +31,8 @@ def cloudy_spec_to_pwinds(SEDfilename, dist_SED, dist_planet): Reads a spectrum file in the format that we give it to Cloudy, namely angstroms and monochromatic flux (i.e., nu*F_nu or lambda*F_lambda) units. and converts it to a spectrum dictionary that p-winds uses. - This is basically an equivalent of the p_winds.parker.make_spectrum_from_file() function. + This is basically an equivalent of the + p_winds.parker.make_spectrum_from_file() function. Parameters ---------- @@ -140,14 +145,15 @@ def save_plain_parker_profile( pdir : str, optional Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/planetname/*pdir*/ where the isothermal parker wind density and velocity profiles are saved. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. By default 'fH_0.9'. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. By default 'fH_0.9'. overwrite : bool, optional Whether to overwrite existing models, by default False. notidal : bool, optional - Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et al. (2024). - See also Appendix D of Vissapragada et al. (2022) for the p-winds implementation. - Default is False, i.e. tidal gravity incluced. + Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et + al. (2024). See also Appendix D of Vissapragada et al. (2022) for the + p-winds implementation. Default is False, i.e. tidal gravity included. altmax : int, optional Maximum altitude of the profile in units of the planet radius. By default 20. """ @@ -157,28 +163,20 @@ def save_plain_parker_profile( projectpath = tools.get_sunbather_project_path() save_name = ( - projectpath - + "/parker_profiles/" - + planet.name - + "/" - + pdir - + "/pprof_" - + planet.name - + "_T=" - + str(T) - + "_M=" - + "%.3f" % Mdot - + ".txt" + f"{projectpath}/parker_profiles/{planet.name}/{pdir}/" + f"pprof_{planet.name}_T={str(T)}_M={Mdot:.3f}.txt" ) if os.path.exists(save_name) and not overwrite: print( "Parker profile already exists and overwrite = False:", planet.name, pdir, - "%.3f" % Mdot, + f"{Mdot:.3f}", T, ) - return # this quits the function but if we're running a grid, it doesn't quit the whole Python code + # this quits the function but if we're running a grid, it doesn't quit + # the whole Python code + return R_pl = planet.R / tools.RJ # convert from cm to Rjup M_pl = planet.M / tools.MJ # convert from g to Mjup @@ -195,7 +193,8 @@ def save_plain_parker_profile( 0.0 # Mean ionization fraction (will be self-consistently calculated later) ) mu_0 = (1 + 4 * he_h_fraction) / (1 + he_h_fraction + mean_f_ion) - # mu_0 is the constant mean molecular weight (assumed for now, will be updated later) + # mu_0 is the constant mean molecular weight (assumed for now, will be + # updated later) initial_f_ion = 0.0 f_r, mu_bar = pw_hydrogen.ion_fraction( @@ -299,18 +298,20 @@ def save_temp_parker_profile( pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/planetname/*pdir*/ where the isothermal parker wind density and velocity profiles are saved. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. mu_bar : float, optional - Weighted mean of the mean particle mass. Based on Eq. A.3 of Lampon et al. (2020). - If None, p-winds will calculate mu(r) and the associated mu_bar. By default None. + Weighted mean of the mean particle mass. Based on Eq. A.3 of Lampon et + al. (2020). If None, p-winds will calculate mu(r) and the associated + mu_bar. By default None. mu_struc : numpy.ndarray, optional Mean particle mass profile, must be provided if mu_bar is None. Typically, this is a mu(r)-profile as given by Cloudy. By default None. no_tidal : bool, optional - Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et al. (2024). - See also Appendix D of Vissapragada et al. (2022) for the p-winds implementation. - Default is False, i.e. tidal gravity included. + Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et + al. (2024). See also Appendix D of Vissapragada et al. (2022) for the + p-winds implementation. Default is False, i.e. tidal gravity included. altmax : int, optional Maximum altitude of the profile in units of the planet radius. By default 20. @@ -319,20 +320,23 @@ def save_temp_parker_profile( save_name : str Full path + filename of the saved Parker wind profile file. mu_bar : float - Weighted mean of the mean particle mass. Based on Eq. A.3 of Lampon et al. (2020). - If the input mu_bar was None, this will return the value as calculated by p-winds. - If the input mu_bar was not None, this will return that same value. + Weighted mean of the mean particle mass. Based on Eq. A.3 of Lampon et + al. (2020). If the input mu_bar was None, this will return the value + as calculated by p-winds. If the input mu_bar was not None, this will + return that same value. launch_velocity : float - Velocity at the planet radius in units of the sonic speed. If it is larger than 1, - the wind is "launched" already supersonic, and hence the assumption of a transonic - wind is not valid anymore. + Velocity at the planet radius in units of the sonic speed. If it is + larger than 1, the wind is "launched" already supersonic, and hence the + assumption of a transonic wind is not valid anymore. """ Mdot = float(Mdot) T = int(T) - R_pl = planet.R / tools.RJ # convert from cm to Rjup - M_pl = planet.M / tools.MJ # convert from g to Mjup + # convert from cm to Rjup + R_pl = planet.R / tools.RJ + # convert from g to Mjup + M_pl = planet.M / tools.MJ m_dot = 10**Mdot # Total atmospheric escape rate in g / s r = np.logspace( @@ -341,8 +345,10 @@ def save_temp_parker_profile( if ( mu_bar is None - ): # if not given by a Cloudy run, let p-winds calculate it (used the first iteration) - # pretend that the metals don't exist and just calculate the h_fraction with only H and He abundances + ): + # if not given by a Cloudy run, let p-winds calculate it (used the + # first iteration) pretend that the metals don't exist and just + # calculate the h_fraction with only H and He abundances abundances = tools.get_abundances(zdict) # solar abundances h_fraction = abundances["H"] / ( abundances["H"] + abundances["He"] @@ -355,7 +361,8 @@ def save_temp_parker_profile( 0.0 # Mean ionization fraction (will be self-consistently calculated later) ) mu_0 = (1 + 4 * he_h_fraction) / (1 + he_h_fraction + mean_f_ion) - # mu_0 is the constant mean molecular weight (assumed for now, will be updated later) + # mu_0 is the constant mean molecular weight (assumed for now, will be + # updated later) initial_f_ion = 0.0 @@ -419,22 +426,12 @@ def save_temp_parker_profile( (r * planet.R, rho_array * rhos, v_array * vs * 1e5, mu_array) ) save_name = ( - projectpath - + "/parker_profiles/" - + planet.name - + "/" - + pdir - + "/temp/pprof_" - + planet.name - + "_T=" - + str(T) - + "_M=" - + "%.3f" % Mdot - + ".txt" + f"{projectpath}/parker_profiles/{planet.name}/{pdir}/temp/" + f"pprof_{planet.name}_T={str(T)}_M={Mdot:.3f}.txt" ) zdictstr = "abundance scale factors relative to solar:" for sp in zdict.keys(): - zdictstr += " " + sp + "=" + "%.1f" % zdict[sp] + zdictstr += f" {sp}={zdict[sp]:.1f}" np.savetxt( save_name, save_array, delimiter="\t", header=zdictstr + "\nalt rho v mu" ) @@ -445,7 +442,9 @@ def save_temp_parker_profile( def run_parker_with_cloudy(filename, T, planet, zdict): - """Runs an isothermal Parker wind profile through Cloudy, using the isothermal temperature profile. + """ + Runs an isothermal Parker wind profile through Cloudy, using the isothermal + temperature profile. Parameters ---------- @@ -465,7 +464,8 @@ def run_parker_with_cloudy(filename, T, planet, zdict): simname : str Full path + name of the Cloudy simulation file without file extension. pprof : pandas.DataFrame - Radial density, velocity and mean particle mass profiles of the isothermal Parker wind profile. + Radial density, velocity and mean particle mass profiles of the + isothermal Parker wind profile. """ pprof = tools.read_parker("", "", "", "", filename=filename) @@ -565,12 +565,13 @@ def save_cloudy_parker_profile( ): """ Calculates an isothermal Parker wind profile with any composition by iteratively - running the p-winds code (dos Santos et al. 2022) and Cloudy (Ferland et al. 1998; 2017, - Chatziokos et al. 2023). This function works iteratively as follows: - p_winds calculates a density profile, Cloudy calculates the mean particle mass profile, - we calculate the associated mu_bar value, which is passed to p-winds to calculate a new - density profile, until mu_bar has converged to a stable value. - Saves a 'pprof' txt file with the r, rho, v, mu structure. + running the p-winds code (dos Santos et al. 2022) and Cloudy (Ferland et + al. 1998; 2017, Chatziokos et al. 2023). This function works iteratively as + follows: + p_winds calculates a density profile, Cloudy calculates the mean particle + mass profile, we calculate the associated mu_bar value, which is passed to + p-winds to calculate a new density profile, until mu_bar has converged to a + stable value. Saves a 'pprof' txt file with the r, rho, v, mu structure. Parameters ---------- @@ -589,33 +590,37 @@ def save_cloudy_parker_profile( pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/planetname/*pdir*/ where the isothermal parker wind density and velocity profiles are saved. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. convergence : float, optional - Convergence threshold expressed as the relative change in mu_bar between iterations, by default 0.01 + Convergence threshold expressed as the relative change in mu_bar + between iterations, by default 0.01 maxit : int, optional Maximum number of iterations, by default 7 cleantemp : bool, optional Whether to remove the temporary files in the /temp folder. These files store - the intermediate profiles during the iterative process to find mu_bar. By default False. + the intermediate profiles during the iterative process to find mu_bar. + By default False. overwrite : bool, optional Whether to overwrite existing models, by default False. verbose : bool, optional Whether to print diagnostics about the convergence of mu_bar, by default False avoid_pwinds_mubar : bool, optional - Whether to avoid using p-winds to calculate mu_bar during the first iteration. - If True, we guess the mu_bar of the first iteration based on a completely neutral - atmosphere. This can be helpful in cases where p-winds solver cannot find a solution, - but Cloudy typically can. By default False. + Whether to avoid using p-winds to calculate mu_bar during the first + iteration. If True, we guess the mu_bar of the first iteration based + on a completely neutral atmosphere. This can be helpful in cases where + p-winds solver cannot find a solution, but Cloudy typically can. By + default False. no_tidal : bool, optional - Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et al. (2024). - See also Appendix D of Vissapragada et al. (2022) for the p-winds implementation. - Default is False, i.e. tidal gravity included. + Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et + al. (2024). See also Appendix D of Vissapragada et al. (2022) for the + p-winds implementation. Default is False, i.e. tidal gravity included. altmax : int, optional Maximum altitude of the profile in units of the planet radius. By default 20. """ - projectpath = tools.get_sunbather_project_path() + projectpath = tools.get_sunbather_project_path() save_name = ( projectpath + "/parker_profiles/" @@ -699,7 +704,8 @@ def save_cloudy_parker_profile( mu_bar = calc_mu_bar(sim) tools.verbose_print( - f"Making new parker profile with p-winds based on Cloudy's reported mu_bar: {mu_bar}", + f"Making new parker profile with p-winds based on Cloudy's reported " + f"mu_bar: {mu_bar}", verbose=verbose, ) mu_struc = np.column_stack( @@ -762,12 +768,14 @@ def run_s( Parameters ---------- plname : str - Planet name (must have parameters stored in $SUNBATHER_PROJECT_PATH/planets.txt). + Planet name (must have parameters stored in + $SUNBATHER_PROJECT_PATH/planets.txt). pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/*plname*/*pdir*/ where the isothermal parker wind density and velocity profiles are saved. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. Mdot : str or numeric log of the mass-loss rate in units of g s-1. T : str or numeric @@ -779,17 +787,18 @@ def run_s( fH : float or None Hydrogen abundance expressed as a fraction of the total. If a value is given, Parker wind profiles will be calculated using p-winds standalone with a H/He - composition. If None is given, Parker wind profiles will be calculated using the - p-winds/Cloudy iterative method and the composition is specified via the zdict argument. + composition. If None is given, Parker wind profiles will be calculated + using the p-winds/Cloudy iterative method and the composition is + specified via the zdict argument. zdict : dict Dictionary with the scale factors of all elements relative to the default solar composition. Can be easily created with tools.get_zdict(). - Will only be used if fH is None, in which case the p-winds/Cloudy iterative method - is applied. + Will only be used if fH is None, in which case the p-winds/Cloudy + iterative method is applied. mu_conv : float - Convergence threshold expressed as the relative change in mu_bar between iterations. - Will only be used if fH is None, in which case the p-winds/Cloudy iterative method - is applied. + Convergence threshold expressed as the relative change in mu_bar + between iterations. Will only be used if fH is None, in which case the + p-winds/Cloudy iterative method is applied. mu_maxit : int Maximum number of iterations for the p-winds/Cloudy iterative method. Will only be used if fH is None. @@ -799,13 +808,14 @@ def run_s( Whether to print diagnostics about the convergence of mu_bar. avoid_pwinds_mubar : bool Whether to avoid using p-winds to calculate mu_bar during the first iteration, - when using the p-winds/Cloudy iterative method. Will only be used if fH is None. - If True, we guess the mu_bar of the first iteration based on a completely neutral - atmosphere. This can be helpful in cases where p-winds solver cannot find a solution, - but Cloudy typically can. + when using the p-winds/Cloudy iterative method. Will only be used if fH + is None. If True, we guess the mu_bar of the first iteration based on + a completely neutral atmosphere. This can be helpful in cases where + p-winds solver cannot find a solution, but Cloudy typically can. no_tidal : bool - Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et al. (2024). - See also Appendix D of Vissapragada et al. (2022) for the p-winds implementation. + Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et + al. (2024). See also Appendix D of Vissapragada et al. (2022) for the + p-winds implementation. """ p = tools.Planet(plname) @@ -853,7 +863,8 @@ def run_s( def catch_errors_run_s(*args): """ - Executes the run_s() function with provided arguments, while catching errors more gracefully. + Executes the run_s() function with provided arguments, while catching + errors more gracefully. """ try: @@ -883,17 +894,20 @@ def run_g( no_tidal, ): """ - Calculates a grid of isothermal Parker wind models, by executing the run_s() function in parallel. + Calculates a grid of isothermal Parker wind models, by executing the + run_s() function in parallel. Parameters ---------- plname : str - Planet name (must have parameters stored in $SUNBATHER_PROJECT_PATH/planets.txt). + Planet name (must have parameters stored in + $SUNBATHER_PROJECT_PATH/planets.txt). pdir : str Directory as $SUNBATHER_PROJECT_PATH/parker_profiles/*plname*/*pdir*/ where the isothermal parker wind density and velocity profiles are saved. - Different folders may exist there for a given planet, to separate for example profiles - with different assumptions such as stellar SED/semi-major axis/composition. + Different folders may exist there for a given planet, to separate for + example profiles with different assumptions such as stellar + SED/semi-major axis/composition. cores : int Number of parallel processes to spawn (i.e., number of CPU cores). Mdot_l : str or numeric @@ -915,17 +929,18 @@ def run_g( fH : float or None Hydrogen abundance expressed as a fraction of the total. If a value is given, Parker wind profiles will be calculated using p-winds standalone with a H/He - composition. If None is given, Parker wind profiles will be calculated using the - p-winds/Cloudy iterative method and the composition is specified via the zdict argument. + composition. If None is given, Parker wind profiles will be calculated + using the p-winds/Cloudy iterative method and the composition is + specified via the zdict argument. zdict : dict Dictionary with the scale factors of all elements relative to the default solar composition. Can be easily created with tools.get_zdict(). - Will only be used if fH is None, in which case the p-winds/Cloudy iterative method - is applied. + Will only be used if fH is None, in which case the p-winds/Cloudy + iterative method is applied. mu_conv : float - Convergence threshold expressed as the relative change in mu_bar between iterations. - Will only be used if fH is None, in which case the p-winds/Cloudy iterative method - is applied. + Convergence threshold expressed as the relative change in mu_bar + between iterations. Will only be used if fH is None, in which case the + p-winds/Cloudy iterative method is applied. mu_maxit : int Maximum number of iterations for the p-winds/Cloudy iterative method. Will only be used if fH is None. @@ -936,12 +951,13 @@ def run_g( avoid_pwinds_mubar : bool Whether to avoid using p-winds to calculate mu_bar during the first iteration, when using the p-winds/Cloudy iterative method. Will only be used if fH is None. - If True, we guess the mu_bar of the first iteration based on a completely neutral - atmosphere. This can be helpful in cases where p-winds solver cannot find a solution, - but Cloudy typically can. + If True, we guess the mu_bar of the first iteration based on a + completely neutral atmosphere. This can be helpful in cases where + p-winds solver cannot find a solution, but Cloudy typically can. no_tidal : bool - Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et al. (2024). - See also Appendix D of Vissapragada et al. (2022) for the p-winds implementation. + Whether to neglect tidal gravity - fourth term of Eq. 4 of Linssen et + al. (2024). See also Appendix D of Vissapragada et al. (2022) for the + p-winds implementation. """ p = multiprocessing.Pool(cores) @@ -974,7 +990,11 @@ def run_g( p.join() -def main(): +def new_argument_parser(args, **kwargs): + parser = argparse.ArgumentParser( + description="Creates 1D Parker profile(s) using the p_winds code and Cloudy.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) class OneOrThreeAction(argparse.Action): """ @@ -1001,20 +1021,17 @@ def __call__(self, parser, namespace, values, option_string=None): key, val = value.split("=") getattr(namespace, self.dest)[key] = float(val) - t0 = time.time() - - parser = argparse.ArgumentParser( - description="Creates 1D Parker profile(s) using the p_winds code and Cloudy." - ) - parser.add_argument( "-plname", required=True, help="planet name (must be in planets.txt)" ) parser.add_argument( "-pdir", required=True, - help="directory where the profiles are saved. It is adviced to choose a name that " - "somehow represents the chosen parameters, e.g. 'fH_0.9' or 'z=10'. The path will be $SUNBATHER_PROJECT_PATH/parker_profiles/pdir/", + help=( + "directory where the profiles are saved. It is advised to choose a name " + "that somehow represents the chosen parameters, e.g. 'fH_0.9' or 'z=10'. " + "The path will be $SUNBATHER_PROJECT_PATH/parker_profiles/pdir/" + ), ) parser.add_argument( "-Mdot", @@ -1022,8 +1039,11 @@ def __call__(self, parser, namespace, values, option_string=None): type=float, nargs="+", action=OneOrThreeAction, - help="log10(mass-loss rate), or three values specifying a grid of " - "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to three decimal places.", + help=( + "log10(mass-loss rate), or three values specifying a grid of " + "mass-loss rates: lowest, highest, stepsize. -Mdot will be rounded to " + "three decimal places." + ), ) parser.add_argument( "-T", @@ -1031,13 +1051,19 @@ def __call__(self, parser, namespace, values, option_string=None): type=int, nargs="+", action=OneOrThreeAction, - help="temperature, or three values specifying a grid of temperatures: lowest, highest, stepsize.", + help=( + "temperature, or three values specifying a grid of temperatures: lowest, " + "highest, stepsize." + ), ) parser.add_argument( "-SEDname", type=str, default="real", - help="name of SED to use. Must be in Cloudy's data/SED/ folder [default=SEDname set in planet.txt file]", + help=( + "name of SED to use. Must be in Cloudy's data/SED/ folder " + "[default=SEDname set in planet.txt file]" + ), ) parser.add_argument( "-overwrite", @@ -1048,22 +1074,31 @@ def __call__(self, parser, namespace, values, option_string=None): composition_group.add_argument( "-fH", type=float, - help="hydrogen fraction by number. Using this command results in running standalone p_winds without invoking Cloudy.", + help=( + "hydrogen fraction by number. Using this command results in running " + "standalone p_winds without invoking Cloudy." + ), ) composition_group.add_argument( "-z", type=float, - help="metallicity (=scale factor relative to solar for all elements except H and He). Using this " - "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.", + help=( + "metallicity (=scale factor relative to solar for all elements except H " + "and He). Using this command results in running p_winds in an iterative " + "scheme where Cloudy updates the mu parameter." + ), ) parser.add_argument( "-zelem", action=AddDictAction, nargs="+", default={}, - help="abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem He=0.01. " - "Can also be used to toggle elements off, e.g. -zelem Ca=0. Combines with -z argument. Using this " - "command results in running p_winds in an an iterative scheme where Cloudy updates the mu parameter.", + help=( + "abundance scale factor for specific elements, e.g. -zelem Fe=10 -zelem " + "He=0.01. Can also be used to toggle elements off, e.g. -zelem Ca=0. " + "Combines with -z argument. Using this command results in running p_winds " + "in an iterative scheme where Cloudy updates the mu parameter." + ), ) parser.add_argument( "-cores", type=int, default=1, help="number of parallel runs [default=1]" @@ -1072,33 +1107,49 @@ def __call__(self, parser, namespace, values, option_string=None): "-mu_conv", type=float, default=0.01, - help="relative change in mu allowed for convergence, when using p_winds/Cloudy iterative scheme [default=0.01]", + help=( + "relative change in mu allowed for convergence, when using p_winds/Cloudy " + "iterative scheme" + ), ) parser.add_argument( "-mu_maxit", type=int, default=7, - help="maximum number of iterations the p_winds/Cloudy iterative scheme is ran " - "if convergence is not reached [default =7]", + help=( + "maximum number of iterations the p_winds/Cloudy iterative scheme is ran " + "if convergence is not reached" + ), ) parser.add_argument( "-verbose", action="store_true", - help="print out mu-bar values of each iteration [default=False]", + help="print out mu-bar values of each iteration", ) parser.add_argument( "-avoid_pwinds_mubar", action="store_true", - help="avoid using the mu-bar value predicted by p-winds for the first iteration. Instead, " - "start with a mu_bar of a completely neutral atmosphere. Helps to avoid the p-winds 'solve_ivp' errors. You may need to " - "use a -mu_maxit higher than 7 when toggling this on. [default=False]", + help=( + "avoid using the mu-bar value predicted by p-winds for the first " + "iteration. Instead, start with a mu_bar of a completely neutral " + "atmosphere. Helps to avoid the p-winds 'solve_ivp' errors. You may need " + "to use a -mu_maxit higher than 7 when toggling this on." + ), ) parser.add_argument( "-no_tidal", action="store_true", - help="neglect the stellar tidal gravity term [default=False, i.e. tidal term included]", + help="neglect the stellar tidal gravity term", ) - args = parser.parse_args() + args = parser.parse_args(args, **kwargs) + + return args + + +def main(args, **kwargs): + t0 = time.time() + + args = new_argument_parser(args, **kwargs) if args.z is not None: zdict = tools.get_zdict(z=args.z, zelem=args.zelem) @@ -1112,11 +1163,12 @@ def __call__(self, parser, namespace, values, option_string=None): or args.avoid_pwinds_mubar ): warnings.warn( - "The -zelem, -mu_conv -mu_maxit, and -avoid_pwinds_mubar commands only combine with -z, not with -fH, so I will ignore their input." + "The 'zelem', 'mu_conv', 'mu_maxit', and 'avoid_pwinds_mubar' arguments " + "only combine with 'z', not with 'fH', so I will ignore their input." ) # set up the folder structure if it doesn't exist yet - projectpath = tools.get_sunbather_project_path() + projectpath = tools.get_sunbather_project_path() if not os.path.isdir(projectpath + "/parker_profiles/"): os.mkdir(projectpath + "/parker_profiles") if not os.path.isdir(projectpath + "/parker_profiles/" + args.plname + "/"): @@ -1125,30 +1177,15 @@ def __call__(self, parser, namespace, values, option_string=None): projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" ): os.mkdir( - projectpath - + "/parker_profiles/" - + args.plname - + "/" - + args.pdir - + "/" + projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" ) if (args.fH is None) and ( not os.path.isdir( - projectpath - + "/parker_profiles/" - + args.plname - + "/" - + args.pdir - + "/temp/" + projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/temp/" ) ): os.mkdir( - projectpath - + "/parker_profiles/" - + args.plname - + "/" - + args.pdir - + "/temp" + projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/temp" ) if len(args.T) == 1 and len(args.Mdot) == 1: # then we run a single model @@ -1245,4 +1282,4 @@ def __call__(self, parser, namespace, values, option_string=None): if __name__ == "__main__": - main() + main(sys.argv[1:]) From 864d9632b09b7a39acbf625df5473d1bc6fbbb75 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 21 Nov 2024 11:48:35 +0100 Subject: [PATCH 59/63] use units/constants from astropy - though only the values for now --- src/sunbather/tools.py | 41 ++++++++++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/src/sunbather/tools.py b/src/sunbather/tools.py index e402f16..c098500 100644 --- a/src/sunbather/tools.py +++ b/src/sunbather/tools.py @@ -16,6 +16,8 @@ import scipy.stats as sps from scipy.ndimage import gaussian_filter1d +import astropy.units +import astropy.constants # ###################################### # ########## GLOBAL CONSTANTS ########## @@ -87,19 +89,32 @@ def get_planets_file(): ) # define constants: -c = 2.99792458e10 # cm/s -h = 4.135667696e-15 # eV s, used to plot wavelengths in keV units -mH = 1.674e-24 # g -k = 1.381e-16 # erg/K -AU = 1.49597871e13 # cm -pc = 3.08567758e18 # cm -RJ = 7.1492e9 # cm -RE = 6.371e8 # cm -Rsun = 69634000000 # cm -Msun = 1.9891e33 # g -MJ = 1.898e30 # g -ME = 5.9722e27 # g -G = 6.6743e-8 # cm3/g/s2 +# c = 2.99792458e10 # cm/s +c = astropy.constants.c.to("cm/s").value +# h = 4.135667696e-15 # eV s, used to plot wavelengths in keV units +h = astropy.constants.h.to("eV*s").value +# mH = 1.674e-24 # g - intended: atomic mass unit +mH = (1 * astropy.units.u).to("g").value +# k = 1.381e-16 # erg/K +k = astropy.constants.k_B.to("erg/K").value +# AU = 1.49597871e13 # cm +AU = astropy.units.au.to("cm") +# pc = 3.08567758e18 # cm +pc = astropy.units.pc.to("cm") +# RJ = 7.1492e9 # cm +RJ = astropy.units.R_jup.to("cm") +# RE = 6.371e8 # cm +RE = astropy.units.R_earth.to("cm") +# Rsun = 69634000000 # cm +Rsun = astropy.units.R_sun.to("cm") +# Msun = 1.9891e33 # g +Msun = astropy.constants.M_sun.to("g").value +# MJ = 1.898e30 # g +MJ = astropy.constants.M_jup.to("g").value +# ME = 5.9722e27 # g +ME = astropy.constants.M_earth.to("g").value +# G = 6.6743e-8 # cm3/g/s2 +G = astropy.constants.G.to("cm**3 * g**-1 * s**-2").value Ldict = { "S": 0, "P": 1, From 27a16d069fe56b49acb7964f265bff6cdd796846 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Thu, 21 Nov 2024 11:49:16 +0100 Subject: [PATCH 60/63] style improvements, mostly --- src/sunbather/__init__.py | 41 +++++++------- src/sunbather/convergeT_parker.py | 90 +++++++++++++++++-------------- src/sunbather/install_cloudy.py | 24 ++++++--- src/sunbather/solveT.py | 79 +++++++++++++++++++-------- 4 files changed, 146 insertions(+), 88 deletions(-) diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py index b05c199..4542efc 100644 --- a/src/sunbather/__init__.py +++ b/src/sunbather/__init__.py @@ -6,45 +6,46 @@ import shutil import sunbather.tools +from sunbather.install_cloudy import GetCloudy + def check_cloudy(): """ Checks if Cloudy executable exists, and if not, prompts to download and build it. """ try: - CLOUDYVERSION = os.environ["CLOUDY_VERSION"] + cloudyversion = os.environ["CLOUDY_VERSION"] except KeyError: - CLOUDYVERSION = "23.01" - SUNBATHERPATH = os.path.dirname( + cloudyversion = "23.01" + sunbatherpath = os.path.dirname( os.path.abspath(__file__) ) # the absolute path where this code lives try: # the path where Cloudy is installed - CLOUDYPATH = os.environ["CLOUDY_PATH"] - except KeyError as exc: - CLOUDYPATH = f"{SUNBATHERPATH}/cloudy/c{CLOUDYVERSION}" - if not os.path.exists(f"{CLOUDYPATH}/source/cloudy.exe"): + cloudypath = os.environ["cloudy_path"] + except KeyError: + cloudypath = f"{sunbatherpath}/cloudy/c{cloudyversion}" + if not os.path.exists(f"{cloudypath}/source/cloudy.exe"): q = input( f"Cloudy not found and CLOUDY_PATH is not set. " - f"Do you want to install Cloudy {CLOUDYVERSION} now in the Sunbather path? " + f"Do you want to install Cloudy {cloudyversion} now in the sunbather path? " f"(y/n) " ) while q.lower() not in ["y", "n"]: q = input("Please enter 'y' or 'n'") if q == "n": raise KeyError( - "Cloudy not found, and the environment variable 'CLOUDY_PATH' is not set. " - "Please set this variable in your .bashrc/.zshrc file " + "Cloudy not found, and the environment variable 'CLOUDY_PATH' is not " + "set. Please set this variable in your .bashrc/.zshrc file " "to the path where the Cloudy installation is located. " "Do not point it to the /source/ subfolder, but to the main folder." - ) from exc - from sunbather.install_cloudy import GetCloudy - INSTALLER = GetCloudy(version=CLOUDYVERSION) - INSTALLER.download() - INSTALLER.extract() - INSTALLER.compile() - INSTALLER.test() - INSTALLER.copy_data() + ) + installer = GetCloudy(version=cloudyversion) + installer.download() + installer.extract() + installer.compile() + installer.test() + installer.copy_data() def make_workingdir(): @@ -64,7 +65,7 @@ def make_workingdir(): sunbatherpath = f"{pathlib.Path(__file__).parent.resolve()}" shutil.copytree( - sunbatherpath + "/data/workingdir", + f"{sunbatherpath}/data/workingdir", workingdir, ) @@ -75,3 +76,5 @@ def firstrun(): """ check_cloudy() make_workingdir() + + print("Sunbather is ready to go!") diff --git a/src/sunbather/convergeT_parker.py b/src/sunbather/convergeT_parker.py index 3516f05..f30fc8c 100644 --- a/src/sunbather/convergeT_parker.py +++ b/src/sunbather/convergeT_parker.py @@ -1,3 +1,7 @@ +""" +ConvergeT_parker module of sunbather +""" +import sys import multiprocessing from shutil import copyfile import time @@ -474,42 +478,44 @@ def run_g( Maximum number of iterations, by default 16. """ - p = multiprocessing.Pool(cores) - - pars = [] - for Mdot in np.arange( - float(Mdot_l), float(Mdot_u) + 1e-6, float(Mdot_s) - ): # 1e-6 so that upper bound is inclusive - for T in np.arange(int(T_l), int(T_u) + 1e-6, int(T_s)).astype(int): - pars.append( - ( - plname, - Mdot, - T, - 1, - fc, - workingdir, - SEDname, - overwrite, - startT, - pdir, - zdict, - altmax, - save_sp, - constantT, - maxit, + with multiprocessing.Pool(processes=cores) as pool: + pars = [] + for Mdot in np.arange( + float(Mdot_l), float(Mdot_u) + 1e-6, float(Mdot_s) + ): # 1e-6 so that upper bound is inclusive + for T in np.arange(int(T_l), int(T_u) + 1e-6, int(T_s)).astype(int): + pars.append( + ( + plname, + Mdot, + T, + 1, + fc, + workingdir, + SEDname, + overwrite, + startT, + pdir, + zdict, + altmax, + save_sp, + constantT, + maxit, + ) ) - ) - - p.starmap(catch_errors_run_s, pars) - p.close() - p.join() + pool.starmap(catch_errors_run_s, pars) + pool.close() + pool.join() -def main(): +def new_argument_parser(): """ - Main function + Creates a new argument parser. """ + parser = argparse.ArgumentParser( + description="Runs the temperature convergence for 1D Parker profile(s).", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) class OneOrThreeAction(argparse.Action): """ @@ -536,13 +542,6 @@ def __call__(self, parser, namespace, values, option_string=None): key, val = value.split("=") getattr(namespace, self.dest)[key] = float(val) - t0 = time.time() - - parser = argparse.ArgumentParser( - description="Runs the temperature convergence for 1D Parker profile(s).", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - ) - parser.add_argument( "-plname", required=True, help="planet name (must be in planets.txt)" ) @@ -691,7 +690,18 @@ def __call__(self, parser, namespace, values, option_string=None): ), ) - args = parser.parse_args() + return parser + + +def main(*args, **kwargs): + """ + Main function + """ + + t0 = time.time() + + parser = new_argument_parser() + args = parser.parse_args(*args, **kwargs) zdict = tools.get_zdict(z=args.z, zelem=args.zelem) @@ -813,4 +823,4 @@ def __call__(self, parser, namespace, values, option_string=None): if __name__ == "__main__": - main() + main(sys.argv[1:]) diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index 035eeb2..d80e951 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -1,3 +1,6 @@ +""" +Functions to download and compile Cloudy +""" import os import pathlib from urllib.error import HTTPError @@ -37,7 +40,7 @@ def download(self): with urllib.request.urlopen(f"{self.url}{self.filename}") as g: with open(self.filename, "b+w") as f: f.write(g.read()) - except HTTPError as exc: + except HTTPError: print(f"Could not download Cloudy from {self.url}{self.filename}...") return # Go to the v23 download page and download the "c23.01.tar.gz" file @@ -56,26 +59,35 @@ def compile(self): Compiles Cloudy. """ os.chdir(f"{self.cloudypath}/c{self.version}/source/") - subprocess.Popen( + with subprocess.Popen( [ "make", ] - ).wait() + ) as p: + p.wait() def test(self): - # Quickly test the Cloudy installation: in the source folder, run ./cloudy.exe, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. + """ + Quickly test the Cloudy installation: in the source folder, run + ./cloudy.exe, type "test" and hit return twice. It should print "Cloudy + exited OK" at the end. + """ os.chdir(f"{self.cloudypath}/c{self.version}/source/") print( 'Type "test" and hit return twice. ' 'It should print "Cloudy exited OK" at the end.' ) - subprocess.Popen( + with subprocess.Popen( [ "./cloudy.exe", ] - ).wait() + ) as p: + p.wait() def copy_data(self): + """ + Copy the stellar SEDs to the Cloudy data folder + """ shutil.copytree( f"{self.sunbatherpath}/data/stellar_SEDs/", f"{self.cloudypath}/c{self.version}/data/SED/", diff --git a/src/sunbather/solveT.py b/src/sunbather/solveT.py index 7cd94da..fd410f8 100644 --- a/src/sunbather/solveT.py +++ b/src/sunbather/solveT.py @@ -127,7 +127,8 @@ def simtogrid(sim, grid): ) # minus sign to get expansion cooling rates as positive values adv = calc_advection(grid, rho, v, Te, mu) - # apply very slight smoothing because the Cloudy .ovr quantities have mediocre reported numerical precision + # apply very slight smoothing because the Cloudy .ovr quantities have + # mediocre reported numerical precision expcool = tools.smooth_gaus_savgol(expcool, fraction=0.01) adv = tools.smooth_gaus_savgol(adv, fraction=0.01) @@ -268,7 +269,10 @@ def last_false_index(arr): ) # boolean array where radiative heating dominates AND radiative cooling dominates highest_r_above_which_no_bothrad_dominate = last_true_index(bothrad_dominate) advheat_dominates[:highest_r_above_which_no_bothrad_dominate] = ( - False # now the boolean array stores where advection heating dominates AND where there is no point at higher altitudes that is rad. heat and rad. cool dominated + False + # now the boolean array stores where advection heating dominates AND + # where there is no point at higher altitudes that is rad. heat and + # rad. cool dominated ) if ( True in advheat_dominates @@ -281,21 +285,28 @@ def last_false_index(arr): ) # boolean array where advection heating is relatively unimportant advunimploc = last_true_index( advheat_unimportant[:advdomloc] - ) # first point at lower altitude where advection becomes unimportant (if no point exists, it will become advdomloc) - # then walk to higher altitude again to find converged point. We are more lax with H/C ratio if advection dominates more. + ) + # first point at lower altitude where advection becomes unimportant (if + # no point exists, it will become advdomloc) then walk to higher + # altitude again to find converged point. We are more lax with H/C + # ratio if advection dominates more. almost_converged = np.abs(HCratio[advunimploc:]) < 1.3 * np.clip( (advheat[advunimploc:] / radheat[advunimploc:]) ** (2.0 / 3.0), 1, 10 ) if True in almost_converged: # otherwise it stays default value adv_cloc = advunimploc + first_true_index(almost_converged) - # check for regime where radiative cooling is weak. Usually this means that expansion cooling dominates, but advection cooling can contribute in some cases + # check for regime where radiative cooling is weak. Usually this means that + # expansion cooling dominates, but advection cooling can contribute in some + # cases exp_cloc = len(HCratio) # start by setting a 'too high' value expcool_dominates = radcool / (radcool + expcool + advcool) < 0.2 - if True and False in expcool_dominates: # FIXME True in expcool_dominates and False in expcool_dominates?? -SR + if True in expcool_dominates and False in expcool_dominates: exp_cloc = last_false_index( expcool_dominates - ) # this way of evaluating it guarantees that all entries after this one are True + ) + # this way of evaluating it guarantees that all entries after this one + # are True elif False not in expcool_dominates: # if they are all True exp_cloc = 0 @@ -352,9 +363,12 @@ def relaxTstruc(grid, path, itno, Te, HCratio): if itno >= 4: # check for fluctuations. If so, we decrease the deltaT factor prev_prevTe = iterations_file["Te" + str(itno - 2)] previous_ratio = Te / prev_prevTe # compare itno-2 to itno-1 + + # compare itno-1 to the current itno (because of smoothing this ratio + # is not exactly the same as fT) this_ratio = ( newTe_relax / Te - ) # compare itno-1 to the current itno (because of smoothing this ratio is not exactly the same as fT) + ) fl = ((previous_ratio < 1) & (this_ratio > 1)) | ( (previous_ratio > 1) & (this_ratio < 1) ) # boolean indicating where temperature fluctuates @@ -402,7 +416,8 @@ def constructTstruc(grid, newTe_relax, cloc, v, rho, mu, radheat, radcool): radheat : numpy.ndarray Radiative heating rate in units of erg s-1 cm-3, at the 'grid' radii. radcool : numpy.ndarray - Radiative cooling rate in units of erg s-1 cm-3, as positive values, at the 'grid' radii. + Radiative cooling rate in units of erg s-1 cm-3, as positive values, at + the 'grid' radii. Returns ------- @@ -429,17 +444,22 @@ def one_cell_HCratio(T, index): / (grid[index] - grid[index - 1]) ) - # instead of completely keeping the radiative heating and cooling rate the same while we are solving for T in this bin, - # we adjust it a little bit. This helps to prevent that the temperature changes are too drastic and go into a regime where - # radiation becomes important again. We guess a quadratic dependence of the rates on T. This is not the true dependence, - # but it does reduce to the original rate when T -> original T, which is important. + # instead of completely keeping the radiative heating and cooling rate + # the same while we are solving for T in this bin, we adjust it a + # little bit. This helps to prevent that the temperature changes are + # too drastic and go into a regime where radiation becomes important + # again. We guess a quadratic dependence of the rates on T. This is not + # the true dependence, but it does reduce to the original rate when T + # -> original T, which is important. guess_radheat = radheat[index] * (newTe_construct[index] / T) ** 2 guess_radcool = radcool[index] * (T / newTe_construct[index]) ** 2 totheat = guess_radheat + max(adv, 0) # if adv is negative we don't add it here + # if adv is positive we don't add it here, we subtract expcool and adv + # because they are negative totcool = ( guess_radcool - expcool - min(adv, 0) - ) # if adv is positive we don't add it here, we subtract expcool and adv because they are negative + ) HCratio = max(totheat, totcool) / min( totheat, totcool @@ -460,7 +480,8 @@ def one_cell_HCratio(T, index): smooth_newTe_construct = np.clip( smooth_newTe_construct, 1e1, 1e6 ) # after smoothing we might have ended up below 10K - # now combine the smoothed profile around 'cloc', and the non-smoothed version away from 'cloc' + # now combine the smoothed profile around 'cloc', and the non-smoothed + # version away from 'cloc' smooth_weight = np.zeros(len(grid)) smooth_weight += sps.norm.pdf(range(len(grid)), cloc, int(len(grid) / 30)) smooth_weight /= np.max(smooth_weight) # normalize @@ -522,7 +543,8 @@ def make_rates_plot( fc : numeric Convergence threshold for H/C. newTe_construct : numpy.ndarray, optional - Proposed temperature profile based on the construction algorithm, by default None + Proposed temperature profile based on the construction algorithm, by + default None cloc : int, optional Index of the grid from where the construction algorithm was ran, by default None title : str, optional @@ -794,7 +816,8 @@ def run_loop(path, itno, fc, save_sp=None, maxit=16): tools.run_Cloudy("iteration1", folder=path) itno += 1 - # now, we have ran our iteration1 and can start the iterative scheme to find a new profile: + # now, we have ran our iteration1 and can start the iterative scheme to + # find a new profile: while itno <= maxit: prev_sim = tools.Sim( path + f"iteration{itno-1}" @@ -802,7 +825,8 @@ def run_loop(path, itno, fc, save_sp=None, maxit=16): Rp = prev_sim.p.R # planet radius in cm altmax = prev_sim.altmax # maximum radius of the simulation in units of Rp - # make logspaced grid to use throughout the code, interpolate all quantities onto this grid. + # make logspaced grid to use throughout the code, interpolate all + # quantities onto this grid. rgrid = np.logspace(np.log10(Rp), np.log10(altmax * Rp), num=1000) Te, mu, rho, v, radheat, radcool, expcool, advheat, advcool = simtogrid( @@ -844,7 +868,8 @@ def run_loop(path, itno, fc, save_sp=None, maxit=16): cloc=cloc, ) - # get the final new temperature profile, based on whether the construction algorithm was applied + # get the final new temperature profile, based on whether the + # construction algorithm was applied if newTe_construct is None: newTe = newTe_relax else: @@ -860,12 +885,16 @@ def run_loop(path, itno, fc, save_sp=None, maxit=16): # now we check if the profile is converged. if ( itno <= 2 - ): # always update the Te profile at least once - in case we start from a 'close' Parker wind profile that immediately satisfies fc + ): + # always update the Te profile at least once - in case we start + # from a 'close' Parker wind profile that immediately satisfies fc converged = False else: prevTe = iterations_file[ "Te" + str(itno - 1) - ].values # read out from file instead of Sim because the file has higher resolution + ].values + # read out from file instead of Sim because the file has higher + # resolution converged = check_converged( fc, HCratio, newTe, prevTe, linthresh=50.0 ) # check convergence criteria @@ -883,7 +912,9 @@ def run_loop(path, itno, fc, save_sp=None, maxit=16): advheat, advcool, ) - # calculate these terms for the output converged.txt file - for fast access of some key parameters without loading in the Cloudy sim. + # calculate these terms for the output converged.txt file - for + # fast access of some key parameters without loading in the Cloudy + # sim. np.savetxt( path + "converged.txt", np.column_stack( @@ -916,7 +947,9 @@ def run_loop(path, itno, fc, save_sp=None, maxit=16): tools.run_Cloudy("converged", folder=path) tools.Sim( path + "converged" - ) # read in the simulation, so we open the .en file (if it exists) and hence compress its size (see tools.process_energies()) + ) + # read in the simulation, so we open the .en file (if it exists) + # and hence compress its size (see tools.process_energies()) clean_converged_folder(path) # remove all non-converged files print(f"Temperature profile converged: {path}") From 6c4e3f7156146af121168c01a2d63823aae19497 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 27 Nov 2024 15:34:14 +0100 Subject: [PATCH 61/63] prepare documentation --- docs/conf.py | 2 +- wiki/FAQ.md => docs/faq.md | 4 ++- wiki/Glossary.md => docs/glossary.md | 1 + docs/index.md | 16 ++++++++++++ docs/index.rst | 18 ------------- wiki/Installation.md => docs/installation.md | 27 ++++++++++---------- docs/logo_text.png | 1 + wiki/Home.md | 4 --- 8 files changed, 36 insertions(+), 37 deletions(-) rename wiki/FAQ.md => docs/faq.md (99%) rename wiki/Glossary.md => docs/glossary.md (99%) create mode 100644 docs/index.md delete mode 100644 docs/index.rst rename wiki/Installation.md => docs/installation.md (98%) create mode 120000 docs/logo_text.png delete mode 100644 wiki/Home.md diff --git a/docs/conf.py b/docs/conf.py index 2a6d44a..827410d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -23,7 +23,7 @@ "sphinx_rtd_theme", "sphinx.ext.autodoc", "sphinx.ext.autosummary", - # "myst_parser", + "myst_parser", ] templates_path = ["_templates"] diff --git a/wiki/FAQ.md b/docs/faq.md similarity index 99% rename from wiki/FAQ.md rename to docs/faq.md index 1619f39..ee8a284 100644 --- a/wiki/FAQ.md +++ b/docs/faq.md @@ -1,3 +1,5 @@ +# FAQ + ## How do I create Parker wind profiles? Add the parameters of the planet/star system to the *$SUNBATHER_PROJECT_PATH/planets.txt* file. Make sure the SED you specify in _planets.txt_ is present in the _$CLOUDY_PATH/data/SED/_ folder in the right format. Then run the `construct_parker.py` module in your terminal (use `-help` to see the arguments). @@ -87,4 +89,4 @@ The `construct_parker.py` module always creates a profile up until 20 $R_p$ and The `convergeT_parker.py` module by default simulates the atmosphere with *Cloudy* up until 8 $R_p$ and this can be changed with the `-altmax` argument. -The `RT.FinFout()` function by default makes a transit spectrum based on the full *Cloudy* simulation (so up until 8 $R_p$), but you can give an upper boundary in cm with the `cut_at` argument. For example, if you want to include only material up until the planet's Roche radius when making the transit spectrum, it generally doesn't hurt to leave `construct_parker.py` and `convergeT_parker.py` at the default values, and just pass `cut_at=mysimulation.p.Rroche` to `RT.FinFout()` (assuming `mysimulation` is the `tools.Sim` object of your *Cloudy* simulation). \ No newline at end of file +The `RT.FinFout()` function by default makes a transit spectrum based on the full *Cloudy* simulation (so up until 8 $R_p$), but you can give an upper boundary in cm with the `cut_at` argument. For example, if you want to include only material up until the planet's Roche radius when making the transit spectrum, it generally doesn't hurt to leave `construct_parker.py` and `convergeT_parker.py` at the default values, and just pass `cut_at=mysimulation.p.Rroche` to `RT.FinFout()` (assuming `mysimulation` is the `tools.Sim` object of your *Cloudy* simulation). diff --git a/wiki/Glossary.md b/docs/glossary.md similarity index 99% rename from wiki/Glossary.md rename to docs/glossary.md index ea5a2a1..070a9e3 100644 --- a/wiki/Glossary.md +++ b/docs/glossary.md @@ -1,3 +1,4 @@ +# Glossary This wiki page is a glossary that provides additional information on various modules/classes/functionalities included in _sunbather_. We also refer to "Hazy", which is the official documentation of _Cloudy_ and can be found in your _$CLOUDY_PATH/docs/_ folder. diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000..52db9b6 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,16 @@ +# sunbather documentation + +Welcome to the _sunbather_ docs! On the left side, you\'ll find the +table of contents. + +![Sunbather logo](logo_text.png) + +```{toctree} Table of Contents +:depth: 2 +installation +glossary +faq +api +Notebook: Fit Helium \ +Notebook: Predict UV \ +``` diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index cde744e..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. sunbather documentation master file, created by - sphinx-quickstart on Wed Nov 13 11:48:00 2024. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -sunbather documentation -======================= - -Add your content using ``reStructuredText`` syntax. See the -`reStructuredText `_ -documentation for details. - - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - - api diff --git a/wiki/Installation.md b/docs/installation.md similarity index 98% rename from wiki/Installation.md rename to docs/installation.md index d03c940..3e00f25 100644 --- a/wiki/Installation.md +++ b/docs/installation.md @@ -1,14 +1,3 @@ -# Installing _Cloudy_ - -_sunbather_ has been developed and tested with _Cloudy v17.02_ and _v23.01_. Newer versions of _Cloudy_ are likely also compatible with _sunbather_, but this has not been thoroughly tested. Therefore, we currently recommend using _v23.01_. Complete _Cloudy_ download and installation instructions can be found [here](https://gitlab.nublado.org/cloudy/cloudy/-/wikis/home). In short, for most Unix systems, the steps are as follows: - -1. Go to the [v23 download page](https://data.nublado.org/cloudy_releases/c23/) and download the "c23.01.tar.gz" file (or go to the [v17 download page](https://data.nublado.org/cloudy_releases/c17/old/) and download the "c17.02.tar.gz" file). -2. Extract it in a location where you want to install _Cloudy_. -3. `cd` into the _/c23.01/source/_ or _/c17.02/source/_ folder and compile the code by running `make`. -4. Quickly test the _Cloudy_ installation: in the source folder, run `./cloudy.exe`, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. - -If you have trouble installing _Cloudy_, we refer to the download instructions linked above, as well as the _Cloudy_ [help forum](https://cloudyastrophysics.groups.io/g/Main/topics). - # Installing _sunbather_ 1. Clone _sunbather_ from Github. The code runs entirely in Python. It was developed using Python 3.9.0 and the following packages are prerequisites: `numpy (v1.24.3), pandas (v1.1.4), matplotlib (v3.7.1), scipy (v1.8.0), astropy (v5.3), p-winds (v1.3.4)`. _sunbather_ also succesfully ran with the newest versions (as of Sep. 18, 2023) of these packages. We have however not yet thoroughly tested all of its functionality with these newer versions, so we currently cannot guarantee that it works, but feel free to try! In any case, we recommend making a Python [virtual environment](https://realpython.com/python-virtual-environments-a-primer/) to run _sunbather_ in. @@ -22,7 +11,19 @@ If you have trouble installing _Cloudy_, we refer to the download instructions l 5. Copy the stellar spectra from _/sunbather/stellar_SEDs/_ to _$CLOUDY_PATH/data/SED/_ . These include the [MUSCLES](https://archive.stsci.edu/prepds/muscles/) spectra. 6. Test your _sunbather_ installation: run _/sunbather/tests/test.py_, which should print "Success". If the test fails, feel free to open an issue or contact d.c.linssen@uva.nl with your error. -# Getting started +## Installing _Cloudy_ + +_sunbather_ has been developed and tested with _Cloudy v17.02_ and _v23.01_. Newer versions of _Cloudy_ are likely also compatible with _sunbather_, but this has not been thoroughly tested. Therefore, we currently recommend using _v23.01_. Complete _Cloudy_ download and installation instructions can be found [here](https://gitlab.nublado.org/cloudy/cloudy/-/wikis/home). In short, for most Unix systems, the steps are as follows: + +1. Go to the [v23 download page](https://data.nublado.org/cloudy_releases/c23/) and download the "c23.01.tar.gz" file (or go to the [v17 download page](https://data.nublado.org/cloudy_releases/c17/old/) and download the "c17.02.tar.gz" file). +2. Extract it in a location where you want to install _Cloudy_. +3. `cd` into the _/c23.01/source/_ or _/c17.02/source/_ folder and compile the code by running `make`. +4. Quickly test the _Cloudy_ installation: in the source folder, run `./cloudy.exe`, type "test" and hit return twice. It should print "Cloudy exited OK" at the end. + +If you have trouble installing _Cloudy_, we refer to the download instructions linked above, as well as the _Cloudy_ [help forum](https://cloudyastrophysics.groups.io/g/Main/topics). + + +## Getting started 1. To get familiar with _sunbather_, we recommend you go through the Jupyter notebooks in the _/sunbather/examples/_ folder, where example use cases (such as creating atmospheric profiles, calculating transmission spectra and fitting observational data) are worked out and explained. -2. For more details on how to use the code, check out the Glossary and FAQ pages on this wiki. We specifically recommend you read the glossary sections "The _planets.txt_ file" and "Stellar SED handling". \ No newline at end of file +2. For more details on how to use the code, check out the Glossary and FAQ pages on this wiki. We specifically recommend you read the glossary sections "The _planets.txt_ file" and "Stellar SED handling". diff --git a/docs/logo_text.png b/docs/logo_text.png new file mode 120000 index 0000000..7e00e04 --- /dev/null +++ b/docs/logo_text.png @@ -0,0 +1 @@ +../logo/Logo + text.png \ No newline at end of file diff --git a/wiki/Home.md b/wiki/Home.md deleted file mode 100644 index 1605b5d..0000000 --- a/wiki/Home.md +++ /dev/null @@ -1,4 +0,0 @@ -Welcome to the _sunbather_ wiki! On the right side, you'll find the table of contents. - -Logo + text - From 67c32176975eb506333e2b3c2a501a16c86b9dab Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Wed, 27 Nov 2024 15:37:16 +0100 Subject: [PATCH 62/63] update requirements --- docs/requirements.in | 1 + docs/requirements.txt | 20 ++++++++++++++++++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/docs/requirements.in b/docs/requirements.in index 256bba1..3e815de 100644 --- a/docs/requirements.in +++ b/docs/requirements.in @@ -1,2 +1,3 @@ sphinx == 7.1.2 sphinx-rtd-theme == 3.0.2 +myst_parser == 4.0.0 diff --git a/docs/requirements.txt b/docs/requirements.txt index e4470a3..86716b2 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -24,6 +24,7 @@ cycler==0.12.1 # via matplotlib docutils==0.20.1 # via + # myst-parser # sphinx # sphinx-rtd-theme flatstar==0.2.1a0 @@ -35,13 +36,25 @@ idna==3.10 imagesize==1.4.1 # via sphinx jinja2==3.1.4 - # via sphinx + # via + # myst-parser + # sphinx kiwisolver==1.4.7 # via matplotlib +markdown-it-py==3.0.0 + # via + # mdit-py-plugins + # myst-parser markupsafe==3.0.2 # via jinja2 matplotlib==3.9.2 # via sunbather (../pyproject.toml) +mdit-py-plugins==0.4.2 + # via myst-parser +mdurl==0.1.2 + # via markdown-it-py +myst-parser==4.0.0 + # via -r requirements.in numpy==2.1.3 # via # astropy @@ -79,7 +92,9 @@ python-dateutil==2.9.0.post0 pytz==2024.2 # via pandas pyyaml==6.0.2 - # via astropy + # via + # astropy + # myst-parser requests==2.32.3 # via sphinx scipy==1.13.1 @@ -93,6 +108,7 @@ snowballstemmer==2.2.0 sphinx==7.1.2 # via # -r requirements.in + # myst-parser # sphinx-rtd-theme # sphinxcontrib-jquery sphinx-rtd-theme==3.0.2 From 86575292ddc32247bbc7eb16a851b70739643926 Mon Sep 17 00:00:00 2001 From: Steven Rieder Date: Fri, 29 Nov 2024 19:14:26 +0100 Subject: [PATCH 63/63] Updates --- src/sunbather/__init__.py | 102 ++++++++++++------ src/sunbather/construct_parker.py | 29 +++-- .../data/{ => workingdir}/planets.txt | 0 src/sunbather/install_cloudy.py | 16 ++- 4 files changed, 96 insertions(+), 51 deletions(-) rename src/sunbather/data/{ => workingdir}/planets.txt (100%) diff --git a/src/sunbather/__init__.py b/src/sunbather/__init__.py index 4542efc..dae299e 100644 --- a/src/sunbather/__init__.py +++ b/src/sunbather/__init__.py @@ -9,14 +9,17 @@ from sunbather.install_cloudy import GetCloudy -def check_cloudy(): +def check_cloudy(quiet=False, cloudy_version="23.01"): """ Checks if Cloudy executable exists, and if not, prompts to download and build it. + :quiet: bool, if True, does not ask for input + :cloudy_version: str, Cloudy version (default: "23.01", environment variable + CLOUDY_VERSION overrides this) """ try: - cloudyversion = os.environ["CLOUDY_VERSION"] + cloudy_version = os.environ["CLOUDY_VERSION"] except KeyError: - cloudyversion = "23.01" + pass sunbatherpath = os.path.dirname( os.path.abspath(__file__) ) # the absolute path where this code lives @@ -24,23 +27,23 @@ def check_cloudy(): # the path where Cloudy is installed cloudypath = os.environ["cloudy_path"] except KeyError: - cloudypath = f"{sunbatherpath}/cloudy/c{cloudyversion}" + cloudypath = f"{sunbatherpath}/cloudy/c{cloudy_version}" if not os.path.exists(f"{cloudypath}/source/cloudy.exe"): - q = input( - f"Cloudy not found and CLOUDY_PATH is not set. " - f"Do you want to install Cloudy {cloudyversion} now in the sunbather path? " - f"(y/n) " - ) - while q.lower() not in ["y", "n"]: - q = input("Please enter 'y' or 'n'") - if q == "n": - raise KeyError( - "Cloudy not found, and the environment variable 'CLOUDY_PATH' is not " - "set. Please set this variable in your .bashrc/.zshrc file " - "to the path where the Cloudy installation is located. " - "Do not point it to the /source/ subfolder, but to the main folder." + if not quiet: + q = input( + f"Cloudy not found and CLOUDY_PATH is not set. Do you want to install " + f"Cloudy {cloudy_version} now in the sunbather path? (y/n) " ) - installer = GetCloudy(version=cloudyversion) + while q.lower() not in ["y", "n"]: + q = input("Please enter 'y' or 'n'") + if q == "n": + raise KeyError( + "Cloudy not found, and the environment variable 'CLOUDY_PATH' is " + "not set. Please set this variable in your .bashrc/.zshrc file " + "to the path where the Cloudy installation is located. " + "Do not point it to the /source/ subfolder, but to the main folder." + ) + installer = GetCloudy(version=cloudy_version) installer.download() installer.extract() installer.compile() @@ -48,33 +51,62 @@ def check_cloudy(): installer.copy_data() -def make_workingdir(): +def make_workingdir(workingdir=None, quiet=False): """ Checks if the SUNBATHER_PROJECT_PATH environment variable has been set and asks for input if not. Also asks to copy the default files to the working dir. + + :workingdir: str, path to the working dir. If None, checks the + SUNBATHER_PROJECT_PATH environment variable, and asks for input if this is + not set. (default: None) + :quiet: bool, if True, does not ask for input (default: False) """ - try: - workingdir = os.environ["SUNBATHER_PROJECT_PATH"] - except KeyError: - workingdir = input("Enter the working dir for Sunbather: ") - q = input(f"Copy default files to the working dir ({workingdir})? (y/n) ") - while q.lower() not in ["y", "n"]: - q = input("Please enter 'y' or 'n': ") - if q == "n": - return + if workingdir is None: + try: + workingdir = os.environ["SUNBATHER_PROJECT_PATH"] + except KeyError: + if not quiet: + workingdir = input("Enter the working dir for Sunbather: ") + else: + # if quiet, use the current dir + workingdir = "./" + if not quiet: + q = input(f"Copy default files to the working dir ({workingdir})? (y/n) ") + while q.lower() not in ["y", "n"]: + q = input("Please enter 'y' or 'n': ") + if q == "n": + return sunbatherpath = f"{pathlib.Path(__file__).parent.resolve()}" - shutil.copytree( - f"{sunbatherpath}/data/workingdir", - workingdir, - ) + for file in os.listdir(f"{sunbatherpath}/data/workingdir"): + if not os.path.exists(f"{workingdir}/{file}"): + shutil.copyfile( + f"{sunbatherpath}/data/workingdir/{file}", + f"{workingdir}/{file}", + ) + else: + if not quiet: + print("File already exists! Overwrite?") + q = input("(y/n) ") + while q.lower() not in ["y", "n"]: + q = input("Please enter 'y' or 'n': ") + if q == "n": + continue + else: + continue + shutil.copyfile( + f"{sunbatherpath}/data/workingdir/{file}", + f"{workingdir}/{file}", + ) + + return -def firstrun(): +def firstrun(quiet=False, workingdir=None, cloudy_version="23.01"): """ Runs 'check_cloudy()' and 'make_workingdir()'. """ - check_cloudy() - make_workingdir() + check_cloudy(quiet=quiet, cloudy_version=cloudy_version) + make_workingdir(quiet=quiet, workingdir=workingdir) print("Sunbather is ready to go!") diff --git a/src/sunbather/construct_parker.py b/src/sunbather/construct_parker.py index a95d7c2..2c60c72 100644 --- a/src/sunbather/construct_parker.py +++ b/src/sunbather/construct_parker.py @@ -990,7 +990,7 @@ def run_g( p.join() -def new_argument_parser(args, **kwargs): +def new_argument_parser(): parser = argparse.ArgumentParser( description="Creates 1D Parker profile(s) using the p_winds code and Cloudy.", formatter_class=argparse.ArgumentDefaultsHelpFormatter, @@ -1141,15 +1141,22 @@ def __call__(self, parser, namespace, values, option_string=None): action="store_true", help="neglect the stellar tidal gravity term", ) - args = parser.parse_args(args, **kwargs) + return parser - return args - -def main(args, **kwargs): +def main(**kwargs): + """ + Main function to construct a Parker profile. + """ t0 = time.time() - - args = new_argument_parser(args, **kwargs) + parser = new_argument_parser() + if not kwargs: + args = parser.parse_args(sys.argv[1:]) + else: + # This is a bit ugly, but it allows us to either call main directly or + # to call the script with command line arguments + print(f"-{key}={value}" for key, value in kwargs.items()) + args = parser.parse_args([f'-{key}={value}' for key, value in kwargs.items()]) if args.z is not None: zdict = tools.get_zdict(z=args.z, zelem=args.zelem) @@ -1174,18 +1181,18 @@ def main(args, **kwargs): if not os.path.isdir(projectpath + "/parker_profiles/" + args.plname + "/"): os.mkdir(projectpath + "/parker_profiles/" + args.plname) if not os.path.isdir( - projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" + f"{projectpath}/parker_profiles/{args.plname}/{args.pdir}/" ): os.mkdir( - projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/" + f"{projectpath}/parker_profiles/{args.plname}/{args.pdir}/" ) if (args.fH is None) and ( not os.path.isdir( - projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/temp/" + f"{projectpath}/parker_profiles/{args.plname}/{args.pdir}/temp/" ) ): os.mkdir( - projectpath + "/parker_profiles/" + args.plname + "/" + args.pdir + "/temp" + f"{projectpath}/parker_profiles/{args.plname}/{args.pdir}/temp" ) if len(args.T) == 1 and len(args.Mdot) == 1: # then we run a single model diff --git a/src/sunbather/data/planets.txt b/src/sunbather/data/workingdir/planets.txt similarity index 100% rename from src/sunbather/data/planets.txt rename to src/sunbather/data/workingdir/planets.txt diff --git a/src/sunbather/install_cloudy.py b/src/sunbather/install_cloudy.py index d80e951..18e974e 100644 --- a/src/sunbather/install_cloudy.py +++ b/src/sunbather/install_cloudy.py @@ -73,16 +73,22 @@ def test(self): exited OK" at the end. """ os.chdir(f"{self.cloudypath}/c{self.version}/source/") - print( - 'Type "test" and hit return twice. ' - 'It should print "Cloudy exited OK" at the end.' - ) with subprocess.Popen( [ "./cloudy.exe", - ] + ], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, ) as p: + cloudy_output = p.communicate(input=b"test\n\n")[0] p.wait() + try: + assert b"Cloudy exited OK" in cloudy_output + except AssertionError: + print("Cloudy did not test OK...") + else: + print("Cloudy tested OK.") def copy_data(self): """