Skip to content

Commit

Permalink
Merge pull request #20 from lukasmerten/CI
Browse files Browse the repository at this point in the history
CI / CD
  • Loading branch information
JulienDoerner authored Nov 15, 2023
2 parents 11a10eb + fa1f342 commit d4e0001
Show file tree
Hide file tree
Showing 74 changed files with 331 additions and 76,632 deletions.
39 changes: 39 additions & 0 deletions .github/workflows/createTarBall.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
name: Create test archive

# Controls when the action will run. Workflow runs when manually triggered using the UI
# or API.
on:
push:
branches: [ "master" ]

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "Create_CRPropa_default"
Create_CRPropa_default:
# The type of runner that the job will run on
runs-on: ubuntu-latest

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checkout the repository
- name: Checkout repository
uses: actions/checkout@v3
# Python set up
- name: Preinstall
uses: actions/setup-python@v4
with:
python-version: '3.9'
cache: 'pip' # caching pip dependencies
- name: pip install
run: pip install -r requirements.txt
# Creating the default CRPropa data, compressing and calculating the checksum
- name: Create tabulated data
run: |
python calc_all.py
- name: Archive data
uses: actions/upload-artifact@v3
with:
name: "crpropa-data"
path: |
data-*.tar.gz
data-*.tar.gz-CHECKSUM
77 changes: 77 additions & 0 deletions .github/workflows/upload.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
# Workflow to test automatic uploading to Sciebo

name: Upload data

# Controls when the action will run. Workflow runs when manually triggered using the UI
# or API.
on:
workflow_dispatch:
# Inputs the workflow accepts.
inputs:
name:
# Description to be shown in the UI
description: 'Filename'
# Default value if no value is explicitly provided
default: 'data-YYYY-MM-DD'
# Input has to be provided for the workflow to run
required: true

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "upload"
upload:
# The type of runner that the job will run on
# ubuntu latest should be 22
runs-on: ubuntu-latest

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Create and Upload a file containing the current date to SCIEBO
- name: Download artifact
id: download-artifact
uses: dawidd6/action-download-artifact@v2
with:
# Optional, workflow file name or ID
# If not specified, will be inferred from run_id (if run_id is specified), or will be the current workflow
workflow: createTarBall.yml
# Optional, the status or conclusion of a completed workflow to search for
# Can be one of a workflow conclusion:
# "failure", "success", "neutral", "cancelled", "skipped", "timed_out", "action_required"
# Or a workflow status:
# "completed", "in_progress", "queued"
# Use the empty string ("") to ignore status or conclusion in the search
workflow_conclusion: success
# Optional, uploaded artifact name,
# will download all artifacts if not specified
# and extract them into respective subdirectories
# https://github.com/actions/download-artifact#download-all-artifacts
name: crpropa-data
# Optional, check the workflow run to whether it has an artifact
# then will get the last available artifact from the previous workflow
# default false, just try to download from the last one
check_artifacts: false
# Optional, search for the last workflow run whose stored an artifact named as in `name` input
# default false
search_artifacts: false
# Optional, choose to skip unpacking the downloaded artifact(s)
# default false
skip_unpack: false
# Optional, choose how to exit the action if no artifact is found
# can be one of:
# "fail", "warn", "ignore"
# default fail
if_no_artifact_found: fail

- name: Display structure of downloaded files
run: ls -R

- name: Upload to sciebo
shell: bash
run: |
curl -u "$SCIEBO_USR:$SCIEBO_PWD" -T "${{ github.event.inputs.name }}.tar.gz" "https://ruhr-uni-bochum.sciebo.de/public.php/webdav/${{ github.event.inputs.name }}.tar.gz"
curl -u "$SCIEBO_USR:$SCIEBO_PWD" -T "${{ github.event.inputs.name }}.tar.gz-CHECKSUM" "https://ruhr-uni-bochum.sciebo.de/public.php/webdav/${{ github.event.inputs.name }}.tar.gz-CHECKSUM"
env:
# Login credentials are stored as encrypted secrets in the repository settings on github.
SCIEBO_USR: ${{ secrets.SCIEBO_CRPDATA_USR }}
SCIEBO_PWD: ${{ secrets.SCIEBO_CRPDATA_PWD}}

5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
*.pyc
*.tar.gz
*.tar.gz*

plots/
temp/

data/
!data/CustomPhotonField/
10 changes: 6 additions & 4 deletions calc_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,11 @@
import calc_photopionproduction as ppp
import calc_synchrotron as syn

# Fixing the datestring at the begin of a run
# datestr format YYYY-MM-DD
datestr = "-".join([str(x).zfill(2) for x in time.localtime()[:3]])


def nuclear_decay():
"""Creates nuclear decay tables"""

Expand Down Expand Up @@ -201,7 +206,6 @@ def compress():
print("#"*50)
print("Compressing the ./data directory.")
t1 = time.time()
datestr = "-".join([str(x) for x in time.localtime()[:3]])
subprocess.run(["tar", "-czf", "data-"+datestr+".tar.gz", "./data"])
t2 = time.time()
print("\nCompressed files generated in {} seconds.".format(round(t2-t1, 2)))
Expand All @@ -217,7 +221,6 @@ def calc_checksum():
print("#"*50)
print("Calculating the checksum.")
t1 = time.time()
datestr = "-".join([str(x) for x in time.localtime()[:3]])
checksum = subprocess.run(["md5sum", "data-"+datestr+".tar.gz"],
capture_output=True, text=True).stdout
with open("data-"+datestr+".tar.gz-CHECKSUM", 'w') as f:
Expand Down Expand Up @@ -250,9 +253,8 @@ def createCRPropaDefault():
to reduce the amount of data that need to be shipped
with the code.
"""

nuclear_decay()
nuclear_mass()
nuclear_decay()
elastic_scattering(reduced_fields)
EM_processes(fields_cmbebl+fields_urb)
BH_pair_production(fields_cmbebl)
Expand Down
70 changes: 55 additions & 15 deletions calc_decay.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,53 @@
from numpy import *
import crpropa as crp
import numpy as np
from scipy.integrate import quad
import gitHelp as gh
import os
from units import mass_electron, c_squared, c_light, h_planck, keV, amu

cdir = os.path.split(__file__)[0]

# Script to preprocess the nuclear decay data table from the BNL NuDat2 database
# Decay Search: http://www.nndc.bnl.gov/nudat2/indx_sigma.jsp, output: formatted file --> decay_NuDat2.txt
# Decay Radiation Search: gamma_NuDat2.txt: http://www.nndc.bnl.gov/nudat2/indx_dec.jsp --> gamma_NuDat2.txt

class NuclearMassTable(object):
"""Class to provide tabulated nuclear masses
This function reimplements the basic functionality of
CRPropa's particleMass module see here:
https://github.com/CRPropa/CRPropa3/blob/master/include/crpropa/ParticleMass.h
"""

def __init__(self):
try:
datapath = os.path.join(cdir, 'data/nuclear_mass.txt')
self.massTable = np.loadtxt(datapath, usecols=(2))
except FileNotFoundError:
print("The file 'data/nuclear_mass.txt' was not found.")
print("Run the script calc_mass.py and try again.")

def getMass(self, id: int) -> float:
"""Helper function to return tabulated nuclear masses
id is not the usual CRPropa PID but id = Z * 31 + N
with Z the charge number and N the neutron number.
"""
return self.massTable[id]

def nuclearMass(self, A: int, Z: int) -> float:
"""nuclear mass for given mass (A) and charge (Z) number
Particle masses that are not tabulated are approximated by
A*amu-Z*mass_electron.
"""

if ((A < 1) or (A > 56) or (Z < 0) or (Z > 26) or (Z > A)):
print ("nuclearMass: nuclear mass not found in the mass table for A = {}, Z = {}. Approximated value used A * amu - Z * m_e instead.".format(A, Z))
return A * amu - Z * mass_electron
N = A - Z

return self.getMass(Z * 31 + N)

class Decay:
def load(self, s):
""" extract decay parameter from a given line of the data file. """
Expand All @@ -24,11 +62,11 @@ def load(self, s):
# decay time
s = l[9].strip()
if s == 'infinity':
self.tau = inf
self.tau = np.inf
elif s == '':
self.tau = 0
else:
self.tau = float(s) / log(2) # half-life --> life time
self.tau = float(s) / np.log(2) # half-life --> life time

# branching ratio
s = ''.join(c for c in l[13] if c not in ('>','<','=','~','%',' ','?','\n'))
Expand All @@ -43,7 +81,7 @@ def __str__(self):

def isStable(self):
""" returns if the nucleus is stable or not"""
return self.tau == inf
return self.tau == np.inf

def isBetaPlus(self):
""" returns if the nucleus has a beta plus decay mode"""
Expand Down Expand Up @@ -283,9 +321,11 @@ def __str__(self):
# see Basdevant, Fundamentals in Nuclear Physics, 4.3.2 and 4.3.3
print ('\nBeta+ correction')
print ('-------------------------------------')
Qe = crp.mass_electron * crp.c_squared # electron energy [J]
Qe = mass_electron * c_squared # electron energy [J]
a0 = 5.29177e-11 # Bohr radius [m]
hbar_c = crp.c_light * (crp.h_planck / 2 / pi) # [m/J]
hbar_c = c_light * (h_planck / 2 / np.pi) # [m/J]

nucMass = NuclearMassTable()

for Z in range(27):
for N in range(31):
Expand All @@ -294,24 +334,24 @@ def __str__(self):
continue

A = Z+N
m1 = crp.nuclearMass(A, Z)
m2 = crp.nuclearMass(A, Z-1)
dm = (m1 - m2) * crp.c_squared
m1 = nucMass.nuclearMass(A, Z)
m2 = nucMass.nuclearMass(A, Z - 1)
dm = (m1 - m2) * c_squared

Qec = (dm + Qe)
Qbeta = (dm - Qe)

# check if energetically possible
if Qbeta < 0:
print (d, ' <- make stable (beta+ decay not possible)')
d.tau = inf
d.tau = np.inf
continue

f = lambda E: E * sqrt(E**2 - Qe**2) * (dm - E)**2
f = lambda E: E * np.sqrt(E**2 - Qe**2) * (dm - E)**2
I, Ierr = quad(f, Qe, dm)

# ratio tau_beta+ / tau_ec
f = pi**2 / 2 * (Z/a0*hbar_c)**3 * Qec**2 / I
f = np.pi**2 / 2 * (Z / a0*hbar_c)**3 * Qec**2 / I
if f < 0:
print (Qec)
print (d, ' <- beta+ correction %.1e'%f)
Expand All @@ -323,7 +363,7 @@ def __str__(self):
except:
continue # no gamma entry
for i, Egamma in enumerate(g.energy):
Egamma *= crp.keV
Egamma *= keV
if Egamma > Qbeta:
print (d, ' <- remove gamma decay (Egamma %g < %g = Q)' % (Egamma, Qbeta))
g.energy.pop(i)
Expand Down Expand Up @@ -463,7 +503,7 @@ def __str__(self):
if (z + n)==0:
continue
for d in decayTable[z][n]:
if d.tau != inf:
if d.tau != np.inf:
continue
fout.write('%i\t%i\t%i\n' % (z, n, z+n))
fout.close()
2 changes: 1 addition & 1 deletion calc_elasticscattering.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import interactionRate as iR
import gitHelp as gh
from calc_all import reduced_fields
from crpropa import eV
from units import eV

cdir = os.path.split(__file__)[0]

Expand Down
11 changes: 4 additions & 7 deletions calc_electromagnetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,10 @@
import interactionRate
import os
import gitHelp as gh
from crpropa import eV, mass_electron, c_light
from calc_all import fields_cmbebl, fields_urb
from units import eV, mass_electron, c_light, sigma_thomson, alpha_finestructure

me2 = (mass_electron*c_light**2.) ** 2 # squared electron mass [J^2/c^4]
sigmaThomson = 6.6524e-29 # Thomson cross section [m^2]
alpha = 1 / 137.035999074 # fine structure constant


def sigmaPP(s):
""" Pair production cross section (Breit-Wheeler), see Lee 1996 """
Expand All @@ -18,7 +15,7 @@ def sigmaPP(s):
return 0.

b = np.sqrt(1 - smin / s)
return sigmaThomson * 3 / 16 * (1 - b**2) * ((3 - b**4) * (np.log1p(b) - np.log1p(-b)) - 2 * b * (2 - b**2))
return sigma_thomson * 3 / 16 * (1 - b**2) * ((3 - b**4) * (np.log1p(b) - np.log1p(-b)) - 2 * b * (2 - b**2))


def sigmaDPP(s):
Expand All @@ -40,7 +37,7 @@ def sigmaICS(s):
b = (s - smin) / (s + smin)
A = 2 / b / (1 + b) * (2 + 2 * b - b**2 - 2 * b**3)
B = (2 - 3 * b**2 - b**3) / b**2 * (np.log1p(b) - np.log1p(-b))
return sigmaThomson * 3 / 8 * smin / s / b * (A - B)
return sigma_thomson * 3 / 8 * smin / s / b * (A - B)


def sigmaTPP(s):
Expand All @@ -49,7 +46,7 @@ def sigmaTPP(s):
if beta < 0:
return 0

return sigmaThomson * 3 / 8 / np.pi * alpha * beta
return sigma_thomson * 3 / 8 / np.pi * alpha_finestructure * beta


def getTabulatedXS(sigma, skin):
Expand Down
5 changes: 3 additions & 2 deletions calc_mass.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import numpy as np
import gitHelp as gh
import os
from crpropa import amu, mass_electron, mass_proton, mass_neutron

from units import amu, mass_electron, mass_proton, mass_neutron

cdir = os.path.split(__file__)[0]

Expand Down Expand Up @@ -76,4 +77,4 @@
for n in range(31):
fout.write(str(z) + ' ' + str(n) + ' ' + str(D[z, n]) + '\n')

fout.close()
fout.close()
2 changes: 1 addition & 1 deletion calc_pairproduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from scipy import integrate
import gitHelp as gh
from calc_all import fields_cmbebl
from crpropa import Mpc, c_squared, mass_electron, mass_proton, radius_electron, alpha_finestructure
from units import Mpc, c_squared, mass_electron, mass_proton, radius_electron, alpha_finestructure

cdir = os.path.split(__file__)[0]

Expand Down
3 changes: 1 addition & 2 deletions calc_photodisintegration.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
from genericpath import isdir
import numpy as np
import os
import interactionRate
import gitHelp as gh
from crpropa import eV
from calc_all import fields_cmbebl, fields_urb, reduced_fields
from units import eV

cdir = os.path.split(__file__)[0]

Expand Down
Loading

0 comments on commit d4e0001

Please sign in to comment.