Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge w GKropf Fork #5

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
3 changes: 2 additions & 1 deletion Examples/Ex_Params1.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
NumMinerals,4
CoolingType,Linear
CoolingFile,/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/cooling_file_GELP.txt
CoolingType,Custom
WholeRock,12.8
ModelDuration,4.0
StartingTemp,700
Expand Down
3 changes: 2 additions & 1 deletion Examples/Ex_Params4.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
NumMinerals,4
CoolingType,Linear
CoolingFile,/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/Inverse_InitialSol/Initial00.txt
CoolingType,Custom
WholeRock,12.8
ModelDuration,4.0
StartingTemp,700
Expand Down
Empty file added eiler_SA/__init__.py
Empty file.
53 changes: 53 additions & 0 deletions eiler_SA/add_cooling_file_to_params.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import os


"""Once you've made a ton of config files changing mode and length based off a config template add a cooling file to
those parameter files with this script. Once you've run this script, run frwrd_bulk_run.py"""

config_file_locations = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/mode_length_chloe/blockD/'
config_output = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/mode_length_chloe_cooling/blockD/'
if not os.path.exists(config_output):
os.mkdir(config_output)
cooling_file_location = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/cooling_file_GELP.txt'
# cooling_file_startemp = 650.0
# cooling_file_endtemp = 200.0
# cooling_file_timestep = 0.0005

for file in os.listdir(config_file_locations):

print('adding a cooling file to config file: {}'.format(file))

path = os.path.join(config_file_locations, file)

lines_to_write = []
with open(path, 'r') as rfile:

for line in rfile:
if line.startswith('CoolingFile,'):
line = 'CoolingFile,'
line = '{}{}\n'.format(line, cooling_file_location)
lines_to_write.append(line)
elif line.startswith('CoolingType,'):
type_str = line.split(',')[0]
line = '{},Custom\n'.format(type_str)
lines_to_write.append(line)
# elif line.startswith('ModelDuration'):
# pass
# elif line.startswith('EndTemp'):
# lines_to_write.append('EndTemp,{}\n'.format(cooling_file_endtemp))
# elif line.startswith('StartingTemp'):
# lines_to_write.append('StartingTemp,{}\n'.format(cooling_file_startemp))
# elif line.startswith('TimeStep'):
# lines_to_write.append('TimeStep,{}\n'.format(cooling_file_timestep))
else:
lines_to_write.append(line)

outpath = os.path.join(config_output, file)
print(lines_to_write)
with open(outpath, 'w') as wfile:

for line in lines_to_write:
wfile.write(line)


print('done!!!')
93 changes: 93 additions & 0 deletions eiler_SA/array_scratch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# ===============================================================================
# Copyright 2019 Jan Hendrickx and Gabriel Parrish
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
import numpy as np
import matplotlib.pyplot as plt
# ============= standard library imports ========================
from eiler_SA.forwardmodeling_sa import make_params_dict, generate_synthetic_data, write_params_file, sample_locator

#root = '/Users/dcadol/Desktop/academic_docs_II/FGB_model/JohnEiler/plag_hornblende_sensitivity'
root = r'/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/original_eiler/'

# year in MA we want to see
year_ma = 44
time_step = 0.0005

# convert the year into the index for the y array. Subtract one bc the y array starts at index 0
year_index = int(round(((year_ma / time_step) - 1), 1))

print('year index {}'.format(year_index))

x_path = os.path.join(root, 'Eiler94_Amphibolite_x.npy')
y_path = os.path.join(root, 'Eiler94_Amphibolite_y.npy')
time_path = os.path.join(root, 'Eiler94_Amphibolite_time.npy')

param_path = os.path.join(root, 'Eiler94_Amphibolite.txt')
param_dict = make_params_dict(param_path)

x_arr = np.load(x_path)
y_arr = np.load(y_path)
t_arr = np.load(time_path)

print('x shape: {}, y shape {}, t shape {}'.format(x_arr.shape, y_arr.shape, t_arr.shape))

# quartz
plt.plot(x_arr[:, 0], y_arr[0, year_index, :])
#plt.show()

# TODO - construct inverse thingy

# 1) Start with a known time-temp history - in our case linear
# 2) Run the forward model
# 3) Keep a sample of forward model outputs at late-time
# 4) Add noise to the data (100% - full noise & 20% - low noise)
# 5) Run the inverse solver on the noisy data with an initial time-temp solution


# Of the last time slice of the forward model we take data as a txt file, only keeping some points
# | Distance | D18O | Uncertainty |

x_quartz = x_arr[:, 0]
# print(x_quartz)
y_quartz = y_arr[0, year_index, :]

x_plag = x_arr[:, 1]
y_plag = y_arr[1, year_index, :]

x_horn = x_arr[:, 2]
y_horn = y_arr[2, year_index, :]

x_mag = x_arr[:, 3]
y_mag = y_arr[3, year_index, :]

quartz_sample_locations = [1500, 1600, 1700, 1800, 1900, 2000]
plag_sample_locations = [2000, 2100, 2200, 2300, 2400, 2500, 2600, 2700, 2800, 2900, 3000]
horn_sample_locations = [3500, 3600, 3700, 3800, 3900, 4000]
mag_sample_locations = [400, 500, 600]

output_location = root

sample_locs = sample_locator(x_plag)
print('sample locations \n', sample_locs)

name = 'Eiler94_Amphibolite_plag_lownoise'

print('x plagioclase', x_plag)
generate_synthetic_data(x_arr=x_plag, y_arr=y_plag, noise=0.01, sample_locations=sample_locs,
output_location=output_location, output_name=name)



114 changes: 114 additions & 0 deletions eiler_SA/bulk_sampler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
# ===============================================================================
# Copyright 2019 Gabriel Kropf and Gabriel Parrish
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
import os
import numpy as np
# ============= standard library imports ========================
from eiler_SA.forwardmodeling_sa import make_params_dict, write_params_file
from eiler_SA.forwardmodeling_sa import make_params_dict, generate_synthetic_data, write_params_file

if __name__ == "__main__":

file_root = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/modality/modality_fwd_results/'
# # HORNBLENDE
# output_location = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/modality/sa_sampled/hornblende'
# sample_mineral = 'horn'
#PLAG
output_location= '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/modality/sa_sampled/plagioclase'
sample_mineral = 'plag'
xfile_lst = []
yfile_lst = []
xname_lst = []
yname_lst = []
### ================ MODALITY ================
# build Filenames for modality SA
plagioclase_mode_start = 0.1
hornblende_mode_start = 0.9
step = 0.1
div = int(max(hornblende_mode_start, plagioclase_mode_start) / step)
print(div)
for i in range(div):

# subract from plag and and to hornblende to change the relative concentration
delta_mode = float(i * step)
# delta_mode = round(delta_mode, 1)
plag_mode = plagioclase_mode_start + delta_mode
plag_mode = round(plag_mode, 1)
hornb_mode = hornblende_mode_start - delta_mode
hornb_mode = round(hornb_mode, 1)

vars = ['x', 'y']

for v in vars:
print('plag mode: {}, hornb mode {}'.format(plag_mode, hornb_mode))
filename = 'eiler94_p{}_h{}_{}.npy'.format(str(plag_mode)[-1], str(hornb_mode)[-1], v)
name = 'eiler94_p{}_h{}'.format(str(plag_mode)[-1], str(hornb_mode)[-1])
# name_lst.append(name)

if v == 'x':
xarrpath = os.path.join(file_root, filename)
xfile_lst.append(xarrpath)
xname_lst.append(name)
if v == 'y':
yarrpath = os.path.join(file_root, filename)
yfile_lst.append(yarrpath)
yname_lst.append(name)

print(xfile_lst, '\n', yfile_lst)

###=========== SAMPLING ================

# year in MA we want to see
year_ma = 44
time_step = 0.0005

# convert the year into the index for the y array. Subtract one bc the y array starts at index 0
year_index = int(round(((year_ma / time_step) - 1), 1))

print(len(xfile_lst), len(yfile_lst), len(xname_lst), len(yname_lst))

for x_arr_file, y_arr_file, xn, yn in zip(xfile_lst, yfile_lst, xname_lst, yname_lst):

x_arr = np.load(x_arr_file)
y_arr = np.load(y_arr_file)

x_quartz = x_arr[:, 0]
# print(x_quartz)
y_quartz = y_arr[0, year_index, :]

x_plag = x_arr[:, 1]
y_plag = y_arr[1, year_index, :]

x_horn = x_arr[:, 2]
y_horn = y_arr[2, year_index, :]

x_mag = x_arr[:, 3]
y_mag = y_arr[3, year_index, :]

# TODO - Function to generate automatic and consistent plag hornblende sample locations

plag_sample_locations = [2000, 2100, 2200, 2300, 2400, 2500, 2600, 2700, 2800, 2900, 3000]
horn_sample_locations = [3500, 3600, 3700, 3800, 3900, 4000]

outname = '{}_{}'.format(sample_mineral, xn)

if sample_mineral == 'plag':

generate_synthetic_data(x_arr=x_plag, y_arr=y_plag, noise=0.14, sample_locations=plag_sample_locations,
output_location=output_location, output_name=outname)

elif sample_mineral == 'horn':
generate_synthetic_data(x_arr=x_horn, y_arr=y_horn, noise=0.14, sample_locations=horn_sample_locations,
output_location=output_location, output_name=outname)
79 changes: 79 additions & 0 deletions eiler_SA/bulk_sampler_chloe_AGU.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import os
import numpy as np
from eiler_SA.forwardmodeling_sa import make_params_dict, write_params_file, sample_locator, generate_synthetic_data

file_root = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/modality/modality_len_results_fwd/blockA'

output_root = '/home/gabriel/PycharmProjects/FastGrainBoundary-DiffusionSolver/sensitivity_analysis/modality/modality_len_results_fwd_sampled/blockA/'
if not os.path.exists(output_root):
os.mkdir(output_root)

y_files = []
x_files = []
time_files = []
xname_lst = []
yname_lst = []
for i in os.listdir(file_root):

p = os.path.join(file_root, i)
n = i.split('.')[0]
if i.endswith('_x.npy'):
x_files.append(p)
xname_lst.append(n)
elif i.endswith('_y.npy'):
y_files.append(p)
yname_lst.append(n)
elif i.endswith('_time.npy'):
time_files.append(p)
# time_name_lsit

yfile_lst = sorted(y_files)
yname_lst = sorted(yname_lst)
xfile_lst = sorted(x_files)
xname_lst = sorted(xname_lst)
time_files = sorted(time_files)

print(yfile_lst)
print(yname_lst)
print(xfile_lst)
print(xname_lst)


###=========== SAMPLING ================

# year in MA we want to see
year_ma = 44
time_step = 0.0005

# convert the year into the index for the y array. Subtract one bc the y array starts at index 0
year_index = int(round(((year_ma / time_step) - 1), 1))

print(len(xfile_lst), len(yfile_lst), len(xname_lst), len(yname_lst))

for x_arr_file, y_arr_file, xn, yn in zip(xfile_lst, yfile_lst, xname_lst, yname_lst):

x_arr = np.load(x_arr_file)
y_arr = np.load(y_arr_file)

x_quartz = x_arr[:, 0]
# print(x_quartz)
y_quartz = y_arr[0, year_index, :]

x_plag = x_arr[:, 1]
y_plag = y_arr[1, year_index, :]

x_horn = x_arr[:, 2]
y_horn = y_arr[2, year_index, :]

x_mag = x_arr[:, 3]
y_mag = y_arr[3, year_index, :]

plag_sample_locations = sample_locator(x_plag)
horn_sample_locations = sample_locator(x_horn)

outname = '{}_{}'.format('plag', xn)
generate_synthetic_data(x_arr=x_plag, y_arr=y_plag, noise=0.14, sample_locations=plag_sample_locations,
output_location=output_root, output_name=outname)
outname = '{}_{}'.format('horn', xn)
generate_synthetic_data(x_arr=x_horn, y_arr=y_horn, noise=0.14, sample_locations=horn_sample_locations,
output_location=output_root, output_name=outname)
Loading