-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathconfig.py
executable file
·290 lines (245 loc) · 11.2 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is the MuSA configuration file.
Note that not all the options will be used in all the experimental setups.
"""
# Note: It is possible to implement any other model,
# not necessarily limited to snow. See modules.*_tools.py for examples
numerical_model = 'FSM2' # model to use from FSM2, dIm or snow17
dt = 3600 # timestep in seconds
# -----------------------------------
# Directories
# -----------------------------------
nc_obs_path = "./DATA/Obs/"
nc_forcing_path = "./DATA/Forcing/"
nc_maks_path = "./DATA/mask/mask.nc"
dem_path = "./DATA/DEM/DEM.nc"
fsm_src_path = "./FSM2/"
intermediate_path = "./DATA/INTERMEDIATE/"
save_ensemble_path = "./DATA/ENSEMBLES/"
output_path = "./DATA/RESULTS/"
spatial_propagation_storage_path = "./DATA/SPATIAL_PROP/"
real_time_restart_path = "./DATA/REAL_TIME_RESTART/"
tmp_path = None
# -----------------------------------
# Restart options
# -----------------------------------
# If restart_run is enabled, the outputs will not be overwritten. MuSA will
# try to restart the simulation from the incomplete outputs
restart_run = False
# If save_int_forcing, and intermediate file is generated to speed up
# other simulations that use the same forcing
save_int_forcing = True
# If restart_forcing, the forcing will be read from the intermediate files of
# a previous run
restart_forcing = True
# if real_time, it will be possible to run MuSA from initial condition files,
# These files are ensemble objects, generated by a previous run, or by a
# very advanced user. Init files are saved in real_time_restart_path,
# to be used in the next iteration as initial conditions.
# Note: restart_forcing and restart_run should be False if real_time_restart,
# unless the user really knows what is doing
real_time_restart = False
# -----------------------------------
# Data Assim
# -----------------------------------
# load_prev_run allows to reconstruct a simulation from the posterior
# parameters of a previous simulation.
load_prev_run = False
# da_algorithm from PF, EnKF, IEnKF, PBS, ES, IES, deterministic_OL,
# IES-MCMC_AI, IES-MCMC, AdaMuPBS, AdaPBS or PIES
da_algorithm = 'PBS'
redraw_prior = False # PF and PBS only
max_iterations = 4 # IEnKF, IES, IES-MCMC and AdaPBS
# resampling_algorithm from "bootstrapping", residual_resample,
# stratified_resample, systematic_resample, no_resampling
resampling_algorithm = "no_resampling"
ensemble_members = 100
Neffthrs = 0.1 # Low Neff threshold
# MCMC parameters
chain_len = 20000 # Length of the mcmc
adaptive = True # Update proposal covariance for next step.
histcov = True # Use posterior IES covariance as proposal covariance
burn_in = 0.1 # discard the first x proportion of samples
# r_cov can be a list of scalars of length equal to var_to_assim or the string
# 'dynamic_error'. If 'dynamic_error' is selected, errors may change in space
# and time. If this option is selected, the errors will be stored in a new
# variable in the observation files, and will have the same dimensions as
# the observations.
r_cov = [0.04]
add_dynamic_noise = False
# var_to_assim from "snd", "SWE", "Tsrf","fSCA", "SCA", "alb", "LE", "H"
var_to_assim = ["snd"]
# DA second order variables and/or statistics (experimental)
DAsord = False
DAord_names = ["Ampli"]
# vars_to_perturbate from "SW", "LW", "Prec", "Ta", "RH", "Ua", "PS
vars_to_perturbate = ["Ta", "Prec"]
# In smoothers, re-draw new parameters for each season
season_rejuvenation = [True, True]
# seed to initialise the random number generator
seed = None
# perturbation_strategy from "normal", "lognormal",
# "logitnormal_adi" or "logitnormal_mult"
perturbation_strategy = ["logitnormal_adi", "logitnormal_mult"]
# precipitation_phase from "Harder" or "temp_thld"
precipitation_phase = "Harder"
# Save ensembles as a pkl object
save_ensemble = False
# -----------------------------------
# Domain
# -----------------------------------
# implementation from "point_scale", "distributed" or "Spatial_propagation"
implementation = "distributed"
# if implementation = "Spatial_propagation" : specify which observation
# variables are spatially propagated in a list
# if var_to_prop = var_to_assim -> All the variables are spatially propagated
# if var_to_prop = [] -> Any variable is spatially propagated
var_to_prop = var_to_assim
# parallelization from "sequential", "multiprocessing" or "HPC.array"
parallelization = "multiprocessing"
MPI = False
# Note: if nprocess = None, the number of processors will be
# estimated (max(n)-1). In HPC.array nprocess is an argument
# (see e.g. run_slurm.sh), and this variable is ignored
nprocess = 8
# number of cells to be solved per processor at each iteration
cells_per_process = None # None is 1
# maximun number of seconds each pool is killed before try again
timeout = None # None is inf
aws_lat = 4735225.54 # Latitude in case of point_scale
aws_lon = 710701.28 # Longitude in case of point_scale
date_ini = "2018-09-01 00:00"
date_end = "2020-08-30 23:00"
season_ini_month = 9 # In smoothers, beginning of DA window (month)
season_ini_day = 1 # In smoothers, beginning of DA window (day)
# -----------------------------------
# Spatial propagation configuration
# -----------------------------------
# Cut-off distance for the Gaspari and Cohn function.
c = [5, 5]
# Calculate the distances internally (topo_dict_external = None) or read an
# external file with the dimensions
topo_dict_external = None
dist_algo = 'euclidean'
# distance_mat_calc Enables calculating the distance matrix, iterativelly
# as an sparse distance matrix, using KDtree or in the regular way
# (memory consuming).
distance_mat_calc = 'Regular' # Regular, Sparse, KDtree
# Optionally perform dimension reduction to try to avoid nonPD
dimension_reduction = 'None' # LMDS, PCA or None
dim_num = 3 # Number of dimensions if dimension_reduction
# jitter regularizes the covariance matrix by adding a value to the diagonal
# elements to make it PD. Typical value 1e-6, but it is possible
# to experiment by increasing it.
jitter = 1e-6
# try to find closePD or raise exception (closePDmethod = None). This can
# be very slow and memory consuming.
closePDmethod = None # 'clipped' (the faster but less accurate) or 'nearest'
# Topographical dimensions to compute the distances
topographic_features = {'Ys': True, # Latitude
'Xs': True, # Longitude
'Zs': False, # Elevation
'slope': False, # Slope
'DAH': False, # Diurnal Anisotropic Heat
'TPI': False, # Topographic Position Index
'Sx': False} # Upwind Slope index (Winstral)
# Topographical hyperparameters
DEM_res = 5 # DEM resolution
TPI_size = 25 # TPI window size
Sx_dmax = 15 # Sx search distance
Sx_angle = 315 # Sx main wind direction angle
nc_dem_varname = "DEM" # Name of the elevation variable in the DEM
# -----------------------------------
# Observations
# -----------------------------------
# Note: Dates and obs files will be sorted internally. Ensure the alphabetical
# order of the obs files fits the list of dates (dates_obs)
# Note 2: dates_obs supports list indentation to not have to write many dates
# in very long runs. Example for generating a list of dailly strings:
# =============================================================================
# import datetime as dt
#
# start = dt.datetime.strptime(date_ini, "%Y-%m-%d %H:%M")
# end = dt.datetime.strptime(date_end, "%Y-%m-%d %H:%M")
# dates_obs = [(start + dt.timedelta(days=x) + dt.timedelta(hours=12)).
# strftime('%Y-%m-%d %H:%M') for x in range(0, (end-start).days+1)]
#
# =============================================================================
# Note 3: A single column .cvs without headers with the dates in the
# format "%Y-%m-%d %H:%M" is also accepted substituting:
# dates_obs = '/path/to/file/dates.csv'
dates_obs = ["2019-02-21 12:00",
"2019-03-26 12:00",
"2019-05-05 12:00",
"2019-05-09 12:00",
"2019-05-23 12:00",
"2019-05-30 12:00",
"2020-01-14 12:00",
"2020-02-03 12:00",
"2020-02-24 12:00",
"2020-03-11 12:00",
"2020-04-29 12:00",
"2020-05-03 12:00",
"2020-05-12 12:00",
"2020-05-19 12:00",
"2020-05-26 12:00",
"2020-06-02 12:00",
"2020-06-10 12:00",
"2020-06-21 12:00"]
obs_var_names = ["HS"]
obs_error_var_names = ['sdError'] # In case of r_cov = 'dynamic_error'
lat_obs_var_name = "northing"
lon_obs_var_name = "easting"
# -----------------------------------
# Forcing and some parameters
# -----------------------------------
# Note: not all parameters/forcing variables are needed for all models
# Note II: param_var_names is optional. It can be used to change some of the
# model parameters, including vegetation ones. If they are not included as
# part of the forcing, those defined in constants.py will be used.
# These parameters can be included within the assimilation
# Press_var_name can be "Press_var_name": "from_DEM". With this option, a
# stationary pressure value is estimated from the DEM (if provided)
# assuming standard atmosphere.
frocing_var_names = {"SW_var_name": "SW",
"LW_var_name": "LW",
"Precip_var_name": "PRECC",
"Press_var_name": "PRESS",
"RH_var_name": "RH",
"Temp_var_name": "TEMP",
"Wind_var_name": "UA"}
forcing_dim_names = {"lat_forz_var_name": "northing",
"lon_forz_var_name": "easting",
"time_forz_var_name": "time"}
param_var_names = {"RealLat_var_name": " XLAT",
"vegh_var_name": "vegh",
"VAI_var_name": "VAI",
"hbas_var_name": "hbas"}
# -----------------------------------
# FSM configuration (Namelist)
# -----------------------------------
# Number and thickness of snow layers
Dzsnow = [0.1, 0.2, 0.4]
# -----------------------------------
# FSM configuration (Compilation)
# -----------------------------------
# Optimization flag. Choose from -O (no optimization), -O1, -O2, -O3 or -Ofast.
# Note: -O3 is recommended. -Ofast may be slightly faster (~10%), but its
# numerical accuracy is lower.
# Note II: Can be used to pass any other flag(s) to gfortran if you know
# what you are doing
OPTIMIZATION = '-O3'
# Parameterizations, see FSM2 documentation
ALBEDO = 2 # snow albedo : 1, 2
CANINT = 2 # canopy interception of snow : 1, 2
CANMOD = 2 # forest canopy layers : 1, 2
CANRAD = 2 # canopy radiative properties : 1, 2
CANUNL = 2 # unloading of canopy : 1, 2
CONDCT = 1 # snow thermal conductivity : 0, 1
DENSTY = 2 # snow density : 0, 1, 2
EXCHNG = 1 # turbulent exchange : 0, 1
HYDROL = 2 # snow hydraulics : 0, 1, 2
SGRAIN = 2 # snow grain growth : 1, 2
SNFRAC = 3 # snow cover fraction : 1, 2, 3