Skip to content

Commit

Permalink
added reader for noresm runs
Browse files Browse the repository at this point in the history
  • Loading branch information
bohlinger committed Jan 2, 2024
1 parent b1a58eb commit 3978ffe
Show file tree
Hide file tree
Showing 6 changed files with 110 additions and 9 deletions.
11 changes: 7 additions & 4 deletions wavy/collocation_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ def find_valid_fc_dates_for_model_and_leadtime(fc_dates, model, leadtime):
of complete hours
'''
fc_dates_new = hour_rounder_pd(fc_dates)
fc_dates_new = np.unique(fc_dates_new)
#if (leadtime is None or leadtime == 'best'):
# pass
#else:
Expand Down Expand Up @@ -149,7 +150,7 @@ def __init__(self, oco=None, model=None, poi=None,
self.distlim = kwargs.get('distlim', 6)
print(" ")
print(" ## Collocate ... ")
# for i in range(1):
#for i in range(1):
try:
t0 = time.time()
results_dict = self.collocate(**kwargs)
Expand Down Expand Up @@ -329,10 +330,11 @@ def _collocate_track(self, **kwargs):
'collocation_idx_y': [],
}
for i in tqdm(range(len(fc_date))):
print(fc_date[i])
#for i in range(len(fc_date)):
try:
#for j in range(1):
with NoStdStreams():
for j in range(1):
#with NoStdStreams():
# filter needed obs within time period
target_date = [parse_date(str(fc_date[i]))]
idx = collocate_times(ndt_datetime,
Expand Down Expand Up @@ -376,7 +378,8 @@ def _collocate_track(self, **kwargs):
results_dict_tmp['collocation_idx_x'])
results_dict['collocation_idx_y'].append(
results_dict_tmp['collocation_idx_y'])
else: pass
else:
pass
if 'results_dict_tmp' in locals():
del results_dict_tmp
except (ValueError, FileNotFoundError, OSError) as e:
Expand Down
23 changes: 23 additions & 0 deletions wavy/config/model_cfg.yaml.default
Original file line number Diff line number Diff line change
Expand Up @@ -330,3 +330,26 @@ cmems_MY_L4:
remoteHostName: my.cmems-du.eu
# optional, to ease grouping
tags:

# interpolated grid 1 degree by 1 degree
noresmf19:
name:
download:
vardef:
Hs: HS
time: time
lons: longitude
lats: latitude
coords:
wavy_input:
src_tmplt: "/lustre/storeB/users/anac/TMP/wavy/NorESMdata/2011/12/"
fl_tmplt: "f19_tn14_wtn14n_DecemberBETAmax1.43_1X1deg_every3hours_2011.nc"
reader: read_noresm_making_waves
collector:
misc:
init_times: [0]
init_step: 24
grid_date: 2011-12-02 00:00:00
date_incr_unit: h
date_incr: 3
proj4: "+proj=longlat +a=6367470 +e=0 +no_defs"
6 changes: 3 additions & 3 deletions wavy/config/satellite_cfg.yaml.default
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ cmems_L3_MY:
ftp: # downloading method
src_tmplt: "/Core/WAVE_GLO_PHY_SWH_L3_MY_014_005\
/cci_obs-wave_glo_phy-swh_my_name-l3_PT1S/%Y/%m/"
trgt_tmplt: /home/patrikb/tmp_altimeter/L3_MY/name/%Y/%m
trgt_tmplt: /home/patrikb/tmp_altimeter/L3/MY/name/%Y/%m
path_date_incr_unit: 'm'
path_date_incr: 1
search_str: '%Y%m%dT'
Expand All @@ -91,7 +91,7 @@ cmems_L3_MY:
# optional: where to read from
# can be defined directly when calling wavy
wavy_input:
src_tmplt: /home/patrikb/tmp_altimeter/L3_MY/name/%Y/%m
src_tmplt: /home/patrikb/tmp_altimeter/L3/MY/name/%Y/%m
fl_tmplt: "varalias_name_region_\
%Y%m%d%H%M%S_%Y%m%d%H%M%S.nc"
strsub: ['name']
Expand All @@ -100,7 +100,7 @@ cmems_L3_MY:
# optional: where to write to
# can be defined directly when calling wavy
wavy_output:
trgt_tmplt: /home/patrikb/tmp_altimeter/L3_MY/name/%Y/%m
trgt_tmplt: /home/patrikb/tmp_altimeter/L3/MY/name/%Y/%m
fl_tmplt: "varalias_name_region_\
%Y%m%d%H%M%S_%Y%m%d%H%M%S.nc"
strsub: ['varalias','name','region']
Expand Down
32 changes: 32 additions & 0 deletions wavy/grid_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,38 @@ def build_xr_ds_grid(var_means, lon_grid, lat_grid, t, **kwargs):
)
return ds

def build_xr_ds_grid_2D(var_means, lon_grid, lat_grid, t, **kwargs):
print(" building xarray dataset from grid")
varstr = kwargs.get('varstr')
lon_grid_coord = kwargs.get('lon_grid_coord')
lat_grid_coord = kwargs.get('lat_grid_coord')

ds = xr.Dataset({
varstr: xr.DataArray(
data=var_means,
dims=['time', 'latitude', 'longitude'],
coords={'latitude': lat_grid_coord,
'longitude': lon_grid_coord,
'time': t},
attrs=variable_def[varstr],
),
'lons': xr.DataArray(
data=lon_grid,
dims=['latitude', 'longitude'],
coords={'longitude': lon_grid_coord},
attrs=variable_def['lons'],
),
'lats': xr.DataArray(
data=lat_grid,
dims=['latitude', 'longitude'],
coords={'latitude': lat_grid_coord},
attrs=variable_def['lats'],
),
},
attrs={'title': 'wavy dataset'}
)
return ds


def grid_point_cloud_ds(values, lons, lats, t, **kwargs):
print(' gridding point cloud')
Expand Down
45 changes: 44 additions & 1 deletion wavy/model_readers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from wavy.wconfig import load_or_default
from wavy.utils import build_xr_ds
from wavy.grid_readers import get_gridded_dataset
from wavy.grid_readers import build_xr_ds_grid
from wavy.grid_readers import build_xr_ds_grid, build_xr_ds_grid_2D
from wavy.ncmod import ncdumpMeta, get_filevarname
from wavy.ncmod import read_netcdfs_with_credentials_aggregated
from wavy.utils import parse_date
Expand Down Expand Up @@ -63,6 +63,49 @@ def read_ww3_4km(**kwargs):

return combined

def read_noresm_making_waves(**kwargs):
pathlst = kwargs.get('pathlst')
nID = kwargs.get('nID')
fc_dates = kwargs.get('fc_dates')
varname = kwargs.get('varname')
varalias = kwargs.get('varalias')
timename = model_dict[nID]['vardef']['time']
lonsname = model_dict[nID]['vardef']['lons']
latsname = model_dict[nID]['vardef']['lats']

ds_lst = []
for i in range(len(pathlst)):
ds = xr.open_dataset(pathlst[i])
ds2 = ds.convert_calendar("all_leap")
datetimeindex = ds2.indexes[timename].to_datetimeindex()
ds[timename] = datetimeindex
del ds2
var = ds[varname].values
lons = ds[lonsname].values
lats = ds[latsname].values
Mlons, Mlats = np.meshgrid(lons, lats)
timedt = ds[timename].values
tlst = [parse_date(str(d)) for d in timedt]
idx = collocate_times(tlst, target_t=[fc_dates[i]])
time = np.array([np.array(tlst)[idx[0]]]).reshape((1,))
varin = var[idx[0], :, :].reshape((1, len(lats), len(lons)))
ds = build_xr_ds_grid_2D(varin, Mlons, Mlats, time,
lon_grid_coord=lons,
lat_grid_coord=lats,
varstr=varalias)
ds_lst.append(ds)

print(" Concatenate ...")
combined = xr.concat(ds_lst, timename,
coords='minimal',
data_vars='minimal',
compat='override',
combine_attrs='override',
join='override')
print(" ... done concatenating")

return combined

def read_remote_ncfiles_aggregated_credentials(**kwargs):
"""
Wrapping function to read remote opendap satellite netcdf files
Expand Down
2 changes: 1 addition & 1 deletion wavy/quicklookmod.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def quicklook(self, a=False, projection=None, **kwargs):
s=.2, c='b', marker='.',
edgecolor='face',
transform=ccrs.PlateCarree())
if len(self.vars[self.varalias].shape) > 1:
if len(plot_var.shape) > 1:
sc = ax.contourf(plot_lons.squeeze(),
plot_lats.squeeze(),
plot_var.squeeze(),
Expand Down

0 comments on commit 3978ffe

Please sign in to comment.