Skip to content

Commit

Permalink
fix NORKYST800 old and new model set up
Browse files Browse the repository at this point in the history
  • Loading branch information
KonstantinChri committed Jul 13, 2024
1 parent 9a33c9c commit 459c026
Show file tree
Hide file tree
Showing 5 changed files with 48 additions and 19 deletions.
2 changes: 1 addition & 1 deletion docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ Several options for **product** are available. Please check the data catalog for
* For coastal wave NORA3 data: product='NORAC_wave'

Dataset: https://thredds.met.no/thredds/catalog/norac_wave/field/catalog.html
* For ocean data (temperature, currents, salinity etc) Norkyst800 data: product='NORKYST800'
* For ocean data (sea level, temperature, currents, salinity over depth ) Norkyst800 data (from 2016-09-14 to today): product='NORKYST800'

Dataset: https://thredds.met.no/thredds/fou-hi/norkyst800v2.html

Expand Down
2 changes: 1 addition & 1 deletion examples/example_import_NORA3.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
#df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2000-01-01', end_time='2001-03-31' , product='NORA3_stormsurge')
#df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2021-01-01', end_time='2021-03-31' , product='NORA3_atm_sub')
#df_ts = ts.TimeSeries(lon=3.7, lat=61.8, start_time='2023-01-01', end_time='2023-02-01', product='NORA3_atm3hr_sub')
#df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2019-02-26', end_time='2019-02-27' , product='NORKYST800')


# Import data from thredds.met.no and save it as csv
df_ts.import_data(save_csv=True)

#print(df_ts.data)
# Load data from a local csv-file
#df_ts.load_data(local_file=df_ts.datafile)

Expand Down
46 changes: 36 additions & 10 deletions metocean_api/ts/aux_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,9 @@ def get_url_info(product, date):
x_coor_str = 'eta_rho'
y_coor_str = 'xi_rho'
elif product == 'NORKYST800':
if date<pd.Timestamp('2017-02-20 00:00:00'):
if date>=pd.Timestamp('2016-09-14 00:00:00') and date<=pd.Timestamp('2019-02-26 00:00:00'):
infile = 'https://thredds.met.no/thredds/dodsC/sea/norkyst800mv0_1h/NorKyst-800m_ZDEPTHS_his.an.'+date.strftime('%Y%m%d%H')+'.nc'
else:
elif date>pd.Timestamp('2019-02-26 00:00:00'):
infile = 'https://thredds.met.no/thredds/dodsC/fou-hi/norkyst800m-1h/NorKyst-800m_ZDEPTHS_his.an.'+date.strftime('%Y%m%d%H')+'.nc'
x_coor_str = 'X'
y_coor_str = 'Y'
Expand Down Expand Up @@ -197,19 +197,28 @@ def create_dataframe(product,ds, lon_near, lat_near,outfile,variable, start_time
ds = ds.drop_vars('longitude')
elif product=='NORKYST800':
ds0 = ds
breakpoint()
if 'depth' in ds['zeta'].dims:
ds['zeta'] = ds.zeta.sel(depth=0)

var_list = []
for var_name in variable:
# Check if 'depth' is not in the dimensions of the variable
if 'depth' in ds[var_name].dims:
# Append variable name to the list
var_list.append(var_name)

for i in range(len(height)):
if 'depth' in ds0[variable].dims: # Check if 'depth' is a dimension of the variable
variable_height = [k + '_'+str(height[i])+'m' for k in variable]
ds[variable_height] = ds0[variable].sel(depth=height[i])
ds = ds.drop_vars(variable)
variable_height = [k + '_'+str(height[i])+'m' for k in var_list]
ds[variable_height] = ds0[var_list].sel(depth=height[i],method='nearest')
ds = ds.drop_vars(var_list)
ds = ds.drop_vars('depth')
ds = ds.drop_vars('lat')
ds = ds.drop_vars('lon')
ds = ds.drop_vars('X')
ds = ds.drop_vars('Y')
ds = ds.drop_vars('projection_stere')
ds = ds.squeeze(drop=True)

else:
drop_var = drop_variables(product=product)
ds = ds.drop_vars(drop_var)
Expand All @@ -218,7 +227,7 @@ def create_dataframe(product,ds, lon_near, lat_near,outfile,variable, start_time
df = ds.to_dataframe()
df = df.astype(float).round(2)
df.index = pd.DatetimeIndex(data=ds.time.values)

top_header = '#'+product + ';LONGITUDE:'+str(lon_near.round(4))+';LATITUDE:' + str(lat_near.round(4))
list_vars = [i for i in ds.data_vars]
vars_info = ['#Variable_name;standard_name;long_name;units']
Expand Down Expand Up @@ -272,5 +281,22 @@ def read_commented_lines(datafile):
commented_lines = np.append(commented_lines,line)
return commented_lines



def remove_dimensions_from_netcdf(input_file, dimensions_to_remove=['X', 'Y']):
"""
Remove specified dimensions from all variables in a NetCDF file and save the result to a new file.
Parameters:
input_file (str): Path to the input NetCDF file.
output_file (str): Path to the output NetCDF file.
dimensions_to_remove (list): A list of dimensions to remove from each variable.
"""
# Open the input NetCDF file using xarray
ds = xr.open_dataset(input_file)

# Squeeze out the dimensions specified if they are singleton dimensions
for dim in dimensions_to_remove:
if dim in ds.dims and ds.sizes[dim] == 1:
ds = ds.squeeze(dim=dim)

# Write the updated dataset to the output NetCDF file
ds.to_netcdf(input_file)
11 changes: 6 additions & 5 deletions metocean_api/ts/read_metno.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@
from nco import Nco
from pathlib import Path


from .aux_funcs import get_date_list, get_url_info, get_near_coord, create_dataframe, check_datafile_exists, read_commented_lines
from .aux_funcs import *

def NORAC_ts(self, save_csv = False, save_nc = False):
"""
Expand Down Expand Up @@ -218,17 +217,19 @@ def NORKYST800_ts(self, save_csv = False, save_nc = False):
# extract point and create temp files
for i in range(len(date_list)):
x_coor_str, y_coor_str, infile = get_url_info(product=self.product, date=date_list[i])
if i==0:

if i==0 or date_list[i].strftime('%Y-%m-%d %H:%M:%S') == '2019-02-27 00:00:00': # '2019-02-27' change to new model set up
x_coor, y_coor, lon_near, lat_near = get_near_coord(infile=infile, lon=self.lon, lat=self.lat, product=self.product)

opt = ['-O -v '+",".join(self.variable)+' -d '+x_coor_str+','+str(x_coor.values[0])+' -d '+y_coor_str+','+str(y_coor.values[0])]

apply_nco(infile,tempfile[i],opt)
remove_dimensions_from_netcdf(tempfile[i], dimensions_to_remove=['X', 'Y'])

check_datafile_exists(self.datafile)
#merge temp files
ds = xr.open_mfdataset(paths=tempfile[:])
existing_files = [f for f in tempfile if os.path.exists(f)]
ds = xr.open_mfdataset(paths=existing_files[:])
#Save in csv format
df = create_dataframe(product=self.product,ds=ds, lon_near=lon_near, lat_near=lat_near, outfile=self.datafile, variable=self.variable[:-2], start_time = self.start_time, end_time = self.end_time, save_csv=save_csv,save_nc = save_nc, height=self.height)
ds.close()
Expand Down
6 changes: 4 additions & 2 deletions tests/test_extract_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,12 @@ def test_extract_OBS():

def test_NORKYST800():
# Define TimeSeries-object
df_ts = ts.TimeSeries(lon=1.320, lat=53.324,start_time='2020-01-01', end_time='2020-01-02', product='NORKYST800')
df_ts = ts.TimeSeries(lon=3.73, lat=64.60,start_time='2020-09-14', end_time='2020-09-14', product='NORKYST800')
# Import data from thredds.met.no
df_ts.import_data(save_csv=False,save_nc=False)
if df_ts.data.shape == (48, 144):
if df_ts.data.shape == (24, 65):
pass
else:
raise ValueError("Shape is not correct")


0 comments on commit 459c026

Please sign in to comment.