Skip to content

Commit

Permalink
Remove files
Browse files Browse the repository at this point in the history
  • Loading branch information
Cayo Lopes committed Jul 11, 2020
1 parent bdd0ca1 commit 84ccac2
Showing 1 changed file with 3 additions and 162 deletions.
165 changes: 3 additions & 162 deletions ecodatatk/Spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,8 @@ def ptsTime2Raster(self, out_name, var_list = None, outputBounds = None, outCRS
# Initialize a NetCDFSpatial instance with the file path:
file = r'D:\\OneDrive\\Arquivos OLD PC\\Cafofo\\Balanco de Calor\\ERA-Interim\\Mangueira2000_01_01to2018_01_01.nc'
file = r'D:\OneDrive\Arquivos OLD PC\Cafofo\interim_1979-01-01to2019-09-31_lakes.nc'

file = r'D:\OneDrive\Arquivos OLD PC\Cafofo\Gabriela_interim_2003-01-01to2006-12-31.nc'
dataset = NetCDFSpatial(file)

# We can visualize the reader data and variables in file:
Expand Down Expand Up @@ -487,165 +489,4 @@ def ptsTime2Raster(self, out_name, var_list = None, outputBounds = None, outCRS
dataset.ptsTime2Raster('Examples/Example Files/raster-output/raster_base_name', var_list = ['sp','t2m'], outputBounds = None, outCRS = 'WGS84', mask_shp = mask_path, buffer_mask = 0)


##########################################################


class RasterTemporalSeries():

def __init__(self, datadir, temporal_standard = None, crs ={'init': 'epsg:4326'} ):
'''
class constructor
'''
self._data = self._readFiles(self, datadir)

ds = xr.open_dataset(datapath)
self._data = ds.to_dataframe().reset_index()
if self._data.select_dtypes(include=[np.datetime64]).keys()[0]:
self._data = self._data.set_index(self._data.select_dtypes(include=[np.datetime64]).keys()[0])

self._data['geometry'] = self._getgeom()
self._crs = crs

self._data = self._df2geodf(self._data['geometry'], crs = self._crs)
self._variables = list(self._data.columns.drop(['geometry']))


@property
def data(self):
return self._data


# Read files: extract time, mean, median, std, max, min, path
# Crop all rasters
# Mean Raster from periods (resample - temporal upscale) by sum, min or mean
# Silhoutte Analysis
# Kmeans classifier

def _getstatistc(self, file):
with rasterio.open(file) as src:
data = src.read(1)
# mean = data.mean()
# median = data.median()
# maxv = data.max()
# minv = data.min()
# stdv = data.std()
return [data.mean(), data.median(), data.max(), data.min(), data.std(), file]

def _readFiles(self, datadir):
statistics = []
for file in os.listdir(datadir):
file = os.path.join(datadir,file)
statistics.append(_getstatistc(self, file))
return pd.DataFrame(statistics, columns = ['Mean', 'Median', 'STD', 'Max', 'Min', 'FilePath'])


#def _readFiles(self, datadir):
# means = maxv = minv = stdv = filep = []
# for file in os.listdir(datadir):
# file = os.path.join(datadir,file)
# with rasterio.open(file) as src:
# data = src.read(1)
# means.append(data.mean())
# median.append(data.median())
# maxv.append(data.max())
# minv.appned(data.min())
# stdv.append(data.std())
# filep.append(file)
# return pd.DataFrame(zip(means,maxv,minv,stdv,filep), columns = ['Mean', 'Median', 'STD', 'Max', 'Min', 'FilePath'])
#

## Base spatial _cropRst, _maskCheck

def cropAllRst(self, mask_shp, out_dir = None, remove = False, buffer_mask = 0):
statistics = []
for raster in tqdm(self._data['FilePath']):
file = self._cropRst(temp_tif, mask_shp, out_tif, remove = remove, buffer_mask = buffer_mask)
statistics.append(_getstatistc(self, file))
self._data = pd.DataFrame(statistics, columns = ['Mean', 'Median', 'STD', 'Max', 'Min', 'FilePath'])
return





def MeanArray(files):
array_list = np.array([read_file(x) for x in files])
return np.nanmean(array_list, axis = 0)

# Merge rasters:
out_name_merge = []
for r in range(0, int(len(rasters)/2)):

out_name = os.path.join(out_dir,directories[r], base_name.split('_')[0]+'_'+ '_'.join([ s.zfill(2) for s in [str(dateGreg.year), str(dateGreg.day), str(dateGreg.month), str(hh), str(mm), directories[r]]]) + '.tif')
out_name_merge.append(out_name)

mergeRst = [rasterio.open(rasters[r]).read(1), rasterio.open(rasters[r+4]).read(1)]

rst1 = mergeRst[0]
Pos1 = np.argwhere(~np.isnan(rst1))
rows, cols = zip(*Pos1)

outRst = mergeRst[1]
outRst[rows, cols] = rst1[rows, cols]

with rasterio.open( out_name , "w", **out_meta) as dest:
dest.write(outRst,1)

dic['mergeFiles'] = out_name_merge
statistics.append(dic)

def arraysMean(files):
# Calcula valor mediorasters
# Baseado em https://gis.stackexchange.com/questions/244376/computing-mean-of-all-rasters-in-a-directory-using-python

if isinstance(files, np.ndarray):
array_list = [read_file(x) for x in files]
else:
files = np.array(files)
array_list = [read_file(x) for x in files]

meanValues = []
for array in tqdm(array_list):
array = array[~np.isnan(array)]
meanValues.append(np.mean(array))
return np.array(meanValues)

def MeanArray(files):
array_list = np.array([read_file(x) for x in files])
return np.nanmean(array_list, axis = 0)



class BaseSpatial:

def _maskCheck(self, mask_shape, buffer_mask):
if not isinstance(mask_shape,gpd.GeoDataFrame):
mask_shape = gpd.GeoDataFrame.from_file(mask_shape)
mask_shape.geometry[0] = mask_shape.geometry[0].buffer(buffer_mask)
return mask_shape

def _cropRst(self,raster, mask_shp, out_tif = None, remove = False, buffer_mask = 0):

mask_shp = self._maskCheck(mask_shp, buffer_mask)

if out_tif == None:
out_tif = raster.replace('.tif','_crop.tif')

with rxy.open_rasterio(raster, masked = True, chunks = True) as ds:
clipped = ds.rio.clip(mask_shp.geometry.apply(mapping), mask_shp.crs, drop=False, invert=False)
clipped.rio.to_raster(out_tif)

if remove:
os.remove(raster)
return out_tif


#########Media Rasters ############

chd = os.chdir("D:\OneDrive\Mestrado\Plano Dissertacao\Temperatura")
df = pd.read_pickle('VariblesTemp02.pkl')

out_dir = "D:\OneDrive\Mestrado\Plano Dissertacao\Temperatura\Modis\Lagoas"

df['DateTime'] = pd.to_datetime(df['Date'].astype(str) + ' '+df['TimeObs_Mirim'].astype(str))
df = df.set_index('DateTime')
##########################################################

0 comments on commit 84ccac2

Please sign in to comment.