diff --git a/.env b/.env new file mode 100755 index 0000000..eb52fc0 --- /dev/null +++ b/.env @@ -0,0 +1,73 @@ +# Text file containing email addresses to send logs to +MAIL_TO_ADDRS_FILE=mail_to.txt + +DKR_USER=nappl_fswms +DKR_GROUP=nappl + +DOCKER=/usr/bin/docker + +DKR_IMAGE_TAG=gdal:2.4.2 + +DKR_BUILD_DIR_HOST=./gdal_docker + +# Name of the running container +DKR_CONTAINER_NAME=fw2_build + +# Default current directory for a running container +DKR_BUILD_DIR=/build + +# Path to precursor archive (symlink) +PRECURSORS_DIR=./precursors + +# Path to graph data archive (symlink) +ALL_YEAR_MAXES_DIR=./graph_data + +# Path template for log files +LOG_PATH_TEMPLATE=./log/FW2_{}.txt +# Time format to insert into the log tempate above +LOG_FILE_TIMESTAMP_FORMAT=%Y%m%d_%I_%M_%S%p + +# Email address to send "from" +EMAIL_FROM_ADDRESS=nemacmailer@gmail.com + +# Meta product types: normal and muted (square root of normal) +FW2_ARCHIVE_DIR_NORMAL=ForWarn2 +FW2_ARCHIVE_DIR_MUTED=ForWarn2_Sqrt + +# Strings to search for in a filename to determine if a file +# output by the dodate script is either "normal" or "muted" +# (comma-separated for multiple values) +FW2_NORMAL_DODATE_FILENAME_CHECK=ForWarnLAEA,ALCLAEA +FW2_MUTED_DOATE_FILENAME_CHECK=ForWarn2LAEA,ALC2LAEA + +# FW2 archive directories for all product types +FW2_PRODUCT_DIR_1YR=X_LC_1YEAR +FW2_PRODUCT_DIR_3YR=X_LC_3YEAR +FW2_PRODUCT_DIR_5YR=X_LC_5YEAR +FW2_PRODUCT_DIR_ALC=X_LC_ALC_1YR +FW2_PRODUCT_DIR_MEDIAN=X_LC_MEDIAN_ALL_YR +FW2_PRODUCT_DIR_10YR=X_LC_90_10_YR +FW2_PRODUCT_DIR_PCTPROGRESS=X_LC_PCTPROGRESS + +# Temporary directories used by dodate +FW2_TMP_DIR_1YR=1-yr-max +FW2_TMP_DIR_3YR=3-yr-max +FW2_TMP_DIR_5YR=5-yr-90 +FW2_TMP_DIR_ALC=ALC +FW2_TMP_DIR_MEDIAN=median-all-yr-max +FW2_TMP_DIR_PCTPROGRESS=pctprogress +FW2_TMP_DIR_10YR=10-yr-90 + +# maxMODIS.YYYY.DOY.[std|nrt].[img] +MAX_8DAY_PRECURSOR_FILENAME_TEMPLATE=maxMODIS.{}.{}.{}.{} +MAX_8DAY_PRECURSOR_FILENAME_EXT=img + +# maxMODIS.YYYY.std.[tif] +ALL_YEAR_MAXES_PRECURSOR_FILENAME_TEMPLATE=maxMODIS.{}.std.{} +ALL_YEAR_MAXES_PRECURSOR_FILE_EXT=tif + +# Path to the fw2 build bash script +DODATE_PATH=./dodate + +# Earliest year for which NDVI data is available on GIMMS +MODIS_DATA_YEAR_START=2003 diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 index c0e0a3d..54f014b --- a/.gitignore +++ b/.gitignore @@ -1,15 +1,14 @@ -mail_to_addrs.txt -RUNNING -*.nc +mail_to.txt *.img -todo_product_days 1-yr-max/* 3-yr-max/* 5-yr-max/* ALC/* median-all-yr-max/* pctprogress/* -logs/* +log/* +*.gz +*.vrt # vim swap files *.swp @@ -119,3 +118,7 @@ venv.bak/ # mypy .mypy_cache/ +ForWarn2 +ForWarn2_Sqrt +precursors +graph_data diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..f7d7c27 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "raster-helpers"] + path = raster-helpers + url = git@github.com:nemac/raster-helpers diff --git a/Config.py b/Config.py deleted file mode 100644 index ec178e2..0000000 --- a/Config.py +++ /dev/null @@ -1,34 +0,0 @@ -import os.path - -# Dev -#PRODUCTS_BASE_PATH = '/fsdata4/forwarn2_products/forwarn2_build_dev' -#MAIN_PATH = '/fsdata4/forwarn2_products/forwarn2_build_dev' - -# Prod -PRODUCTS_BASE_PATH = '/fsdata4/forwarn2_products' -MAIN_PATH = '/fsdata4/forwarn2_products/forwarn2_build_prod' - -DODATE_PATH = os.path.join(MAIN_PATH, 'dodate') - -TODO_DAYS_PATH = os.path.join(MAIN_PATH, 'todo_product_days') - -ALL_DAYS_PATH = os.path.join(MAIN_PATH, 'all_product_days') - -# keys are where dodate dumps them -# values are product directories in ForWarn2, ForWarn2_Sqrt -# (this is where the products are moved to) -PRODUCT_DIRS = { - '1-yr-max' : 'X_LC_1YEAR', - '10-yr-90' : 'X_LC_90_10_YR', - '3-yr-max' : 'X_LC_3YEAR', - '5-yr-90' : 'X_LC_5YEAR', - 'ALC' : 'X_LC_ALC_1YR', - 'median-all-yr-max' : 'X_LC_MEDIAN_ALL_YR', - 'pctprogress' : 'X_LC_PCTPROGRESS' -} - -SOURCE_DIRS = [ 'ForWarn2', 'ForWarn2_Sqrt' ] - -MAIL_TO_ADDRS_FILE = 'mail_to_addrs.txt' - -EMAIL_TEMPLATE_FILE = 'mime_email_template.txt' diff --git a/README.md b/README.md old mode 100644 new mode 100755 index 78b24d0..0496e3d --- a/README.md +++ b/README.md @@ -1,12 +1,21 @@ -# forwarn2_build -ForWarn 2 product build scripts +## ForWarn 2 Production System -Note: the directories `ForWarn2` and `ForWarn2_Sqrt` mirror the directory structure of the final resting place of generated products. They are used for testing purposes only! Do not put products here for production sites! +Requirements: + +- Docker Setup: -- Make a text file called `todo_product_days`. This file contains a list of julian days (see `all_product_days`) that still need products. An automated run of `make_products` with no `-d` argument will automatically write a new `todo_product_days` file and remove days that were completed successfully. +- Setup the precursor archive structure (make a script for this) +- Build gdal docker container +- `mail_to_addrs.txt` +- Setup SMTP server + - Abstract config for non-localhost case + + -- Make another text file called `mail_to_addrs.txt`. This file contains a list of email addresses that the system will send logs to. (Use the `--no-email` flag to suppress this function. +Run: +- Cron job +- Single date diff --git a/all_product_days b/all_product_days deleted file mode 100644 index 2b98b49..0000000 --- a/all_product_days +++ /dev/null @@ -1,46 +0,0 @@ -001 -009 -017 -025 -033 -041 -049 -057 -065 -073 -081 -089 -097 -105 -113 -121 -129 -137 -145 -153 -161 -169 -177 -185 -193 -201 -209 -217 -225 -233 -241 -249 -257 -265 -273 -281 -289 -297 -305 -313 -321 -329 -337 -345 -353 -361 \ No newline at end of file diff --git a/bulk_maxes.py b/bulk_maxes.py new file mode 100644 index 0000000..56547c1 --- /dev/null +++ b/bulk_maxes.py @@ -0,0 +1,171 @@ + + +import rasterio as rio +import xml.etree.ElementTree as ET + +from util import * +from precursor_archive import PrecursorArchive + + +class YearMaxesArchive: + + _file_tpl = 'maxMODIS.{}.std.{}' + + _root_dir = './graph_data' + + def __init__(self, precursors=None, root_dir=None, dryrun=False): + load_env() + self._root_dir = root_dir or self._root_dir + self.precursors = precursors or PrecursorArchive() + + + def update(self, dryrun=False, update_precursors=False): + all_updated = [] if not update_precursors else self.precursors.update() + std_updated = [ d for d in all_updated if d[-1] == 'std' ] + years_updated = sorted(set([ d[0] for d in std_updated ])) + years_missing = self._get_missing_years() + todo = years_updated + years_missing + for year in todo: + self.build_tif(year, dryrun=dryrun) + + + def build_tif(self, year, dryrun=False): + '''Build a new 46-band tif where each band represents an 8-day NDVI maximum.''' + vrt_filename = self._build_year_vrt(year, dryrun) + tif_filename = 'maxMODIS.{}.std.tif'.format(year) + new_tif_path_tmp = os.path.join(self._root_dir, '{}.tmp'.format(tif_filename)) + self._gdal_translate_vrt(vrt_filename, new_tif_path_tmp, dryrun=dryrun) + try: + os.remove(os.path.join(self._root_dir, tif_filename)) + except: + pass + os.rename(new_tif_path_tmp, os.path.join(self._root_dir, tif_filename)) + os.remove(vrt_filename) + + + def _get_missing_years(self): + '''Returns a list of years (strings) with missing all-year maxes tifs''' + tpl = ALL_YEAR_MAXES_PRECURSOR_FILENAME_TEMPLATE + ext = ALL_YEAR_MAXES_PRECURSOR_FILE_EXT + all_years = get_all_modis_data_years() + return list(filter(lambda y: not os.path.exists(os.path.join(self._root_dir, tpl.format(y, ext))), all_years)) + + + def _gdal_translate_vrt(self, vrt_path, tif_path, dryrun=False): + '''Use gdal_translate to convert a VRT to a GeoTIFF. Used for creating all-year maxes files. + ''' + print(f'Converting VRT to TIF: {vrt_path} {tif_path}') + print(f'Here is the VRT:\n') + with open(vrt_path) as f: + for line in f.readlines(): + print(line) + + c = f'''gdal_translate + -of GTiff + -co TILED=YES + -co COMPRESS=DEFLATE + -co BIGTIFF=YES + {vrt_path} + {tif_path} + ''' + if not dryrun: + run_process(c) + + + def _build_year_vrt(self, year, dryrun=False): + paths = self._get_vrt_bands(year) + bounds = self._get_extent(paths, dryrun=dryrun) + big_vrt_name = 'maxMODIS.{}.std.vrt'.format(year) + print("Generating VRT {}...".format(big_vrt_name)) + if dryrun: + return big_vrt_name + for i in range(0, len(paths)): + band_num = str(i+1) + path = paths[i] + temp_vrt = self._build_8day_vrt(path, bounds=bounds, dryrun=dryrun) + if band_num == '1': + main_tree = ET.parse(temp_vrt) + main_root = main_tree.getroot() + else: + tree = ET.parse(temp_vrt) + root = tree.getroot() + bandElement = root.find('VRTRasterBand') + bandElement.attrib['band'] = band_num + main_root.append(bandElement) + try: os.remove(temp_vrt) + except: pass + main_tree.write(big_vrt_name) + return big_vrt_name + + + def _get_vrt_bands(self, year): + '''Get a list of paths to the 8-day max files for some year.''' + f_tpl = MAX_8DAY_PRECURSOR_FILENAME_TEMPLATE + ext = MAX_8DAY_PRECURSOR_FILENAME_EXT + bands = [] + for jd in ALL_MODIS_JULIAN_DAYS: + jd_dir = os.path.join(PRECURSORS_DIR, jd) + f = f_tpl.format(year, jd, 'std', ext) + path = os.path.join(jd_dir, f) + if os.path.exists(path): + p = os.path.join(jd_dir, f) + bands.append(p) + else: + continue + return bands + + + def _build_8day_vrt(self, path, bounds=None, vrtnodata=255, band_num=1, dryrun=False): + '''Wrapper for gdalbuildvrt. Build a 1-band VRT. + + Arguments: + path: path to the source file + bounds: a python list of the form [ xmin, ymin, xmax, ymax ]. + These values are joined into a string that is passed to the -te flag. + vrtnodata: Value to use for the -vrtnodata flag. + band_num: Band number in the source dataset to use. + ''' + vrtnodata = str(vrtnodata) + band_num = str(band_num) + temp_vrt = os.path.basename(path) + '.vrt' + c = f'''gdalbuildvrt + -vrtnodata {vrtnodata} + -b {band_num} + -overwrite + ''' + if bounds: + bounds_string = ' '.join([ str(num) for num in bounds ]) + c += f'-te {bounds_string} \n' + c += f'{temp_vrt} {path}' + if not dryrun: + run_process(c) + return temp_vrt + + + def _get_extent(self, paths, dryrun=False): + '''Returns the maximum value for each extent parameter for a list of rasters.''' + if dryrun: + return [] + self._check_same_proj(paths) + def max_by_key(iterable, key): + return max([ getattr(obj, key) for obj in iterable ]) + bounds = [] + for p in paths: + with rio.Env(): + with rio.open(p) as src: + bounds.append(src.bounds) + max_bounds = [ max_by_key(bounds, key) for key in ('left', 'bottom', 'right', 'top') ] + return max_bounds + + + def _check_same_proj(self, paths): + proj_strings = [] + for p in paths: + with rio.Env(): + with rio.open(p) as src: + proj_strings.append(src.profile['crs'].to_proj4()) + proj_last = proj_strings[0] + for proj in proj_strings: + if proj_last != proj: + raise TypeError('All datasets must have the exact same projection!') + diff --git a/cli.py b/cli.py new file mode 100755 index 0000000..880c924 --- /dev/null +++ b/cli.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 + +import os.path + +from volumes import vols +from util import load_env, clean_all, chown_all +from dkr import run_gdal + +load_env(ns=globals()) + +try: + run_gdal(os.path.join(DKR_BUILD_DIR, 'dkr_update'), volumes=vols) +finally: + clean_all() + diff --git a/dkr.py b/dkr.py new file mode 100644 index 0000000..196a5c4 --- /dev/null +++ b/dkr.py @@ -0,0 +1,52 @@ + +import os, os.path, pwd, grp +import docker +import logging as log + +from util import load_env + + +load_env(ns=globals()) + +def build_gdal(): + client = docker.from_env() + user = pwd.getpwnam(DKR_USER) + group = grp.getgrnam(DKR_GROUP) + user_id = user[2] + group_id = group[2] + gdal_image = client.images.build( + path=DKR_BUILD_DIR_HOST, + tag=DKR_IMAGE_TAG, + buildargs={ + 'DKR_BUILD_DIR': DKR_BUILD_DIR, + 'DKR_USER': DKR_USER, + 'DKR_GROUP': DKR_GROUP, + 'DKR_USER_ID': str(user_id), + 'DKR_GROUP_ID': str(group_id) + } + ) + + +def run_gdal(cmd, volumes=None): + client = docker.from_env() + containers = client.containers.list() + if DKR_CONTAINER_NAME in [ c.name for c in containers ]: + log.info("Already running!") + return + try: + container = client.containers.run(DKR_IMAGE_TAG, + command=cmd, + name=DKR_CONTAINER_NAME, + network_mode='host', + auto_remove=True, + volumes=volumes, + tty=True, + detach=True, + ) + for chunk in container.logs(stream=True): + print(chunk.decode('UTF-8'), end='') + container.wait() + except Exception as e: + log.error(e) + + diff --git a/dkr_update b/dkr_update new file mode 100755 index 0000000..fd5bc8d --- /dev/null +++ b/dkr_update @@ -0,0 +1,26 @@ +#!/usr/bin/env python3 + +import sys, os, os.path +import logging as log + +sys.path.append('/build') +from bulk_maxes import YearMaxesArchive +from fw2_archive import ForWarn2Archive +from util import init_log, mail_results, chown_all + +init_log() + +bulk_archive = YearMaxesArchive() +bulk_archive.update(update_precursors=True) + +archive = ForWarn2Archive() +dates = archive.update() + +chown_all() + +if len(dates): + log.info('Emailing results...') + mail_results(dates) +else: + log.info('No new products...') + diff --git a/do_max b/do_max deleted file mode 100755 index feb0d44..0000000 --- a/do_max +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/bash - -set -e - -YEAR=$1 -DOY=$2 - -################################################################# -# remove existing Terra and Aqua tile tifs - rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif - rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# download 26 tiles for conus each from Aqua and from Terra - -echo "Now downloading DOY " $DOY " of TYPE std for YEAR " $YEAR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -# download 26 Aqua tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/std/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# - -# did we get 26 each? -let numtiles=`ls -1 *.gz | wc -l` - -if [ "$numtiles" == 52 ]; then - echo "Got 52 tifs for " $YEAR $DOY - else - echo "ERROR: Tiles MISSING for " $YEAR $DOY " only got " $numtiles -fi - -gunzip *.gz - -################################################################# -# mosaic together Terra for this DOY - -rm -f Terra.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Terra.img GMOD09Q1.A$YEAR$DOY*.tif -echo "done mosaicking Terra images together" -#rm -f Terra.tif -#gdal_translate Terra.img -of GTiff Terra.tif -#xv Terra.tif - -# mosaic together Aqua for this DOY - -rm -f Aqua.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Aqua.img GMYD09Q1.A$YEAR$DOY.*.tif -echo "done mosaicking Aqua images together" -#rm -f Aqua.tif -#gdal_translate Aqua.img -of GTiff Aqua.tif -#xv Aqua.tif - -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# take the maximum NDVI from the Terra and Aqua 8-day composites - -# if both are 253 or if both are 255 or if either are 253 with the other 255, then 255, but this is NOT nodata -# if either are 254, then 254, water - -# 252 is nodata coming out - -echo "taking the maximum NDVI from the Terra and Aqua 8-day composites" - -#find maxval composite of Terra and Aqua -# and propagate two mask values -gdal_calc.py -A Terra.img -B Aqua.img --outfile=maxMODIS.$YEAR.$DOY.std.img --calc="\ -maximum((A<251)*A,(B<251)*B)\ -+(((A==253)&(B==253))|((A==253)&(B==255))|((A==255)&(B==253))|((A==255)&(B==255)))*255\ -+((A==254)|(B==254))*254\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --NoDataValue=252 --type=Byte --overwrite - -rm -f Aqua.img Terra.img - -################################################################# - - diff --git a/do_max_max b/do_max_max deleted file mode 100755 index 3140e73..0000000 --- a/do_max_max +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash - -DOY3=$1 -YR3=$2 - -DOY2=$3 -YR2=$4 - -DOY1=$5 -YR1=$6 - -gdal_calc.py --debug --calc="\ -maximum( maximum( (A<251)*A,(B<251)*B ),(C<251)*C )\ -+((A==254)|(B==254)|(C==254))*254\ -+((A==255)&(B==255)&(C==255))*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" \ - --outfile=maxMODISmax.$YR3.$DOY3.std.img \ - -A maxMODIS.$YR1.$DOY1.std.img \ - -B maxMODIS.$YR2.$DOY2.std.img \ - -C maxMODIS.$YR3.$DOY3.std.img \ - --type=Byte --overwrite - diff --git a/do_max_modis_max.py b/do_max_modis_max.py deleted file mode 100755 index 85ca8e1..0000000 --- a/do_max_modis_max.py +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env python - -import os, os.path, argparse -import sys - - -intervals = [ ("361","353","345"), ("353","345","337"), ("345","337","329"), ("337","329","321"), ("329","321","313"), ("321","313","305"), ("313","305","297"), ("305","297","289"), ("297","289","281"), ("289","281","273"), ("281","273","265"), ("273","265","257"), ("265","257","249"), ("257","249","241"), ("249","241","233"), ("241","233","225"), ("233","225","217"), ("225","217","209"), ("217","209","201"), ("209","201","193"), ("201","193","185"), ("193","185","177"), ("185","177","169"), ("177","169","161"), ("169","161","153"), ("161","153","145"), ("153","145","137"), ("145","137","129"), ("137","129","121"), ("129","121","113"), ("121","113","105"), ("113","105","097"), ("105","097","089"), ("097","089","081"), ("089","081","073"), ("081","073","065"), ("073","065","057"), ("065","057","049"), ("057","049","041"), ("049","041","033"), ("041","033","025"), ("033","025","017"), ("025","017","009"), ("017","009","001"), ("009","001","361"), ("001","361","353") ] - -def get_three_dates(year, doy): - days = list(filter(lambda trip: trip[0] == doy, intervals))[0] - if not len(days): - print "Invalid day of year" - sys.exit() - days = list(map(lambda d: (d, year), days)) - # doy == 009 or doy == 001 is a special case where the year is not consistent - if doy == '009': - days[2] = ( '361', str(int(year)-1) ) - if doy == '001': - days[1] = ( '361', str(int(year)-1) ) - days[2] = ( '353', str(int(year)-1) ) - print days - return days - - -def setup_arg_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-y', '--year', help='Year') - parser.add_argument('-d', '--doy', help='Day of year') - return parser - - -def main(): - parser = setup_arg_parser() - args = parser.parse_args() - year = args.year - doy = args.doy - dates = get_three_dates(year, doy) - max_modis_file_template = "maxMODIS.{}.{}.std.img" - for d in dates: - doy = d[0] - year = d[1] - max_filename = max_modis_file_template.format(year, doy) - if not os.path.exists(max_filename): - print "Missing {}. Generating now...".format(max_filename) - os.system("./do_max {} {}".format(year, doy)) - max_modis_max_filename = "maxMODISmax.{}.{}.std.img".format(year, doy) - print "Generating {}...".format(max_modis_max_filename) - os.system("./do_max_max {} {} {} {} {} {}".format( - dates[0][0], dates[0][1], - dates[1][0], dates[1][1], - dates[2][0], dates[2][1] - )) - -if __name__ == '__main__': - main() diff --git a/dodate b/dodate index e77207b..0dff9d1 100755 --- a/dodate +++ b/dodate @@ -1,28 +1,54 @@ #!/bin/bash -# Usage: ./dodate 2018153 -# First command line argument is the desired date for product generation - - -# Types of Intermediate Files, by filename +################################################################## +################################################################## +# +# ForWarn 2 Production Script +# +################################################################## +################################################################## +# +# Low-level bash script for building an entire set of products +# for a single 24-day period (3 MODIS periods). +# +# Author: William Hargrove (EFETAC / USDA) +# +# Maintained By: UNCA's NEMAC (nemac@unca.edu) +# +# Arguments: +# datestring - a string of the form YYYYJJJ, where +# YYYY is a year and JJJ is a zero-padded julian day (day of the year). +# +# Example: ./dodate 2019009 +# +# Data Source: https://gimms.gsfc.nasa.gov/MODIS/ +# +################################################################# +################################################################# +# +# Summary of All Precursors # # from distal to proximal # in order of dependency, from the bottom up # -# meanallpriormax.* mean over all prior years, over 3 dates, over 2 sensors -# sumallpriormax.* sum over all prior years, over 3 dates, over 2 sensors -# maxallpriormax.* max over all prior years, over 3 dates, over 2 sensors -# above is for all prior year products -# maxMODISmaxmax.* max over all prior years, over 3 dates, over 2 sensors -# above is for 1-, 3-, and 5-year products -# .std all 3 dates are from std products -# .nrt first 2 dates are from std, third date is nrt -# maxMODISalc.* max over 3 dates, recent priority, over 2 sensors -# maxMODISmax.* max over 3 dates, over 2 sensors -# maxMODIS.* max over 2 sensors, for one date - - +# Precursors for Prior Year Products: +# +# meanallpriormax.* mean over all prior years, over 3 dates, over 2 sensors +# sumallpriormax.* sum over all prior years, over 3 dates, over 2 sensors +# maxallpriormax.* max over all prior years, over 3 dates, over 2 sensors +# +# Precursors for 1, 3, and 5-year Products +# +# maxMODISmaxmax.* max over all prior years, over 3 dates, over 2 sensors +# +# .std all 3 dates are from std products +# .nrt first 2 dates are from std, third date is nrt +# maxMODISalc.* max over 3 dates, recent priority, over 2 sensors +# maxMODISmax.* max over 3 dates, over 2 sensors +# maxMODIS.* max over 2 sensors, for one date +# ################################################################# +# # Precursor Intermediate Files Needed for making 1-, 3-, and 5-year Products # # maxMODIS nrt DOY3 @@ -36,15 +62,16 @@ # maxMODIS std max over each of 3 DOYs in the prior year # # * = files should already exist - +# # use cp command something like this -# cp /media/disk/fullwrapper2/maxMODISmaxmax.[135]-yr-baseline.201[76543].DOY.std.img . -# cp /media/disk/fullwrapper2/maxMODISmax.201[76543].DOY.std.img . -# cp /media/disk/fullwrapper2/maxMODIS.2017.0[876][135].std.img . +# cp /precursors/maxMODISmaxmax.[135]-yr-baseline.201[76543].DOY.std.img . +# cp /precursors/maxMODISmax.201[76543].DOY.std.img . +# cp /precursors/maxMODIS.2017.0[876][135].std.img . # # harvest back the ? - +# ################################################################# +# # Precursor Intermediate Files Needed for making all prior year Products # # maxallpriormax.*.DOY.std.img for this DOY for as recent a year as possible* @@ -55,14 +82,15 @@ # the year of the latest available maxallpriormax file* # # * = files should already exist - +# # use cp command something like this # cp /media/disk/fullwrapper2/maxallpriormax.*.DOY.std.img . # cp /media/disk/fullwrapper2/maxMODISmax.201[76543].081.std.img . # # harvest back the maxallpriormax std file that is made for this DOY - +# ################################################################# +# # Precursor Intermediate Files Needed for making Mean all prior year Products # # countallpriormax std for this DOY for as recent a year as possible* @@ -74,1547 +102,829 @@ # the year of the latest available countallpriormax and sumallpriormax files* # # * = files should already exist - +# # use cp command something like this # cp /media/disk/fullwrapper2/countallpriormax.*.DOY.std.img . # cp /media/disk/fullwrapper2/sumallpriormax.*.DOY.std.img . # cp /media/disk/fullwrapper2/maxMODISmax.201[76543].081.std.img . # # harvest back the countallpriormax and sumallpriormax files that are made for this DOY - - -################################################################# -################################################################# - -set -b - -ALCpath=. - -medianallyrmaxpath=. - -#junk=`date | awk '{print "date -d\""$1" "$2" "$3"\" +%Y%m%d"}' | bash` - -#datedayofyear=`echo $junk | cut -c5-8` -#YEAR=`echo $junk | cut -c1-4` - -#echo "Current date is " $YEAR $datedayofyear - -# this code finds the earliest just-passed interval DOY from the current date -#for INTERVALs in 361/353/345 353/345/337 345/337/329 337/329/321 329/321/313 321/313/305 313/305/297 305/297/289 297/289/281 289/281/273 281/273/265 273/265/257 265/257/249 257/249/241 249/241/233 241/233/225 233/225/217 225/217/209 217/209/201 209/201/193 201/193/185 193/185/177 185/177/169 177/169/161 169/161/153 161/153/145 153/145/137 145/137/129 137/129/121 129/121/113 121/113/105 113/105/097 105/097/089 097/089/081 089/081/073 081/073/065 073/065/057 065/057/049 057/049/041 049/041/033 041/033/025 033/025/017 025/017/009 017/009/001 009/001/361 001/361/353 -#do - -#DOY=`echo $INTERVALs | awk -F/ '{print $1}'` -##echo $DOY $datedayofyear -#if [ $DOY -le $datedayofyear ] - #then break -#fi - -#done -################################################################# -# command line argument should be desired date in the format 2018001 - -YEAR=`echo $1 | cut -c1-4` -DOYWANTED=`echo $1 | cut -c5-7` - -# this code finds the three dates involved in this interval -for INTERVALs in 361/353/345 353/345/337 345/337/329 337/329/321 329/321/313 321/313/305 313/305/297 305/297/289 297/289/281 289/281/273 281/273/265 273/265/257 265/257/249 257/249/241 249/241/233 241/233/225 233/225/217 225/217/209 217/209/201 209/201/193 201/193/185 193/185/177 185/177/169 177/169/161 169/161/153 161/153/145 153/145/137 145/137/129 137/129/121 129/121/113 121/113/105 113/105/097 105/097/089 097/089/081 089/081/073 081/073/065 073/065/057 065/057/049 057/049/041 049/041/033 041/033/025 033/025/017 025/017/009 017/009/001 009/001/361 001/361/353 -do - -DOY=`echo $INTERVALs | awk -F/ '{print $1}'` -#echo $DOY $DOYWANTED -if [ $DOY -eq $DOYWANTED ]; then - break -fi - -done # finding the three dates -################################################################# - -echo "NRT DOY to download is " $DOY -# this is the day the 8-day period BEGINS -echo "The three INTERVALs needed are " $INTERVALs -echo "these are the days the 8-day periods BEGIN" - - - -# check if all subdirectories exist first -for dirs in 1-yr-max 3-yr-max 5-yr-90 10-yr-90 median-all-yr-max ALC pctprogress -do - - if [ ! -d "${dirs}" ];then - echo "Need to make subdirectory " $dirs - mkdir $dirs - fi - -done # checking all subdirs -################################################################# - - -# test to see if 1-yr product already exists for this date -# if so, assume all products already exist and exit -##echo "got to test" -##if [ ! -f ./1-yr-max/ForWarn2LAEA.$YEAR.$DOY.1-yr-baseline.img ]; then - ##echo "Products do not already exist for ./1-yr-max/ForWarnLAEA."$YEAR"."$DOY".1-yr-baseline.img" - ##echo "Assuming that NO products already exist for " $YEAR $DOY - ##echo "Generating all products for " $YEAR $DOY - - - - - -# get rid of any existing or partial gz downloads - rm -f *.gz* - - - let LASTYEAR=$YEAR-1 - - DOY1=`echo $INTERVALs | awk -F"/" '{print $1}'` - DOY2=`echo $INTERVALs | awk -F"/" '{print $2}'` - DOY3=`echo $INTERVALs | awk -F"/" '{print $3}'` - - echo $DOY1 $DOY2 $DOY3 - - - case $DOY1 in - 001) - YRDOY3=$YEAR/$DOY1 - YRDOY2=$LASTYEAR/$DOY2 - YRDOY1=$LASTYEAR/$DOY3;; - 009) - YRDOY3=$YEAR/$DOY1 - YRDOY2=$YEAR/$DOY2 - YRDOY1=$LASTYEAR/$DOY3;; - *) - YRDOY3=$YEAR/$DOY1 - YRDOY2=$YEAR/$DOY2 - YRDOY1=$YEAR/$DOY3;; - esac - - echo $YEAR $INTERVALs $YRDOY3 $YRDOY2 $YRDOY1 - - # parse YRDOY3 YRDOY2 YRDOY1 into paired years and dates - YR3=`echo $YRDOY3 | awk -F/ '{print $1}'` - DOY3=`echo $YRDOY3 | awk -F/ '{print $2}'` - YR2=`echo $YRDOY2 | awk -F/ '{print $1}'` - DOY2=`echo $YRDOY2 | awk -F/ '{print $2}'` - YR1=`echo $YRDOY1 | awk -F/ '{print $1}'` - DOY1=`echo $YRDOY1 | awk -F/ '{print $2}'` - - -# make the numerator for all products (except ALC) -# first make the new maxMODIS.nrt for the current date $YRDOY3 - -# try to load this DOY as nrt first - THIRDDATETYPE=nrt - YEAR=$YR3 - DOY=$DOY3 -################################################################# -################################################################# -# remove existing Terra and Aqua tile tifs - rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif - rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# download 26 tiles for conus each from Aqua and from Terra - -echo "Now downloading DOY " $DOY " of TYPE " $THIRDDATETYPE " for YEAR " $YEAR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -# download 26 Aqua tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# -# unzip all tiles - -# did we get 26 each? -let numtiles=`ls -1 *.gz | wc -l` - -echo "numtiles was " $numtiles - -# were tiles available as nrt? -if [ "$numtiles" == 0 ]; then - echo "Tiles for DOY "$YEAR $DOY " are not available as nrt!" - echo "Attempting to download tiles for this DOY as std!" - THIRDDATETYPE=std - - # try downloading again as std -################################################################# -# remove existing Terra and Aqua tile tifs - rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif - rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# download 26 tiles for conus each from Aqua and from Terra - -echo "Now downloading DOY " $DOY " of TYPE " $THIRDDATETYPE " for YEAR " $YEAR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -# download 26 Aqua tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$THIRDDATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# - -fi # tried downloading as std instead - -# did we get 26 each? -let numtiles=`ls -1 *.gz | wc -l` - -if [ "$numtiles" == 52 ]; then - echo "Got 52 tifs for " $YEAR $DOY - else - echo "ERROR: Tiles MISSING for " $YEAR $DOY " only got " $numtiles -fi - -gunzip *.gz - -################################################################# -# mosaic together Terra for this DOY - -rm -f Terra.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o "Terra.img" GMOD09Q1.A$YEAR$DOY*.tif -echo "done mosaicking Terra images together" -#rm -f Terra.tif -#gdal_translate Terra.img -of GTiff Terra.tif -#xv Terra.tif - -# mosaic together Aqua for this DOY - -rm -f Aqua.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Aqua.img GMYD09Q1.A$YEAR$DOY.*.tif -echo "done mosaicking Aqua images together" -#rm -f Aqua.tif -#gdal_translate Aqua.img -of GTiff Aqua.tif -#xv Aqua.tif - -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# take the maximum NDVI from the Terra and Aqua 8-day composites - -# if both are 253 or if both are 255 or if either are 253 with the other 255, then 255, but this is NOT nodata -# if either are 254, then 254, water - -# 252 is nodata coming out - -echo "taking the maximum NDVI from the Terra and Aqua 8-day composites" - -#find maxval composite of Terra and Aqua -# and propagate two mask values -gdal_calc.py --debug -A Terra.img -B Aqua.img --outfile=maxMODIS.$YEAR.$DOY.$THIRDDATETYPE.img --calc="\ -maximum((A<251)*A,(B<251)*B)\ -+(((A==253)&(B==253))|((A==253)&(B==255))|((A==255)&(B==253))|((A==255)&(B==255)))*255\ -+((A==254)|(B==254))*254\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --NoDataValue=252 --type=Byte --overwrite -rm -f maxMODIS.$YEAR.$DOY.$THIRDDATETYPE.tif - -#gdal_translate maxMODIS.$YEAR.$DOY.$THIRDDATETYPE.img -of GTiff maxMODIS.$YEAR.$DOY.$THIRDDATETYPE.tif -#xv maxMODIS.$YEAR.$DOY.$THIRDDATETYPE.tif -################################################################# - -# over the two earlier dates in this interval -# see whether maxMODIS std files exist already -# if not, make them - -# try std TYPE download first - FIRSTDATETYPE=std - SECONDDATETYPE=std - - for YRDOY in $YRDOY2 $YRDOY1 - do - - # try std TYPE download first - TYPE=std - - PASTYR=`echo $YRDOY | awk -F/ '{print $1}'` - PASTDOY=`echo $YRDOY | awk -F/ '{print $2}'` - - - - # does this maxMODIS PASTDOY double-max std image already exist? - # if not, make it - if [ ! -f maxMODIS.$PASTYR.$PASTDOY.$TYPE.img ]; then - echo "maxMODIS file does not exist for maxMODIS."$PASTYR"."$PASTDOY"."$TYPE".img" - # get tiles and make it - echo "Fetching tiles and generating double max for maxMODIS."$PASTYR"."$PASTDOY"."$TYPE".img" - - -################################################################# -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# download 26 tiles for conus each from Aqua and from Terra - -echo "Now downloading DOY " $PASTDOY " of TYPE " $TYPE " for YEAR " $PASTYR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -# download 26 Aqua tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# - -# did we get 26 std tiles each? -let numtiles=`ls -1 *.gz | wc -l` -if [ "$numtiles" == 52 ]; then - echo "Got 52 std tiles for " $PASTYR $PASTDOY $TYPE -else - echo "ERROR: std Tiles MISSING for " $PASTYR $PASTDOY $TYPE " only got " $numtiles - # did we get 26 std Terra tiles? - let numterratiles=`ls -1 GMOD09Q1.*.gz | wc -l` - if [ "$numterratiles" == 26 ]; then - echo "Got 26 Terra std tifs for " $PASTYR $PASTDOY $TYPE - else - echo "Only got " $numterratiles " std Terra tiles for " $PASTYR $PASTDOY $TYPE - echo "Trying nrt for Terra " $PASTYR $PASTDOY $TYPE " instead now ..." - TYPE=nrt - -################################################################# -# download 26 nrt tiles for conus from Terra - -echo "Now downloading Terra DOY " $PASTDOY " of TYPE " $TYPE " for YEAR " $PASTYR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz -################################################################# - - # did we get 26 nrt Terra tiles this time? - let numterratiles=`ls -1 GMOD09Q1.*.gz | wc -l` - if [ "$numterratiles" == 26 ]; then - echo "Got 26 Terra nrt tiles for " $PASTYR $PASTDOY $TYPE - # set nrt TYPE for the right filename in upcoming gdal_calc - echo YRDOY is $YRDOY - echo YRDOY1 is $YRDOY1 - if [ $YRDOY == $YRDOY1 ]; then - FIRSTDATETYPE=nrt - echo FIRSTDATETYPE is $FIRSTDATETYPE - else - SECONDDATETYPE=nrt - echo SECONDDATETYPE is $SECONDDATETYPE - fi - else - echo "ERROR: std AND nrt Terra Tiles are still MISSING for " $PASTYR $PASTDOY $TYPE " only got " $numterratiles - fi # 26 Terra tiles std - fi # 26 Terra tiles nrt - - - - # did we get 26 std Aqua tiles? - let numaquatiles=`ls -1 GMYD09Q1.*.gz | wc -l` - if [ "$numaquatiles" == 26 ]; then - echo "Got 26 Aqua std tiles for " $PASTYR $PASTDOY - else - echo "Only got " $numaquatiles " std Aqua tiles for " $PASTYR $PASTDOY $TYPE - echo "Trying nrt for Aqua " $PASTYR $PASTDOY $TYPE " instead now ..." - TYPE=nrt - -################################################################# -# download 26 nrt tiles for conus from Aqua - -echo "Now downloading Aqua DOY " $PASTDOY " of TYPE " $TYPE " for YEAR " $PASTYR " via https from NASA GLAM" - -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# - - # did we get 26 nrt Aqua tiles this time? - let numaquatiles=`ls -1 GMYD09Q1.*.gz | wc -l` - if [ "$numaquatiles" == 26 ]; then - echo "Got 26 Aqua nrt tiles for " $PASTYR $PASTDOY $TYPE - # set nrt TYPE for the right filename in upcoming gdal_calc - echo YRDOY is $YRDOY - echo YRDOY1 is $YRDOY1 - if [ $YRDOY == $YRDOY1 ]; then - FIRSTDATETYPE=nrt - echo FIRSTDATETYPE is $FIRSTDATETYPE - else - SECONDDATETYPE=nrt - echo SECONDDATETYPE is $SECONDDATETYPE - fi - else - echo "ERROR: std AND nrt Aqua Tiles still MISSING for " $PASTYR $PASTDOY $TYPE " only got " $numaquatiles - fi # 26 Aqua tiles std - fi # 26 Aqua tiles nrt - - # did we finally get 26 each? - let numtiles=`ls -1 *.gz | wc -l` - if [ "$numtiles" == 52 ]; then - echo "Finally got 52 tiles for " $PASTYR $PASTDOY - else - echo "This is NOT GONNA WORK!" - echo "Could not get all tiles for Terra or Aqua " $PASTYR $PASTDOY - echo "Tried both std and nrt for EACH!" - exit 1 - fi - -fi # 52 std Tiles - -# unzip all tiles -gunzip *.gz - -################################################################# -# mosaic together Terra for this DOY - -rm -f Terra.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Terra.img GMOD09Q1.A$PASTYR$PASTDOY*.tif -echo "done mosaicking Terra images together" -#rm -f Terra.tif -#gdal_translate Terra.img -of GTiff Terra.tif -#xv Terra.tif - -# mosaic together Aqua for this DOY - -rm -f Aqua.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Aqua.img GMYD09Q1.A$PASTYR$PASTDOY.*.tif -echo "done mosaicking Aqua images together" -#rm -f Aqua.tif -#gdal_translate Aqua.img -of GTiff Aqua.tif -#xv Aqua.tif - -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# take the maximum NDVI from the Terra and Aqua 8-day composites - -# if both are 253 or if both are 255 or if either are 253 with the other 255, then 255, but this is NOT nodata -# if either are 254, then 254, water - -# 252 is nodata coming out -# TYPE is still set from above, std by default, nrt if necessary - -echo "taking the maximum NDVI from the Terra and Aqua 8-day composites" - -#find maxval composite of Terra and Aqua -# and propagate two mask values -gdal_calc.py --debug -A Terra.img -B Aqua.img --outfile=maxMODIS.$PASTYR.$PASTDOY.$TYPE.img --calc="\ -maximum((A<251)*A,(B<251)*B)\ -+(((A==253)&(B==253))|((A==253)&(B==255))|((A==255)&(B==253))|((A==255)&(B==255)))*255\ -+((A==254)|(B==254))*254\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --NoDataValue=252 --type=Byte --overwrite -rm -f maxMODIS.$PASTYR.$PASTDOY.$TYPE.tif -#gdal_translate maxMODIS.$PASTYR.$PASTDOY.$TYPE.img -of GTiff maxMODIS.$PASTYR.$PASTDOY.$TYPE.tif -################################################################# - - else - echo "maxMODIS."$PASTYR"."$PASTDOY"."$TYPE".img file already exists" - echo "No need to make maxMODIS."$PASTYR"."$PASTDOY"."$TYPE".img file" - fi - - done # next date over the 2 earlier dates in this interval - - -################################################################# -# make the maxMODISmax.nrt numerator across the three dates for all products -# consisting of two std dates and one nrt date -################################################################# -# find 24-day max NDVI triple-max - -echo "FIRSTDATETYPE is " $FIRSTDATETYPE -echo "SECONDDATETYPE is " $SECONDDATETYPE -echo "THIRDDATETYPE is " $THIRDDATETYPE -echo "TYPE is " $TYPE -echo "DOY1 is " $DOY1 -echo "YR1 is " $YR1 -echo "DOY2 is " $DOY2 -echo "YR2 is " $YR2 -echo "DOY3 is " $DOY3 -echo "YR3 is " $YR3 - -# must make this nrt triple-max product every time, -# since this is adding a new nrt date - -# make triple DOY maximum value composite -# if ANY of three are 254, then 254 = water -# if ALL three are 255, then 255 = no data - -# 252 is nodata going in -# 252 is nodata coming out - -gdal_calc.py --debug --calc="\ -maximum( maximum( (A<251)*A,(B<251)*B ),(C<251)*C )\ -+((A==254)|(B==254)|(C==254))*254\ -+((A==255)&(B==255)&(C==255))*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmax.$YR3.$DOY3.$THIRDDATETYPE.img -A maxMODIS.$YR1.$DOY1.$FIRSTDATETYPE.img -B maxMODIS.$YR2.$DOY2.$SECONDDATETYPE.img -C maxMODIS.$YR3.$DOY3.$THIRDDATETYPE.img --type=Byte --overwrite - -rm -f maxMODISmax.$YR3.$DOY3.$THIRDDATETYPE.tif -#gdal_translate maxMODISmax.$YR3.$DOY3.$THIRDDATETYPE.img -of GTiff maxMODISmax.$YR3.$DOY3.$THIRDDATETYPE.tif -#xv maxMODISmax.$YR3.$DOY3.$THIRDDATETYPE.tif -################################################################# -################################################################# - -# make ALC numerator, maxMODISalc nrt -# consisting of two std dates and one nrt date -################################################################# -# find 24-day Adaptive Length Compositing (ALC) current image -# for 1-year PRODUCT only - -# make triple DOY recent priority composite -# if ANY of three are 254, then 254 = water -# if ALL three are 255, then 255 = no data - -# 252 is nodata going in -# 252 is nodata coming out - -gdal_calc.py --debug --calc="\ -(C<251)*C\ -+((B<251)&(C>250))*B\ -+((A<251)&(B>250)&(C>250))*A\ -+((A==254)|(B==254)|(C==254))*254\ -+((A==255)&(B==255)&(C==255))*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.img -A maxMODIS.$YR1.$DOY1.$FIRSTDATETYPE.img -B maxMODIS.$YR2.$DOY2.$SECONDDATETYPE.img -C maxMODIS.$YR3.$DOY3.$THIRDDATETYPE.img --type=Byte --overwrite - -rm -f $ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.tif -#gdal_translate $ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.img -of GTiff $ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.tif -#xv $ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.tif - -echo "Current and ALC/ALC2 current views are now computed for " $YEAR $DOY -################################################################# -# nrt current and ALC current views are now computed -# for the numerator of all products -################################################################# -################################################################# - -# loop to make 1-, 3-, 5-, and 10-year products - for PRODUCT in 1 3 5 10 - do - echo "Making " $PRODUCT"-yr product now ..." - TYPE=std - - - let MINYEAR=$PRODUCT+2003 - echo "MINYEAR is " $MINYEAR - - if [ $YEAR -ge $MINYEAR ]; then - echo $PRODUCT "-year max baseline is possible for year " $YEAR - - - # set denominator file and output subdirectory acc to PRODUCT - if [ $PRODUCT -lt 5 ]; then - subdir=max - denomfile=max - else - subdir=90 - denomfile=90 - fi - - - # test whether maxMODISmaxmax std max or maxMODISmax90 over all prior years, over 3 dates, - # and over 2 sensors already exists for denominator baseline - if [ ! -f maxMODISmax$denomfile.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img ]; then - echo "File does not exist for maxMODISmax"$denomfile"."$PRODUCT"-yr-baseline."$YEAR"."$DOY"."$TYPE".img" - - - # check for intermediate ingredients (maxMODISmax each year) to make it -################################################################# - # walk through all past years, EXCLUDING current year - # Do we have current and all back-year maxMODIS maps - # for this YR and DOY? - yrsback=$PRODUCT - # alk forward in time - # stop at $LASTYEAR - # over all prior years - while [ $yrsback -gt 0 ] - do - - # if maxMODISmax std file for this date - # for every prior year does not exist, make it - let PASTYEAR=$YEAR-$yrsback - if [ ! -f maxMODISmax.$PASTYEAR.$DOY.$TYPE.img ]; then - echo "maxMODISmax file does not exist for maxMODISmax."$PASTYEAR"."$DOY"."$TYPE".img" - # going forward in time thru 3 dates this prior year - for YRDOY in $YRDOY1 $YRDOY2 $YRDOY3 - do - - # parse apart this YR and DOY with awk - YR=`echo $YRDOY | awk -F"/" '{print $1}'` - PASTDOY=`echo $YRDOY | awk -F"/" '{print $2}'` - let PASTYR=$YR-$yrsback - echo "PASTYR is " $PASTYR - echo "PASTDOY is " $PASTDOY - - - # does this maxMODIS DOY double-max image already exist? - # if not, make it - if [ ! -f maxMODIS.$PASTYR.$PASTDOY.$TYPE.img ]; then - echo "maxMODIS file does not exist for maxMODIS."$PASTYR"."$PASTDOY"."$TYPE".img" - # get tiles and make it - echo "Fetching and generating double max for maxMODIS."$PASTYR"."$PASTDOY"."$TYPE".img" - - -################################################################# -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# download 26 tiles for conus each from Aqua and from Terra - -echo "Now downloading DOY " $PASTDOY " of TYPE " $TYPE " for YEAR " $PASTYR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMOD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMOD09Q1.A$PASTYR$PASTDOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -# download 26 Aqua tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$TYPE/GMYD09Q1/tif/NDVI/$PASTYR/$PASTDOY/GMYD09Q1.A$PASTYR$PASTDOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# -# unzip all tiles - -# did we get 26 each? -let numtiles=`ls -1 *.gz | wc -l` -if [ "$numtiles" == 52 ]; then - echo "Got 52 tifs for " $PASTYR $PASTDOY - else - echo "Tiles MISSING for " $PASTYR $PASTDOY " only got " $numtiles -fi - -gunzip *.gz +# +# ################################################################# -# mosaic together Terra for this DOY - -rm -f Terra.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Terra.img GMOD09Q1.A$PASTYR$PASTDOY*.tif -echo "done mosaicking Terra images together" -#rm -f Terra.tif -#gdal_translate Terra.img -of GTiff Terra.tif -#xv Terra.tif - -# mosaic together Aqua for this DOY - -rm -f Aqua.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Aqua.img GMYD09Q1.A$PASTYR$PASTDOY.*.tif -echo "done mosaicking Aqua images together" -#rm -f Aqua.tif -#gdal_translate Aqua.img -of GTiff Aqua.tif -#xv Aqua.tif - -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# take the maximum NDVI from the Terra and Aqua 8-day composites - -# if both are 253 or if both are 255 or if either are 253 with the other 255, then 255, but this is NOT nodata -# if either are 254, then 254, water - -# 252 is nodata coming out - -echo "taking the maximum NDVI from the Terra and Aqua 8-day composites" - -#find maxval composite of Terra and Aqua -# and propagate two mask values -gdal_calc.py --debug -A Terra.img -B Aqua.img --outfile=maxMODIS.$PASTYR.$PASTDOY.$TYPE.img --calc="\ -maximum((A<251)*A,(B<251)*B)\ -+(((A==253)&(B==253))|((A==253)&(B==255))|((A==255)&(B==253))|((A==255)&(B==255)))*255\ -+((A==254)|(B==254))*254\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --NoDataValue=252 --type=Byte --overwrite -rm -f maxMODIS.$PASTYR.$PASTDOY.$TYPE.tif -#gdal_translate maxMODIS.$PASTYR.$PASTDOY.$TYPE.img -of GTiff maxMODIS.$PASTYR.$PASTDOY.$TYPE.tif -#xv maxMODIS.$PASTYR.$PASTDOY.$TYPE.tif -################################################################# - - else - echo "sensor maxMODIS file already exists for maxMODIS." $PASTYR"."$PASTDOY"."$TYPE".img" - fi - - - done # over all 3 YRDOYS - # all 3 maxMODIS double-maxes now exist for this prior year - - -################################################################# - # generate the maxMODISmax 3-way max over the 3 YRINTERVALs - # for this prior year -################################################################# -# find 24-day max NDVI maxMODISmax triple-max for this PASTYR - -# parse THISYRs for each of the three DOYs - -THISYR3=`echo $YRDOY3 | awk -F/ '{print $1-"'$yrsback'"}'` -THISYR2=`echo $YRDOY2 | awk -F/ '{print $1-"'$yrsback'"}'` -THISYR1=`echo $YRDOY1 | awk -F/ '{print $1-"'$yrsback'"}'` - - -# make triple DOY maximum value composite -# if ANY of three are 254, then 254 = water -# if ALL three are 255, then 255 = no data - -# 252 is nodata going in -# 252 is nodata coming out - -gdal_calc.py --debug --calc="\ -maximum( maximum( (A<251)*A,(B<251)*B ),(C<251)*C )\ -+((A==254)|(B==254)|(C==254))*254\ -+((A==255)&(B==255)&(C==255))*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmax.$THISYR3.$DOY3.$TYPE.img -A maxMODIS.$THISYR1.$DOY1.$TYPE.img -B maxMODIS.$THISYR2.$DOY2.$TYPE.img -C maxMODIS.$THISYR3.$DOY3.$TYPE.img --type=Byte --overwrite - -rm -f maxMODISmax.$THISYR3.$DOY3.$TYPE.tif -#gdal_translate maxMODISmax.$THISYR3.$DOY3.$TYPE.img -of GTiff maxMODISmax.$THISYR3.$DOY3.$TYPE.tif -#xv maxMODISmax.$THISYR3.$DOY3.$TYPE.tif -################################################################# - - else - echo "maxMODISmax file already exists for maxMODISmax."$PASTYEAR"."$DOY"."$TYPE".img" - fi - - -################################################################# - - # advance one year forward and repeat - (( yrsback-- )) - - - done # over all prior years - -################################################################# - # all maxMODISmax files now present - # a triple-max file for all three intervals every year - # for all 3 YRINTERVALs for current and all yrsback for this $PRODUCT - # Use $DOY3 set to third (last) date for naming files - - -######## make $PRODUCT-year product ######################################### - - # generate $PRODUCT-year maxMODISmaxmax maximum baseline or maxMODISmax90 percentile baseline over the $PRODUCT prior yrsback years - let BACK10=$YEAR-10 - let BACK9=$YEAR-9 - let BACK8=$YEAR-8 - let BACK7=$YEAR-7 - let BACK6=$YEAR-6 - let BACK5=$YEAR-5 - let BACK4=$YEAR-4 - let BACK3=$YEAR-3 - let BACK2=$YEAR-2 - let BACK1=$YEAR-1 - - - case $PRODUCT in - 10) - # generate 90th percentile over prior 10 years - gdal_calc.py --debug --calc="\ -percentile([\ -(A<251)*A,(B<251)*B,(C<251)*C,\ -(D<251)*D,(E<251)*E,(F<251)*F,\ -(G<251)*G,(H<251)*H,(I<251)*I,\ -(J<251)*J\ -],90,axis=0)\ -+((A==254)&(B==254)&(C==254)&(D==254)&(E==254)&(F==254)&(G==254)&(H==254)&(I==254)&(J==254))*254\ -+( (\ -(A!=254)|(B!=254)|(C!=254)|(D!=254)|(E!=254)|(F!=254)|(G!=254)|(H!=254)|(I!=254)|(J!=254)\ - )&\ -( \ -(A>250)&(B>250)&(C>250)&(D>250)&(E>250)&(F>250)&(G>250)&(H>250)&(I>250)&(J>250)\ - ) )*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmax90.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img -A maxMODISmax.$BACK10.$DOY3.$TYPE.img -B maxMODISmax.$BACK9.$DOY3.$TYPE.img -C maxMODISmax.$BACK8.$DOY3.$TYPE.img -D maxMODISmax.$BACK7.$DOY3.$TYPE.img -E maxMODISmax.$BACK6.$DOY3.$TYPE.img -F maxMODISmax.$BACK5.$DOY3.$TYPE.img -G maxMODISmax.$BACK4.$DOY3.$TYPE.img -H maxMODISmax.$BACK3.$DOY3.$TYPE.img -I maxMODISmax.$BACK2.$DOY3.$TYPE.img -J maxMODISmax.$BACK1.$DOY3.$TYPE.img --type=Byte --overwrite - ;; - - 5) - # generate 90th percentile over prior 5 years - gdal_calc.py --debug --calc="\ -percentile([\ -(A<251)*A,(B<251)*B,(C<251)*C,\ -(D<251)*D,(E<251)*E\ -],90,axis=0)\ -+((A==254)&(B==254)&(C==254)&(D==254)&(E==254))*254\ -+( (\ -(A!=254)|(B!=254)|(C!=254)|(D!=254)|(E!=254)\ - )&\ -( \ -(A>250)&(B>250)&(C>250)&(D>250)&(E>250)\ - ) )*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmax90.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img -A maxMODISmax.$BACK5.$DOY3.$TYPE.img -B maxMODISmax.$BACK4.$DOY3.$TYPE.img -C maxMODISmax.$BACK3.$DOY3.$TYPE.img -D maxMODISmax.$BACK2.$DOY3.$TYPE.img -E maxMODISmax.$BACK1.$DOY3.$TYPE.img --type=Byte --overwrite - ;; - - 3) - # generate maximum over prior 3 years - gdal_calc.py --debug --calc="\ -maximum( maximum( (A<251)*A,(B<251)*B ),(C<251)*C )\ -+((A==254)|(B==254)|(C==254))*254\ -+((A==255)&(B==255)&(C==255))*255\ -" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmaxmax.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img -A maxMODISmax.$BACK3.$DOY3.$TYPE.img -B maxMODISmax.$BACK2.$DOY3.$TYPE.img -C maxMODISmax.$BACK1.$DOY3.$TYPE.img --type=Byte --overwrite - ;; - - 1) - # generate maximum over prior year - # for 1-yr product, 1-yr max is the same as the max last year - cp maxMODISmax.$BACK1.$DOY3.$TYPE.img maxMODISmaxmax.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img - ;; - - esac # over all 1-, 3-, 5-, and 10-year cases - - - # done with custom calculations to make maxMODISmaxmax or maxMODISmax90 for each case, - - - else - echo "maxMODISmax"$denomfile"."$PRODUCT"-yr-baseline."$YEAR"."$DOY"."$TYPE".img file already exists" - fi - -################################################################# -################################################################# - - - # now continue with generic PRODUCT calculations - - # only if 1-yr product, finish calculating ALC products - if [ $PRODUCT -eq 1 ]; then - -################################################################# -#ALC and ALC2, for 1-yr only -################################################################# -# calculate Adaptive Length Compositing (ALC) product -# only for 1-year PRODUCT -# using recent priority values current nrt view -# standard way, (observed-expected)/expected NDVI -# clamp departure values between 2 and 254 -# if either current or baseline is 254 water, then 0 water -# if BOTH current AND baseline are 255 no value, then 255 no value - -# 252 is nodata coming in -# 1 is nodata going out - -# 0 = water -# 255 = no NDVI value - - - if [ ! -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img ] - then - echo "File does not exist for $ALCpath/ALC/ALC2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img" - -echo "Calculating ALC product ..." - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251))*\ -round_(( (A.astype(float)-B.astype(float))/ B.clip(1).astype(float)*128+127).clip(2,254)).astype(uint8)\ -+((A==254)|(B==254))*0\ -+((A==255)&(B==255))*255\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/maxMODISalc.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmaxmax.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - - -################################################################# -# calculate Adaptive Length Compositing (ALC2) sqrt product -# only for 1-year PRODUCT -# using recent priority values current view -# sqrt of expected, (observed-expected)/sqrt(expected+1) -# must use native NDVI scaling -# clamp departure values between 2 and 254 -# if either current or baseline is 254 water, then 0 water -# if BOTH current AND baseline are 255 no value, then 255 - -# 252 is nodata coming in -# 1 is nodata going out - - -# 0 = water -# 255 = no NDVI value - -echo "Calculating ALC2 sqrt product ..." - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251))*\ -round_(( ((A.astype(float)*0.004)-(B.astype(float)*0.004))/ sqrt((B.astype(float)*0.004).clip(1).astype(float))*128+127).clip(2,254)).astype(uint8)\ -+((A==254)|(B==254))*0\ -+((A==255)&(B==255))*255\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/maxMODISalc.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmaxmax.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - - -################################################################# -# ALC and ALC2 split, reproject, re-join - -# split out two mask categories, 0 water and 255 nodata from ALC product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ALC product ..." - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Splitting out data from two masks in ALC product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -################################################################# -# reproject ALC product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ALC product data with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -echo "Warping ALC product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Rejoining ALC product masks with data" - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - -# take out the trash here -rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* -rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img* -rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* - - -################################################################# -################################################################# -# split out two mask categories, 0 water and 255 nodata from ALC2 sqrt product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ALC2 sqrt product ..." - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Splitting out data from two masks in ALC2 sqrt product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -################################################################# -# reproject ALC2 sqrt product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ALC2 sqrt product with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - -echo "Warping ALC2 sqrt product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Rejoining ALC2 sqrt product masks with data" - -# probably gdal_calc assigns 255 as default nodata here - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite -rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - -# take out the trash here -rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* -rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img* -rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* - - - else - echo "$ALCpath/ALC/ALC2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img file already exists" - fi - -################################################################# -################################################################# - - - fi # PRODUCT was 1, just calculated ALC and ALC2 products - - - # generate FW and FW2 divison products for $PRODUCT-year - # split, reproject, re-paste -################################################################# -################################################################# -# calculate $PRODUCT-year ForWarn product -# use nrt version for maxMODISmax current view -# standard way, (observed-expected)/expected NDVI -# clamp departure values between 2 and 254 -# if either current or baseline is 254 water, then 0 water -# if BOTH current AND baseline are 255 no value, then 255 no value - - - - if [ ! -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img ] - then - echo "File does not exist for ./"$PRODUCT"-yr-"$subdir"/ForWarn2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img" - - -# 252 is nodata coming in -# 1 is nodata going out - -# 0 = water -# 255 = no NDVI value - -echo "Calculating ForWarn product ..." - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251))*\ -round_(( (A.astype(float)-B.astype(float))/ B.clip(1).astype(float)*128+127).clip(2,254)).astype(uint8)\ -+((A==254)|(B==254))*0\ -+((A==255)&(B==255))*255\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmax$denomfile.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - -################################################################# -# calculate $PRODUCT-year ForWarn sqrt product -# sqrt of expected, (observed-expected)/sqrt(expected+1) -# must use native NDVI scaling -# clamp departure values between 2 and 254 -# if either current or baseline is 254 water, then 0 water -# if BOTH current AND baseline are 255 no value, then 255 - -# 252 is nodata coming in -# 1 is nodata going out - - -# 0 = water -# 255 = no NDVI value - -echo "Calculating ForWarn2 sqrt product ..." - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251))*\ -round_(( ((A.astype(float)*0.004)-(B.astype(float)*0.004))/ sqrt((B.astype(float)*0.004).clip(1).astype(float))*128+127).clip(2,254)).astype(uint8)\ -+((A==254)|(B==254))*0\ -+((A==255)&(B==255))*255\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmax$denomfile.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - -################################################################# -# FW and FW2 split, reproject, re-join - -# split out two mask categories, 0 water and 255 nodata from ForWarn product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ForWarn product ..." - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Splitting out data from two masks in ForWarn product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -################################################################# -# reproject ForWarn product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ForWarn product data with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -echo "Warping ForWarn product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Rejoining ForWarn product masks with data" - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - -# take out the trash -rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img* - -################################################################# -################################################################# -# split out two mask categories, 0 water and 255 nodata from ForWarn2 sqrt product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ForWarn2 sqrt product ..." - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Splitting out data from two masks in ForWarn2 sqrt product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -################################################################# -# reproject ForWarn2 sqrt product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ForWarn2 sqrt product with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif - - -echo "Warping ForWarn2 sqrt product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif - - -echo "Rejoining ForWarn2 sqrt product masks with data" - -# probably gdal_calc assigns 255 as default nodata here - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite -rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif -#xv ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif - -# take out the trash -rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* -rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img* - - else - echo "./$PRODUCT-yr-"$subdir"/ForWarn2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img file already exists" - fi - - -################################################################# -################################################################# - - - else - echo "This "$PRODUCT"-year length baseline is not possible for this year "$YEAR" "$DOY - fi # year is earlier than 2003+$PRODUCT, this length baseline is not possible - - - done # over 1-, 3-, 5- and 10-yr PRODUCTs - echo "1-, 3-, 5- and 10-yr products have been generated!" - - # done with all $PRODUCTS - - -######## calculate three percentiles over all prior max baselines ############ - -# calculate 90th, 50th and 10th percentile baselines over all prior years -# for this DOY - -#******************************************************************* -# this is code that writes code for gdal_calc for 50th and 90th and 10th percentile of all prior years, over the entire MODIS period until 2028 -# much shorter than case statements! -#******************************************************************* - -#for prioryear in 2002/A 2003/B 2004/C 2005/D 2006/E 2007/F 2008/G 2009/H 2010/I 2011/J 2012/K 2013/L 2014/M 2015/N 2016/O 2017/P 2018/Q 2019/R 2020/S 2021/T 2022/U 2023/V 2024/W 2025/X 2026/Y 2027/Z 2028/AA -for prioryear in 2003/A 2004/B 2005/C 2006/D 2007/E 2008/F 2009/G 2010/H 2011/I 2012/J 2013/K 2014/L 2015/M 2016/N 2017/O 2018/P 2019/Q 2020/R 2021/S 2022/T 2023/U 2024/V 2025/W 2026/X 2027/Y 2028/Z -do - - yr=`echo $prioryear|awk -F/ '{print $1}'` - ltr=`echo $prioryear|awk -F/ '{print $2}'` - - yrstring=$yrstring" "$yr - ltrstring=$ltrstring" "$ltr - - if [ $yr -eq $LASTYEAR ]; then - break - fi +# +# SCRIPT STARTS NOW +# +################################################################# -done +## +# Init -#******************************************************************* +# Report the status of terminated background jobs immediately, +# rather than before the next primary prompt. This is effective +# only when job control is enabled. +set -b +# Exit immediately upon any error +set -e - if [ ! -f medianallpriormax.$YEAR.$DOY.$TYPE.img ] - then - echo "File does not exist for medianallpriormax."$YEAR"."$DOY"."$TYPE".img" +# Paths to precursors +ALCpath=. +medianallyrmaxpath=. -first=` -echo -n "percentile([" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"<251)*"$cac - else - echo -n ",("$cac"<251)*"$cac - fi -done +# Extract the year and julian day from the datestring argument. +YEAR=`echo $1 | cut -c1-4` +DOYWANTED=`echo $1 | cut -c5-7` -echo -en "],50,axis=0)+(" -for cac in $ltrstring +# Find the desired day of year (DOY) and three-day interval. +for INTERVALs in 361/353/345 353/345/337 345/337/329 337/329/321 329/321/313 321/313/305 313/305/297 305/297/289 297/289/281 289/281/273 281/273/265 273/265/257 265/257/249 257/249/241 249/241/233 241/233/225 233/225/217 225/217/209 217/209/201 209/201/193 201/193/185 193/185/177 185/177/169 177/169/161 169/161/153 161/153/145 153/145/137 145/137/129 137/129/121 129/121/113 121/113/105 113/105/097 105/097/089 097/089/081 089/081/073 081/073/065 073/065/057 065/057/049 057/049/041 049/041/033 041/033/025 033/025/017 025/017/009 017/009/001 009/001/361 001/361/353 do - if [ $cac == A ]; then - echo -n "("$cac"==254)" - else - echo -n "&("$cac"==254)" - fi -done -echo -e ")*0+( (" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"!=254)" - else - echo -n "|("$cac"!=254)" - fi -done + DOY=`echo $INTERVALs | awk -F/ '{print $1}'` + if [ $DOY -eq $DOYWANTED ]; then + break + fi -echo -n ")&(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac">250)" - else - echo -n "&("$cac">250)" - fi -done +done -echo -n ") )*255" -` +echo "NRT DOY to download is " $DOY +# this is the day the 8-day period BEGINS +echo "The three INTERVALs needed are " $INTERVALs +echo "these are the days the 8-day periods BEGIN" -second=` -yr=2003 -for cac in $ltrstring +# build output directories if needed +for dirs in 1-yr-max 3-yr-max 5-yr-90 10-yr-90 median-all-yr-max ALC pctprogress do - echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " - let yr=yr+1 -done -` + if [ ! -d "${dirs}" ];then + echo "Need to make subdirectory " $dirs + mkdir $dirs + fi +done -echo $first -echo $second - -gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=medianallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite +# get rid of any existing or partial gz downloads +rm -f *.gz* || true + +let LASTYEAR=$YEAR-1 + +# The three julian days to make precursors for +DOY1=`echo $INTERVALs | awk -F"/" '{print $1}'` +DOY2=`echo $INTERVALs | awk -F"/" '{print $2}'` +DOY3=`echo $INTERVALs | awk -F"/" '{print $3}'` + +echo $DOY1 $DOY2 $DOY3 + +# Calculates the correct year for each julian day. +# The first two julian days of the year have at least one day in the +# corresponding interval that occurred in year previous to what was supplied +# in the datestring argument. +case $DOY1 in +001) + YRDOY3=$YEAR/$DOY1 + YRDOY2=$LASTYEAR/$DOY2 + YRDOY1=$LASTYEAR/$DOY3;; +009) + YRDOY3=$YEAR/$DOY1 + YRDOY2=$YEAR/$DOY2 + YRDOY1=$LASTYEAR/$DOY3;; +*) + YRDOY3=$YEAR/$DOY1 + YRDOY2=$YEAR/$DOY2 + YRDOY1=$YEAR/$DOY3;; +esac + +echo $YEAR $INTERVALs $YRDOY3 $YRDOY2 $YRDOY1 + +# parse YRDOY3 YRDOY2 YRDOY1 into paired years and dates +YR3=`echo $YRDOY3 | awk -F/ '{print $1}'` +DOY3=`echo $YRDOY3 | awk -F/ '{print $2}'` +YR2=`echo $YRDOY2 | awk -F/ '{print $1}'` +DOY2=`echo $YRDOY2 | awk -F/ '{print $2}'` +YR1=`echo $YRDOY1 | awk -F/ '{print $1}'` +DOY1=`echo $YRDOY1 | awk -F/ '{print $2}'` + + +YEAR=$YR3 +DOY=$DOY3 + +SECONDDATETYPE=std +FIRSTDATETYPE=std + +THIRDDATETYPE=std + +# Assume maxMODIS and maxMODISmax precursors already exist + +# Check if std is available first +if [ ! -f maxMODIS.$YR3.$DOY3.std.img ]; then + THIRDDATETYPE=nrt + if [ ! -f maxMODIS.$YR3.$DOY3.nrt.img ]; then + echo "Missing maxMODIS.$YR3.$DOY3.nrt.img... Exiting" + exit 1 +fi +fi -#gdal_translate medianallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff medianallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist medianallpriormax.$YEAR.$DOY.$TYPE.img -#xv medianallpriormax.$YEAR.$DOY.$TYPE.tif +if [ ! -f maxMODIS.$YR2.$DOY2.std.img ]; then + echo "Missing maxMODIS.$YR2.$DOY2.std.img... Exiting" + exit 1 +fi - else - echo "medianallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" - fi +if [ ! -f maxMODIS.$YR1.$DOY1.std.img ]; then + echo "Missing maxMODIS.$YR1.$DOY1.std.img... Exiting" + exit 1 +fi -#******************************************************************* -#******************************************************************* - if [ ! -f 90thallpriormax.$YEAR.$DOY.$TYPE.img ] - then - echo "File does not exist for 90thallpriormax."$YEAR"."$DOY"."$TYPE".img" +################################################################# +# +# maxMODISalc Precursor Generation +# +################################################################# +## +# +# Create the maxMODISalc nrt numerator across the three dates for all products +# consisting of two std dates and one nrt date (ideally). +# +# Find 24-day Adaptive Length Compositing (ALC) current image for 1-year product only. +# +# Raster calculation summary: +# +# make triple DOY maximum value composite +# if ANY of three are 254, then 254 = water +# if ALL three are 255, then 255 = no data +# +# Raster calculation summary: +# +# Output 254 (water): +# - any of the three pixels are 254 +# +# Output 255: +# - any of the three pixels are 255 +# +# NoData value is 252 -first=` -echo -n "percentile([" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"<251)*"$cac - else - echo -n ",("$cac"<251)*"$cac - fi -done +echo +echo "***************************************************************" +echo +echo "Creating maxMODISalc for " $YEAR $DOY "..." -echo -en "],90,axis=0)+(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"==254)" - else - echo -n "&("$cac"==254)" - fi -done +gdal_calc.py --debug --calc="\ +(C<251)*C\ ++((B<251)&(C>250))*B\ ++((A<251)&(B>250)&(C>250))*A\ ++((A==254)|(B==254)|(C==254))*254\ ++((A==255)&(B==255)&(C==255))*255\ +" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.img -A maxMODIS.$YR1.$DOY1.$FIRSTDATETYPE.img -B maxMODIS.$YR2.$DOY2.$SECONDDATETYPE.img -C maxMODIS.$YR3.$DOY3.$THIRDDATETYPE.img --type=Byte --overwrite -echo -e ")*0+( (" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"!=254)" - else - echo -n "|("$cac"!=254)" - fi -done +rm -f $ALCpath/maxMODISalc.$YR3.$DOY3.$THIRDDATETYPE.tif || true -echo -n ")&(" -for cac in $ltrstring +# loop to make 1-, 3-, 5-, and 10-year products +for PRODUCT in 1 3 5 10 do - if [ $cac == A ]; then - echo -n "("$cac">250)" - else - echo -n "&("$cac">250)" - fi -done - -echo -n ") )*255" -` + echo "Making " $PRODUCT"-yr product now ..." + TYPE=std + + let MINYEAR=$PRODUCT+2003 + echo "MINYEAR is " $MINYEAR + + if [ $YEAR -ge $MINYEAR ]; then + echo $PRODUCT "-year max baseline is possible for year " $YEAR + + # set denominator file and output subdirectory acc to PRODUCT + if [ $PRODUCT -lt 5 ]; then + subdir=max + denomfile=max + else + subdir=90 + denomfile=90 + fi + + # test whether maxMODISmaxmax std max or maxMODISmax90 over all prior years, over 3 dates, + # and over 2 sensors already exists for denominator baseline + if [ ! -f maxMODISmax$denomfile.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img ]; then + echo "File does not exist for maxMODISmax"$denomfile"."$PRODUCT"-yr-baseline."$YEAR"."$DOY"."$TYPE".img" + + ######## make $PRODUCT-year product ######################################### + + # generate $PRODUCT-year maxMODISmaxmax maximum baseline or maxMODISmax90 percentile baseline over the $PRODUCT prior yrsback years + let BACK10=$YEAR-10 + let BACK9=$YEAR-9 + let BACK8=$YEAR-8 + let BACK7=$YEAR-7 + let BACK6=$YEAR-6 + let BACK5=$YEAR-5 + let BACK4=$YEAR-4 + let BACK3=$YEAR-3 + let BACK2=$YEAR-2 + let BACK1=$YEAR-1 + + case $PRODUCT in + 10) + # generate 90th percentile over prior 10 years + gdal_calc.py --debug --calc="\ +percentile([\ +(A<251)*A,(B<251)*B,(C<251)*C,\ +(D<251)*D,(E<251)*E,(F<251)*F,\ +(G<251)*G,(H<251)*H,(I<251)*I,\ +(J<251)*J\ +],90,axis=0)\ ++((A==254)&(B==254)&(C==254)&(D==254)&(E==254)&(F==254)&(G==254)&(H==254)&(I==254)&(J==254))*254\ ++( (\ +(A!=254)|(B!=254)|(C!=254)|(D!=254)|(E!=254)|(F!=254)|(G!=254)|(H!=254)|(I!=254)|(J!=254)\ + )&\ +( \ +(A>250)&(B>250)&(C>250)&(D>250)&(E>250)&(F>250)&(G>250)&(H>250)&(I>250)&(J>250)\ + ) )*255\ +" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmax90.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img -A maxMODISmax.$BACK10.$DOY3.$TYPE.img -B maxMODISmax.$BACK9.$DOY3.$TYPE.img -C maxMODISmax.$BACK8.$DOY3.$TYPE.img -D maxMODISmax.$BACK7.$DOY3.$TYPE.img -E maxMODISmax.$BACK6.$DOY3.$TYPE.img -F maxMODISmax.$BACK5.$DOY3.$TYPE.img -G maxMODISmax.$BACK4.$DOY3.$TYPE.img -H maxMODISmax.$BACK3.$DOY3.$TYPE.img -I maxMODISmax.$BACK2.$DOY3.$TYPE.img -J maxMODISmax.$BACK1.$DOY3.$TYPE.img --type=Byte --overwrite -second=` -yr=2003 -for cac in $ltrstring -do - echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " - let yr=yr+1 -done -` + ;; + 5) + # generate 90th percentile over prior 5 years + gdal_calc.py --debug --calc="\ +percentile([\ +(A<251)*A,(B<251)*B,(C<251)*C,\ +(D<251)*D,(E<251)*E\ +],90,axis=0)\ ++((A==254)&(B==254)&(C==254)&(D==254)&(E==254))*254\ ++( (\ +(A!=254)|(B!=254)|(C!=254)|(D!=254)|(E!=254)\ + )&\ +( \ +(A>250)&(B>250)&(C>250)&(D>250)&(E>250)\ + ) )*255\ +" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmax90.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img -A maxMODISmax.$BACK5.$DOY3.$TYPE.img -B maxMODISmax.$BACK4.$DOY3.$TYPE.img -C maxMODISmax.$BACK3.$DOY3.$TYPE.img -D maxMODISmax.$BACK2.$DOY3.$TYPE.img -E maxMODISmax.$BACK1.$DOY3.$TYPE.img --type=Byte --overwrite -#echo $first -#echo $second + ;; + 3) + # generate maximum over prior 3 years + gdal_calc.py --debug --calc="\ +maximum( maximum( (A<251)*A,(B<251)*B ),(C<251)*C )\ ++((A==254)|(B==254)|(C==254))*254\ ++((A==255)&(B==255)&(C==255))*255\ +" --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=maxMODISmaxmax.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img -A maxMODISmax.$BACK3.$DOY3.$TYPE.img -B maxMODISmax.$BACK2.$DOY3.$TYPE.img -C maxMODISmax.$BACK1.$DOY3.$TYPE.img --type=Byte --overwrite -gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=90thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite + ;; + 1) + # generate maximum over prior year + # for 1-yr product, 1-yr max is the same as the max last year + cp maxMODISmax.$BACK1.$DOY3.$TYPE.img maxMODISmaxmax.$PRODUCT-yr-baseline.$YR3.$DOY3.$TYPE.img + ;; + esac # over all 1-, 3-, 5-, and 10-year cases + + # done with custom calculations to make maxMODISmaxmax or maxMODISmax90 for each case, + + else + echo "maxMODISmax"$denomfile"."$PRODUCT"-yr-baseline."$YEAR"."$DOY"."$TYPE".img file already exists" + fi + + # now continue with generic PRODUCT calculations + # only if 1-yr product, finish calculating ALC products + if [ $PRODUCT -eq 1 ]; then + + ################################################################# + #ALC and ALC2, for 1-yr only + ################################################################# + # calculate Adaptive Length Compositing (ALC) product + # only for 1-year PRODUCT + # using recent priority values current nrt view + # standard way, (observed-expected)/expected NDVI + # clamp departure values between 2 and 254 + # if either current or baseline is 254 water, then 0 water + # if BOTH current AND baseline are 255 no value, then 255 no value + + # 252 is nodata coming in + # 1 is nodata going out + + # 0 = water + # 255 = no NDVI value + + if [ ! -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img ]; + then + echo "File does not exist for $ALCpath/ALC/ALC2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img" + echo "Calculating ALC product ..." + gdal_calc.py --debug --calc="\ +((A<251)&(B<251))*\ +round_(( (A.astype(float)-B.astype(float))/ B.clip(1).astype(float)*128+127).clip(2,254)).astype(uint8)\ ++((A==254)|(B==254))*0\ ++((A==255)&(B==255))*255\ +" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/maxMODISalc.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmaxmax.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # calculate Adaptive Length Compositing (ALC2) sqrt product + # only for 1-year PRODUCT + # using recent priority values current view + # sqrt of expected, (observed-expected)/sqrt(expected+1) + # must use native NDVI scaling + # clamp departure values between 2 and 254 + # if either current or baseline is 254 water, then 0 water + # if BOTH current AND baseline are 255 no value, then 255 + # 252 is nodata coming in + # 1 is nodata going out + # 0 = water + # 255 = no NDVI value + + echo "Calculating ALC2 sqrt product ..." + + gdal_calc.py --debug --calc="\ + ((A<251)&(B<251))*\ + round_(( ((A.astype(float)*0.004)-(B.astype(float)*0.004))/ sqrt((B.astype(float)*0.004).clip(1).astype(float))*128+127).clip(2,254)).astype(uint8)\ + +((A==254)|(B==254))*0\ + +((A==255)&(B==255))*255\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/maxMODISalc.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmaxmax.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # ALC and ALC2 split, reproject, re-join + # split out two mask categories, 0 water and 255 nodata from ALC product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + + echo "Splitting out two mask categories from ALC product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + echo "Splitting out data from two masks in ALC product ..." + # use 1 as a fill value + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + + # reproject ALC product to LAEA + # reproject masks and data separately, and re-join + echo "Warping ALC product data with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img + rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + + echo "Warping ALC product masks with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img + rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + echo "Rejoining ALC product masks with data" + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # take out the trash here + rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f $ALCpath/ALC/ALCLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.img* || true + rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f $ALCpath/ALC/ALC.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + + # split out two mask categories, 0 water and 255 nodata from ALC2 sqrt product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + echo "Splitting out two mask categories from ALC2 sqrt product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + echo "Splitting out data from two masks in ALC2 sqrt product ..." + # use 1 as a fill value + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + + # reproject ALC2 sqrt product to LAEA + # reproject masks and data separately, and re-join + echo "Warping ALC2 sqrt product with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img + rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + + echo "Warping ALC2 sqrt product masks with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img + rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + echo "Rejoining ALC2 sqrt product masks with data" + # probably gdal_calc assigns 255 as default nodata here + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite + rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # take out the trash here + rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f $ALCpath/ALC/ALC2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.img* || true + rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f $ALCpath/ALC/ALC2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true -#gdal_translate 90thallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff 90thallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist 90thallpriormax.$YEAR.$DOY.$TYPE.img -#xv 90thallpriormax.$YEAR.$DOY.$TYPE.tif + else + echo "$ALCpath/ALC/ALC2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img file already exists" + fi - else - echo "90thallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" - fi + fi # PRODUCT was 1, just calculated ALC and ALC2 products + + # generate FW and FW2 divison products for $PRODUCT-year + # split, reproject, re-paste + # calculate $PRODUCT-year ForWarn product + # use nrt version for maxMODISmax current view + # standard way, (observed-expected)/expected NDVI + # clamp departure values between 2 and 254 + # if either current or baseline is 254 water, then 0 water + # if BOTH current AND baseline are 255 no value, then 255 no value + if [ ! -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img ] + then + + echo "File does not exist for ./"$PRODUCT"-yr-"$subdir"/ForWarn2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img" + + # 252 is nodata coming in + # 1 is nodata going out + # 0 = water + # 255 = no NDVI value + echo "Calculating ForWarn product ..." + gdal_calc.py --debug --calc="\ + ((A<251)&(B<251))*\ + round_(( (A.astype(float)-B.astype(float))/ B.clip(1).astype(float)*128+127).clip(2,254)).astype(uint8)\ + +((A==254)|(B==254))*0\ + +((A==255)&(B==255))*255\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmax$denomfile.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # calculate $PRODUCT-year ForWarn sqrt product + # sqrt of expected, (observed-expected)/sqrt(expected+1) + # must use native NDVI scaling + # clamp departure values between 2 and 254 + # if either current or baseline is 254 water, then 0 water + # if BOTH current AND baseline are 255 no value, then 255 + # 252 is nodata coming in + # 1 is nodata going out + # 0 = water + # 255 = no NDVI value + echo "Calculating ForWarn2 sqrt product ..." + gdal_calc.py --debug --calc="\ + ((A<251)&(B<251))*\ + round_(( ((A.astype(float)*0.004)-(B.astype(float)*0.004))/ sqrt((B.astype(float)*0.004).clip(1).astype(float))*128+127).clip(2,254)).astype(uint8)\ + +((A==254)|(B==254))*0\ + +((A==255)&(B==255))*255\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B maxMODISmax$denomfile.$PRODUCT-yr-baseline.$YEAR.$DOY.$TYPE.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # FW and FW2 split, reproject, re-join + # split out two mask categories, 0 water and 255 nodata from ForWarn product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + echo "Splitting out two mask categories from ForWarn product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + echo "Splitting out data from two masks in ForWarn product ..." + # use 1 as a fill value + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + + # reproject ForWarn product to LAEA + # reproject masks and data separately, and re-join + echo "Warping ForWarn product data with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img + rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + + echo "Warping ForWarn product masks with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img + rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + echo "Rejoining ForWarn product masks with data" + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + + # take out the trash + rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarnLAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn.$YEAR.$DOY.$PRODUCT-yr-baseline.img* || true + + # split out two mask categories, 0 water and 255 nodata from ForWarn2 sqrt product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + echo "Splitting out two mask categories from ForWarn2 sqrt product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -A ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + + + echo "Splitting out data from two masks in ForWarn2 sqrt product ..." + + # use 1 as a fill value + + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -A ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + #gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif + #xv ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif + + + ################################################################# + # reproject ForWarn2 sqrt product to LAEA + # reproject masks and data separately, and re-join + + echo "Warping ForWarn2 sqrt product with gdalwarp" + + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img + rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif || true + #gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif + #xv ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.tif + + + echo "Warping ForWarn2 sqrt product masks with gdalwarp" + + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img + rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif || true + #gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif + #xv ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.tif + + + echo "Rejoining ForWarn2 sqrt product masks with data" + + # probably gdal_calc assigns 255 as default nodata here + + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -A ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img -B ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img --type=Byte --overwrite + rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif || true + #gdal_translate ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.img -of GTiff ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.tif + #xv ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.tif + + # take out the trash + rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn2LAEA.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.data.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.masks.img* || true + rm -f ./$PRODUCT-yr-$subdir/ForWarn2.$YEAR.$DOY.$PRODUCT-yr-baseline.img* || true + + else + echo "./$PRODUCT-yr-"$subdir"/ForWarn2LAEA."$YEAR"."$DOY"."$PRODUCT"-yr-baseline.img file already exists" + fi + + else + echo "This "$PRODUCT"-year length baseline is not possible for this year "$YEAR" "$DOY + fi # year is earlier than 2003+$PRODUCT, this length baseline is not possible + +done # over 1-, 3-, 5- and 10-yr PRODUCTs +echo "1-, 3-, 5- and 10-yr products have been generated!" + +## +# calculate 90th, 50th and 10th percentile baselines over all prior years +# for this DOY #******************************************************************* -#******************************************************************* +# this is code that writes code for gdal_calc for 50th and 90th and 10th percentile of all prior years, over the entire MODIS period until 2028 +# much shorter than case statements! #******************************************************************* - if [ ! -f 10thallpriormax.$YEAR.$DOY.$TYPE.img ] - then - echo "File does not exist for 10thallpriormax."$YEAR"."$DOY"."$TYPE".img" +for prioryear in 2003/A 2004/B 2005/C 2006/D 2007/E 2008/F 2009/G 2010/H 2011/I 2012/J 2013/K 2014/L 2015/M 2016/N 2017/O 2018/P 2019/Q 2020/R 2021/S 2022/T 2023/U 2024/V 2025/W 2026/X 2027/Y 2028/Z +do + yr=`echo $prioryear|awk -F/ '{print $1}'` + ltr=`echo $prioryear|awk -F/ '{print $2}'` + yrstring=$yrstring" "$yr + ltrstring=$ltrstring" "$ltr -first=` -echo -n "percentile([" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"<251)*"$cac - else - echo -n ",("$cac"<251)*"$cac - fi + if [ $yr -eq $LASTYEAR ]; then + break + fi done -echo -en "],10,axis=0)+(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"==254)" - else - echo -n "&("$cac"==254)" - fi -done +#******************************************************************* -echo -e ")*0+( (" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"!=254)" - else - echo -n "|("$cac"!=254)" - fi -done -echo -n ")&(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac">250)" - else - echo -n "&("$cac">250)" - fi -done +if [ ! -f medianallpriormax.$YEAR.$DOY.$TYPE.img ] +then + echo "File does not exist for medianallpriormax."$YEAR"."$DOY"."$TYPE".img" + + first=` + echo -n "percentile([" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"<251)*"$cac + else + echo -n ",("$cac"<251)*"$cac + fi + done + + echo -en "],50,axis=0)+(" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"==254)" + else + echo -n "&("$cac"==254)" + fi + done + + echo -e ")*0+( (" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"!=254)" + else + echo -n "|("$cac"!=254)" + fi + done + + echo -n ")&(" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac">250)" + else + echo -n "&("$cac">250)" + fi + done + + echo -n ") )*255" + ` + + second=` + yr=2003 + for cac in $ltrstring + do + echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " + let yr=yr+1 + done + ` + + echo $first + echo $second + + gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=medianallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite -echo -n ") )*255" -` +else + echo "medianallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" +fi -second=` -yr=2003 -for cac in $ltrstring -do - echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " - let yr=yr+1 -done -` +#******************************************************************* -#echo $first -#echo $second +if [ ! -f 90thallpriormax.$YEAR.$DOY.$TYPE.img ] +then + echo "File does not exist for 90thallpriormax."$YEAR"."$DOY"."$TYPE".img" + + first=` + echo -n "percentile([" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"<251)*"$cac + else + echo -n ",("$cac"<251)*"$cac + fi + done + + echo -en "],90,axis=0)+(" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"==254)" + else + echo -n "&("$cac"==254)" + fi + done + + echo -e ")*0+( (" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"!=254)" + else + echo -n "|("$cac"!=254)" + fi + done + + echo -n ")&(" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac">250)" + else + echo -n "&("$cac">250)" + fi + done + + echo -n ") )*255" + ` + + second=` + yr=2003 + for cac in $ltrstring + do + echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " + let yr=yr+1 + done + ` + + gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=90thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite -gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=10thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite +else + echo "90thallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" +fi -#gdal_translate 10thallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff 10thallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist 10thallpriormax.$YEAR.$DOY.$TYPE.img -#xv 10thallpriormax.$YEAR.$DOY.$TYPE.tif +# START 10thallpriormax + +if [ ! -f 10thallpriormax.$YEAR.$DOY.$TYPE.img ] +then + echo "File does not exist for 10thallpriormax."$YEAR"."$DOY"."$TYPE".img" + + first=` + echo -n "percentile([" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"<251)*"$cac + else + echo -n ",("$cac"<251)*"$cac + fi + done + + echo -en "],10,axis=0)+(" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"==254)" + else + echo -n "&("$cac"==254)" + fi + done + + echo -e ")*0+( (" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac"!=254)" + else + echo -n "|("$cac"!=254)" + fi + done + + echo -n ")&(" + for cac in $ltrstring + do + if [ $cac == A ]; then + echo -n "("$cac">250)" + else + echo -n "&("$cac">250)" + fi + done + + echo -n ") )*255" + ` + + second=` + yr=2003 + for cac in $ltrstring + do + echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " + let yr=yr+1 + done + ` + + gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=10thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite - else - echo "10thallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" - fi +else + echo "10thallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" +fi -#******************************************************************* #******************************************************************* ##### this is an example of the gdal_calc.py --debug code that this code writes ### #******************************************************************* # calculate mean, max, 90th and 50th median baselines over all prior years # for this DOY #case $numyrsprior in - #16) # 2019 thru P + #16) # 2019 thru P # #gdal_calc.py --debug --calc="\ #percentile([\ @@ -1817,341 +1127,230 @@ gdal_calc.py --debug --calc="`echo $first`" `echo $second` --outfile=10thallprio # #;; - - - ################################################################# ######## 90th percentile all prior max products ############################## +if [ ! -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img ] +then + + echo "File does not exist for ./pctprogress/ForWarnLAEA.pctprogressallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img" + + # calculate the pctprogressallpriormax product for this $DOY using this baseline + # standard way, (observed-expected)/expected NDVI + # clamp departure values between 2 and 254 + # if either current or baseline is 254 water, then 0 water + # if BOTH current AND baseline are 255 no value, then 255 no value + # 252 is nodata coming in + # 1 is nodata going out + # 0 = water + # 255 = no NDVI value + echo "Calculating pctprogress all prior max product ..." + gdal_calc.py --debug --calc="\ + ((A<251)&(B<251)&(C<251))*\ + round_(((\ + (A.astype(float)-C.astype(float)) / (C.astype(float)-B.astype(float)).clip(1)\ + )*128+127).clip(2,254)).astype(uint8)\ + +((A==254)|(B==254)|(C==254))*0\ + +(( (A!=254)|(B!=254)|(C!=254) )&( (A>250)&(B>250)&(C>250) ))*255\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B 10thallpriormax.$YEAR.$DOY.$TYPE.img -C medianallpriormax.$YEAR.$DOY.$TYPE.img --outfile=./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif || true + + # split and reproject ForWarn pctprogress all prior max product + # split out two mask categories, 0 water and 255 nodata from all prior max product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + echo "Splitting out two mask categories from ForWarn pctprogress product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -A ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif || true + + echo "Splitting out data from two masks in ForWarn pctprogressallpriormax product ..." + # use 1 as a fill value + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -A ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif || true + + # reproject ForWarn pctprogressallpriormax product to LAEA + # reproject masks and data separately, and re-join + echo "Warping ForWarn pctprogressallpriormax product data with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img + rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif || true + + echo "Warping ForWarn pctprogressallpriormax product masks with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img + rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif || true + + echo "Rejoining ForWarn pctprogressallpriormax product masks with data" + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -A ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -B ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img --type=Byte --overwrite + rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif || true + + # take out the trash + rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* || true + rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* || true + rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* || true + rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* || true + rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img* || true + + echo "pctprogressallpriormax is done!" - if [ ! -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img ] - then - echo "File does not exist for ./pctprogress/ForWarnLAEA.pctprogressallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img" - - - # calculate the pctprogressallpriormax product for this $DOY using this baseline -# standard way, (observed-expected)/expected NDVI -# clamp departure values between 2 and 254 -# if either current or baseline is 254 water, then 0 water -# if BOTH current AND baseline are 255 no value, then 255 no value - -# 252 is nodata coming in -# 1 is nodata going out - -# 0 = water -# 255 = no NDVI value - -echo "Calculating pctprogress all prior max product ..." -#(((A.astype(float)-B.astype(float)) / (C.astype(float)-B.astype(float).clip(1)))-1.0)\ - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251)&(C<251))*\ -round_(((\ -(A.astype(float)-C.astype(float)) / (C.astype(float)-B.astype(float)).clip(1)\ -)*128+127).clip(2,254)).astype(uint8)\ -+((A==254)|(B==254)|(C==254))*0\ -+(( (A!=254)|(B!=254)|(C!=254) )&( (A>250)&(B>250)&(C>250) ))*255\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B 10thallpriormax.$YEAR.$DOY.$TYPE.img -C medianallpriormax.$YEAR.$DOY.$TYPE.img --outfile=./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#gdal_translate ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -of GTiff ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#xv ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif - - -# split and reproject ForWarn pctprogress all prior max product -# split out two mask categories, 0 water and 255 nodata from all prior max product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ForWarn pctprogress product ..." - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -A ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#gdal_translate ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -of GTiff ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#xv ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif - - -echo "Splitting out data from two masks in ForWarn pctprogressallpriormax product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -A ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#gdal_translate ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -of GTiff ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#xv ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif - - -################################################################# -# reproject ForWarn pctprogressallpriormax product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ForWarn pctprogressallpriormax product data with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#gdal_translate ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -of GTiff ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#xv ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif - - -echo "Warping ForWarn pctprogressallpriormax product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#gdal_translate ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -of GTiff ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#xv ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif - - -echo "Rejoining ForWarn pctprogressallpriormax product masks with data" - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -A ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -B ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img --type=Byte --overwrite -rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#gdal_translate ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -of GTiff ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#xv ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif - - -# take out the trash -rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* -rm -f ./pctprogress/ForWarnLAEA.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* -rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* -rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* -rm -f ./pctprogress/ForWarn.pctprogressallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img* - - echo "pctprogressallpriormax is done!" - - else - echo "./pctprogress/ForWarnLAEA.pctprogressallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img file already exists" - fi - -################################################################# -################################################################# +else + echo "./pctprogress/ForWarnLAEA.pctprogressallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img file already exists" +fi ######## median of all prior max products ################################ - if [ ! -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$TYPE.img ] - then - - - # calculate the medianallpriormax product for this $DOY using this baseline - -# if either is 254 water, then 0 water -# if either is NOT 254 water, and both are mask values, then 255 no value - -# 252 is nodata coming in -# 255 is nodata going out - -# 0 = water -# 255 = no NDVI value - -echo "Calculating median of allpriormax product ..." - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251))*\ -round_(( (A.astype(float)-B.astype(float))/ B.clip(1).astype(float)*128+127).clip(2,254)).astype(uint8)\ -+((A==254)&(B==254))*0\ -+(( (A!=254)|(B!=254) )&( (A>250)&(B>250) ))*255\ -" --NoDataValue=255 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B medianallpriormax.$YEAR.$DOY.$TYPE.img --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite - - - # split and reproject median of all prior max product -# split out two mask categories, 0 water and 255 nodata from medianall prior max product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ForWarn medianallpriormax product ..." +if [ ! -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$TYPE.img ] +then + + # calculate the medianallpriormax product for this $DOY using this baseline + + # if either is 254 water, then 0 water + # if either is NOT 254 water, and both are mask values, then 255 no value + + # 252 is nodata coming in + # 255 is nodata going out + + # 0 = water + # 255 = no NDVI value + + echo "Calculating median of allpriormax product ..." + + gdal_calc.py --debug --calc="\ + ((A<251)&(B<251))*\ + round_(( (A.astype(float)-B.astype(float))/ B.clip(1).astype(float)*128+127).clip(2,254)).astype(uint8)\ + +((A==254)&(B==254))*0\ + +(( (A!=254)|(B!=254) )&( (A>250)&(B>250) ))*255\ + " --NoDataValue=255 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B medianallpriormax.$YEAR.$DOY.$TYPE.img --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + + # split and reproject median of all prior max product + # split out two mask categories, 0 water and 255 nodata from medianall prior max product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + echo "Splitting out two mask categories from ForWarn medianallpriormax product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif || true + + echo "Splitting out data from two masks in ForWarn medianallpriormax product ..." + # use 1 as a fill value + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif || true + + # reproject ForWarn medianallpriormax product to LAEA + # reproject masks and data separately, and re-join + echo "Warping ForWarn medianallpriormax product data with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif || true + + echo "Warping ForWarn medianallpriormax product masks with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif || true + + echo "Rejoining ForWarn medianallpriormax product masks with data" + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -A $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -B $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img --type=Byte --overwrite + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif || true + + # take out the trash + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img* || true -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif - - -echo "Splitting out data from two masks in ForWarn medianallpriormax product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif - - -################################################################# -# reproject ForWarn medianallpriormax product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ForWarn medianallpriormax product data with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif - - -echo "Warping ForWarn medianallpriormax product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif - - -echo "Rejoining ForWarn medianallpriormax product masks with data" - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -A $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -B $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img --type=Byte --overwrite -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif - -# take out the trash -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarnLAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img* - - - else - echo $medianallyrmaxpath"/median-all-yr-max/ForWarnLAEA.medianallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img file already exists" - fi +else + echo $medianallyrmaxpath"/median-all-yr-max/ForWarnLAEA.medianallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img file already exists" +fi ######## median of all prior max sqrt product ################################# - if [ ! -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img ] - then - echo "File does not exist for "$medianallyrmaxpath"/median-all-yr-max/ForWarn2LAEA.medianallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img" - - - # calculate the medianallpriormax sqrt product for this $DOY using this baseline - -# if either is 254 water, then 0 water -# if BOTH are NOT 254 water, and both are mask values, then 255 no value - -# 255 is nodata coming in -# 255 is nodata going out - -# 0 = water -# 255 = no NDVI value - -echo "Calculating median of allpriormax sqrt product ..." - -gdal_calc.py --debug --calc="\ -((A<251)&(B<251))*\ -round_(( ((A.astype(float)*0.004)-(B.astype(float)*0.004))/ sqrt((B.astype(float)*0.004).clip(1).astype(float))*128+127).clip(2,254)).astype(uint8)\ -+((A==254)&(B==254))*0\ -+(( (A!=254)|(B!=254) )&( (A>250)&(B>250) ))*255\ -" --NoDataValue=255 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B medianallpriormax.$YEAR.$DOY.$TYPE.img --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f ./median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#gdal_translate ./median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -of GTiff ./median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#xv ./median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif - - - # split and reproject median of all prior max sqrt product -# split out two mask categories, 0 water and 255 nodata from medianall prior max product -# use 128 as a fill value, but not as nodata - -# 1 is nodata coming in -# 1 is nodata going out - -# use 128 as a fill value - -echo "Splitting out two mask categories from ForWarn medianallpriormax sqrt product ..." - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((A>1)&(A<255))*128\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif - - -echo "Splitting out data from two masks in ForWarn medianallpriormax sqrt product ..." - -# use 1 as a fill value - -gdal_calc.py --debug --calc="\ -((A>1)&(A<255))*A\ -+((A==0)|(A==255))*1\ -" --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif - - -################################################################# -# reproject ForWarn medianallpriormax sqrt product to LAEA -# reproject masks and data separately, and re-join - -echo "Warping ForWarn medianallpriormax sqrt product data with gdalwarp" +if [ ! -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img ] +then + echo "File does not exist for "$medianallyrmaxpath"/median-all-yr-max/ForWarn2LAEA.medianallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img" + + # calculate the medianallpriormax sqrt product for this $DOY using this baseline + # if either is 254 water, then 0 water + # if BOTH are NOT 254 water, and both are mask values, then 255 no value + # 255 is nodata coming in + # 255 is nodata going out + # 0 = water + # 255 = no NDVI value + echo "Calculating median of allpriormax sqrt product ..." + gdal_calc.py --debug --calc="\ + ((A<251)&(B<251))*\ + round_(( ((A.astype(float)*0.004)-(B.astype(float)*0.004))/ sqrt((B.astype(float)*0.004).clip(1).astype(float))*128+127).clip(2,254)).astype(uint8)\ + +((A==254)&(B==254))*0\ + +(( (A!=254)|(B!=254) )&( (A>250)&(B>250) ))*255\ + " --NoDataValue=255 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" -A maxMODISmax.$YEAR.$DOY.$THIRDDATETYPE.img -B medianallpriormax.$YEAR.$DOY.$TYPE.img --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f ./median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif || true + + # split and reproject median of all prior max sqrt product + # split out two mask categories, 0 water and 255 nodata from medianall prior max product + # use 128 as a fill value, but not as nodata + # 1 is nodata coming in + # 1 is nodata going out + # use 128 as a fill value + echo "Splitting out two mask categories from ForWarn medianallpriormax sqrt product ..." + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((A>1)&(A<255))*128\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif || true + + echo "Splitting out data from two masks in ForWarn medianallpriormax sqrt product ..." + # use 1 as a fill value + gdal_calc.py --debug --calc="\ + ((A>1)&(A<255))*A\ + +((A==0)|(A==255))*1\ + " --NoDataValue=1 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img --type=Byte --overwrite + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif || true + + # reproject ForWarn medianallpriormax sqrt product to LAEA + # reproject masks and data separately, and re-join + echo "Warping ForWarn medianallpriormax sqrt product data with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif || true + + echo "Warping ForWarn medianallpriormax sqrt product masks with gdalwarp" + gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif || true + + echo "Rejoining ForWarn medianallpriormax sqrt product masks with data" + gdal_calc.py --debug --calc="\ + ((A==0)|(A==255))*A\ + +((B>1)&(B<255))*B\ + " --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -B $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img --type=Byte --overwrite + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif || true + + # take out the trash + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* || true + rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img* || true -gdalwarp -overwrite -multi -t_srs laea.prj -r bilinear -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.tif - - -echo "Warping ForWarn medianallpriormax sqrt product masks with gdalwarp" - -gdalwarp -overwrite -multi -t_srs laea.prj -wm 500 -tr 231.656358264000005 231.656358264000005 -srcnodata 1 -dstnodata None -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.tif - - -echo "Rejoining ForWarn medianallpriormax sqrt product masks with data" - -gdal_calc.py --debug --calc="\ -((A==0)|(A==255))*A\ -+((B>1)&(B<255))*B\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --outfile=$medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -A $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img -B $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img --type=Byte --overwrite -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#gdal_translate $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img -of GTiff $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif -#xv $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.tif - -# take out the trash -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2LAEA.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.data.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.masks.img* -rm -f $medianallyrmaxpath/median-all-yr-max/ForWarn2.medianallpriormax.$YEAR.$DOY.$THIRDDATETYPE.img* - - - else - echo $medianallyrmaxpath"/median-all-yr-max/ForWarn2LAEA.medianallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img file already exists" - fi - -################################################################# - - echo "ALL products have been generated! DONE!" - - ##else - ##echo "Products already appear to exist for " $YEAR $DOY -##fi +else + echo $medianallyrmaxpath"/median-all-yr-max/ForWarn2LAEA.medianallpriormax."$YEAR"."$DOY"."$THIRDDATETYPE".img file already exists" +fi +echo "ALL products have been generated! DONE!" diff --git a/fw2_archive.py b/fw2_archive.py new file mode 100755 index 0000000..f531e47 --- /dev/null +++ b/fw2_archive.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python3 + +from util import * +import logging as log + +load_env() + + +class ForWarn2Archive: + + def update(self, dryrun=False): + # ForWarn 2 products + todo_dates = self.get_todo_dates() + self.make_symlinks_for_dates(todo_dates) + success = True + for d in todo_dates: + # Add a boolean called 'success' to each dict + self.build_date(d, archive=True, dryrun=dryrun) + if not len(todo_dates): + log.info("Already up to date!") + return [] + else: + log.info('Finished production cycle.') + return todo_dates + + + def build_date(self, date, archive=False, dryrun=False): + '''Build a full set of ForWarn 2 products for some date.''' + year = date['year'] + jd = date['jd'] + log.info("Building ForWarn 2 products for {}/{}...\n".format(year, jd)) + c = f'{DODATE_PATH} {year}{jd}' + if not dryrun: + run_process(c) + success = False + if archive: + self.move_products(date, dryrun=dryrun) + if self.is_ok(date, dryrun=dryrun): + success = True + else: + success = False + log.error('Something went wrong while trying to move the product files to their destination.') + date['success'] = success + self.move_precursors(dryrun=dryrun) + return date + + + def is_ok(self, date, dryrun=False): + '''Return True if a full set of products exist for the given date. + + Arguments: + date: dict of the form { 'year': 'YYYY', 'jd': 'DOY' } + ''' + year = date['year'] + jd = date['jd'] + date = self.get_datetime_for_year_jd(year, jd) + file_date = date + datetime.timedelta(days=7) + datestring = file_date.strftime('%Y%m%d') + # Keys are directories used by the dodate bash script to place output files + # Values are the corresponding directory names in the product archive. + tree = self.get_folder_tree() + all_products_exist = True + for source in tree.keys(): + source_dir = FW2_ARCHIVE_DIR_NORMAL if source == 'normal' else FW2_ARCHIVE_DIR_MUTED + for dodate_dir in tree[source].keys(): + dir_path = tree[source][dodate_dir] + if not os.path.exists(dir_path): + log.error("Missing product directory {}!".format(dir_path)) + files = os.listdir(dir_path) + files = list(filter(lambda f: datestring in f, files)) + if not len(files): + log.info("Unable to find file in archive for {} {} {}".format(source_dir, dodate_dir, datestring)) + all_products_exist = False + if not all_products_exist: + log.warn("Missing ForWarn 2 products in the archive for {}/{}...".format(year, jd)) + if dryrun: + return True + return all_products_exist + + + def move_precursors(self, base_dir='.', dryrun=False): + # Remove Aqua.img and Terra.img + try: + os.remove('Aqua.img') + os.remove('Terra.img') + except: + pass + staging_precursors = [ + 'maxMODIS', # this string covers several different precursors + '10thallpriormax', + '90thallpriormax', + 'medianallpriormax' + ] + all_jds = ALL_MODIS_JULIAN_DAYS + files = [ f for f in os.listdir('.') if f.endswith('.img') and not os.path.islink(f) ] + for f in files: + for s in staging_precursors: + if s in f: + for jd in ALL_MODIS_JULIAN_DAYS: + pattern = ".*\d{4}\."+jd+".*\.img" + m = re.search(pattern, f) + if m: + src = os.path.abspath(os.path.join(base_dir, f)) + dst = os.path.abspath(os.path.join(PRECURSORS_DIR, jd, f)) + log.info("Moving precursor:\n {src}\n to...\n {dst}".format(src=src, dst=dst)) + if not dryrun: + shutil.move(src, dst) + + + def move_products(self, date, dryrun=False): + '''Move new products from the staging directories to the archive.''' + normal_filename_checks = FW2_NORMAL_DODATE_FILENAME_CHECK.split(',') + muted_filename_checks = FW2_MUTED_DOATE_FILENAME_CHECK.split(',') + same_checks = list(filter(lambda d: d in muted_filename_checks, normal_filename_checks)) + if len(same_checks): + log.info("Duplicate strings detected for detecting if a file output from dodate is \ + either normal or muted. See FW2_(NORMAL|MUTED)_DODATE_FILENAME_CHECK in .env. \ + Each string should be a comma-separated list of values, with no duplicates across both lists.") + sys.exit(1) + year = date['year'] + jd = date['jd'] + tree = self.get_folder_tree() + for meta_type in tree.keys(): + dir_maps = tree[meta_type] + checks = normal_filename_checks if meta_type == 'normal' else muted_filename_checks + for tmp_dir in dir_maps.keys(): + files = [ f for f in os.listdir(tmp_dir) if year in f and jd in f ] + meta_type_files = [] + for f in files: + for s in checks: + if s in f: + meta_type_files.append(f) + for f in meta_type_files: + old_fullpath = os.path.join(tmp_dir, f) + new_filename = self.rename_dodate_filename(f) + new_fullpath = os.path.join(tree[meta_type][tmp_dir], new_filename) + log.info("Moving {0}\n to \n{1}\n".format(old_fullpath, new_fullpath, date)) + if not dryrun: + shutil.copyfile(old_fullpath, new_fullpath) + # TODO try block? + os.remove(old_fullpath) + + + def get_folder_tree(self): + # Assume product directories are nested below the "meta" product type dir + # (normal or muted/sqrt) + tree = { + 'normal': { + FW2_TMP_DIR_1YR: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_1YR), + FW2_TMP_DIR_3YR: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_3YR), + FW2_TMP_DIR_5YR: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_5YR), + FW2_TMP_DIR_ALC: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_ALC), + FW2_TMP_DIR_MEDIAN: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_MEDIAN), + FW2_TMP_DIR_10YR: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_10YR), + FW2_TMP_DIR_PCTPROGRESS: os.path.join(FW2_ARCHIVE_DIR_NORMAL, FW2_PRODUCT_DIR_PCTPROGRESS) + }, + 'muted': { + FW2_TMP_DIR_1YR: os.path.join(FW2_ARCHIVE_DIR_MUTED, FW2_PRODUCT_DIR_1YR), + FW2_TMP_DIR_3YR: os.path.join(FW2_ARCHIVE_DIR_MUTED, FW2_PRODUCT_DIR_3YR), + FW2_TMP_DIR_5YR: os.path.join(FW2_ARCHIVE_DIR_MUTED, FW2_PRODUCT_DIR_5YR), + FW2_TMP_DIR_ALC: os.path.join(FW2_ARCHIVE_DIR_MUTED, FW2_PRODUCT_DIR_ALC), + FW2_TMP_DIR_MEDIAN: os.path.join(FW2_ARCHIVE_DIR_MUTED, FW2_PRODUCT_DIR_MEDIAN), + FW2_TMP_DIR_10YR: os.path.join(FW2_ARCHIVE_DIR_MUTED, FW2_PRODUCT_DIR_10YR), + # No PCTPROGRESS for muted/sqrt products + } + } + return tree + + + def rename_dodate_filename(self, filename): + '''Construct the final filename for a ForWarn 2 product file given + the filename created by the dodate script.''' + m = re.search("(.*)(\d{4})\.(\d{3})(.*)", filename) + if m: + pieces = list(m.groups()) + year = str(pieces[1]) + # Add 7 days to the julian day in the given filename + jd = str(int(pieces[2]) + 7) + date = datetime.datetime.strptime('{}{}'.format(year, jd), '%Y%j') + datestring = date.strftime('%Y%m%d') + new_pieces = [ pieces[0], datestring, pieces[3] ] + new_filename = ''.join(new_pieces) + return new_filename + else: + log.info("Failed to rename filename: {0}".format(filename)) + + + def get_todo_dates(self): + '''Get a list of potential dates for which ForWarn 2 products may be built. + + Return a list of MODIS product dates in the past two years for which: + + 1. Enough time has passed that NRT data for that date may be available. + 2. A complete set of ForWarn 2 products does not exist. + + In theory NRT data should be available for these dates, but it's possible the data is late. + ''' + all_days = ALL_FW2_JULIAN_DAYS + today = datetime.datetime.today() + today_year = today.strftime('%Y') + last_year = str(int(today_year) - 1) + this_year_todo_dates = map(lambda jd: self.get_datetime_for_year_jd(today_year, jd), all_days) + last_year_todo_dates = map(lambda jd: self.get_datetime_for_year_jd(last_year, jd), all_days) + potential_this_year_todo_dates = self.filter_unavailable_modis_dates(this_year_todo_dates) + potential_last_year_todo_dates = self.filter_unavailable_modis_dates(last_year_todo_dates) + potential_todo_dates = potential_this_year_todo_dates + potential_last_year_todo_dates + potential_todo_date_dicts = list(map(self.get_year_jd_config_for_datetime, potential_todo_dates)) + todo_dates = list(filter(lambda d: not self.is_ok(d), potential_todo_date_dicts)) + return todo_dates + + + def get_datetime_for_year_jd(self, year, jd): + '''Return a datetime object for a date given a year and day of the year.''' + return datetime.datetime.strptime('{}{}'.format(year, jd), '%Y%j') + + + def get_year_jd_config_for_datetime(self, date): + '''Given a datetime object, return a dictionary of the form + { 'year': 'YYYY', 'jd': 'JJJ' } where JJJ is a zero-padded day of the year. + ''' + return { 'year': date.strftime('%Y'), 'jd': date.strftime('%j') } + + + def filter_unavailable_modis_dates(self, dates): + '''Given a list of MODIS product dates, remove any dates that are + either in the future, or are simply too near the present for products + to be available yet (at most 8 days in the past from the current day).''' + day_delta = 8 + today = datetime.datetime.today() + return list(filter(lambda d: d <= today - datetime.timedelta(days=day_delta), dates)) + + + def get_three_dates(self, year, jd): + '''Given the year and julian day for an 8-day MODIS product, return a list of + three MODIS product dates: the supplied date first, followed by the previous two + MODIS dates. These three dates represent the three 8-day MODIS cycles that + form the basis of a 24-day ForWarn 2 cycle. + + The dates returned are repreesented as dictionaries with two keys: 'year' and 'jd'. + ''' + all_jds = ALL_MODIS_JULIAN_DAYS + dates = list(map(lambda d: { 'jd': d, 'year': year }, all_jds)) + if jd == '009': + dates[2] = { 'jd' : '361', 'year' : str(int(year)-1) } + if jd == '001': + dates[1] = { 'jd' : '361', 'year' : str(int(year)-1) } + dates[2] = { 'jd' : '353', 'year' : str(int(year)-1) } + return dates + + + def make_symlinks_for_dates(self, dates): + '''Make symbolic links in the current directory to any precursors that may be useful for creating ForWarn 2 products for the given dates. + + dates: a list of dicts, each with two keys, 'year' and 'jd' + ''' + jds = [] + log.debug("Symlinking precursor files to this directory...") + for d in dates: + year = d['year'] + jd = d['jd'] + jd_dir = os.path.join(PRECURSORS_DIR, jd) + three_jds = [ d['jd'] for d in self.get_three_dates(year, jd) ] + jds.extend(three_jds) + jds = sorted(set(jds)) + for jd in jds: + self.link_by_pattern(PRECURSORS_DIR, '.', ".*\d{4}\."+jd+".*\.img") + + + def link_by_pattern(self, source_dir, dest_dir, pattern): + '''Recursively walk source_dir and make a symlink in dest_dir for every file found if the file matches the supplied regex pattern.''' + found_at_least_one_match = False + for root, dirs, files in os.walk(source_dir): + for filename in files: + m = re.search(pattern, filename) + if m: + found_at_least_one_match = True + src = os.path.abspath(os.path.join(root, filename)) + dst = os.path.abspath(os.path.join(dest_dir, filename)) + try: + os.symlink(src, dst) + except: + pass + else: + pass + if not found_at_least_one_match: + log.warn("No files found to make symbolic links for!") + diff --git a/gdal_docker/Dockerfile b/gdal_docker/Dockerfile new file mode 100755 index 0000000..5489297 --- /dev/null +++ b/gdal_docker/Dockerfile @@ -0,0 +1,25 @@ +FROM ubuntu:18.04 + +ARG DKR_BUILD_DIR +ARG DKR_USER +ARG DKR_GROUP +ARG DKR_USER_ID +ARG DKR_GROUP_ID + +MAINTAINER NEMAC Dev Team + +# SMTP +EXPOSE 587 +EXPOSE 25 +# HTTP +EXPOSE 80 + +RUN groupadd -g ${DKR_GROUP_ID} ${DKR_GROUP} && \ + useradd -l -u ${DKR_USER_ID} -g ${DKR_GROUP_ID} ${DKR_USER} + +WORKDIR $DKR_BUILD_DIR + +ADD ./install $DKR_BUILD_DIR/install +ADD ./requirements.txt $DKR_BUILD_DIR/requirements.txt + +RUN ./install diff --git a/gdal_docker/install b/gdal_docker/install new file mode 100755 index 0000000..891666c --- /dev/null +++ b/gdal_docker/install @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +apt-get update && \ +apt-get install -y \ + software-properties-common + +add-apt-repository -y ppa:ubuntugis/ppa + +apt-get update && \ +apt-get install -y \ + gdal-bin=2.4.2+dfsg-1~bionic0 \ + +apt-get update && \ +apt-get install -y \ + python-gdal \ + python3-pip \ + wget + +pip3 install -r requirements.txt diff --git a/gdal_docker/requirements.txt b/gdal_docker/requirements.txt new file mode 100755 index 0000000..41ae542 --- /dev/null +++ b/gdal_docker/requirements.txt @@ -0,0 +1,3 @@ +rasterio +pytz +requests diff --git a/gimms.py b/gimms.py new file mode 100644 index 0000000..e3c7464 --- /dev/null +++ b/gimms.py @@ -0,0 +1,170 @@ + +import subprocess +import os, gzip, glob, shutil +import requests + +from util import * + +class Gimms: + + _file_ext = 'img' + + _file_prefix = 'maxMODIS' + + _tilesets = [ + { 'x': ['06'], 'y': ['04', '05', '06'] }, + { 'x': ['07', '08', '09'], 'y': [ '04', '05', '06', '07' ] }, + { 'x': ['10', '11'], 'y': ['04', '05', '06', '07' ] }, + { 'x': ['12'], 'y': ['04', '05', '07'] } + ] + + _satellites = { 'Terra': 'GMO', 'Aqua': 'GMY' } + + _url_template = 'https://gimms.gsfc.nasa.gov/MODIS/{ptype}/{sat}D09Q1/tif/NDVI/{year}/{jd}/{sat}D09Q1.A{year}{jd}.08d.latlon.x{x}y{y}.6v1.NDVI.tif.gz' + + def _filename_template(self, year, jd, sat_name=None, nrt=False, prefix=None, ext=None): + ext = ext or self._file_ext + prefix = prefix or self._file_prefix + ptype = 'nrt' if nrt else 'std' + if not sat_name: + filename = f'{prefix}.{year}.{jd}.{ptype}.{ext}' + else: + filename = f'{prefix}.{year}.{jd}.{ptype}.{sat_name}.{ext}' + return filename + + def check(self, year, jd, nrt=False): + ptype = 'NRT' if nrt else 'STD' + print('Checking GIMMS server for {ptype} / {year} / {jd}...') + b = self._check_date(year, jd, nrt=nrt) + return b + + def _check_date(self, year, jd, nrt=False): + ptype = 'nrt' if nrt else 'std' + for sat_name in self._satellites.keys(): + for url in self._get_tile_urls(year=year, jd=jd, sat_name=sat_name, nrt=nrt): + r = requests.head(url) + if not r.ok: + raise DataNotFoundError(f'GIMMS {ptype} data not available for {year} / {jd}.') + + def get(self, year, jd, out_dir='.', tmp_dir='./tmp', nrt=False, check=False): + out_path = self._get(year, jd, out_dir=out_dir, tmp_dir=tmp_dir, nrt=nrt, check=check) + return out_path + + def _get(self, year, jd, out_dir='.', tmp_dir='./tmp', nrt=False, check=False): + out_path = os.path.join(out_dir, self._filename_template(year, jd, nrt=nrt)) + ptype = 'nrt' if nrt else 'std' + if os.path.exists(out_path): + print(f'Found {out_path}...') + return out_path + print(f'Creating 8-day {ptype} Aqua/Terra maximum for {year} / {jd}...') + if not os.path.exists(tmp_dir): + os.mkdir(tmp_dir) + if not os.path.exists(out_dir): + os.mkdir(out_dir) + if check: + self._check_date(year, jd, nrt=nrt) + # TODO output to a temporary file first instead of removing the existing one + paths = [] + for sat_name in self._satellites.keys(): + path = os.path.join(tmp_dir, self._filename_template(year, jd, sat_name=sat_name, nrt=nrt)) + tiles = self._get_tiles(year, jd, sat_name=sat_name, tmp_dir=tmp_dir, nrt=nrt) + error = self._merge_tiles(out_path=path, tiles=tiles) + paths.append(path) + p1, p2 = paths[0], paths[1] + self._calc_max(p1=p1, p2=p2, out_path=out_path) + return out_path + + def _get_tiles(self, year, jd, sat_name=None, tmp_dir=None, nrt=False): + tiles = [ self._get_tile(url, tmp_dir) for url in self._get_tile_urls(year, jd, sat_name=sat_name, nrt=nrt) ] + return tiles + + def _get_tile(self, url, tmp_dir): + filename = url.split('/')[-1] + gz_path = os.path.join(tmp_dir, filename) + tif_path = gz_path.rstrip('.gz') + if os.path.exists(tif_path): + return tif_path + try: + r = requests.get(url) + except: + raise DataNotFoundError + if not r.ok: + raise DataNotFoundError + with open(gz_path, 'wb') as fd: + for chunk in r.iter_content(chunk_size=128): + fd.write(chunk) + self._gunzip(gz_path) + return tif_path + + def _get_tile_urls(self, year, jd, sat_name=None, nrt=False): + ptype = 'nrt' if nrt else 'std' + sat = self._satellites[sat_name] + for tileset in self._tilesets: + for x in tileset['x']: + for y in tileset['y']: + url = self._get_tile_url(year, jd, x, y, sat, ptype) + yield url + + def _get_tile_url(self, year, jd, x, y, sat, ptype): + return self._url_template.format(year=year, jd=jd, x=x, y=y, sat=sat, ptype=ptype) + + def _merge_tiles(self, out_path, tiles): + paths_str = ' '.join(tiles) + if os.path.exists(out_path): + print(f'Found {out_path}...') + return out_path + if os.path.exists(out_path): + os.remove(out_path) + cmd = f''' + gdal_merge.py + -init 255 + -of HFA + -co "STATISTICS=YES" + -co "COMPRESSED=YES" + -o {out_path} {paths_str} + ''' + run_process(cmd) + for p in tiles: + if os.path.exists(p): + os.remove(p) + + def _calc_max(self, p1, p2, out_path, remove_inputs=True): + '''Create maximum NDVI product from the Terra and Aqua 8-day composites.''' + cmd = f'''gdal_calc.py + -A {p1} + -B {p2} + --outfile={out_path} + --calc=" + maximum((A<251)*A,(B<251)*B) + + ( + ((A==253)&(B==253))| + ((A==253)&(B==255))| + ((A==255)&(B==253))| + ((A==255)&(B==255)) + )*255 + + ( + (A==254)|(B==254) + )*254 + " + --format=HFA + --co "STATISTICS=YES" + --co "COMPRESSED=YES" + --NoDataValue=252 + --type=Byte + --overwrite + ''' + run_process(cmd) + if os.path.exists(p1) and remove_inputs: + os.remove(p1) + if os.path.exists(p2) and remove_inputs: + os.remove(p2) + + def _gunzip(self, gz_path, remove=True): + p = gz_path.rstrip('.gz') + with gzip.open(gz_path, 'rb') as f: + file_content = f.read() + with open(p, 'wb') as f: + f.write(file_content) + if remove: + os.remove(gz_path) + diff --git a/make_links.py b/make_links.py deleted file mode 100755 index 63b6eb0..0000000 --- a/make_links.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python - -'''Make symbolic links by walking a source directory rescursively and making a symbolic link to each file in the destination directory.''' - -import os, os.path, re -import argparse - -import sys - -def make_links(source_dir, dest_dir, ext): - for root, dirs, files in os.walk(source_dir): - for filename in files: - m = re.search("img$", filename) - if m: - src = os.path.abspath(os.path.join(root, filename)) - dst = os.path.abspath(os.path.join(dest_dir, filename)) - try: - os.symlink(src, dst) - except: - pass - else: - print "Not a .{0} file, so not linking: {1}".format(ext, os.path.join(root, filename)) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('src', help='Source directory') - parser.add_argument('dst', help='Destination directory') - parser.add_argument('--ext', default='img', help='File extension to link') - args = parser.parse_args() - make_links(args.src, args.dst, args.ext) - - -if __name__ == '__main__': - main() diff --git a/make_products b/make_products deleted file mode 100755 index 340ada7..0000000 --- a/make_products +++ /dev/null @@ -1,380 +0,0 @@ -#!/usr/bin/python -''' --- Are there new products waiting to be created? - (Get julian days from the "products not yet created" file that are <= current julian day) - - For each julian day: - - -> Get year for current day - - -- Products for julian day of current year yet to be created? - - (Check if files are already in product directories) - - (After last product for the year is created there will be a window of time - before the new year when products to be created list will be reset but - no new products should be created until the next year hits -- - this check accounts for this case) - - -> Run dodate YYYYDOY - -> Move products over to product directories when finished - -> Remove julian day from list of products to be created - --- Is the products to be created file empty? - - -> Assume new year, reset the file -''' - -import os, os.path, sys, re, datetime, shutil -import logging -import argparse -from subprocess import Popen, PIPE, STDOUT, check_output -from tempfile import NamedTemporaryFile - -from Config import * - - -def pad_with_zero(num): - num = int(num) - return str(num) if num > 9 else '0{0}'.format(num) - - -def setup_logging(dryrun, is_cli_run): - now = datetime.datetime.now() - month = pad_with_zero(now.month) - day = pad_with_zero(now.day) - hour = pad_with_zero(now.hour) - minute = pad_with_zero(now.minute) - second = pad_with_zero(now.second) - - dryrun_text = 'dryrun_' if dryrun else '' - cli_run_text = 'cli_run_' if is_cli_run else '' - log_filename = '{0}{1}{2}{3}{4}_{5}h{6}m{7}s.txt'.format(dryrun_text, cli_run_text, now.year, month, day, hour, minute, second) - log_path = os.path.join(MAIN_PATH, 'logs', log_filename) - logging.basicConfig(filename=log_path, level=logging.DEBUG) - logger = logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) - return log_path - - -def log_subprocess_output(pipe): - for line in iter(pipe.readline, b''): - logging.info('dodate - ' + str(line)) - - -def products_exist(d): - date = datetime.datetime.strptime(d, '%Y%j') - date = date + datetime.timedelta(days=7) - datestring = date.strftime('%Y%m%d') - - for source in SOURCE_DIRS: - for key in PRODUCT_DIRS: - prod_dir = PRODUCT_DIRS[key] - path = os.path.join(PRODUCTS_BASE_PATH, source, prod_dir) - if not os.path.exists(path) or (source == 'ForWarn2_Sqrt' and key == 'pctprogress'): - continue - files = os.listdir(path) - files = list(filter(lambda f: datestring in f, files)) - if not len(files): - return False - return True - - -def renamed(filename): - m = re.search("(.*)(\d{4})\.(\d{3})(.*)", filename) - if m: - pieces = list(m.groups()) - year = '{0}'.format(pieces[1]) - doy = '{0}'.format(int(pieces[2]) + 7) - try: - date = datetime.datetime.strptime('{0}{1}'.format(year, doy), '%Y%j') - datestring = date.strftime('%Y%m%d') - except ValueError as e: - logging.error("ValueError trying to make date for file {0}".format(filename, datestring)) - else: - new_pieces = [ pieces[0], datestring, pieces[3] ] - new_filename = ''.join(new_pieces) - return new_filename - else: - logging.error("Failed to rename filename: {0}".format(filename)) - - -def make_test_file(start_text, product_dir, datestring): - year = datestring[:4] - jd = datestring[-3:] - test = open(os.path.join(MAIN_PATH, product_dir, "TEST_{0}.{1}.{2}".format(start_text, year, jd)), 'a') - - -def destroy_test_files(): - logging.info("Destroying test files...\n") - for src_dir in SOURCE_DIRS: - for key in PRODUCT_DIRS: - if src_dir == 'ForWarn2_Sqrt' and key == 'pctprogress': - continue - prod_dir = PRODUCT_DIRS[key] - files = [ p for p in os.listdir(os.path.join(MAIN_PATH, src_dir, prod_dir)) if 'TEST' in p ] - for f in files: - os.remove(os.path.join(MAIN_PATH, src_dir, prod_dir, f)) - - -def harvest_products(datestring, dryrun): - for key in PRODUCT_DIRS: - - fw2_check = 'ForWarnLAEA' if key != 'ALC' else 'ALCLAEA' - fw2_sqrt_check = 'ForWarn2LAEA' if key != 'ALC' else 'ALC2LAEA' - - path = os.path.join(MAIN_PATH, key) - - year = datestring[:4] - day = datestring[-3:] - - if dryrun: - make_test_file(fw2_check, key, datestring) - make_test_file(fw2_sqrt_check, key, datestring) - - files = os.listdir(path) - files = list(filter(lambda f: day in f, files)) - - for f in files: - if fw2_check in f and year in f: - source_dir = 'ForWarn2' - elif fw2_sqrt_check in f and year in f: - source_dir = 'ForWarn2_Sqrt' - else: - continue - old_fullpath = os.path.join(path, f) - new_fullpath = os.path.join(PRODUCTS_BASE_PATH, source_dir, PRODUCT_DIRS[key], renamed(f)) - if os.path.isdir(os.path.join(PRODUCTS_BASE_PATH, source_dir, PRODUCT_DIRS[key])): - logging.info("Moving {0}\n to \n{1}\n".format(old_fullpath, new_fullpath, datestring)) - shutil.copyfile(old_fullpath, new_fullpath) - os.remove(old_fullpath) - - -def mail_results(success, datestring, dryrun, log_path): - day = datestring[-3:] - mail_to_addrs = [] - with open(MAIL_TO_ADDRS_FILE) as f: - for addr in f: - mail_to_addrs.append(addr.strip()) - - logging.info("Emailing results to: {0}".format(' '.join(mail_to_addrs))) - - dryrun_body_text = "" if not dryrun else "NOTE: THIS IS A TEST OF THE FORWARN 2 SYSTEM!\n\n" - - if success: - subject_text = "FW2 Day {0} Product Generation".format(day).rstrip() - body_text = "{0}Success! The log is attached to this email.".format(dryrun_body_text) - else: - subject_text = "FAILED: FW2 Day Product Generation {0}".format(day).rstrip() - body_text = "{0}Looks like something went wrong. We'll try again and send another notification email.".format(dryrun_body_text) - - # Read the log file and encode as base64 - with open(log_path) as f: - log_contents = f.read() - - log_encoded = log_contents.encode('base64') - - # Load the email template file - with open(EMAIL_TEMPLATE_FILE) as f: - template_contents = f.read() - - # Replace the placeholder text with subject, content, log filename, and encoded attachment text - log_filename = os.path.split(log_path)[1] - rendered_email = template_contents.replace('EMAIL_SUBJECT_REPLACE', subject_text) - rendered_email = rendered_email.replace('EMAIL_BODY_REPLACE', body_text) - rendered_email = rendered_email.replace('LOG_FILENAME_REPLACE', log_filename) - rendered_email = rendered_email.replace('ATTACHMENT_CONTENT_REPLACE', log_encoded) - - # Make a temporary file with the contents of the email - f = NamedTemporaryFile(delete=False) - f.write(rendered_email) - filename = f.name - f.close() - # Must use full path to sendmail for cron jobs. (Cron is not aware of normal environment variables like $PATH) - mail_command = "/usr/sbin/sendmail -f nemacmailer@gmail.com {0} < {1}".format(' '.join(mail_to_addrs), filename) - - # Send the email to each recipient in mail_to_addrs - exit_status = os.system(mail_command) - os.remove(filename) - -def reset_todo_dates_file(): - # If dates is empty, reset the todo_product_days file - logging.info('Resetting todo_product_days file...\n') - os.remove(TODO_DAYS_PATH) - os.system('cp {0} {1}'.format(ALL_DAYS_PATH, TODO_DAYS_PATH)) - - -def get_all_todo_days(): - days = [] - with open(TODO_DAYS_PATH) as f: - for jd in f: - days.append(jd.strip()) - return days - - -def get_todo_dates(overwrite): - days = get_all_todo_days() - # TODO: test this! - if not len(days): - reset_todo_dates_file() - today = datetime.datetime.today() - today = today.strftime('%Y%j') - today = datetime.datetime.strptime(today, '%Y%j') - year = today.strftime('%Y') - dates = map(lambda day: datetime.datetime.strptime('{0}{1}'.format(year, day), '%Y%j'), days) - dates = filter(lambda d: d <= today - datetime.timedelta(days=8), dates) - dates = map(lambda d: d.strftime('%Y%j'), dates) - dates = list(dates) - if not overwrite: - dates = filter(lambda d: not products_exist(d), dates) - return dates - - -def build_products_for_date(datestring, dryrun, log_path): - os.chdir(os.path.join(MAIN_PATH)) - c = [ DODATE_PATH, datestring ] - if not dryrun: - try: - c = [DODATE_PATH, datestring] - process = Popen(c, stdout=PIPE, stderr=STDOUT) - with process.stdout: - log_subprocess_output(process.stdout) - exitcode = process.wait() - except Exception as e: - mail_results(False, datestring, dryrun, log_path) - logging.error("Exception while running dodate script.\n") - logging.error(e) - - -def write_new_todo_days_file(days_to_remove, dryrun): - logging.info('Writing new todo_product_days file...\n') - days = get_all_todo_days() - days = [ d for d in days if d not in days_to_remove ] - new_file_contents = '' - for day in days: - new_file_contents += '{0}\n'.format(day) - if not dryrun: - with open(TODO_DAYS_PATH, 'w') as f: - f.write(new_file_contents) - else: - logging.info("Since this is a dryrun we're not going to write a new todo_product_days file. But just for fun, this is what the contents of the new file would be:\n{0}".format(new_file_contents)) - - -def build_products(dates, dryrun, is_cli_run, log_path, no_email): - if dryrun: - logging.info("DRY RUN - NO PRODUCTS WILL BE CREATED\n") - days_to_remove = [] - for datestring in dates: - day = datestring[-3:] - logging.info("Building ForWarn 2 products for year {0}, julian day {1}...\n".format(datestring[:4], day)) - build_products_for_date(datestring, dryrun, log_path) - success = False - # Only harvest products for cron runs - if not is_cli_run: - harvest_products(datestring, dryrun) - if products_exist(datestring): - success = True - days_to_remove.append(day) - else: - success = False - logging.error('Products for day {0} did not make it to the destination folder. We\'ll keep {1} in the todo_product_days file, but something probably went gone wrong in the creation scripts. Check the rest of the log for more details.\n'.format(datestring, day)) - else: - # We're running a CLI run for a specific date. - # Defaulting to true for now - success = True - if not no_email: - mail_results(success, datestring, dryrun, log_path) - if not is_cli_run: - write_new_todo_days_file(days_to_remove, dryrun) - if dryrun: - destroy_test_files() - - -def is_date_argument_ok(arg): - # Must be 7 characters long - if len(arg) != 7: - logging.error("Date argument must be 7 characters long in the form YYYYDOY. Run this script with the -h flag for more details.\n") - return False - - # Must be of the form YYYYDOY - try: - date = datetime.datetime.strptime(arg, '%Y%j') - except ValueError: - logging.error("Date argument is malformed. Must be of the form YYYYDOY where DOY is a zero-padded julian day. For example, 003 represents the third day of the year. Run this script with the -h flag for more details.\n") - return False - - # Must be a valid product date - with open(ALL_DAYS_PATH) as f: - all_days = [] - for jd in f: - all_days.append(jd.strip()) - if date.strftime('%j') not in all_days: - logging.error("The julian day you entered is not valid. Please provide a day that aligns with the GIMMS 8-day dropoff schedule (see all_product_days text file for reference). Run this script with the -h flag for more details.\n") - return False - - # Must be a valid year (2001 onward) - today = datetime.datetime.today() - today_year = int(today.strftime('%Y')) - date_year = int(date.strftime('%Y')) - if date_year < 2001: - logging.error("Year entered is invalid. We can only make products from 2001 onward.\n") - return False - if date_year > today_year: - logging.error("Year entered is invalid. We can't make products for a year that hasn't happened yet!\n") - return False - - # All checks passed, assume the datestring is ok - return True - - -def setup_arg_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('--overwrite', action='store_true', help='Overwrite existing products. Use at your own risk!') - parser.add_argument('--dryrun', action='store_true', help='Run this script without actually building any products. Useful for testing.') - parser.add_argument('-d', '--date', help='Build products for a certain date. This argument must be of the form YYYYDOY, where the first four numbers represent a year and the last three numbers are a julian day, padded with zeroes if necessary. For example, to make a 2017 product for the second julian day of the year, this argument would be 2017002.') - parser.add_argument('--no_email', action='store_true', help='Do not send email notifications. Useful for testing.') - return parser - - -def check_is_only_instance_or_quit(): - name_of_this_script = sys.argv[0].split('/').pop() - command = "ps -aux | grep %s" % name_of_this_script - stdout = check_output(command, shell=True) - lines = stdout.split('\n') - # Remove empty strings made from the split command - # Remove entries related to the grep command run as part of the process - lines = [ line for line in lines if line != '' and 'grep' not in line ] - if (len(lines) > 1): - # One entry refers to this instance of the script. - # More than one entry means there is another instance of the script running. - logging.info("Another instance of %s is already running. Exiting..." % name_of_this_script) - sys.exit() - - -def main(): - os.chdir(MAIN_PATH) - parser = setup_arg_parser() - args = parser.parse_args() - overwrite = args.overwrite - dryrun = args.dryrun - is_cli_run = bool(args.date) - no_email = args.no_email - log_path = setup_logging(dryrun, is_cli_run) - check_is_only_instance_or_quit() - - if is_cli_run: - if not is_date_argument_ok(args.date): - sys.exit(1) - dates = [ args.date ] - else: - dates = get_todo_dates(overwrite) - - if not len(dates): - logging.info('No dates to process. Exiting...\n') - os.remove(log_path) - sys.exit(0) - - build_products(dates, dryrun, is_cli_run, log_path, no_email) - -if __name__ == '__main__': - main() - diff --git a/mime_email_template.txt b/mime_email_template.txt deleted file mode 100644 index b6b6bf2..0000000 --- a/mime_email_template.txt +++ /dev/null @@ -1,20 +0,0 @@ -From:ForWarn 2 System -Subject:EMAIL_SUBJECT_REPLACE -MIME-Version: 1.0 -Content-Type: multipart/mixed; boundary="YWVhZDFlY2QzMGQ2N2U0YTZmODU" - ---YWVhZDFlY2QzMGQ2N2U0YTZmODU -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: quoted-printable - -EMAIL_BODY_REPLACE - ---YWVhZDFlY2QzMGQ2N2U0YTZmODU -Content-Type: application/octet-stream -MIME-Version: 1.0 -Content-Transfer-Encoding: base64 -Content-Disposition: attachment; filename="LOG_FILENAME_REPLACE" - -ATTACHMENT_CONTENT_REPLACE - ---YWVhZDFlY2QzMGQ2N2U0YTZmODU-- diff --git a/netcdf/Config.py b/netcdf/Config.py deleted file mode 100644 index 1f6605e..0000000 --- a/netcdf/Config.py +++ /dev/null @@ -1,21 +0,0 @@ - -# DEV -#MAIN_PATH = '/fsdata4/forwarn2_products/forwarn2_build_dev/netcdf/' - -# PROD -MAIN_PATH = '/fsdata4/forwarn2_products/forwarn2_build_prod/netcdf/' - -STD_DAY_FILES_PATH = './std' - -NRT_DAY_FILES_PATH = './nrt' - -ALL_DAYS_PATH = 'all_product_days' - -YEAR = '2020' - -# Dev -#NETCDF_YEAR_FILE_DIR = '.' - -# Prod -NETCDF_YEAR_FILE_DIR = '/fsdata4/forwarn/netcdfs/' - diff --git a/netcdf/all_product_days b/netcdf/all_product_days deleted file mode 100644 index 67ade1a..0000000 --- a/netcdf/all_product_days +++ /dev/null @@ -1,46 +0,0 @@ -001 -009 -017 -025 -033 -041 -049 -057 -065 -073 -081 -089 -097 -105 -113 -121 -129 -137 -145 -153 -161 -169 -177 -185 -193 -201 -209 -217 -225 -233 -241 -249 -257 -265 -273 -281 -289 -297 -305 -313 -321 -329 -337 -345 -353 -361 diff --git a/netcdf/append_yr_nc.py b/netcdf/append_yr_nc.py deleted file mode 100755 index 9d1a1f7..0000000 --- a/netcdf/append_yr_nc.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python - -import datetime -import argparse, os, os.path, sys -import logging -from subprocess import Popen, PIPE, STDOUT - - -def pad_with_zero(num): - num = int(num) - return str(num) if num > 9 else '0{0}'.format(num) - - -def log_subprocess_output(pipe): - for line in iter(pipe.readline, b''): - logging.info(str(line)) - - -def setup_logging(): - now = datetime.datetime.now() - month = pad_with_zero(now.month) - day = pad_with_zero(now.day) - hour = pad_with_zero(now.hour) - minute = pad_with_zero(now.minute) - second = pad_with_zero(now.second) - - log_filename = 'log_{0}{1}{2}_{3}h{4}m{5}s.log'.format(now.year, month, day, hour, minute, second) - log_path = os.path.join(log_filename) - logging.basicConfig(filename=log_path, level=logging.DEBUG) - logger = logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) - return log_path - - -def log_subprocess_output(pipe): - for line in iter(pipe.readline, b''): - logging.info(str(line)) - - -def setup_arg_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-y', '--year', help='Year to glob files against') - parser.add_option('--nrt', action="store_true", help="Append to NRT year file") - parser.add_option('--std', action="store_false", help="Append to STD year file") - return parser - - -def get_todo_days_path(prod_type): - if prod_type == 'std': - return STD_TODO_DAYS_PATH - if prod_type == 'nrt': - return NRT_TODO_DAYS_PATH - - -def get_all_todo_days(prod_type): - todo_days_path = get_todo_days_path(prod_type) - days = [] - with open(todo_days_path) as f: - for jd in f: - days.append(jd.strip()) - return days - - -def write_new_todo_days_file(prod_type, days_to_remove, dryrun): - logging.info('Writing new todo_product_days file...\n') - days = get_all_todo_days() - days = [ d for d in days if d not in days_to_remove ] - new_file_contents = '' - for day in days: - new_file_contents += '{0}\n'.format(day) - if not dryrun: - with open(get_todo_days_path(prod_type), 'w') as f: - f.write(new_file_contents) - else: - logging.info("Since this is a dryrun we're not going to write a new todo_product_days file. But just for fun, this is what the contents of the new file would be:\n{0}".format(new_file_contents)) - - -def get_8day_nc_files_for(year, prod_type): - all_nc_files = [ f for f in os.listdir('.') if year in f and prod_type in f and f.endswith('.nc') ] - with open(get_todo_days_path(prod_type) as f: - all_product_days = [ line.rstrip() for line in f.readlines() ] - nc_files = [] - for day in all_product_days: - for f in all_nc_files: - if day in f and f not in nc_files: - nc_files.append(f) - nc_files = sorted(nc_files) - logging.info("List of files to append:") - logging.info(nc_files) - return nc_files - - -def main(): - parser = setup_arg_parser() - args = parser.parse_args() - year = args.year - is_nrt = args.nrt - is_std = args.std - log_path = setup_logging() - if (is_nrt and is_std) or (not is_nrt and not is_std): - logging.error("You must supply either the --nrt flag or the --std flag") - sys.exit(1) - if not year: - logging.error('Must pass a year!') - sys.exit(1) - if len(year) != 4: - logging.error('Year must be 4 digits long!') - sys.exit(1) - if is_nrt: - prod_type = 'nrt' - elif is_std: - prod_type = 'std' - nc_files = get_8day_nc_files_for(year, prod_type) - filename = 'maxMODIS.{0}.std.nc'.format(year) - c = [ 'ncecat', '-A' ] - c.extend(nc_files) - c.append(filename) - logging.info(' '.join(c)) - process = Popen(c, stdout=PIPE, stderr=STDOUT) - with process.stdout: - log_subprocess_output(process.stdout) - exitcode = process.wait() - - -if __name__ == '__main__': - main() diff --git a/netcdf/do_cron.py b/netcdf/do_cron.py deleted file mode 100755 index 8ef5ea7..0000000 --- a/netcdf/do_cron.py +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/env python - -import os, sys, os.path, datetime, argparse, logging -from Config import * -from make_year_nc import make_netcdf -from subprocess import check_output - -def pad_with_zero(num): - num = int(num) - return str(num) if num > 9 else '0{0}'.format(num) - - -def get_all_todo_days(): - days = [] - with open(ALL_DAYS_PATH) as f: - for jd in f: - days.append(jd.strip()) - return days - - -def get_todo_dates(): - days = get_all_todo_days() # TODO: test this! - if not len(days): - reset_todo_dates_file() - today = datetime.datetime.today() - today = today.strftime('%Y%j') - today = datetime.datetime.strptime(today, '%Y%j') - dates = map(lambda day: datetime.datetime.strptime('{0}{1}'.format(YEAR, day), '%Y%j'), days) - dates = filter(lambda d: d <= today - datetime.timedelta(days=8), dates) - dates = map(lambda d: d.strftime('%Y%j'), dates) - dates = list(dates) - dates = filter(lambda d: not max_file_exists('std', d), dates) - return dates - - -def get_8day_prod_dir(prod_type): - return STD_DAY_FILES_PATH if prod_type == 'std' else NRT_DAY_FILES_PATH - - - -def max_file_exists(prod_type, d): - doy_str = '%03d' % int(d[4:]) - f = 'maxMODIS.{0}.{1}.{2}.nc'.format(d[0:4], doy_str, prod_type) - prod_dir = get_8day_prod_dir(prod_type) - return os.path.exists(os.path.join(prod_dir, f)) - - -def year_file_exists(year): - return os.path.exists(os.path.join(NETCDF_YEAR_FILE_DIR, 'maxMODIS.{}.std.nc'.format(year))) - - -def get_date_nc(prod_type, date, dryrun): - year = date[0:4] - doy = '%03d' % int(date[4:]) - prod_dir = get_8day_prod_dir(prod_type) - if not max_file_exists(prod_type, date): - logging.info("Creating {} netcdf for {}".format(prod_type, date)) - if not dryrun: - os.system('./get_date_netcdf.sh {} {} {}'.format(prod_type, year, doy)) - os.system('mv maxMODIS.{}.{}.{}.nc {}'.format(year, doy, prod_type, prod_dir)) - else: - logging.info("File already exists. Skipping...") - - -def setup_logging(): - now = datetime.datetime.now() - month = pad_with_zero(now.month) - day = pad_with_zero(now.day) - hour = pad_with_zero(now.hour) - minute = pad_with_zero(now.minute) - second = pad_with_zero(now.second) - - log_filename = 'log_{0}{1}{2}_{3}h{4}m{5}s.log'.format(now.year, month, day, hour, minute, second) - log_path = os.path.join(log_filename) - logging.basicConfig(filename=log_path, level=logging.DEBUG) - logger = logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) - return log_path - - -def setup_arg_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('--dryrun', action='store_true') - return parser - -def check_is_only_instance_or_quit(): - name_of_this_script = sys.argv[0].split('/').pop() - command = "ps -aux | grep %s" % name_of_this_script - stdout = check_output(command, shell=True) - lines = stdout.split('\n') - # Remove empty strings made from the split command - # Remove entries related to the grep command run as part of the process - lines = [ line for line in lines if line != '' and 'grep' not in line ] - if (len(lines) > 1): - # One entry refers to this instance of the script. - # More than one entry means there is another instance of the script running. - logging.info("Another instance of %s is already running. Exiting..." % name_of_this_script) - sys.exit() - -def main(): - os.chdir(MAIN_PATH) - parser = setup_arg_parser() - args = parser.parse_args() - dryrun = args.dryrun - setup_logging() - check_is_only_instance_or_quit() - # Get possible todo days - todo_dates = get_todo_dates() - new_stds = [] - for date in todo_dates: - # Attempt to download std products - get_date_nc('std', date, dryrun) - if max_file_exists('std', date): - new_stds.append(date) - else: - logging.info("Fetching NRT file for date {}...".format(date)) - get_date_nc('nrt', date, dryrun) - if len(new_stds): - logging.info('Building {} std netcdf'.format(YEAR)) - make_netcdf(YEAR, dryrun) - # Replace the old std product with the new one - std_year_f = 'maxMODIS.{}.std.nc'.format(YEAR) - os.system('mv {} {}'.format(std_year_f, os.path.join(NETCDF_YEAR_FILE_DIR, std_year_f))) - - -if __name__ == '__main__': - main() - diff --git a/netcdf/get_date_netcdf.sh b/netcdf/get_date_netcdf.sh deleted file mode 100755 index d7c828c..0000000 --- a/netcdf/get_date_netcdf.sh +++ /dev/null @@ -1,98 +0,0 @@ -#!/bin/bash - -DATETYPE=$1 # nrt or std -YEAR=$2 -DOY=$3 # must be 3 digits, like 001 - -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif - -################################################################# -# download 26 tiles for conus each from Aqua and from Terra - -echo "Now downloading DOY " $DOY " of TYPE " $DATETYPE " for YEAR " $YEAR " via https from NASA GLAM" - -# download 26 Terra tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMOD09Q1/tif/NDVI/$YEAR/$DOY/GMOD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -# download 26 Aqua tiles for conus -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x06y0{4,5,6}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x0{7,8,9}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x1{0,1}y0{4,5,6,7}.6v1.NDVI.tif.gz -wget -e robots=off -nd -nv -np https://gimms.gsfc.nasa.gov/MODIS/$DATETYPE/GMYD09Q1/tif/NDVI/$YEAR/$DOY/GMYD09Q1.A$YEAR$DOY.08d.latlon.x12y0{4,5,7}.6v1.NDVI.tif.gz - -################################################################# - -# did we get 26 each? -let numtiles=`ls -1 *.gz | wc -l` - -if [ "$numtiles" == 52 ]; then - echo "Got 52 tifs for " $YEAR $DOY - else - echo "ERROR: Tiles MISSING for " $YEAR $DOY " only got " $numtiles - rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif* - rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif* - exit -fi - -gunzip *.gz - -################################################################# -# mosaic together Terra for this DOY - -rm -f Terra.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o "Terra.img" GMOD09Q1.A$YEAR$DOY*.tif -echo "done mosaicking Terra images together" -#rm -f Terra.tif -#gdal_translate Terra.img -of GTiff Terra.tif -#xv Terra.tif - -# mosaic together Aqua for this DOY - -rm -f Aqua.img -gdal_merge.py -v -init 255 -of HFA -co "STATISTICS=YES" -co "COMPRESSED=YES" -o Aqua.img GMYD09Q1.A$YEAR$DOY.*.tif -echo "done mosaicking Aqua images together" -#rm -f Aqua.tif -#gdal_translate Aqua.img -of GTiff Aqua.tif -#xv Aqua.tif - -################################################################# -# remove existing Terra and Aqua tile tifs -rm -f GMOD09Q1.*.08d.latlon.*.6v1.NDVI.tif* -rm -f GMYD09Q1.*.08d.latlon.*.6v1.NDVI.tif* - -################################################################# -# take the maximum NDVI from the Terra and Aqua 8-day composites - -# if both are 253 or if both are 255 or if either are 253 with the other 255, then 255, but this is NOT nodata -# if either are 254, then 254, water - -# 252 is nodata coming out - -echo "taking the maximum NDVI from the Terra and Aqua 8-day composites" - -OUTFILE=maxMODIS.$YEAR.$DOY.$DATETYPE - -#find maxval composite of Terra and Aqua -# and propagate two mask values -gdal_calc.py --debug -A Terra.img -B Aqua.img --outfile=${OUTFILE}.img --calc="\ -maximum((A<251)*A,(B<251)*B)\ -+(((A==253)&(B==253))|((A==253)&(B==255))|((A==255)&(B==253))|((A==255)&(B==255)))*255\ -+((A==254)|(B==254))*254\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --NoDataValue=252 --type=Byte --overwrite - -rm -f maxMODIS.$YEAR.$DOY.$DATETYPE.tif -rm -f Aqua.img -rm -f Terra.img - -./rescale_max.sh ${OUTFILE}.img ${OUTFILE}.rescaled.img - -gdal_translate -stats -co "COMPRESS=DEFLATE" -of netCDF ${OUTFILE}.rescaled.img ${OUTFILE}.nc - -rm -f ${OUTFILE}.img -rm -f ${OUTFILE}.rescaled.img diff --git a/netcdf/img2netcdf.sh b/netcdf/img2netcdf.sh deleted file mode 100755 index 2d4d532..0000000 --- a/netcdf/img2netcdf.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -files=$(ls *.img) - -for f in $files -do - filename=${f%%\.img} - ./rescale_max.sh ${filename}.img ${filename}.rescaled.img - gdal_translate -stats -co "COMPRESS=DEFLATE" -of netCDF ${filename}.rescaled.img ${filename}.nc - rm ${filename}.rescaled.img -done diff --git a/netcdf/make_year_nc.py b/netcdf/make_year_nc.py deleted file mode 100755 index a7b233e..0000000 --- a/netcdf/make_year_nc.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python - -import datetime -import argparse, os, os.path, sys -import logging -from subprocess import Popen, PIPE, STDOUT - -from Config import * - -def pad_with_zero(num): - num = int(num) - return str(num) if num > 9 else '0{0}'.format(num) - - -def log_subprocess_output(pipe): - for line in iter(pipe.readline, b''): - logging.info(str(line)) - - -def setup_logging(): - now = datetime.datetime.now() - month = pad_with_zero(now.month) - day = pad_with_zero(now.day) - hour = pad_with_zero(now.hour) - minute = pad_with_zero(now.minute) - second = pad_with_zero(now.second) - - log_filename = 'log_{0}{1}{2}_{3}h{4}m{5}s.log'.format(now.year, month, day, hour, minute, second) - log_path = os.path.join(log_filename) - logging.basicConfig(filename=log_path, level=logging.DEBUG) - logger = logging.getLogger().addHandler(logging.StreamHandler(sys.stdout)) - return log_path - - -def log_subprocess_output(pipe): - for line in iter(pipe.readline, b''): - logging.info(str(line)) - - -def is_filelist_contiguous(filelist, all_product_days, year): - for i, day in enumerate(all_product_days): - if i == len(filelist): - break - if '{0}.{1}'.format(year, day) not in filelist[i]: - return False - return True - - -def get_year_netcdf_files(year): - all_nc_files = [ f for f in os.listdir(STD_DAY_FILES_PATH) if year in f and 'std' in f and f.endswith('.nc') ] - with open(ALL_DAYS_PATH) as f: - all_product_days = [ line.rstrip() for line in f.readlines() ] - nc_files = [] - for day in all_product_days: - for f in all_nc_files: - if day in f and f not in nc_files: - nc_files.append(f) - nc_files = sorted(nc_files) - if not is_filelist_contiguous(nc_files, all_product_days, year): - logging.error("We're either missing a day or the file list is not sorted correctly.") - logging.error("List of files:") - logging.error(nc_files) - logging.error('Exiting...') - sys.exit(1) - return nc_files - - -def setup_arg_parser(): - parser = argparse.ArgumentParser() - parser.add_argument('-y', '--year', help='Year to glob files against') - parser.add_argument('--dryrun', action='store_true') - return parser - - -def make_netcdf(year, dryrun): - nc_files = get_year_netcdf_files(year) - nc_file_paths = list(map(lambda f: os.path.join(STD_DAY_FILES_PATH, f), nc_files)) - filename = 'maxMODIS.{0}.std.nc'.format(year) - c = [ 'ncecat', '-A' ] - c.extend(nc_file_paths) - c.append(filename) - logging.info(' '.join(c)) - if not dryrun: - process = Popen(c, stdout=PIPE, stderr=STDOUT) - with process.stdout: - log_subprocess_output(process.stdout) - exitcode = process.wait() - - -def main(): - parser = setup_arg_parser() - args = parser.parse_args() - year = args.year - dryrun = args.dryrun - log_path = setup_logging() - if not year: - logging.error('Must pass a year!') - sys.exit(1) - if len(year) != 4: - logging.error('Year must be 4 digits long!') - sys.exit(1) - make_netcdf(year, dryrun) - -if __name__ == '__main__': - main() diff --git a/netcdf/nrt/.gitignore b/netcdf/nrt/.gitignore deleted file mode 100644 index d57580f..0000000 --- a/netcdf/nrt/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.nc diff --git a/netcdf/rescale_max.sh b/netcdf/rescale_max.sh deleted file mode 100755 index ef35cba..0000000 --- a/netcdf/rescale_max.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -set -v - -IN_FILE=$1 -OUT_FILE=$2 - -NODATA=255 - -# Values ranging [251 - 255] are reserved for mask values: -# -# Value Description -# ----- ----------- -# 251 (empty) -# 252 (empty) -# 253 Invalid (out of range NDVI or NDVI_anom) -# 254 Water -# 255 No data (unfilled, cloudy, or snow contaminated) -# -# (See https://gimms.gsfc.nasa.gov/MODIS/README.txt) - -gdal_calc.py \ - -A $IN_FILE \ - --outfile=$OUT_FILE \ - --type=Byte \ - --calc="(A>=251)*255+(A<251)*(100.0*A/250.0)" \ - --NoDataValue=$NODATA \ - --debug diff --git a/netcdf/std/.gitignore b/netcdf/std/.gitignore deleted file mode 100644 index d57580f..0000000 --- a/netcdf/std/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.nc diff --git a/precalcpctile b/precalcpctile deleted file mode 100755 index 64492fb..0000000 --- a/precalcpctile +++ /dev/null @@ -1,489 +0,0 @@ -#!/bin/bash - -pctilepath=/media/disk/fullwrapper2 - -TYPE=std - -for YEAR in 2018 -do - - let LASTYEAR=$YEAR-1 - - #for DOY in 001 009 017 025 033 041 049 057 065 073 081 089 097 105 113 121 129 137 145 153 161 169 177 185 193 201 209 217 225 233 241 249 257 265 273 281 289 297 305 313 321 329 337 345 353 361 - for DOY in 145 - do - - -######## calculate three percentiles over all prior max baselines ############ - -# calculate 90th, 50th and 10th median baselines over all prior years -# for this DOY - -#******************************************************************* -# this is code that writes code for gdal_calc for 50th and 90th and 10th percentile of all prior years, over the entire MODIS period until 2028 -# much shorter than case statements! -#******************************************************************* - -#for prioryear in 2002/A 2003/B 2004/C 2005/D 2006/E 2007/F 2008/G 2009/H 2010/I 2011/J 2012/K 2013/L 2014/M 2015/N 2016/O 2017/P 2018/Q 2019/R 2020/S 2021/T 2022/U 2023/V 2024/W 2025/X 2026/Y 2027/Z 2028/AA -for prioryear in 2003/A 2004/B 2005/C 2006/D 2007/E 2008/F 2009/G 2010/H 2011/I 2012/J 2013/K 2014/L 2015/M 2016/N 2017/O 2018/P 2019/Q 2020/R 2021/S 2022/T 2023/U 2024/V 2025/W 2026/X 2027/Y 2028/Z -do - - yr=`echo $prioryear|awk -F/ '{print $1}'` - ltr=`echo $prioryear|awk -F/ '{print $2}'` - - yrstring=$yrstring" "$yr - ltrstring=$ltrstring" "$ltr - - if [ $yr -eq $LASTYEAR ]; then - break - fi - -done - -#******************************************************************* - - - if [ ! -f $pctilepath/medianallpriormax.$YEAR.$DOY.$TYPE.img ] - then - echo "File does not exist for $pctilepath/medianallpriormax."$YEAR"."$DOY"."$TYPE".img" - -first=` -echo -n "percentile([" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"<251)*"$cac - else - echo -n ",("$cac"<251)*"$cac - fi -done - -echo -en "],50,axis=0)+(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"==254)" - else - echo -n "&("$cac"==254)" - fi -done - -echo -e ")*0+( (" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"!=254)" - else - echo -n "|("$cac"!=254)" - fi -done - -echo -n ")&(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac">250)" - else - echo -n "&("$cac">250)" - fi -done - -echo -n ") )*255" -` - -second=` -yr=2003 -for cac in $ltrstring -do - #echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " - echo -n " -"$cac" "$pctilepath"/maxMODISmax."$yr".$DOY.$TYPE.img " - let yr=yr+1 -done -` - -#echo $first -#echo $second - -gdal_calc.py --calc="`echo $first`" `echo $second` --outfile=$pctilepath/medianallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite - - -#gdal_translate $pctilepath/medianallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff $pctilepath/medianallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist $pctilepath/medianallpriormax.$YEAR.$DOY.$TYPE.img -#xv $pctilepath/medianallpriormax.$YEAR.$DOY.$TYPE.tif - - else - echo "medianallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" - fi - -#******************************************************************* -#******************************************************************* - - if [ ! -f $pctilepath/90thallpriormax.$YEAR.$DOY.$TYPE.img ] - then - echo "File does not exist for $pctilepath/90thallpriormax."$YEAR"."$DOY"."$TYPE".img" - - -first=` -echo -n "percentile([" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"<251)*"$cac - else - echo -n ",("$cac"<251)*"$cac - fi -done - -echo -en "],90,axis=0)+(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"==254)" - else - echo -n "&("$cac"==254)" - fi -done - -echo -e ")*0+( (" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"!=254)" - else - echo -n "|("$cac"!=254)" - fi -done - -echo -n ")&(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac">250)" - else - echo -n "&("$cac">250)" - fi -done - -echo -n ") )*255" -` - -second=` -yr=2003 -for cac in $ltrstring -do - #echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " - echo -n " -"$cac" "$pctilepath"/maxMODISmax."$yr".$DOY.$TYPE.img " - let yr=yr+1 -done -` - -#echo $first -#echo $second - -gdal_calc.py --calc="`echo $first`" `echo $second` --outfile=$pctilepath/90thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite - -#gdal_translate $pctilepath/90thallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff $pctilepath/90thallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist $pctilepath/90thallpriormax.$YEAR.$DOY.$TYPE.img -#xv $pctilepath/90thallpriormax.$YEAR.$DOY.$TYPE.tif - - else - echo "$pctilepath/90thallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" - fi - -#******************************************************************* -#******************************************************************* -#******************************************************************* - - if [ ! -f $pctilepath/10thallpriormax.$YEAR.$DOY.$TYPE.img ] - then - echo "File does not exist for $pctilepath/10thallpriormax."$YEAR"."$DOY"."$TYPE".img" - - -first=` -echo -n "percentile([" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"<251)*"$cac - else - echo -n ",("$cac"<251)*"$cac - fi -done - -echo -en "],10,axis=0)+(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"==254)" - else - echo -n "&("$cac"==254)" - fi -done - -echo -e ")*0+( (" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac"!=254)" - else - echo -n "|("$cac"!=254)" - fi -done - -echo -n ")&(" -for cac in $ltrstring -do - if [ $cac == A ]; then - echo -n "("$cac">250)" - else - echo -n "&("$cac">250)" - fi -done - -echo -n ") )*255" -` - -second=` -yr=2003 -for cac in $ltrstring -do - #echo -n " -"$cac" maxMODISmax."$yr".$DOY.$TYPE.img " - echo -n " -"$cac" "$pctilepath"/maxMODISmax."$yr".$DOY.$TYPE.img " - let yr=yr+1 -done -` - -#echo $first -#echo $second - -gdal_calc.py --calc="`echo $first`" `echo $second` --outfile=$pctilepath/10thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite - -#gdal_translate $pctilepath/10thallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff $pctilepath/10thallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist $pctilepath/10thallpriormax.$YEAR.$DOY.$TYPE.img -#xv $pctilepath/10thallpriormax.$YEAR.$DOY.$TYPE.tif - - else - echo "$pctilepath/10thallpriormax."$YEAR"."$DOY"."$TYPE".img file already exists" - fi - -#******************************************************************* -#******************************************************************* -##### this is an example of the gdal_calc.py code that this code writes ### -#******************************************************************* -# calculate mean, max, 90th and 50th median baselines over all prior years -# for this DOY -#case $numyrsprior in - #16) # 2019 thru P -# -#gdal_calc.py --calc="\ -#percentile([\ -#(A<251)*A,(B<251)*B,(C<251)*C,\ -#(D<251)*D,(E<251)*E,(F<251)*F,\ -#(G<251)*G,(H<251)*H,(I<251)*I,\ -#(J<251)*J,(K<251)*K,(L<251)*L,\ -#(M<251)*M,(N<251)*N,(O<251)*O,\ -#(P<251)*P\ -#],50,axis=0)\ -#+(\ -#(A==254)&(B==254)&(C==254)&\ -#(D==254)&(E==254)&(F==254)&\ -#(G==254)&(H==254)&(I==254)&\ -#(J==254)&(K==254)&(L==254)&\ -#(M==254)&(N==254)&(O==254)&\ -#(P==254)\ -#)*0\ -#+( (\ -#(A!=254)|(B!=254)|(C!=254)|\ -#(D!=254)|(E!=254)|(F!=254)|\ -#(G!=254)|(H!=254)|(I!=254)|\ -#(J!=254)|(K!=254)|(L!=254)|\ -#(M!=254)|(N!=254)|(O!=254)|\ -#(P!=254)\ - #)&\ -#( \ -#(A>250)&(B>250)&(C>250)&\ -#(D>250)&(E>250)&(F>250)&\ -#(G>250)&(H>250)&(I>250)&\ -#(J>250)&(K>250)&(L>250)&\ -#(M>250)&(N>250)&(O>250)&\ -#(P>250)\ - #) )*255\ -#" \ -#-A maxMODISmax.2003.$DOY.$TYPE.img -B maxMODISmax.2004.$DOY.$TYPE.img \ -#-C maxMODISmax.2005.$DOY.$TYPE.img -D maxMODISmax.2006.$DOY.$TYPE.img \ -#-E maxMODISmax.2007.$DOY.$TYPE.img -F maxMODISmax.2008.$DOY.$TYPE.img \ -#-G maxMODISmax.2009.$DOY.$TYPE.img -H maxMODISmax.2010.$DOY.$TYPE.img \ -#-I maxMODISmax.2011.$DOY.$TYPE.img -J maxMODISmax.2012.$DOY.$TYPE.img \ -#-K maxMODISmax.2013.$DOY.$TYPE.img -L maxMODISmax.2014.$DOY.$TYPE.img \ -#-M maxMODISmax.2015.$DOY.$TYPE.img -N maxMODISmax.2016.$DOY.$TYPE.img \ -#-O maxMODISmax.2017.$DOY.$TYPE.img -P maxMODISmax.2018.$DOY.$TYPE.img \ -#--outfile=medianallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite -# -#gdal_translate medianallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff medianallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist medianallpriormax.$YEAR.$DOY.$TYPE.img -#xv medianallpriormax.$YEAR.$DOY.$TYPE.tif -# -# -#gdal_calc.py --calc="\ -#percentile([\ -#(A<251)*A,(B<251)*B,(C<251)*C,\ -#(D<251)*D,(E<251)*E,(F<251)*F,\ -#(G<251)*G,(H<251)*H,(I<251)*I,\ -#(J<251)*J,(K<251)*K,(L<251)*L,\ -#(M<251)*M,(N<251)*N,(O<251)*O,\ -#(P<251)*P\ -#],90,axis=0)\ -#+(\ -#(A==254)&(B==254)&(C==254)&\ -#(D==254)&(E==254)&(F==254)&\ -#(G==254)&(H==254)&(I==254)&\ -#(J==254)&(K==254)&(L==254)&\ -#(M==254)&(N==254)&(O==254)&\ -#(P==254)\ -#)*0\ -#+( (\ -#(A!=254)|(B!=254)|(C!=254)|\ -#(D!=254)|(E!=254)|(F!=254)|\ -#(G!=254)|(H!=254)|(I!=254)|\ -#(J!=254)|(K!=254)|(L!=254)|\ -#(M!=254)|(N!=254)|(O!=254)|\ -#(P!=254)\ - #)&\ -#( \ -#(A>250)&(B>250)&(C>250)&\ -#(D>250)&(E>250)&(F>250)&\ -#(G>250)&(H>250)&(I>250)&\ -#(J>250)&(K>250)&(L>250)&\ -#(M>250)&(N>250)&(O>250)&\ -#(P>250)\ - #) )*255\ -#" \ -#-A maxMODISmax.2003.$DOY.$TYPE.img -B maxMODISmax.2004.$DOY.$TYPE.img \ -#-C maxMODISmax.2005.$DOY.$TYPE.img -D maxMODISmax.2006.$DOY.$TYPE.img \ -#-E maxMODISmax.2007.$DOY.$TYPE.img -F maxMODISmax.2008.$DOY.$TYPE.img \ -#-G maxMODISmax.2009.$DOY.$TYPE.img -H maxMODISmax.2010.$DOY.$TYPE.img \ -#-I maxMODISmax.2011.$DOY.$TYPE.img -J maxMODISmax.2012.$DOY.$TYPE.img \ -#-K maxMODISmax.2013.$DOY.$TYPE.img -L maxMODISmax.2014.$DOY.$TYPE.img \ -#-M maxMODISmax.2015.$DOY.$TYPE.img -N maxMODISmax.2016.$DOY.$TYPE.img \ -#-O maxMODISmax.2017.$DOY.$TYPE.img -P maxMODISmax.2018.$DOY.$TYPE.img\ -#--outfile=90thallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite -# -#gdal_translate 90thallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff 90thallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist 90thallpriormax.$YEAR.$DOY.$TYPE.img -#xv 90thallpriormax.$YEAR.$DOY.$TYPE.tif -# -# -#gdal_calc.py --calc="\ -#maximum.reduce([\ -#(A<251)*A,(B<251)*B,(C<251)*C,\ -#(D<251)*D,(E<251)*E,(F<251)*F,\ -#(G<251)*G,(H<251)*H,(I<251)*I,\ -#(J<251)*J,(K<251)*K,(L<251)*L,\ -#(M<251)*M,(N<251)*N,(O<251)*O,\ -#(P<251)*P\ -#],axis=0)\ -#+(\ -#(A==254)&(B==254)&(C==254)&\ -#(D==254)&(E==254)&(F==254)&\ -#(G==254)&(H==254)&(I==254)&\ -#(J==254)&(K==254)&(L==254)&\ -#(M==254)&(N==254)&(O==254)&\ -#(P==254)\ -#)*0\ -#+( (\ -#(A!=254)|(B!=254)|(C!=254)|\ -#(D!=254)|(E!=254)|(F!=254)|\ -#(G!=254)|(H!=254)|(I!=254)|\ -#(J!=254)|(K!=254)|(L!=254)|\ -#(M!=254)|(N!=254)|(O!=254)|\ -#(P!=254)\ -# )&\ -#( \ -#(A>250)&(B>250)&(C>250)&\ -#(D>250)&(E>250)&(F>250)&\ -#(G>250)&(H>250)&(I>250)&\ -#(J>250)&(K>250)&(L>250)&\ -#(M>250)&(N>250)&(O>250)&\ -#(P>250)\ -# ) )*255\ -#" \ -#-A maxMODISmax.2003.$DOY.$TYPE.img -B maxMODISmax.2004.$DOY.$TYPE.img \ -#-C maxMODISmax.2005.$DOY.$TYPE.img -D maxMODISmax.2006.$DOY.$TYPE.img \ -#-E maxMODISmax.2007.$DOY.$TYPE.img -F maxMODISmax.2008.$DOY.$TYPE.img \ -#-G maxMODISmax.2009.$DOY.$TYPE.img -H maxMODISmax.2010.$DOY.$TYPE.img \ -#-I maxMODISmax.2011.$DOY.$TYPE.img -J maxMODISmax.2012.$DOY.$TYPE.img \ -#-K maxMODISmax.2013.$DOY.$TYPE.img -L maxMODISmax.2014.$DOY.$TYPE.img \ -#-M maxMODISmax.2015.$DOY.$TYPE.img -N maxMODISmax.2016.$DOY.$TYPE.img \ -#-O maxMODISmax.2017.$DOY.$TYPE.img -P maxMODISmax.2018.$DOY.$TYPE.img\ -#--outfile=maxallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite -# -##gdal_translate maxallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff maxallpriormax.$YEAR.$DOY.$TYPE.tif -##rm *.xml -##gdalinfo -stats -hist maxallpriormax.$YEAR.$DOY.$TYPE.img -##xv maxallpriormax.$YEAR.$DOY.$TYPE.tif -# -# -#gdal_calc.py --calc="\ -#mean([\ -#(A<251)*A,(B<251)*B,(C<251)*C,\ -#(D<251)*D,(E<251)*E,(F<251)*F,\ -#(G<251)*G,(H<251)*H,(I<251)*I,\ -#(J<251)*J,(K<251)*K,(L<251)*L,\ -#(M<251)*M,(N<251)*N,(O<251)*O,\ -#(P<251)*P\ -#],axis=0)\ -#+(\ -#(A==254)&(B==254)&(C==254)&\ -#(D==254)&(E==254)&(F==254)&\ -#(G==254)&(H==254)&(I==254)&\ -#(J==254)&(K==254)&(L==254)&\ -#(M==254)&(N==254)&(O==254)&\ -#(P==254)\ -#)*0\ -#+( (\ -#(A!=254)|(B!=254)|(C!=254)|\ -#(D!=254)|(E!=254)|(F!=254)|\ -#(G!=254)|(H!=254)|(I!=254)|\ -#(J!=254)|(K!=254)|(L!=254)|\ -#(M!=254)|(N!=254)|(O!=254)\ -#(P!=254)\ -# )&\ -#( \ -#(A>250)&(B>250)&(C>250)&\ -#(D>250)&(E>250)&(F>250)&\ -#(G>250)&(H>250)&(I>250)&\ -#(J>250)&(K>250)&(L>250)&\ -#(M>250)&(N>250)&(O>250)&\ -#(P>250)\ -# ) )*255\ -#" \ -#-A maxMODISmax.2003.$DOY.$TYPE.img -B maxMODISmax.2004.$DOY.$TYPE.img \ -#-C maxMODISmax.2005.$DOY.$TYPE.img -D maxMODISmax.2006.$DOY.$TYPE.img \ -#-E maxMODISmax.2007.$DOY.$TYPE.img -F maxMODISmax.2008.$DOY.$TYPE.img \ -#-G maxMODISmax.2009.$DOY.$TYPE.img -H maxMODISmax.2010.$DOY.$TYPE.img \ -#-I maxMODISmax.2011.$DOY.$TYPE.img -J maxMODISmax.2012.$DOY.$TYPE.img \ -#-K maxMODISmax.2013.$DOY.$TYPE.img -L maxMODISmax.2014.$DOY.$TYPE.img \ -#-M maxMODISmax.2015.$DOY.$TYPE.img -N maxMODISmax.2016.$DOY.$TYPE.img \ -#-O maxMODISmax.2017.$DOY.$TYPE.img -P maxMODISmax.2018.$DOY.$TYPE.img\ -#--outfile=meanallpriormax.$YEAR.$DOY.$TYPE.img --type=Byte --NoDataValue=252 --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --overwrite -# -#gdal_translate meanallpriormax.$YEAR.$DOY.$TYPE.img -of GTiff meanallpriormax.$YEAR.$DOY.$TYPE.tif -#rm *.xml -#gdalinfo -stats -hist meanallpriormax.$YEAR.$DOY.$TYPE.img -#xv meanallpriormax.$YEAR.$DOY.$TYPE.tif -# -#;; - - - - -################################################################# - -done # over all intervals - -done # over all years diff --git a/precursor_archive.py b/precursor_archive.py new file mode 100644 index 0000000..92ce2c3 --- /dev/null +++ b/precursor_archive.py @@ -0,0 +1,256 @@ +import os, sys, copy +import requests +import rasterio as rio +import xml.etree.ElementTree as ET + +import logging as log + +from util import * +from gimms import Gimms + +load_env() + +class PrecursorArchive: + + def __init__(self, root_dir='./precursors', year_maxes_root='./graph_data', default_file_ext='img', api=None): + self.api = api or Gimms() + self._jds = ALL_MODIS_JULIAN_DAYS + self._intervals = INTERVALS + self._default_file_ext = default_file_ext + self._default_max_prefix = 'maxMODIS' + self._default_maxmax_prefix = 'maxMODISmax' + self._root_dir = os.path.realpath(root_dir) + self._year_maxes_root = os.path.realpath(year_maxes_root) + self._init_state() + self._update_state() + + + def update(self): + log.info('Updating precursor archive...') + all_updated = list(self._update_all()) + return all_updated + + + def _update_all(self): + self._update_state() + for d, y, jd in self._walk_state(): + out_path, ptype, updated = self._update_date(y, jd) + if updated: + yield y, jd, out_path, ptype + self._clean() + self._update_state() + + + def _update_date(self, y, jd): + out_path, ptype, updated = self._update_24day_max(y, jd) + return out_path, ptype, updated + + + def _update_24day_max(self, y, jd): + std_path = self._get_file_path(y, jd, nrt=False, is_maxmax=True) + nrt_path = self._get_file_path(y, jd, nrt=True, is_maxmax=True) + y2, jd2 = self._get_previous_date(y, jd) + y3, jd3 = self._get_previous_date(y2, jd2) + dates = [ (y,jd), (y2,jd2), (y3,jd3) ] + inputs_are_std = False + try: + paths, nrt = self._get_24day_max_input_paths(y, jd) + if not nrt: + inputs_are_std = True + except FileNotFoundError: + pass + inputs_updated = False + for _y, _jd, in dates: + path, ptype, updated = self._update_8day_max(_y, _jd) + if ptype == None: + return None, None, False + inputs_updated = inputs_updated or updated + if not inputs_updated and os.path.exists(std_path): + return std_path, 'std', False + log.info(f'Updating 24-day max for {y} / {jd}...') + paths, nrt = self._get_24day_max_input_paths(y, jd) + ptype = 'nrt' if nrt else 'std' + if inputs_updated and os.path.exists(std_path) and ptype == 'std': + log.info(f'Removing outdated 24-day std max {std_path}...') + os.remove(std_path) + if inputs_updated and os.path.exists(nrt_path) and ptype == 'nrt': + log.info(f'Removing outdated 24-day nrt max {nrt_path}...') + os.remove(nrt_path) + ptype = 'nrt' if nrt else 'std' + out_path = nrt_path if nrt else std_path + cmd = f''' + gdal_calc.py + --calc=" + maximum(maximum((A<251)*A,(B<251)*B),(C<251)*C) + +((A==254)|(B==254)|(C==254))*254 + +((A==255)&(B==255)&(C==255))*255 + " + --NoDataValue=252 + --format=HFA + --co "STATISTICS=YES" + --co "COMPRESSED=YES" + --outfile={out_path} + -A {paths[0]} + -B {paths[1]} + -C {paths[2]} + --type=Byte + --debug + ''' + try: + run_process(cmd) + except: + return None, None, False + return out_path, ptype, True + + + def _update_8day_max(self, y, jd): + _dir = self._get_dir(jd) + try: + std_path = self._get_file_path(y, jd, nrt=False) + if os.path.exists(std_path): + log.debug(f'Found std file at {std_path}...') + return std_path, 'std', False + std_path = self.api.get(y, jd, out_dir=_dir, check=True) + return std_path, 'std', True + except DataNotFoundError as e: + log.info('No std data available, trying nrt instead...') + nrt_path = self._get_file_path(y, jd, nrt=True) + if os.path.exists(nrt_path): + return nrt_path, 'nrt', False + nrt_path = self.api.get(y, jd, out_dir=_dir, nrt=True, check=True) + return nrt_path, 'nrt', True + except DataNotFoundError as e: + return None, None, False + + + def _get_24day_max_input_paths(self, y, jd): + y1, jd1 = y, jd + p1, p1nrt = self._get_best_8day_max_path(y, jd) + y2, jd2 = self._get_previous_date(y1, jd1) + p2, p2nrt = self._get_best_8day_max_path(y2, jd2) + y3, jd3 = self._get_previous_date(y2, jd2) + p3, p3nrt = self._get_best_8day_max_path(y3, jd3) + nrt = True in [ p1nrt, p2nrt, p3nrt ] + return (p1, p2, p3), nrt + + + def _get_dir(self, jd): + p = os.path.join(self._root_dir, jd) + return p + + + def _get_best_8day_max_path(self, y, jd, std_only=False): + std_ok = self._check(y, jd) + if std_ok: + nrt = False + return self._get_file_path(y, jd), nrt + if not std_ok and std_only: + raise FileNotFoundError(f"No 8-day STD max found for {y} / {jd}.") + nrt_ok = self._check(y, jd, nrt=True) + if nrt_ok: + return self._get_file_path(y, jd, nrt=True), nrt_ok + raise FileNotFoundError(f"No 8-day max found for {y} / {jd}.") + + + def _get_previous_date(self, y, jd): + try: + interval = [ d for d in self._intervals if d[0] == jd ][0] + except: + raise Exception(f"Bad year ({year}) or julian day ({jd})") + ints = [ int(v) for v in interval ] + p_y = str(int(y)-1) if ints[0] < ints[1] else y + p_jd_i = self._jds.index(jd)-1 + p_jd = self._jds[p_jd_i] + return p_y, p_jd + + + def _check(self, y, jd, nrt=False, is_maxmax=False): + try: + self._get_file_path(y, jd, nrt=nrt, is_maxmax=is_maxmax, check=True) + except FileNotFoundError: + return False + return True + + + def _clean(self): + self._update_state() + for d, y, jd in self._walk_state(): + if d['max']['std'] and d['max']['nrt']: + path = self._get_file_path(y=y, jd=jd, nrt=True, is_maxmax=False) + os.remove(path) + if d['maxmax']['std'] and d['maxmax']['nrt']: + path = self._get_file_path(y=y, jd=jd, nrt=True, is_maxmax=True) + log.info(f'Removing {path}...') + os.remove(path) + self._update_state() + + + def _update_state(self): + for d, y, jd in self._walk_state(): + d['max']['std'] = self._check(y, jd, nrt=False, is_maxmax=False) + d['maxmax']['std'] = self._check(y, jd, nrt=False, is_maxmax=True) + d['max']['nrt'] = self._check(y, jd, nrt=True, is_maxmax=False) + d['maxmax']['nrt'] = self._check(y, jd, nrt=True, is_maxmax=True) + + + def _walk_state(self): + for jd in self._state.keys(): + for y in self._state[jd].keys(): + yield self._state[jd][y], y, jd + + + def _init_state(self): + '''Example state dict: + { + '001': { + '2003'': { + 'max': { 'nrt': True, 'std': False } + 'maxmax': { 'nrt': True, 'std': True } + }, + ... + '2021': { ... }, + }, + ... + '353': { ... }, + } + ''' + day_delta = 8 + state = {} + for jd in self._jds: + state[jd] = {} + for y in get_all_modis_data_years(): + dt = datetime.datetime.strptime('{y}{jd}'.format(y=y, jd=jd), '%Y%j') + today = datetime.datetime.today() + if dt > today - datetime.timedelta(days=day_delta): + # skip unavailable dates + continue + s = { 'std': False, 'nrt': False } + state[jd][y] = {} + state[jd][y]['max'] = s + state[jd][y]['maxmax'] = s.copy() + self._state = state + + + def _get_file_path(self, y, jd=None, is_maxmax=False, nrt=False, check=False, year_only=False, ext=None): + filename = self._filename_template(y, jd, year_only=year_only, ext=ext, nrt=nrt, is_maxmax=is_maxmax) + if not year_only: + full_path = os.path.join(self._root_dir, jd, filename) + else: + full_path = os.path.join(self._year_maxes_root, filename) + if check and not os.path.exists(full_path): + raise FileNotFoundError(f'{full_path} does not exist on the file system.') + real_path = os.path.realpath(full_path) + return real_path + + + def _filename_template(self, y, jd, is_maxmax=False, nrt=False, year_only=False, ext=None): + ext = ext or self._default_file_ext + max_prefix = self._default_max_prefix + maxmax_prefix=self._default_maxmax_prefix + prefix = maxmax_prefix if is_maxmax else max_prefix + ptype = 'nrt' if nrt else 'std' + if not year_only: + filename = f'{prefix}.{y}.{jd}.{ptype}.{ext}' + else: + filename = f'{prefix}.{y}.{ptype}.{ext}' + return filename diff --git a/rescale_max b/rescale_max deleted file mode 100644 index 3ded19d..0000000 --- a/rescale_max +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -DATASET=$1 - -gdal_calc.py -A Terra.img -B Aqua.img --outfile=maxMODIS.$YEAR.$DOY.std.img --calc="\ -maximum((A<251)*A,(B<251)*B)\ -+(((A==253)&(B==253))|((A==253)&(B==255))|((A==255)&(B==253))|((A==255)&(B==255)))*255\ -+((A==254)|(B==254))*254\ -" --format=HFA --co "STATISTICS=YES" --co "COMPRESSED=YES" --NoDataValue=252 --type=Byte --overwrite - - diff --git a/test.py b/test.py new file mode 100644 index 0000000..cadb792 --- /dev/null +++ b/test.py @@ -0,0 +1,140 @@ + +import unittest +import os, os.path, shutil + +from gimms import Gimms, DataNotFoundError +from precursor_archive import PrecursorArchive + +from test_util import * + +tmp_dir = './tmp' +out_dir='./tmp' + +year = '2021' +jd = '121' + + +class TestPrecursorArchive(unittest.TestCase): + + def setUp(self): + test_root = get_test_dir(PRECURSORS_DIR) + if not os.path.exists(test_root): + os.mkdir(test_root) + link_archive(PRECURSORS_DIR, test_root) + self.archive = PrecursorArchive(root_dir=get_test_dir(PRECURSORS_DIR)) + + def test__update_all(self): + pass + + def test__update_date(self): + pass + + def test__update_24day_max(self): + pass + + def test__update_8day_max(self): + pass + + def test__get_24day_max_input_paths(self): + pass + + def test__get_dir(self): + pass + + def test__get_best_8day_max_path(self): + pass + + def test__get_previous_date(self): + self.assertEqual(self.archive._get_previous_date('2021', '121'), ('2021', '113')) + self.assertEqual(self.archive._get_previous_date('2021', '001'), ('2020', '361')) + + def test__check(self): + self.assertTrue(self.archive._check(year, jd)) + self.assertTrue(self.archive._check(year, jd, is_maxmax=True)) + self.assertFalse(self.archive._check('1800', jd)) + + def test__clean(self): + pass + + def test__update_state(self): + pass + + def test__walk_state(self): + pass + + def test__init_state(self): + pass + + def test__get_file_path(self): + path = self.archive._get_file_path(year, jd, is_maxmax=False, nrt=False, check=False) + self.assertTrue(os.path.exists(path)) + with self.assertRaises(FileNotFoundError): + not_here = self.archive._get_file_path('1800', '002', check=True) + + def test__filename_template(self): + pass + + +@unittest.skip('skipping api test') +class TestGimms(unittest.TestCase): + + def setUp(self): + self.sats = [ 'Aqua', 'Terra' ] + self.api = Gimms() + + def test__get(self): + std_path = self.api._get(year, jd, out_dir=out_dir, tmp_dir=tmp_dir, nrt=False, check=True) + self.assertTrue(os.path.exists(std_path)) + os.remove(std_path) + + def test__get_tiles(self): + for path in self.api._get_tiles(year, jd, sat_name='Aqua', tmp_dir=tmp_dir, nrt=False): + self.assertTrue('GMY' in path) + if os.path.exists(path): + os.remove(path) + + def test__get_tile(self): + with self.assertRaises(DataNotFoundError): + nothing_here = self.api._get_tile(url='http://not.a.thing.abcdserfdsfs/NOTHING.IS.REAL.tif.gz', tmp_dir=tmp_dir) + x, y, sat, ptype = '06', '04', 'GMO', 'nrt' + url = self.api._get_tile_url(year, jd, x, y, sat, ptype) + path = self.api._get_tile(url=url, tmp_dir=tmp_dir) + self.assertTrue(sat in path) + if os.path.exists(path): + os.remove(path) + + def test__get_tile_urls(self): + for sat_name in self.sats: + nrt_urls = list(self.api._get_tile_urls(year, jd, sat_name=sat_name, nrt=True)) + std_urls = list(self.api._get_tile_urls(year, jd, sat_name=sat_name, nrt=False)) + self.assertEqual(len(list(nrt_urls)), 26) + self.assertEqual(len(list(std_urls)), 26) + for url in nrt_urls: + self.assertTrue('nrt' in url) + self.assertTrue(year in url) + self.assertTrue(jd in url) + for url in std_urls: + self.assertTrue('std' in url) + self.assertTrue(year in url) + self.assertTrue(jd in url) + + def test__filename_template(self): + sat = self.sats[0] + prefix = 'maxMODIS' + ext = 'img' + f_nrt = self.api._filename_template(year, jd, nrt=True, prefix=prefix, ext=ext) + f_std = self.api._filename_template(year, jd, nrt=False, prefix=prefix, ext=ext) + f_sat = self.api._filename_template(year, jd, sat_name='Aqua', nrt=False, prefix=prefix, ext=ext) + self.assertTrue('nrt' in f_nrt) + self.assertTrue('std' in f_std) + self.assertTrue(sat in f_sat) + self.assertTrue(f_nrt.startswith(prefix)) + self.assertTrue(f_std.startswith(prefix)) + self.assertTrue(f_sat.startswith(prefix)) + self.assertTrue(f_nrt.endswith(ext)) + self.assertTrue(f_std.endswith(ext)) + self.assertTrue(f_sat.endswith(ext)) + + +if __name__ == '__main__': + unittest.main() diff --git a/test_util.py b/test_util.py new file mode 100644 index 0000000..2bd32f0 --- /dev/null +++ b/test_util.py @@ -0,0 +1,15 @@ + +from util import load_env + +load_env(ns=globals()) + +def get_test_dir(folder): + return os.path.join('./test', folder) + +def link_archive(src=PRECURSORS_DIR, dst=get_test_dir(PRECURSORS_DIR)): + def copy3(src, dst): + src = os.path.realpath(src) + dst = os.path.realpath(dst) + os.symlink(src, dst) + shutil.copytree(src, dst, copy_function=copy3) + diff --git a/util.py b/util.py new file mode 100755 index 0000000..379982d --- /dev/null +++ b/util.py @@ -0,0 +1,230 @@ +import smtplib +from email.message import EmailMessage + +import os, os.path, sys, re, traceback, shutil, datetime +from shutil import chown +from pytz import timezone +from subprocess import Popen, PIPE, STDOUT + +from tempfile import NamedTemporaryFile +import base64 +import logging as log + +# Some helpful constants + +ALL_MODIS_JULIAN_DAYS=("001", "009", "017", "025", "033", "041", "049", "057", "065", "073", "081", "089", "097", "105", "113", "121", "129", "137", "145", "153", "161", "169", "177", "185", "193", "201", "209", "217", "225", "233", "241", "249", "257", "265", "273", "281", "289", "297", "305", "313", "321", "329", "337", "345", "353", "361") + +# ForWarn 2 julian days are the same as MODIS, except we exclude day 361 +# However, we still build precursors for day 361 since it's used when calculating +# products for days 001 and 009. +ALL_FW2_JULIAN_DAYS=ALL_MODIS_JULIAN_DAYS[:-1] + +# Each tuple is a triplet of MODIS product days corresponding to +# a ForWarn 2 product window for the first day in the tuple. +INTERVALS=[ ("361","353","345"), ("353","345","337"), ("345","337","329"), ("337","329","321"), ("329","321","313"), ("321","313","305"), ("313","305","297"), ("305","297","289"), ("297","289","281"), ("289","281","273"), ("281","273","265"), ("273","265","257"), ("265","257","249"), ("257","249","241"), ("249","241","233"), ("241","233","225"), ("233","225","217"), ("225","217","209"), ("217","209","201"), ("209","201","193"), ("201","193","185"), ("193","185","177"), ("185","177","169"), ("177","169","161"), ("169","161","153"), ("161","153","145"), ("153","145","137"), ("145","137","129"), ("137","129","121"), ("129","121","113"), ("121","113","105"), ("113","105","097"), ("105","097","089"), ("097","089","081"), ("089","081","073"), ("081","073","065"), ("073","065","057"), ("065","057","049"), ("057","049","041"), ("049","041","033"), ("041","033","025"), ("033","025","017"), ("025","017","009"), ("017","009","001"), ("009","001","361"), ("001","361","353") ] + + +def load_env(ns=globals()): + '''Load the variables defined in .env into the globals dictionary.''' + with open('.env') as f: + lines = [ line.strip() for line in f.readlines() ] + lines = filter(lambda line: '=' in line and not line.startswith('#'), lines) + env = [ line.split('=') for line in lines ] + for arr in env: + key = arr[0] + val = ''.join(arr[1:]) + ns[key] = val + + +# Load the environment variables into the globals dict +load_env() + + +## Exceptions +# + +class DataNotFoundError(Exception): + pass + +class InvalidDateError(Exception): + pass + +class OverwriteError(Exception): + pass + + +## Helpers +# + +def chown_all(): + # The call is coming from... inside the container! + log.debug(f'Changing owner:group of all files to {DKR_USER}:{DKR_GROUP}') + folders = [ + os.path.realpath(DKR_BUILD_DIR), + os.path.join(DKR_BUILD_DIR, FW2_ARCHIVE_DIR_NORMAL), + os.path.join(DKR_BUILD_DIR, FW2_ARCHIVE_DIR_MUTED), + os.path.join(DKR_BUILD_DIR, PRECURSORS_DIR), + os.path.join(DKR_BUILD_DIR, ALL_YEAR_MAXES_DIR), + ] + for folder in folders: + shutil.chown(os.path.realpath(folder), user=DKR_USER, group=DKR_GROUP) + for root, dirs, files in os.walk(folder): + for f in files: + shutil.chown(os.path.join(root, f), user=DKR_USER, group=DKR_GROUP) + for _dir in dirs: + shutil.chown(os.path.join(root, _dir), user=DKR_USER, group=DKR_GROUP) + + +def clean_all(base_dir='.', dryrun=False): + exts = [ 'img', 'gz', 'tif', 'vrt' ] + all_jds = ALL_MODIS_JULIAN_DAYS + files = os.listdir(base_dir) + for f in files: + for ext in exts: + if f.endswith(ext): + log.info(f'Removing {f}') + os.remove(f) + + +def get_all_modis_data_years(): + '''Return a list of years (strings) for which MODIS data is available on the GIMMS server.''' + start = int(MODIS_DATA_YEAR_START) + today = datetime.datetime.today() + this_year = today.strftime('%Y') + all_years = list(range(start, int(this_year)+1 )) + return [ str(y) for y in all_years ] + + +def run_process(cmd, remove_newlines=True): + log.info('Running subprocess...') + log.info(f'{cmd}') + if remove_newlines: + cmd = cmd.replace('\n', '') + cmd = cmd.strip() + process = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=True) + with process.stdout: + for line in iter(process.stdout.readline, b''): + log.debug(line.rstrip().decode("utf-8")) + exitcode = process.wait() + if exitcode > 0: + raise OSError(f"Process returned with non-zero exit code: {exitcode}.") + + +def get_default_log_path(): + now = datetime.datetime.now().strftime(LOG_FILE_TIMESTAMP_FORMAT) + log_path = LOG_PATH_TEMPLATE.format(now) + return log_path + + +def get_log_path(logger=None): + if not logger: + logger = log.getLogger() + paths = [ handler.baseFilename for handler in logger.handlers if isinstance(handler, log.FileHandler) ] + if len(paths) > 1: + log.error("This logger has two file handlers associated with it. That's bad, right?") + if not len(paths): + return None + if len(paths) == 1: + return paths[0] + + +def init_log(level=log.DEBUG, log_path=None, dryrun=False, use_file=True): + '''Initialize logging.''' + logger = log.getLogger() + for handler in logger.handlers: + logger.removeHandler(handler) + if dryrun: + formatter_string = '[%(asctime)s EST] [DRYRUN] [%(levelname)s] %(message)s' + else: + formatter_string = '[%(asctime)s] [%(levelname)s] %(message)s' + formatter = log.Formatter(formatter_string) + if use_file: + log_path = log_path or get_default_log_path() + file_handler = log.FileHandler(filename=log_path, mode='a') + logger.addHandler(file_handler) + stream_handler = log.StreamHandler() + logger.addHandler(stream_handler) + for handler in logger.handlers[:]: + handler.setFormatter(formatter) + logger.setLevel(level) + + +# TODO +def mail_results(dates=[], dryrun=False): + if not os.path.exists(MAIL_TO_ADDRS_FILE): + log.error("Unable to email results since mail_to_addrs.txt is missing.") + mail_to_addrs = [] + try: + with open(MAIL_TO_ADDRS_FILE) as f: + for addr in f: + mail_to_addrs.append(addr.strip()) + if not len(mail_to_addrs): + raise OSError("File is empty: {}".format(MAIL_TO_ADDRS_FILE)) + for addr in mail_to_addrs: + if '@' not in addr: + raise ValueError("Malformed email address in {}: {}".format(MAIL_TO_ADDRS_FILE, addr)) + except FileNotFoundError as e: + log.error("File is missing: {}".format(MAIL_TO_ADDRS_FILE)) + log.error("See MAIL_TO_ADDRS in .env.".format(MAIL_TO_ADDRS_FILE)) + sys.exit(1) + except OSError as e: + log.error(e.__str__()) + sys.exit(1) + except ValueError as e: + log.error(e.__str__()) + dryrun_subject_text = '[DRYRUN] ' if dryrun else '' + total_success = len([ d for d in dates if d['success'] ]) == len(dates) + if len(dates): + date_list_str = ', '.join([ '{}/{}'.format(d['year'], d['jd']) for d in dates ]) + date_list_str = ' (' + date_list_str + ')' + else: + date_list_str = '' + if total_success: + subject_text = "{}FW2 Product Generation{}".format(dryrun_subject_text, date_list_str).rstrip() + body_text = "Success! See attached log for details." + else: + subject_text = "{}FAILED: FW2 Product Generation{}".format(dryrun_subject_text, date_list_str).rstrip() + body_text = "Something went wrong... Usually this just means the most recent NRT tiles are missing from the GIMMS server. We'll try again in a bit." + body_text += "\n\n" + if len(dates): + body_text += "Summary:\n\n" + for d in dates: + status_text = "OK" if d['success'] else "FAILED" + body_text += "Product: {} / {}\n".format(d['year'], d['jd']) + body_text += "Status: {}\n".format("OK" if d['success'] else "FAILED") + body_text += "\n" + log_path = get_log_path() + if log_path: + with open(log_path) as f: + log_contents = f.read() + else: + log_contents = "Log file is missing!" + log.info("Emailing results to {}".format(', '.join(mail_to_addrs))) + msg = EmailMessage() + me = 'nemacmailer@gmail.com' + msg['Subject'] = subject_text + msg['From'] = me + msg['To'] = ', '.join(mail_to_addrs) + msg.set_content(body_text) + msg.add_attachment(log_contents, filename=os.path.basename(log_path)) + s = smtplib.SMTP('localhost', 25) + s.send_message(msg) + s.quit() + + +def check_is_only_instance_or_quit(): + name_of_this_script = sys.argv[0].split('/').pop() + command = "ps -aux | grep %s" % name_of_this_script + stdout = str(check_output(command, shell=True)) + lines = stdout.split('\n') + # Remove empty strings made from the split command + # Remove entries related to the grep command run as part of the process + lines = [ line for line in lines if line != '' and 'grep' not in line ] + if (len(lines) > 1): + # One entry refers to this instance of the script. + # More than one entry means there is another instance of the script running. + log.info("Another instance of {} is already running! Or it's open in a text editor (LOL). Exiting...".format(name_of_this_script)) + sys.exit() + + + diff --git a/volumes.py b/volumes.py new file mode 100644 index 0000000..73b92c2 --- /dev/null +++ b/volumes.py @@ -0,0 +1,29 @@ + +import os.path +from util import load_env + +load_env(ns=globals()) + +vols = { + os.path.realpath(FW2_ARCHIVE_DIR_NORMAL): { + 'bind': os.path.join(DKR_BUILD_DIR, FW2_ARCHIVE_DIR_NORMAL), + 'mode': 'rw' + }, + os.path.realpath(FW2_ARCHIVE_DIR_MUTED): { + 'bind': os.path.join(DKR_BUILD_DIR, FW2_ARCHIVE_DIR_MUTED), + 'mode': 'rw' + }, + os.path.realpath('.'): { + 'bind': os.path.realpath(DKR_BUILD_DIR), + 'mode': 'rw' + }, + os.path.realpath(PRECURSORS_DIR): { + 'bind': os.path.join(DKR_BUILD_DIR, PRECURSORS_DIR), + 'mode': 'rw' + }, + os.path.realpath(ALL_YEAR_MAXES_DIR): { + 'bind': os.path.join(DKR_BUILD_DIR, ALL_YEAR_MAXES_DIR), + 'mode': 'rw' + } +} +