diff --git a/.github/workflows/static-analysis.yml b/.github/workflows/static-analysis.yml index 8fa8b9ae..1d08910e 100644 --- a/.github/workflows/static-analysis.yml +++ b/.github/workflows/static-analysis.yml @@ -3,10 +3,10 @@ name: Static analysis on: push jobs: - call-flake8-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-flake8.yml@v0.7.0 - with: - local_package_names: None - call-secrets-analysis-workflow: - uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.7.0 + # Docs: https://github.com/ASFHyP3/actions + uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.12.0 + + call-ruff-workflow: + # Docs: https://github.com/ASFHyP3/actions + uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.12.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 12e76b59..96327dcb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.7.1] +### Changed +- The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now uses `ruff` rather than `flake8` for linting. + ## [0.7.0] ### Removed - Deleted files related to PDC image services. diff --git a/image_services/egis/create_configs.py b/image_services/egis/create_configs.py index 945eadd4..c9c7e33c 100644 --- a/image_services/egis/create_configs.py +++ b/image_services/egis/create_configs.py @@ -2,6 +2,7 @@ from jinja2 import Environment, PackageLoader, StrictUndefined, select_autoescape + SEASONS = { 'JJA': { 'Season': 'summer', @@ -22,21 +23,18 @@ 'Season': 'spring', 'SeasonAbbrev': 'Mar/Apr/May', 'SeasonFull': 'March/April/May', - } + }, } def make_configuration(data_type, polarization, season): config = { - "project_name": "GSSICB", - "s3_prefix": "tiles/", - "s3_suffix": f"_{SEASONS[season]['Season']}_{polarization.lower()}_{data_type}.tif", - "dataset_name": f"{data_type}_{polarization.upper()}_{season}", - "raster_function_templates": [ - "ScaledCoherence.rft.xml", - "UnscaledCoherence.rft.xml" - ], - "default_raster_function_template": "UnscaledCoherence.rft.xml" + 'project_name': 'GSSICB', + 's3_prefix': 'tiles/', + 's3_suffix': f"_{SEASONS[season]['Season']}_{polarization.lower()}_{data_type}.tif", + 'dataset_name': f'{data_type}_{polarization.upper()}_{season}', + 'raster_function_templates': ['ScaledCoherence.rft.xml', 'UnscaledCoherence.rft.xml'], + 'default_raster_function_template': 'UnscaledCoherence.rft.xml', } return config @@ -48,7 +46,7 @@ def make_metadata_fields(data_type, polarization, season): 'polarization': polarization, 'months_abbreviated': SEASONS[season]['SeasonAbbrev'], 'season': SEASONS[season]['Season'], - 'months_full': SEASONS[season]['SeasonFull'] + 'months_full': SEASONS[season]['SeasonFull'], } return metadata diff --git a/image_services/egis/make_egis_services.py b/image_services/egis/make_egis_services.py index 2f43426b..391430e6 100644 --- a/image_services/egis/make_egis_services.py +++ b/image_services/egis/make_egis_services.py @@ -12,6 +12,7 @@ import boto3 from osgeo import gdal, osr + gdal.UseExceptions() gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR') @@ -39,7 +40,7 @@ 'Season': 'March/April/May', 'StartDate': '03/01/2020', 'EndDate': '05/31/2020', - } + }, } @@ -69,7 +70,7 @@ def get_projection(srs_wkt: str) -> str: def remove_prefix(raster_path, prefix): - return raster_path[len(prefix):] + return raster_path[len(prefix) :] def get_raster_metadata(raster_path: str) -> dict: @@ -135,9 +136,24 @@ def calculate_overview_fields(mosaic_dataset, local_path): # This function calculates custom attribute values for the overview record print('Calculating field values for overview record') ds = os.path.join(local_path, mosaic_dataset) - ds_cursor = arcpy.da.UpdateCursor(ds, ['Tag', 'MinPS', 'Category', 'StartDate', 'EndDate', 'GroupName', - 'Name', 'ProductType', 'Season', 'Polarization', 'Tile', 'DownloadURL', - 'URLDisplay']) + ds_cursor = arcpy.da.UpdateCursor( + ds, + [ + 'Tag', + 'MinPS', + 'Category', + 'StartDate', + 'EndDate', + 'GroupName', + 'Name', + 'ProductType', + 'Season', + 'Polarization', + 'Tile', + 'DownloadURL', + 'URLDisplay', + ], + ) if ds_cursor is not None: print('Updating Overview Field Values') for row in ds_cursor: @@ -187,8 +203,9 @@ def main(): csv_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}.csv') - raster_function_template = ''.join([f'{template_directory / template};' - for template in config['raster_function_templates']]) + raster_function_template = ''.join( + [f'{template_directory / template};' for template in config['raster_function_templates']] + ) if config['default_raster_function_template'] != 'None': default_raster_function_template = str(template_directory / config['default_raster_function_template']) else: @@ -214,11 +231,13 @@ def main(): ) logging.info('Creating mosaic dataset') - mosaic_dataset = str(arcpy.management.CreateMosaicDataset( - in_workspace=geodatabase, - in_mosaicdataset_name=config['dataset_name'], - coordinate_system=3857, - )) + mosaic_dataset = str( + arcpy.management.CreateMosaicDataset( + in_workspace=geodatabase, + in_mosaicdataset_name=config['dataset_name'], + coordinate_system=3857, + ) + ) logging.info(f'Adding source rasters to {mosaic_dataset}') arcpy.management.AddRastersToMosaicDataset( @@ -274,8 +293,8 @@ def main(): cell_size=3, metadata_level='BASIC', transmission_fields='Name;StartDate;EndDate;MinPS;MaxPS;LowPS;HighPS;Date;ZOrder;Dataset_ID;CenterX;' - 'CenterY;Tag;GroupName;StartDate;EndDate;ProductType;Season;Polarization;Tile;' - 'DownloadURL;URLDisplay', + 'CenterY;Tag;GroupName;StartDate;EndDate;ProductType;Season;Polarization;Tile;' + 'DownloadURL;URLDisplay', use_time='DISABLED', start_time_field='StartDate', end_time_field='EndDate', @@ -314,7 +333,16 @@ def main(): os.environ['AWS_PROFILE'] = 'hyp3' logging.info(f'Moving CRF to {s3_overview}') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) + subprocess.run( + [ + 'aws', + 's3', + 'cp', + local_overview, + s3_overview.replace('/vsis3/', 's3://'), + '--recursive', + ] + ) logging.info('Adding overview to mosaic dataset') arcpy.management.AddRastersToMosaicDataset( diff --git a/image_services/egis/metadata/print_service_metadata.py b/image_services/egis/metadata/print_service_metadata.py index 2eca85a0..9139bee5 100644 --- a/image_services/egis/metadata/print_service_metadata.py +++ b/image_services/egis/metadata/print_service_metadata.py @@ -3,6 +3,7 @@ from jinja2 import Environment, PackageLoader, StrictUndefined, select_autoescape + SEASONS = { 'JJA': { 'Season': 'summer', @@ -23,7 +24,7 @@ 'Season': 'spring', 'SeasonAbbrev': 'Mar/Apr/May', 'SeasonFull': 'March/April/May', - } + }, } @@ -62,7 +63,7 @@ def main(): 'polarization': polarization, 'months_abbreviated': SEASONS[season]['SeasonAbbrev'], 'season': SEASONS[season]['Season'], - 'months_full': SEASONS[season]['SeasonFull'] + 'months_full': SEASONS[season]['SeasonFull'], } output_text = render_template(args.template, fields) diff --git a/image_services/glo_30_hand/make_hand_service.py b/image_services/glo_30_hand/make_hand_service.py index a80fed9f..3b179506 100644 --- a/image_services/glo_30_hand/make_hand_service.py +++ b/image_services/glo_30_hand/make_hand_service.py @@ -19,7 +19,7 @@ 'Rasters from the glo-30-hand collection will be selected using this ' 'filter before they are added to the mosaic dataset. The syntax is as ' 'expected by the `filter` option of arcpy.management.AddRastersToMosaicDataset.' - ) + ), ) args = parser.parse_args() @@ -79,9 +79,18 @@ ['Tile', '!Name!.split("_")[4] + !Name!.split("_")[6]'], ['Tag', '"GLO30_HAND"'], ['Dataset_ID', '"Global_30m_HAND"'], - ['ProductName', '"GLO30_HAND_"+ !Name!.split("_")[4] + !Name!.split("_")[6]'], - ['URLDisplay', '"GLO30_HAND_"+ !Name!.split("_")[4] + !Name!.split("_")[6]'], - ['DownloadURL', '"https://glo-30-hand.s3.amazonaws.com/v1/2021/" + !Name! + ".tif"'], + [ + 'ProductName', + '"GLO30_HAND_"+ !Name!.split("_")[4] + !Name!.split("_")[6]', + ], + [ + 'URLDisplay', + '"GLO30_HAND_"+ !Name!.split("_")[4] + !Name!.split("_")[6]', + ], + [ + 'DownloadURL', + '"https://glo-30-hand.s3.amazonaws.com/v1/2021/" + !Name! + ".tif"', + ], ['MaxPS', '610'], ], ) @@ -132,7 +141,7 @@ cell_size=3, metadata_level='BASIC', transmission_fields='Name;MinPS;MaxPS;LowPS;HighPS;ZOrder;Dataset_ID;CenterX;CenterY;Tag;Tile;ProductName;' - 'DownloadURL;URLDisplay', + 'DownloadURL;URLDisplay', use_time='DISABLED', max_num_of_download_items=50, max_num_of_records_returned=2000, @@ -160,7 +169,7 @@ ) logging.info('aws s3 cp') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace("/vsis3/", "s3://"), '--recursive']) + subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) logging.info('AddRastersToMosaicDataset') arcpy.management.AddRastersToMosaicDataset( @@ -195,18 +204,18 @@ raster_or_mosaic_layer=mosaic_dataset, out_sddraft=service_definition_draft.name, service_name=dataset_name, - summary="Height Above Nearest Drainage (HAND) is a terrain model that normalizes topography to the " - "relative heights along the drainage network and is used to describe the relative soil " - "gravitational potentials or the local drainage potentials. Each pixel value represents the " - "vertical distance to the nearest drainage. The HAND data provides near-worldwide land coverage " - "at 30 meters and was produced from the 2021 release of the Copernicus GLO-30 Public DEM as " - "distributed in the Registry of Open Data on AWS (https://registry.opendata.aws/copernicus-dem/) " - "using the the ASF Tools Python Package (" - "https://hyp3-docs.asf.alaska.edu/tools/asf_tools_api/#asf_tools.hand.calculate) and the PySheds " - "Python library (https://github.com/mdbartos/pysheds). The HAND data are provided as a tiled set " - "of Cloud Optimized GeoTIFFs (COGs) with 30-meter (1 arcsecond) pixel spacing. The COGs are " - "organized into the same 1 degree by 1 degree grid tiles as the GLO-30 DEM, and individual tiles " - "are pixel-aligned to the corresponding COG DEM tile.", + summary='Height Above Nearest Drainage (HAND) is a terrain model that normalizes topography to the ' + 'relative heights along the drainage network and is used to describe the relative soil ' + 'gravitational potentials or the local drainage potentials. Each pixel value represents the ' + 'vertical distance to the nearest drainage. The HAND data provides near-worldwide land coverage ' + 'at 30 meters and was produced from the 2021 release of the Copernicus GLO-30 Public DEM as ' + 'distributed in the Registry of Open Data on AWS (https://registry.opendata.aws/copernicus-dem/) ' + 'using the the ASF Tools Python Package (' + 'https://hyp3-docs.asf.alaska.edu/tools/asf_tools_api/#asf_tools.hand.calculate) and the PySheds ' + 'Python library (https://github.com/mdbartos/pysheds). The HAND data are provided as a tiled set ' + 'of Cloud Optimized GeoTIFFs (COGs) with 30-meter (1 arcsecond) pixel spacing. The COGs are ' + 'organized into the same 1 degree by 1 degree grid tiles as the GLO-30 DEM, and individual tiles ' + 'are pixel-aligned to the corresponding COG DEM tile.', ) logging.info('StageService') diff --git a/image_services/opera/make_opera_md.py b/image_services/opera/make_opera_md.py index cd647961..b010b524 100644 --- a/image_services/opera/make_opera_md.py +++ b/image_services/opera/make_opera_md.py @@ -14,6 +14,7 @@ from osgeo import gdal, osr from tenacity import Retrying, before_sleep_log, stop_after_attempt, wait_fixed + gdal.UseExceptions() gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR') @@ -40,15 +41,16 @@ def get_projection(srs_wkt: str) -> str: def remove_prefix(raster_path, prefix): - return raster_path[len(prefix):] + return raster_path[len(prefix) :] def get_raster_metadata(raster_path: str, bucket: str) -> dict: assert raster_path.startswith(f'/vsis3/{bucket}/') name = Path(raster_path).stem download_url = f'https://datapool.asf.alaska.edu/RTC/OPERA-S1/{name}.tif' - acquisition_date = \ + acquisition_date = ( name[36:38] + '/' + name[38:40] + '/' + name[32:36] + ' ' + name[41:43] + ':' + name[43:45] + ':' + name[45:47] + ) info = gdal.Info(raster_path, format='json') return { 'Raster': info['description'], @@ -106,8 +108,21 @@ def calculate_overview_fields(mosaic_dataset, local_path): # This function calculates custom attribute values for the overview record print('Calculating field values for overview record') ds = os.path.join(local_path, mosaic_dataset) - ds_cursor = arcpy.da.UpdateCursor(ds, ['Tag', 'MinPS', 'Category', 'StartDate', 'EndDate', 'GroupName', - 'Name', 'Polarization', 'DownloadURL', 'URLDisplay']) + ds_cursor = arcpy.da.UpdateCursor( + ds, + [ + 'Tag', + 'MinPS', + 'Category', + 'StartDate', + 'EndDate', + 'GroupName', + 'Name', + 'Polarization', + 'DownloadURL', + 'URLDisplay', + ], + ) logging.info('Calculating Overview Start and End Dates') start_dates = [row[1] for row in arcpy.da.SearchCursor(mosaic_dataset, ['Tag', 'StartDate']) if row[0] != 'Dataset'] @@ -145,7 +160,7 @@ def add_property(property_set: etree.Element, property_key: str, property_value: def build_wms_extension() -> etree.Element: - xsi_type = etree.QName("http://www.w3.org/2001/XMLSchema-instance", "type") + xsi_type = etree.QName('http://www.w3.org/2001/XMLSchema-instance', 'type') svc_extension = etree.Element('SVCExtension', {xsi_type: 'typens:SVCExtension'}) @@ -163,19 +178,26 @@ def build_wms_extension() -> etree.Element: add_property(property_array, 'country', 'US') add_property(property_array, 'contactVoiceTelephone', '907-474-5041') add_property(property_array, 'contactElectronicMailAddress', 'uso@asf.alaska.edu') - add_property(property_array, 'accessConstraints', - 'There are no restrictions on the use of this data, but it must be acknowledged or ' - 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' - 'Contains modified Copernicus Sentinel data, processed by ESA."') + add_property( + property_array, + 'accessConstraints', + 'There are no restrictions on the use of this data, but it must be acknowledged or ' + 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' + 'Contains modified Copernicus Sentinel data, processed by ESA."', + ) add_property(property_array, 'title', '') add_property(property_array, 'abstract', '') info_property_set = etree.SubElement(svc_extension, 'Info', {xsi_type: 'typens:PropertySet'}) - info_property_array = etree.SubElement(info_property_set, 'PropertyArray', - {xsi_type: 'typens:ArrayOfPropertySetProperty'}) + info_property_array = etree.SubElement( + info_property_set, 'PropertyArray', {xsi_type: 'typens:ArrayOfPropertySetProperty'} + ) add_property(info_property_array, 'WebEnabled', 'true') - add_property(info_property_array, 'WebCapabilities', - 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension') + add_property( + info_property_array, + 'WebCapabilities', + 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension', + ) return svc_extension @@ -196,8 +218,9 @@ def main(): csv_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}.csv') - raster_function_template = ''.join([f'{template_directory / template};' - for template in config['raster_function_templates']]) + raster_function_template = ''.join( + [f'{template_directory / template};' for template in config['raster_function_templates']] + ) if config['default_raster_function_template'] != 'None': default_raster_function_template = str(template_directory / config['default_raster_function_template']) else: @@ -206,11 +229,15 @@ def main(): arcpy.env.parallelProcessingFactor = '75%' try: - rasters = get_rasters(csv_file.replace(".csv", "")) + rasters = get_rasters(csv_file.replace('.csv', '')) update_csv(csv_file, rasters, config['bucket']) - for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(60), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), + wait=wait_fixed(60), + reraise=True, + before_sleep=before_sleep_log(logging, logging.WARNING), + ): with attempt: today = datetime.datetime.now(datetime.timezone.utc).strftime('%y%m%d_%H%M') output_name = f'{config["dataset_name"]}_{today}' @@ -225,11 +252,13 @@ def main(): ) logging.info('Creating mosaic dataset') - mosaic_dataset = str(arcpy.management.CreateMosaicDataset( - in_workspace=geodatabase, - in_mosaicdataset_name=config['dataset_name'], - coordinate_system=3857, - )) + mosaic_dataset = str( + arcpy.management.CreateMosaicDataset( + in_workspace=geodatabase, + in_mosaicdataset_name=config['dataset_name'], + coordinate_system=3857, + ) + ) logging.info(f'Adding source rasters to {mosaic_dataset}') arcpy.management.AddRastersToMosaicDataset( @@ -285,7 +314,7 @@ def main(): cell_size=3, metadata_level='BASIC', transmission_fields='Name;StartDate;EndDate;MinPS;MaxPS;LowPS;HighPS;Date;Dataset_ID;CenterX;' - 'CenterY;Tag;GroupName;Polarization;DownloadURL;URLDisplay', + 'CenterY;Tag;GroupName;Polarization;DownloadURL;URLDisplay', use_time='ENABLED', start_time_field='StartDate', end_time_field='EndDate', @@ -326,7 +355,16 @@ def main(): ) logging.info(f'Moving CRF to {s3_overview}') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) + subprocess.run( + [ + 'aws', + 's3', + 'cp', + local_overview, + s3_overview.replace('/vsis3/', 's3://'), + '--recursive', + ] + ) logging.info('Adding overview to mosaic dataset') arcpy.management.AddRastersToMosaicDataset( diff --git a/image_services/opera/make_opera_services.py b/image_services/opera/make_opera_services.py index 94323936..36ec3ed2 100644 --- a/image_services/opera/make_opera_services.py +++ b/image_services/opera/make_opera_services.py @@ -15,6 +15,7 @@ from osgeo import gdal, osr from tenacity import Retrying, before_sleep_log, stop_after_attempt, wait_fixed + gdal.UseExceptions() gdal.SetConfigOption('GDAL_DISABLE_READDIR_ON_OPEN', 'EMPTY_DIR') @@ -41,15 +42,16 @@ def get_projection(srs_wkt: str) -> str: def remove_prefix(raster_path, prefix): - return raster_path[len(prefix):] + return raster_path[len(prefix) :] def get_raster_metadata(raster_path: str, bucket: str) -> dict: assert raster_path.startswith(f'/vsis3/{bucket}/') name = Path(raster_path).stem download_url = f'https://datapool.asf.alaska.edu/RTC/OPERA-S1/{name}.tif' - acquisition_date = \ + acquisition_date = ( name[36:38] + '/' + name[38:40] + '/' + name[32:36] + ' ' + name[41:43] + ':' + name[43:45] + ':' + name[45:47] + ) info = gdal.Info(raster_path, format='json') return { 'Raster': info['description'], @@ -107,8 +109,21 @@ def calculate_overview_fields(mosaic_dataset, local_path): # This function calculates custom attribute values for the overview record print('Calculating field values for overview record') ds = os.path.join(local_path, mosaic_dataset) - ds_cursor = arcpy.da.UpdateCursor(ds, ['Tag', 'MinPS', 'Category', 'StartDate', 'EndDate', 'GroupName', - 'Name', 'Polarization', 'DownloadURL', 'URLDisplay']) + ds_cursor = arcpy.da.UpdateCursor( + ds, + [ + 'Tag', + 'MinPS', + 'Category', + 'StartDate', + 'EndDate', + 'GroupName', + 'Name', + 'Polarization', + 'DownloadURL', + 'URLDisplay', + ], + ) logging.info('Calculating Overview Start and End Dates') start_dates = [row[1] for row in arcpy.da.SearchCursor(mosaic_dataset, ['Tag', 'StartDate']) if row[0] != 'Dataset'] @@ -146,7 +161,7 @@ def add_property(property_set: etree.Element, property_key: str, property_value: def build_wms_extension() -> etree.Element: - xsi_type = etree.QName("http://www.w3.org/2001/XMLSchema-instance", "type") + xsi_type = etree.QName('http://www.w3.org/2001/XMLSchema-instance', 'type') svc_extension = etree.Element('SVCExtension', {xsi_type: 'typens:SVCExtension'}) @@ -164,19 +179,26 @@ def build_wms_extension() -> etree.Element: add_property(property_array, 'country', 'US') add_property(property_array, 'contactVoiceTelephone', '907-474-5041') add_property(property_array, 'contactElectronicMailAddress', 'uso@asf.alaska.edu') - add_property(property_array, 'accessConstraints', - 'There are no restrictions on the use of this data, but it must be acknowledged or ' - 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' - 'Contains modified Copernicus Sentinel data, processed by ESA."') + add_property( + property_array, + 'accessConstraints', + 'There are no restrictions on the use of this data, but it must be acknowledged or ' + 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' + 'Contains modified Copernicus Sentinel data, processed by ESA."', + ) add_property(property_array, 'title', '') add_property(property_array, 'abstract', '') info_property_set = etree.SubElement(svc_extension, 'Info', {xsi_type: 'typens:PropertySet'}) - info_property_array = etree.SubElement(info_property_set, 'PropertyArray', - {xsi_type: 'typens:ArrayOfPropertySetProperty'}) + info_property_array = etree.SubElement( + info_property_set, 'PropertyArray', {xsi_type: 'typens:ArrayOfPropertySetProperty'} + ) add_property(info_property_array, 'WebEnabled', 'true') - add_property(info_property_array, 'WebCapabilities', - 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension') + add_property( + info_property_array, + 'WebCapabilities', + 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension', + ) return svc_extension @@ -197,8 +219,9 @@ def main(): csv_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}.csv') - raster_function_template = ''.join([f'{template_directory / template};' - for template in config['raster_function_templates']]) + raster_function_template = ''.join( + [f'{template_directory / template};' for template in config['raster_function_templates']] + ) if config['default_raster_function_template'] != 'None': default_raster_function_template = str(template_directory / config['default_raster_function_template']) else: @@ -207,11 +230,15 @@ def main(): arcpy.env.parallelProcessingFactor = '75%' try: - rasters = get_rasters(csv_file.replace(".csv", "")) + rasters = get_rasters(csv_file.replace('.csv', '')) update_csv(csv_file, rasters, config['bucket']) - for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(60), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), + wait=wait_fixed(60), + reraise=True, + before_sleep=before_sleep_log(logging, logging.WARNING), + ): with attempt: today = datetime.datetime.now(datetime.timezone.utc).strftime('%y%m%d_%H%M') output_name = f'{config["dataset_name"]}_{today}' @@ -227,11 +254,13 @@ def main(): ) logging.info('Creating mosaic dataset') - mosaic_dataset = str(arcpy.management.CreateMosaicDataset( - in_workspace=geodatabase, - in_mosaicdataset_name=config['dataset_name'], - coordinate_system=3857, - )) + mosaic_dataset = str( + arcpy.management.CreateMosaicDataset( + in_workspace=geodatabase, + in_mosaicdataset_name=config['dataset_name'], + coordinate_system=3857, + ) + ) logging.info(f'Adding source rasters to {mosaic_dataset}') arcpy.management.AddRastersToMosaicDataset( @@ -287,7 +316,7 @@ def main(): cell_size=3, metadata_level='BASIC', transmission_fields='Name;StartDate;EndDate;MinPS;MaxPS;LowPS;HighPS;Date;Dataset_ID;CenterX;' - 'CenterY;Tag;GroupName;Polarization;DownloadURL;URLDisplay', + 'CenterY;Tag;GroupName;Polarization;DownloadURL;URLDisplay', use_time='ENABLED', start_time_field='StartDate', end_time_field='EndDate', @@ -334,7 +363,16 @@ def main(): os.environ['AWS_PROFILE'] = 'hyp3' logging.info(f'Moving CRF to {s3_overview}') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) + subprocess.run( + [ + 'aws', + 's3', + 'cp', + local_overview, + s3_overview.replace('/vsis3/', 's3://'), + '--recursive', + ] + ) logging.info('Adding overview to mosaic dataset') arcpy.management.AddRastersToMosaicDataset( @@ -375,8 +413,9 @@ def main(): with open(args.server_connection_file) as f: server_connection = json.load(f) - for attempt in Retrying(stop=stop_after_attempt(3), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), reraise=True, before_sleep=before_sleep_log(logging, logging.WARNING) + ): with attempt: logging.info(f'Publishing {service_definition}') server = Server(**server_connection) diff --git a/image_services/opera/make_opera_services_test.py b/image_services/opera/make_opera_services_test.py index d7b938a0..37a784e1 100644 --- a/image_services/opera/make_opera_services_test.py +++ b/image_services/opera/make_opera_services_test.py @@ -43,15 +43,16 @@ def get_projection(srs_wkt: str) -> str: def remove_prefix(raster_path, prefix): - return raster_path[len(prefix):] + return raster_path[len(prefix) :] def get_raster_metadata(raster_path: str, bucket: str) -> dict: assert raster_path.startswith(f'/vsis3/{bucket}/') name = Path(raster_path).stem download_url = f'https://datapool.asf.alaska.edu/RTC/OPERA-S1/{name}.tif' - acquisition_date = \ + acquisition_date = ( name[36:38] + '/' + name[38:40] + '/' + name[32:36] + ' ' + name[41:43] + ':' + name[43:45] + ':' + name[45:47] + ) info = gdal.Info(raster_path, format='json') return { 'Raster': info['description'], @@ -109,8 +110,21 @@ def calculate_overview_fields(mosaic_dataset, local_path): # This function calculates custom attribute values for the overview record print('Calculating field values for overview record') ds = os.path.join(local_path, mosaic_dataset) - ds_cursor = arcpy.da.UpdateCursor(ds, ['Tag', 'MinPS', 'Category', 'StartDate', 'EndDate', 'GroupName', - 'Name', 'Polarization', 'DownloadURL', 'URLDisplay']) + ds_cursor = arcpy.da.UpdateCursor( + ds, + [ + 'Tag', + 'MinPS', + 'Category', + 'StartDate', + 'EndDate', + 'GroupName', + 'Name', + 'Polarization', + 'DownloadURL', + 'URLDisplay', + ], + ) logging.info('Calculating Overview Start and End Dates') start_dates = [row[1] for row in arcpy.da.SearchCursor(mosaic_dataset, ['Tag', 'StartDate']) if row[0] != 'Dataset'] @@ -148,7 +162,7 @@ def add_property(property_set: etree.Element, property_key: str, property_value: def build_wms_extension() -> etree.Element: - xsi_type = etree.QName("http://www.w3.org/2001/XMLSchema-instance", "type") + xsi_type = etree.QName('http://www.w3.org/2001/XMLSchema-instance', 'type') svc_extension = etree.Element('SVCExtension', {xsi_type: 'typens:SVCExtension'}) @@ -166,19 +180,26 @@ def build_wms_extension() -> etree.Element: add_property(property_array, 'country', 'US') add_property(property_array, 'contactVoiceTelephone', '907-474-5041') add_property(property_array, 'contactElectronicMailAddress', 'uso@asf.alaska.edu') - add_property(property_array, 'accessConstraints', - 'There are no restrictions on the use of this data, but it must be acknowledged or ' - 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' - 'Contains modified Copernicus Sentinel data, processed by ESA."') + add_property( + property_array, + 'accessConstraints', + 'There are no restrictions on the use of this data, but it must be acknowledged or ' + 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' + 'Contains modified Copernicus Sentinel data, processed by ESA."', + ) add_property(property_array, 'title', '') add_property(property_array, 'abstract', '') info_property_set = etree.SubElement(svc_extension, 'Info', {xsi_type: 'typens:PropertySet'}) - info_property_array = etree.SubElement(info_property_set, 'PropertyArray', - {xsi_type: 'typens:ArrayOfPropertySetProperty'}) + info_property_array = etree.SubElement( + info_property_set, 'PropertyArray', {xsi_type: 'typens:ArrayOfPropertySetProperty'} + ) add_property(info_property_array, 'WebEnabled', 'true') - add_property(info_property_array, 'WebCapabilities', - 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension') + add_property( + info_property_array, + 'WebCapabilities', + 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension', + ) return svc_extension @@ -199,8 +220,9 @@ def main(): csv_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}.csv') - raster_function_template = ''.join([f'{template_directory / template};' - for template in config['raster_function_templates']]) + raster_function_template = ''.join( + [f'{template_directory / template};' for template in config['raster_function_templates']] + ) if config['default_raster_function_template'] != 'None': default_raster_function_template = str(template_directory / config['default_raster_function_template']) else: @@ -212,8 +234,12 @@ def main(): rasters = get_rasters(config['bucket'], config['s3_prefix'], config['s3_suffix']) update_csv(csv_file, rasters, config['bucket']) - for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(60), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), + wait=wait_fixed(60), + reraise=True, + before_sleep=before_sleep_log(logging, logging.WARNING), + ): with attempt: today = datetime.datetime.now(datetime.timezone.utc).strftime('%y%m%d_%H%M') output_name = f'{config["dataset_name"]}_{today}' @@ -229,11 +255,13 @@ def main(): ) logging.info('Creating mosaic dataset') - mosaic_dataset = str(arcpy.management.CreateMosaicDataset( - in_workspace=geodatabase, - in_mosaicdataset_name=config['dataset_name'], - coordinate_system=3857, - )) + mosaic_dataset = str( + arcpy.management.CreateMosaicDataset( + in_workspace=geodatabase, + in_mosaicdataset_name=config['dataset_name'], + coordinate_system=3857, + ) + ) logging.info(f'Adding source rasters to {mosaic_dataset}') arcpy.management.AddRastersToMosaicDataset( @@ -289,7 +317,7 @@ def main(): cell_size=3, metadata_level='BASIC', transmission_fields='Name;StartDate;EndDate;MinPS;MaxPS;LowPS;HighPS;Date;Dataset_ID;CenterX;' - 'CenterY;Tag;GroupName;Polarization;DownloadURL;URLDisplay', + 'CenterY;Tag;GroupName;Polarization;DownloadURL;URLDisplay', use_time='ENABLED', start_time_field='StartDate', end_time_field='EndDate', @@ -333,7 +361,16 @@ def main(): os.environ['AWS_PROFILE'] = 'hyp3' logging.info(f'Moving CRF to {s3_overview}') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) + subprocess.run( + [ + 'aws', + 's3', + 'cp', + local_overview, + s3_overview.replace('/vsis3/', 's3://'), + '--recursive', + ] + ) logging.info('Adding overview to mosaic dataset') arcpy.management.AddRastersToMosaicDataset( @@ -374,8 +411,9 @@ def main(): with open(args.server_connection_file) as f: server_connection = json.load(f) - for attempt in Retrying(stop=stop_after_attempt(3), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), reraise=True, before_sleep=before_sleep_log(logging, logging.WARNING) + ): with attempt: logging.info(f'Publishing {service_definition}') server = Server(**server_connection) diff --git a/image_services/opera/metadata/create_metadata.py b/image_services/opera/metadata/create_metadata.py index e260a4af..40d2521a 100644 --- a/image_services/opera/metadata/create_metadata.py +++ b/image_services/opera/metadata/create_metadata.py @@ -26,35 +26,43 @@ def render_template(template: str, payload: dict) -> str: def main(): parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-o', '--output', help='File to save output to (optional)') - parser.add_argument('-t', '--template', help='Metadata template to fill', default='rtc_metadata.txt.j2') + parser.add_argument( + '-t', + '--template', + help='Metadata template to fill', + default='rtc_metadata.txt.j2', + ) parser.add_argument('config_file', help='Configuration file from which resources are imported') args = parser.parse_args() polarization = json.load(open(args.config_file))['s3_suffix'][1:3] if polarization[0] != polarization[1]: - polarization_description = f'Values for the {polarization} (cross-polarized) polarization are generally ' \ - f'driven by volume scattering, with more complex volume scatterers (such as dense ' \ - f'vegetation) returning higher backscatter values. Surface water generally ' \ - f'appears very dark, as it is predominantly a surface scatterer; most returns ' \ - f'remain in the primary polarization. ' + polarization_description = ( + f'Values for the {polarization} (cross-polarized) polarization are generally ' + f'driven by volume scattering, with more complex volume scatterers (such as dense ' + f'vegetation) returning higher backscatter values. Surface water generally ' + f'appears very dark, as it is predominantly a surface scatterer; most returns ' + f'remain in the primary polarization. ' + ) elif polarization[0] == 'V': - polarization_description = f'Values for the {polarization} polarization are commonly driven by surface ' \ - f'roughness and/or soil moisture, with rougher surfaces and higher soil moisture ' \ - f'returning higher backscatter values. Surface water appears very dark under calm ' \ - f'conditions, as the signal bounces off the surface away from the sensor. ' + polarization_description = ( + f'Values for the {polarization} polarization are commonly driven by surface ' + f'roughness and/or soil moisture, with rougher surfaces and higher soil moisture ' + f'returning higher backscatter values. Surface water appears very dark under calm ' + f'conditions, as the signal bounces off the surface away from the sensor. ' + ) elif polarization[0] == 'H': - polarization_description = f'Values for the {polarization} polarization are a predominance of double-bounce ' \ - f'scattering, with signal bouncing off the ground and then off of surficial ' \ - f'objects (e.g., stemmy vegetation, manmade structures) and back towards the ' \ - f'sensor. Surface water appears very dark under calm conditions, as the signal ' \ - f'bounces off the surface away from the sensor. ' + polarization_description = ( + f'Values for the {polarization} polarization are a predominance of double-bounce ' + f'scattering, with signal bouncing off the ground and then off of surficial ' + f'objects (e.g., stemmy vegetation, manmade structures) and back towards the ' + f'sensor. Surface water appears very dark under calm conditions, as the signal ' + f'bounces off the surface away from the sensor. ' + ) else: polarization_description = '' - fields = { - 'polarization': polarization, - 'polarization_description': polarization_description - } + fields = {'polarization': polarization, 'polarization_description': polarization_description} output_text = render_template(args.template, fields) if args.output: diff --git a/image_services/opera/update_opera_urls.py b/image_services/opera/update_opera_urls.py index d952f57a..1bf3d765 100644 --- a/image_services/opera/update_opera_urls.py +++ b/image_services/opera/update_opera_urls.py @@ -7,6 +7,7 @@ import boto3 import requests + S3_CLIENT = boto3.client('s3') log = logging.getLogger(__name__) @@ -48,7 +49,10 @@ def main(): config = json.load(f) polarization = config['s3_suffix'][1:3] - url_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}_vsis3_urls.csv') + url_file = os.path.join( + args.working_directory, + f'{config["project_name"]}_{config["dataset_name"]}_vsis3_urls.csv', + ) log.info(f'Querying CMR for OPERA {polarization} products') vsis3_urls = query_cmr(polarization) diff --git a/image_services/rtc_services/make_rtc_service.py b/image_services/rtc_services/make_rtc_service.py index fd1e2669..8b33944f 100644 --- a/image_services/rtc_services/make_rtc_service.py +++ b/image_services/rtc_services/make_rtc_service.py @@ -91,7 +91,7 @@ def add_property(property_set: etree.Element, property_key: str, property_value: def build_wms_extension() -> etree.Element: - xsi_type = etree.QName("http://www.w3.org/2001/XMLSchema-instance", "type") + xsi_type = etree.QName('http://www.w3.org/2001/XMLSchema-instance', 'type') svc_extension = etree.Element('SVCExtension', {xsi_type: 'typens:SVCExtension'}) @@ -109,19 +109,26 @@ def build_wms_extension() -> etree.Element: add_property(property_array, 'country', 'US') add_property(property_array, 'contactVoiceTelephone', '907-474-5041') add_property(property_array, 'contactElectronicMailAddress', 'uso@asf.alaska.edu') - add_property(property_array, 'accessConstraints', - 'There are no restrictions on the use of this data, but it must be acknowledged or ' - 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' - 'Contains modified Copernicus Sentinel data, processed by ESA."') + add_property( + property_array, + 'accessConstraints', + 'There are no restrictions on the use of this data, but it must be acknowledged or ' + 'cited as follows: "This imagery was generated by ASF DAAC HyP3 using GAMMA software. ' + 'Contains modified Copernicus Sentinel data, processed by ESA."', + ) add_property(property_array, 'title', '') add_property(property_array, 'abstract', '') info_property_set = etree.SubElement(svc_extension, 'Info', {xsi_type: 'typens:PropertySet'}) - info_property_array = etree.SubElement(info_property_set, 'PropertyArray', - {xsi_type: 'typens:ArrayOfPropertySetProperty'}) + info_property_array = etree.SubElement( + info_property_set, 'PropertyArray', {xsi_type: 'typens:ArrayOfPropertySetProperty'} + ) add_property(info_property_array, 'WebEnabled', 'true') - add_property(info_property_array, 'WebCapabilities', - 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension') + add_property( + info_property_array, + 'WebCapabilities', + 'GetCapabilities,GetMap,GetFeatureInfo,GetStyles,GetLegendGraphic,GetSchemaExtension', + ) return svc_extension @@ -147,8 +154,9 @@ def build_wms_extension() -> etree.Element: csv_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}.csv') -raster_function_template = ''.join([f'{template_directory / template};' - for template in config['raster_function_templates']]) +raster_function_template = ''.join( + [f'{template_directory / template};' for template in config['raster_function_templates']] +) if config['default_raster_function_template'] != 'None': default_raster_function_template = str(template_directory / config['default_raster_function_template']) else: @@ -160,8 +168,12 @@ def build_wms_extension() -> etree.Element: rasters = get_rasters(bucket, config['s3_prefix'], config['s3_suffix']) update_csv(csv_file, rasters) - for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(60), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), + wait=wait_fixed(60), + reraise=True, + before_sleep=before_sleep_log(logging, logging.WARNING), + ): with attempt: today = datetime.datetime.now(datetime.timezone.utc).strftime('%y%m%d_%H%M') output_name = f'{config["project_name"]}_{config["dataset_name"]}_{today}' @@ -177,11 +189,13 @@ def build_wms_extension() -> etree.Element: ) logging.info('Creating mosaic dataset') - mosaic_dataset = str(arcpy.management.CreateMosaicDataset( - in_workspace=geodatabase, - in_mosaicdataset_name=config['dataset_name'], - coordinate_system=3857, - )) + mosaic_dataset = str( + arcpy.management.CreateMosaicDataset( + in_workspace=geodatabase, + in_mosaicdataset_name=config['dataset_name'], + coordinate_system=3857, + ) + ) logging.info(f'Adding fields to {mosaic_dataset}') arcpy.management.AddFields( @@ -207,14 +221,22 @@ def build_wms_extension() -> etree.Element: ['GroupName', '!Name![:46]'], ['Tag', '!Name!.split("_")[8]'], ['MaxPS', '1610'], - ['StartDate', '!Name!.split("_")[2][4:6] + "/" + !Name!.split("_")[2][6:8] + "/" ' - '+ !Name!.split("_")[2][:4] + " " + !Name!.split("_")[2][9:11] + ":" ' - '+ !Name!.split("_")[2][11:13] + ":" + !Name!.split("_")[2][13:15]'], - ['EndDate', '!Name!.split("_")[2][4:6] + "/" + !Name!.split("_")[2][6:8] + "/" ' - '+ !Name!.split("_")[2][:4] + " " + !Name!.split("_")[2][9:11] + ":" ' - '+ !Name!.split("_")[2][11:13] + ":" + !Name!.split("_")[2][13:15]'], - ['DownloadURL', f'"https://s3-us-west-2.amazonaws.com/hyp3-nasa-disasters/{config["s3_prefix"]}" ' - f'+ !Name! + ".tif"'], + [ + 'StartDate', + '!Name!.split("_")[2][4:6] + "/" + !Name!.split("_")[2][6:8] + "/" ' + '+ !Name!.split("_")[2][:4] + " " + !Name!.split("_")[2][9:11] + ":" ' + '+ !Name!.split("_")[2][11:13] + ":" + !Name!.split("_")[2][13:15]', + ], + [ + 'EndDate', + '!Name!.split("_")[2][4:6] + "/" + !Name!.split("_")[2][6:8] + "/" ' + '+ !Name!.split("_")[2][:4] + " " + !Name!.split("_")[2][9:11] + ":" ' + '+ !Name!.split("_")[2][11:13] + ":" + !Name!.split("_")[2][13:15]', + ], + [ + 'DownloadURL', + f'"https://s3-us-west-2.amazonaws.com/hyp3-nasa-disasters/{config["s3_prefix"]}" ' f'+ !Name! + ".tif"', + ], ], ) @@ -265,7 +287,7 @@ def build_wms_extension() -> etree.Element: cell_size=3, metadata_level='BASIC', transmission_fields='Name;StartDate;EndDate;MinPS;MaxPS;LowPS;HighPS;Date;ZOrder;Dataset_ID;CenterX;CenterY;' - 'Tag;ProductName;GroupName;DownloadURL', + 'Tag;ProductName;GroupName;DownloadURL', use_time='ENABLED', start_time_field='StartDate', end_time_field='EndDate', @@ -296,7 +318,16 @@ def build_wms_extension() -> etree.Element: ) logging.info(f'Moving CRF to {s3_overview}') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) + subprocess.run( + [ + 'aws', + 's3', + 'cp', + local_overview, + s3_overview.replace('/vsis3/', 's3://'), + '--recursive', + ] + ) logging.info('Adding overview to mosaic dataset') arcpy.management.AddRastersToMosaicDataset( @@ -357,8 +388,9 @@ def build_wms_extension() -> etree.Element: with open(args.server_connection_file) as f: server_connection = json.load(f) - for attempt in Retrying(stop=stop_after_attempt(3), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), reraise=True, before_sleep=before_sleep_log(logging, logging.WARNING) + ): with attempt: logging.info(f'Publishing {service_definition}') server = Server(**server_connection) diff --git a/image_services/sample_data/make_sample_service.py b/image_services/sample_data/make_sample_service.py index 55071120..6a677019 100644 --- a/image_services/sample_data/make_sample_service.py +++ b/image_services/sample_data/make_sample_service.py @@ -106,8 +106,9 @@ def update_csv(csv_file: str, rasters: List[str]): csv_file = os.path.join(args.working_directory, f'{config["project_name"]}_{config["dataset_name"]}.csv') -raster_function_template = ''.join([f'{template_directory / template};' - for template in config['raster_function_templates']]) +raster_function_template = ''.join( + [f'{template_directory / template};' for template in config['raster_function_templates']] +) if config['default_raster_function_template'] != 'None': default_raster_function_template = str(template_directory / config['default_raster_function_template']) else: @@ -120,8 +121,12 @@ def update_csv(csv_file: str, rasters: List[str]): rasters = get_rasters(bucket, config['s3_prefix'], config['s3_suffix']) update_csv(csv_file, rasters) - for attempt in Retrying(stop=stop_after_attempt(3), wait=wait_fixed(60), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), + wait=wait_fixed(60), + reraise=True, + before_sleep=before_sleep_log(logging, logging.WARNING), + ): with attempt: today = datetime.datetime.now(datetime.timezone.utc).strftime('%y%m%d_%H%M') output_name = f'{config["project_name"]}_{config["dataset_name"]}_{today}' @@ -137,11 +142,13 @@ def update_csv(csv_file: str, rasters: List[str]): ) logging.info('Creating mosaic dataset') - mosaic_dataset = str(arcpy.management.CreateMosaicDataset( - in_workspace=geodatabase, - in_mosaicdataset_name=config['dataset_name'], - coordinate_system=3857, - )) + mosaic_dataset = str( + arcpy.management.CreateMosaicDataset( + in_workspace=geodatabase, + in_mosaicdataset_name=config['dataset_name'], + coordinate_system=3857, + ) + ) logging.info(f'Adding fields to {mosaic_dataset}') arcpy.management.AddFields( @@ -241,7 +248,16 @@ def update_csv(csv_file: str, rasters: List[str]): ) logging.info(f'Moving CRF to {s3_overview}') - subprocess.run(['aws', 's3', 'cp', local_overview, s3_overview.replace('/vsis3/', 's3://'), '--recursive']) + subprocess.run( + [ + 'aws', + 's3', + 'cp', + local_overview, + s3_overview.replace('/vsis3/', 's3://'), + '--recursive', + ] + ) logging.info('Adding overview to mosaic dataset') arcpy.management.AddRastersToMosaicDataset( @@ -290,8 +306,9 @@ def update_csv(csv_file: str, rasters: List[str]): with open(args.server_connection_file) as f: server_connection = json.load(f) - for attempt in Retrying(stop=stop_after_attempt(3), reraise=True, - before_sleep=before_sleep_log(logging, logging.WARNING)): + for attempt in Retrying( + stop=stop_after_attempt(3), reraise=True, before_sleep=before_sleep_log(logging, logging.WARNING) + ): with attempt: logging.info(f'Publishing {service_definition}') server = Server(**server_connection) diff --git a/image_services/updating_scripts/time_slider_updates.py b/image_services/updating_scripts/time_slider_updates.py index f3fc7ca2..dd277eaf 100644 --- a/image_services/updating_scripts/time_slider_updates.py +++ b/image_services/updating_scripts/time_slider_updates.py @@ -5,6 +5,7 @@ import boto3 from arcgis import GIS + client = boto3.client(service_name='secretsmanager', region_name='us-west-2') response = client.get_secret_value(SecretId='tools_user_accounts') password_dict = json.loads(response['SecretString']) @@ -26,21 +27,19 @@ def update_time_slider(item): time_slider['endTime'] = datetime_to_esri(today_time) time_slider['timeStopInterval'] = {'interval': 1, 'units': 'esriTimeUnitsDays'} time_slider['startTime'] = datetime_to_esri(today_time - slider_length) - time_slider['currentTimeExtent'] = [datetime_to_esri(today_time - window_length), - datetime_to_esri(today_time)] + time_slider['currentTimeExtent'] = [datetime_to_esri(today_time - window_length), datetime_to_esri(today_time)] return item.update(data=json.dumps(data)) -webmap_ids = ['2205f66af0324a88a8d0b8a6c8fde5bf', # Alaska Rivers - '80442ecd1e0246adac5f5fb7e627e3e3', # HKH - '3dd8d25559db4ba6aa0e1b6e8cb5d39a', # RTC - 'faa83e4ccfe64bb8a99c13ef70b19b8f', # SWE - ] +webmap_ids = [ + '2205f66af0324a88a8d0b8a6c8fde5bf', # Alaska Rivers + '80442ecd1e0246adac5f5fb7e627e3e3', # HKH + '3dd8d25559db4ba6aa0e1b6e8cb5d39a', # RTC + 'faa83e4ccfe64bb8a99c13ef70b19b8f', # SWE +] -gis = GIS('https://asf-daac.maps.arcgis.com', - password_dict['asf-agol-username'], - password_dict['asf-agol-password']) +gis = GIS('https://asf-daac.maps.arcgis.com', password_dict['asf-agol-username'], password_dict['asf-agol-password']) for webmap_id in webmap_ids: if update_time_slider(gis.content.get(webmap_id)): diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..46a49209 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,31 @@ +[project] +name = "gis_services" +# TODO: Pinning to 3.8 for ruff, figure out the actual appropriate version: +requires-python = "==3.8" + +[tool.ruff] +line-length = 120 +# The directories to consider when resolving first- vs. third-party imports. +# See: https://docs.astral.sh/ruff/settings/#src +src = [] + +[tool.ruff.format] +indent-style = "space" +quote-style = "single" + +[tool.ruff.lint] +extend-select = [ + "I", # isort: https://docs.astral.sh/ruff/rules/#isort-i + # TODO: Uncomment the following extensions and address their warnings: + # "UP", # pyupgrade: https://docs.astral.sh/ruff/rules/#pyupgrade-up + # "D", # pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d + # "ANN", # annotations: https://docs.astral.sh/ruff/rules/#flake8-annotations-ann + # "PTH", # use-pathlib-pth: https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth +] + +[tool.ruff.lint.pydocstyle] +convention = "google" + +[tool.ruff.lint.isort] +case-sensitive = true +lines-after-imports = 2