diff --git a/geodataflow/core/schemadef.py b/geodataflow/core/schemadef.py index 02a5db3..48e0e1c 100644 --- a/geodataflow/core/schemadef.py +++ b/geodataflow/core/schemadef.py @@ -31,8 +31,9 @@ =============================================================================== """ +from collections import OrderedDict from datetime import date, datetime -from typing import Any, List +from typing import Any, List, Iterable class DataType(object): @@ -179,6 +180,17 @@ def clone(self) -> "FieldDef": new_obj.copy(self) return new_obj + @staticmethod + def concat(fields1: Iterable["FieldDef"], fields2: Iterable["FieldDef"]) -> List["FieldDef"]: + """ + Concat the specified stream of Fields in one unique array. + """ + result = OrderedDict({fd.name: fd for fd in fields1}) + for fd in fields2: + result[fd.name] = fd + + return [fd for _, fd in result.items()] + class SchemaDef: """ diff --git a/geodataflow/geoext/dataset.py b/geodataflow/geoext/dataset.py index f6d86ee..12e3a58 100644 --- a/geodataflow/geoext/dataset.py +++ b/geodataflow/geoext/dataset.py @@ -119,7 +119,9 @@ def properties(self) -> Dict[str, Any]: Returns the Properties of this Dataset. """ info = self.get_metadata() - return {field_def.name: info.get(field_def.name) for field_def in DATASET_DEFAULT_SCHEMA_DEF} + data = {field_def.name: info.get(field_def.name) for field_def in DATASET_DEFAULT_SCHEMA_DEF} + data.update(self.user_data) + return data def env(self) -> GdalEnv: """ @@ -218,6 +220,10 @@ def get_spatial_srid(self) -> int: Returns the EPSG SRID code of the this Dataset. """ spatial_wkt = self._dataset.GetProjection() + + if not spatial_wkt: + return 0 + spatial_ref = osr.SpatialReference() spatial_ref.ImportFromWkt(spatial_wkt) diff --git a/geodataflow/pipeline/filters/EOProductDataset.py b/geodataflow/pipeline/filters/EOProductDataset.py index fc07300..0dcf877 100644 --- a/geodataflow/pipeline/filters/EOProductDataset.py +++ b/geodataflow/pipeline/filters/EOProductDataset.py @@ -98,13 +98,18 @@ def starting_run(self, schema_def, pipeline, processing_args): """ Starting a new Workflow on Geospatial data. """ - from geodataflow.core.schemadef import GeometryType + from geodataflow.core.schemadef import GeometryType, DataType, FieldDef from geodataflow.geoext.dataset import DATASET_DEFAULT_SCHEMA_DEF + new_fields = [ + FieldDef("productType", DataType.String), + FieldDef("productDate", DataType.String) + ] + schema_def = EOProductCatalog.starting_run(self, schema_def, pipeline, processing_args) schema_def = schema_def.clone() schema_def.geometryType = GeometryType.Polygon - schema_def.fields = DATASET_DEFAULT_SCHEMA_DEF.copy() + schema_def.fields = FieldDef.concat(DATASET_DEFAULT_SCHEMA_DEF, new_fields) return schema_def def run(self, feature_store, processing_args): @@ -199,9 +204,8 @@ def custom_dataset_op(dataset_ob, operation_args): logging.info('Done!') for dataset in datasets: - dataset.user_data['areaOfInterest'] = products[0].areaOfInterest - dataset.user_data['product_type'] = self.product - dataset.user_data['product_date'] = product_date + dataset.user_data['productType'] = self.product + dataset.user_data['productDate'] = product_date yield dataset # pass diff --git a/geodataflow/pipeline/filters/RasterCalc.py b/geodataflow/pipeline/filters/RasterCalc.py index c9537b3..f5dcc70 100644 --- a/geodataflow/pipeline/filters/RasterCalc.py +++ b/geodataflow/pipeline/filters/RasterCalc.py @@ -55,7 +55,7 @@ def description(self) -> str: """ Returns the Description text of this Module. """ - return 'Performs raster calc algebraical operations to input Rasters.' + return 'Performs raster calc algebraic operations to input Rasters.' def category(self) -> str: """ diff --git a/geodataflow/pipeline/writers/TimeseriesPlot.py b/geodataflow/pipeline/writers/TimeseriesPlot.py new file mode 100644 index 0000000..11e3362 --- /dev/null +++ b/geodataflow/pipeline/writers/TimeseriesPlot.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- +""" +=============================================================================== + + GeodataFlow: + Toolkit to run workflows on Geospatial & Earth Observation (EO) data. + + Copyright (c) 2022, Alvaro Huarte. All rights reserved. + + Redistribution and use of this code in source and binary forms, with + or without modification, are permitted provided that the following + conditions are met: + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR + CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; + OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR + OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SAMPLE CODE, EVEN IF + ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +=============================================================================== +""" + +import os +import datetime +import numpy as np +from typing import Dict, Iterable +from geodataflow.pipeline.basictypes import AbstractWriter +from geodataflow.core.capabilities import StoreCapabilities +from geodataflow.core.processingargs import ProcessingUtils + + +class TimeseriesPlot(AbstractWriter): + """ + This module plots to image Time Series of values to visualize trends + in counts or numerical values over time. + """ + def __init__(self): + AbstractWriter.__init__(self) + self.connectionString = '' + # Graph properties. + self.title = 'Time series Plot' + self.xLabel = 'Date' + self.yLabel = 'Value' + self.figureXSize = 800 + self.figureYSize = 600 + # Time Series properties. + self.label = '' + self.expressionValue = 'float(mean)' + self.attributeDate = 'productDate' + self.dateFormatter = '%Y-%m-%d' + self.dateRange = 10 + + def description(self) -> str: + """ + Returns the Description text of this Module. + """ + return 'It plots to image Time Series of values to visualize trends in counts or numerical values over time.' + + def category(self) -> str: + """ + Returns the category or group to which this Module belongs. + """ + return 'Output' + + def params(self) -> Dict: + """ + Returns the declaration of parameters supported by this Module. + """ + return { + 'connectionString': { + 'description': 'Connection string or Filename of the image to output.', + 'dataType': 'string', + 'default': 'plot.png', + 'extensions': ['.png', '.jpg'] + }, + 'title': { + 'description': 'Title of the Graph.', + 'dataType': 'string', + 'default': 'Time series Plot' + }, + 'xLabel': { + 'description': 'Label for the x-axis.', + 'dataType': 'string', + 'default': 'Date' + }, + 'yLabel': { + 'description': 'Label for the y-axis.', + 'dataType': 'string', + 'default': 'Value' + }, + 'figureXSize': { + 'description': 'The number of pixels of the image in the X direction.', + 'dataType': 'int', + 'default': 800 + }, + 'figureYSize': { + 'description': 'The number of pixels of the image in the Y direction.', + 'dataType': 'int', + 'default': 600 + }, + 'expressionValue': { + 'description': + 'Algebraic expression to calculate the values, or list of them separated by commas.', + 'dataType': 'calc' + }, + 'attributeDate': { + 'description': 'Attribute containing the Date in format "%Y-%m-%d".', + 'dataType': 'string' + }, + 'label': { + 'description': + 'Optional label, or list of them separated by commas, for the Legend. None by default.', + 'dataType': 'string' + }, + 'dateFormatter': { + 'description': 'Format pattern of Dates for the x-axis.', + 'dataType': 'string', + 'default': '%Y-%m-%d' + }, + 'dateRange': { + 'description': 'The interval between each iteration for the x-axis ticker.', + 'dataType': 'int', + 'default': 10 + } + } + + def test_capability(self, connection_string: str, capability: StoreCapabilities) -> bool: + """ + Returns if this Module supports the specified ConnectionString and named StoreCapability. + """ + file_name, file_ext = os.path.splitext(connection_string) + return file_name and file_ext and file_ext in ['.png', '.jpg', '.jpeg'] + + def starting_run(self, schema_def, pipeline, processing_args): + """ + Starting a new Workflow on Geospatial data. + """ + from geodataflow.geoext.commonutils import DataUtils + + for item_string in DataUtils.enumerate_single_connection_string(self.connectionString): + # + if not self.test_capability(item_string, StoreCapabilities.CREATE): + raise Exception('Theimage format "{}" is not supported!'.format(item_string)) + + from geodataflow.core.schemadef import SchemaDef, GeometryType + from geodataflow.geoext.dataset import DATASET_DEFAULT_SCHEMA_DEF + + schema_def = SchemaDef(type='RasterLayer', + name=DataUtils.get_layer_name(item_string), + srid=0, + crs=None, + geometryType=GeometryType.Polygon, + envelope=[0, 0, self.figureXSize, self.figureYSize], + fields=DATASET_DEFAULT_SCHEMA_DEF.copy()) + + return schema_def + + return None + + def run(self, feature_store, processing_args): + """ + Transform input Geospatial data. It should return a new iterable set of Geospatial features. + """ + timeseries = [] + + expressions = \ + self.expressionValue.split(',') if isinstance(self.expressionValue, str) else self.expressionValue + labels = \ + self.label.split(',') if isinstance(self.label, str) and self.label else self.label + + if isinstance(expressions, list) and isinstance(labels, list) and len(expressions) != len(labels): + raise Exception( + 'TimeseriesPlot does not support "expressionValue" and "label" settings with different size.') + + # Extract stream of Dates and array of Values. + for feature in feature_store: + date_ = feature.properties[self.attributeDate] if self.attributeDate else None + values = [ + ProcessingUtils.eval_function(xpr, feature.properties, False) + for xpr in expressions + ] + timeseries.append({'date': date_, 'values': values}) + + # Refactoring stream of Date/Values to numpy arrays. + timeseries = TimeseriesPlot._timeseries_np(timeseries) + + # Draw plot! + if len(timeseries) > 0: + import matplotlib + import matplotlib.dates as m_dates + matplotlib.use('agg') + import matplotlib.pyplot as plt + + from geodataflow.geoext.gdalenv import GdalEnv + from geodataflow.geoext.dataset import GdalDataset + + file_ext = os.path.splitext(self.connectionString)[1] + relative_date = '%Y' not in self.dateFormatter + has_labels = False + + # Main attributes of Plot. + inv_dpi_ = 0.01 + fig, ax_ = plt.subplots(figsize=(inv_dpi_*self.figureXSize, inv_dpi_*self.figureYSize)) + if self.title: + ax_.set_title(self.title) + if self.xLabel: + ax_.set_xlabel(self.xLabel) + if self.yLabel: + ax_.set_ylabel(self.yLabel) + + # Drawing Time Series collection. + # TODO: Allow customization of symbology of Lines. + settings = {} + date_min = timeseries[0].min().astype(datetime.datetime) + date_max = timeseries[0].max().astype(datetime.datetime) + # + for index, values in enumerate(timeseries[1:]): + x = timeseries[0] + y = values + l_label = labels[index] if isinstance(labels, list) else None + l_style = settings.get('linestyle', '-') + l_width = settings.get('linewidth', 1) + l_color = settings.get('color', None) + m_style = settings.get('marker', None) + m_wsize = settings.get('markersize', 3) + + ax_.plot(x, y, + color=l_color, + linestyle=l_style, + linewidth=l_width, + marker=m_style, + markersize=m_wsize, + label=l_label) + + has_labels = has_labels or l_label + + if relative_date: + day_range = (date_max - date_min).days + while day_range > 365: + day_range = day_range - 365 + else: + day_range = (date_max - date_min).days + + ax_font_size = max(1, int(8 * self.figureXSize / 800)) + day_interval = max(1, 2 * int(day_range / self.dateRange)) + ax_.xaxis.set_major_locator(m_dates.DayLocator(interval=day_interval)) + ax_.xaxis.set_major_formatter(m_dates.DateFormatter(self.dateFormatter)) + + for tick in ax_.xaxis.get_major_ticks(): + tick.label1.set_fontsize(max(1, ax_font_size - 1)) + + fig.autofmt_xdate() + if has_labels: + ax_.legend(loc='best', fontsize=ax_font_size) + + ax_.grid() + fig.savefig(self.connectionString, format=file_ext[1:]) + plt.close() + + # Load GDAL dataset from the rasterized Graph. + gdal_env = GdalEnv.default() + gdal = gdal_env.gdal() + dataset = gdal.Open(self.connectionString, gdal.GA_ReadOnly) + yield GdalDataset(dataset, gdal_env) + + pass + + @staticmethod + def _timeseries_np(timeseries: Iterable[Dict]): + """ + Convert stream of Date/Values to numpy arrays. + """ + today_date = datetime.date.today() + result = [] + + for date_index, ts in enumerate(timeseries): + date_ = ts.get('date') + values = ts.get('values') + + if not date_: + date_ = today_date + datetime.timedelta(days=date_index) + date_ = date_.strftime('%Y-%m-%d') + + result.append([np.datetime64(date_)] + values) + + result = sorted(result, key=lambda x: x[0], reverse=False) + result = np.transpose(np.array(result)) + return result diff --git a/requirements.txt b/requirements.txt index b19a957..0e7b736 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ shapely GDAL pandas geopandas +matplotlib diff --git a/tests/data/test_raster_plot_timeseries.json b/tests/data/test_raster_plot_timeseries.json new file mode 100644 index 0000000..3b29cb1 --- /dev/null +++ b/tests/data/test_raster_plot_timeseries.json @@ -0,0 +1,31 @@ +{ + "pipeline": [ + { + "type": "RasterReader", + "connectionString": "${TEST_DATA_PATH}/NDVI-tiff-sample-*.tif" + }, + { + "type": "RasterStats", + "stats": ["mean", "median"], + "polygonize": true + }, + { + "type": "TimeseriesPlot", + "connectionString": "${TEST_OUTPUT_PATH}/plot.png", + + # Graph properties. + "title": "Time series Plot", + "xLabel": "Date", + "yLabel": "NDVI", + "figureXSize": 800, + "figureYSize": 600, + + # Time Series properties. + "label": ["Mean", "Median"], + "expressionValue": ["int(mean * 100)", "int(median * 100)"], + "attributeDate": "", + "dateFormatter": "%Y-%m-%d", + "dateRange": 10 + } + ] +} diff --git a/tests/test_geodataflow.py b/tests/test_geodataflow.py index 5fdb58a..c030aaf 100644 --- a/tests/test_geodataflow.py +++ b/tests/test_geodataflow.py @@ -389,6 +389,26 @@ def test_func(features): self.process_pipeline(test_func, pipeline_file) pass + def test_raster_plot_timeseries(self): + """ + Test RasterStats module. + """ + pipeline_file = os.path.join(DATA_FOLDER, 'test_raster_plot_timeseries.json') + + def test_func(features): + """ Test results """ + self.assertEqual(len(features), 1) + feature = features[0] + self.assertEqual(feature.type, 'Raster') + self.assertEqual(feature.geometry.geom_type, 'Polygon') + dataset = feature.dataset() + self.assertEqual(dataset.RasterCount, 4) + self.assertEqual(dataset.RasterXSize, 800) + self.assertEqual(dataset.RasterYSize, 600) + + self.process_pipeline(test_func, pipeline_file) + pass + def test_schema_of_stage(self): """ Test reading the Schema of a Stage. diff --git a/ui/js/api/modules.js b/ui/js/api/modules.js index 4555339..c8f824b 100644 --- a/ui/js/api/modules.js +++ b/ui/js/api/modules.js @@ -1 +1 @@ -var modules = {"InputParam": {"name": "InputParam", "type": "filter", "alias": "InputParam", "category": "Graph", "description": "Acts as Feature provider of a Module's parameter", "params": {}}, "RasterCalc": {"name": "RasterCalc", "type": "filter", "alias": "Calc", "category": "Raster", "description": "Performs raster calc algebraical operations to input Rasters.", "params": {"bands": {"description": "List of Band names defined in Expression, or a string separated by commas.", "dataType": "string", "default": "", "placeHolder": "B04,B03,B02,B08"}, "expression": {"description": "Raster calculator expression with numpy syntax, e.g. (B08\u2013B04)/(B08+B04).", "dataType": "calc", "default": "", "placeHolder": "(B08 - B04) / (B08 + B04)"}, "noData": {"description": "NoData value of output Dataset.", "dataType": "float", "default": -9999.0}}}, "GeometryTransform": {"name": "GeometryTransform", "type": "filter", "alias": "Transform", "category": "Geometry", "description": "Transforms input Geometries or Rasters between two Spatial Reference Systems (CRS).", "params": {"sourceCrs": {"description": "Source Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported. It uses input CRS when this param is not specified.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}, "targetCrs": {"description": "Output Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}}}, "TableQuery": {"name": "TableQuery", "type": "filter", "alias": "Query", "category": "Table", "description": "Queries the columns of a GeoPandas DataFrame with a boolean expression.", "params": {"expression": {"description": "The query string to evaluate. You can refer to variables in the environment by prefixing them with an \u2018@\u2019 character like @a + @b.", "dataType": "filter"}}}, "ConnectionJoin": {"name": "ConnectionJoin", "type": "filter", "alias": "ConnectionJoin", "category": "Graph", "description": "Joins the streams of data of several input Modules in one unique output.", "params": {"stages": {"description": "Collection of Modules (Using the \"StageId\" attribute) to merge.", "dataType": "array"}}}, "RasterPolygonize": {"name": "RasterPolygonize", "type": "filter", "alias": "Polygonize", "category": "Raster", "description": "Returns the Geometry containing all connected regions of nodata pixels in input Datasets.", "params": {"bandIndex": {"description": "Index of Band from which to create Geometries.", "dataType": "int", "default": 0}}}, "EOProductDataset": {"name": "EOProductDataset", "type": "filter", "alias": "Dataset", "category": "EO STAC Imagery", "description": "Extracts Datasets from EO/STAC Collections via spatial & alphanumeric filters.", "params": {"driver": {"description": "Driver class name that implements EO Providers.", "dataType": "string", "default": "STAC", "options": ["STAC", "EODAG"], "labels": ["STAC", "EODAG"]}, "provider": {"description": "Provider name or API Endpoint that provides info about EO Products.", "dataType": "string", "default": "https://earth-search.aws.element84.com/v0/search"}, "product": {"description": "EO Product type or Collection from which to fetch data.", "dataType": "string", "default": "sentinel-s2-l2a-cogs"}, "startDate": {"description": "Start date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "endDate": {"description": "End date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "closestToDate": {"description": "Select only those EO Products which Date is the closest to the specified (Optional).", "dataType": "date"}, "windowDate": {"description": "Days around \"closestToDate\" when \"startDate\" and \"endDate\" are not specified.", "dataType": "int", "default": 5}, "filter": {"description": "Attribute filter string of EO Products to fetch (Optional).", "dataType": "filter"}, "preserveInputCrs": {"description": "Preserve input CRS, otherwise Geometries are transformed to \"EPSG:4326\".", "dataType": "bool", "default": true}, "configVars": {"description": "Environment variables separated by commas. Commonly used to configure credentials.", "dataType": "string", "default": "AWS_NO_SIGN_REQUEST=YES"}, "bands": {"description": "List of Bands to fetch, or a string separated by commas. Empty means fetch all.", "dataType": "string", "default": "B04,B03,B02,B08", "placeHolder": "B04,B03,B02,B08"}, "groupByDate": {"description": "Group EO Products by Date.", "dataType": "bool", "default": true}, "clipByAreaOfInterest": {"description": "Clip EO Products by geometry of input AOI.", "dataType": "bool", "default": true}}}, "RasterTransform": {"name": "RasterTransform", "type": "filter", "alias": "Transform", "category": "Raster", "description": "Transforms input Rasters between two Spatial Reference Systems (CRS).", "params": {"sourceCrs": {"description": "Source Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported. It uses input CRS when this param is not specified.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}, "targetCrs": {"description": "Output Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}, "resampleAlg": {"description": "Resampling strategy.", "dataType": "int", "default": 1, "options": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12], "labels": ["NearestNeighbour", "Bilinear", "Cubic", "CubicSpline", "Lanczos", "Average", "Mode", "Max", "Min", "Med", "Q1", "Q3"]}}}, "FeatureWriter": {"name": "FeatureWriter", "type": "writer", "alias": "FeatureWriter", "category": "Output", "description": "Writes Features with Geometries to a Geospatial DataStore using OGR providers.", "params": {"connectionString": {"description": "Connection string of the FeatureStore ('.geojson', '.gpkg', '.shp.zip' are supported).", "dataType": "string", "default": "output.gpkg", "extensions": [".geojson", ".gpkg", ".shp.zip"]}}}, "TablePack": {"name": "TablePack", "type": "filter", "alias": "Pack", "category": "Table", "description": "Packs input Features into a GeoPandas DataFrame.", "params": {}}, "FeatureCache": {"name": "FeatureCache", "type": "filter", "alias": "Cache", "category": "Feature", "description": "Caches data of inputs to speedup the management of repetitive invocations of Modules.", "params": {}}, "FeatureLimit": {"name": "FeatureLimit", "type": "filter", "alias": "Limits", "category": "Feature", "description": "Validates that input Geometries do not be greater than a Limit.", "params": {"fullAreaLimit": {"description": "Maximum area covered by all input Geometries (Optional).", "dataType": "float"}, "areaLimit": {"description": "Maximum area covered by each input Geometry (Optional).", "dataType": "float"}, "countLimit": {"description": "Maximum number of input Geometries (Optional).", "dataType": "int"}}}, "GeometryCentroid": {"name": "GeometryCentroid", "type": "filter", "alias": "Centroid", "category": "Geometry", "description": "Returns the Centroid of input Geometries.", "params": {}}, "RasterStats": {"name": "RasterStats", "type": "filter", "alias": "Stats", "category": "Raster", "description": "Summarizes geospatial raster datasets and transform them to vector geometries.", "params": {"stats": {"description": "Method, or list of methods separated by commas, of summarizing and aggregating the raster values of input Datasets--.", "dataType": "string", "default": "median", "options": ["count", "majority", "max", "mean", "median", "min", "minority", "nodataCount", "percentile_10", "percentile_25", "percentile_75", "percentile_90", "range", "size", "std", "sum", "unique"], "labels": ["count", "majority", "max", "mean", "median", "min", "minority", "nodataCount", "percentile_10", "percentile_25", "percentile_75", "percentile_90", "range", "size", "std", "sum", "unique"]}, "bandIndex": {"description": "Index of Band from which to calculate Raster statistics.", "dataType": "int", "default": 0}, "polygonize": {"description": "Creates vector polygons for all connected regions of no-data pixels in the raster.", "dataType": "bool", "default": false}}}, "FeatureReader": {"name": "FeatureReader", "type": "reader", "alias": "FeatureReader", "category": "Input", "description": "Reads Features with Geometries from a Geospatial DataSource using OGR providers.", "params": {"connectionString": {"description": "Connection string of the Feature Store.", "dataType": ["file", "url", "geojson"], "extensions": [".geojson", ".gpkg", ".shp.zip"]}, "where": {"description": "Attribute query string when fetching features (Optional).", "dataType": "filter"}, "spatialFilter": {"description": "Geometry to be used as spatial filter when fetching features (Optional).", "dataType": "geometry"}, "countLimit": {"description": "Maximum number of Features to fetch (Optional).", "dataType": "int"}}}, "RasterClip": {"name": "RasterClip", "type": "filter", "alias": "Clip", "category": "Raster", "description": "Clips input Rasters by a Geometry.", "params": {"clipGeometries": {"description": "Collection of Geometries that will clip input Features.", "dataType": "input"}, "cutline": {"description": "Clipping using lines of Geometries, otherwise just envelopes are used. True by default.", "dataType": "bool", "default": true}, "allTouched": {"description": "Ensures that that all pixels overlapping the cutline polygon will be selected, not just those whose center point falls within the polygon. True by default.", "dataType": "bool", "default": true}}}, "SpatialRelation": {"name": "SpatialRelation", "type": "filter", "alias": "SpatialRelation", "category": "Geometry", "description": "Returns input Features that match a Spatial Relationship with one or more other Geometries.", "params": {"relationship": {"description": "Spatial Relationship to validate, 'Intersects' by default.", "dataType": "int", "default": 3, "options": [1, 2, 3, 4, 5, 6, 7, 8], "labels": ["Equals", "Disjoint", "Intersects", "Touches", "Crosses", "Within", "Contains", "Overlaps"]}, "otherGeometries": {"description": "Collection of Geometries with which input Features should validate a Spatial Relationship.", "dataType": "input"}}}, "RasterReader": {"name": "RasterReader", "type": "reader", "alias": "RasterReader", "category": "Input", "description": "Reads Datasets from a Geospatial RasterSource using GDAL providers.", "params": {"connectionString": {"description": "Connection string of the Raster Store.", "dataType": ["file", "url"], "extensions": [".tiff", ".tif", ".ecw", ".jp2"]}, "countLimit": {"description": "Maximum number of Datasets to fetch (Optional).", "dataType": "int"}}}, "EOProductCatalog": {"name": "EOProductCatalog", "type": "filter", "alias": "Catalog", "category": "EO STAC Imagery", "description": "Extracts Metadata from EO/STAC Collections via spatial & alphanumeric filters.", "params": {"driver": {"description": "Driver class name that implements EO Providers.", "dataType": "string", "default": "STAC", "options": ["STAC", "EODAG"], "labels": ["STAC", "EODAG"]}, "provider": {"description": "Provider name or API Endpoint that provides info about EO Products.", "dataType": "string", "default": "https://earth-search.aws.element84.com/v0/search"}, "product": {"description": "EO Product type or Collection from which to fetch data.", "dataType": "string", "default": "sentinel-s2-l2a-cogs"}, "startDate": {"description": "Start date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "endDate": {"description": "End date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "closestToDate": {"description": "Select only those EO Products which Date is the closest to the specified (Optional).", "dataType": "date"}, "windowDate": {"description": "Days around \"closestToDate\" when \"startDate\" and \"endDate\" are not specified.", "dataType": "int", "default": 5}, "filter": {"description": "Attribute filter string of EO Products to fetch (Optional).", "dataType": "filter"}, "preserveInputCrs": {"description": "Preserve input CRS, otherwise Geometries are transformed to \"EPSG:4326\".", "dataType": "bool", "default": true}}}, "TableUnpack": {"name": "TableUnpack", "type": "filter", "alias": "Unpack", "category": "Table", "description": "Unpacks input GeoPandas DataFrames to a stream of Features.", "params": {}}, "TimeseriesPlot": {"name": "TimeseriesPlot", "type": "writer", "alias": "TimeseriesPlot", "category": "Output", "description": "Generic module", "params": {"driver": {"description": "My Value", "dataType": "int", "default": 1, "options": [1, 2], "labels": ["Value-1", "Value-2"]}}}, "GeometryBuffer": {"name": "GeometryBuffer", "type": "filter", "alias": "Buffer", "category": "Geometry", "description": "Computes a buffer area around a geometry having the given width.", "params": {"distance": {"description": "Distance of buffer to apply to input Geometry.", "dataType": "float", "default": 1.0}, "capStyle": {"description": "Caps style.", "dataType": "int", "default": 1, "options": [1, 2, 3], "labels": ["round", "flat", "square"]}, "joinStyle": {"description": "Join style.", "dataType": "int", "default": 1, "options": [1, 2, 3], "labels": ["round", "mitre", "bevel"]}}}, "TableEval": {"name": "TableEval", "type": "filter", "alias": "Eval", "category": "Table", "description": "Evaluates a string describing operations on GeoPandas DataFrame columns.", "params": {"expression": {"description": "The query to evaluate. Operates on columns only, not specific rows.", "dataType": "calc"}}}, "RasterWriter": {"name": "RasterWriter", "type": "writer", "alias": "RasterWriter", "category": "Output", "description": "Writes Datasets to a Geospatial RasterStore using GDAL providers.", "params": {"connectionString": {"description": "Connection string of the Raster Store (Common GDAL extensions are supported).", "dataType": "string", "default": "output.tif", "extensions": [".tif", ".ecw", ".jp2", ".png", ".jpg"]}, "formatOptions": {"description": "GDAL format options of output Dataset (Optional).", "dataType": "string", "default": "-of COG"}}}, "RasterSplit": {"name": "RasterSplit", "type": "filter", "alias": "Split", "category": "Raster", "description": "Splits input Rasters to tiles.", "params": {"tileSizeX": {"description": "Size of output tiles in X-direction (Pixels).", "dataType": "int", "default": 512}, "tileSizeY": {"description": "Size of output tiles in Y-direction (Pixels).", "dataType": "int", "default": 512}, "paddingVal": {"description": "Extra padding to apply to output", "dataType": "int", "default": 0}}}, "RasterMosaic": {"name": "RasterMosaic", "type": "filter", "alias": "Mosaic", "category": "Raster", "description": "Merges all input Rasters to one unique Output.", "params": {}}}; \ No newline at end of file +var modules = {"RasterWriter": {"name": "RasterWriter", "type": "writer", "alias": "RasterWriter", "category": "Output", "description": "Writes Datasets to a Geospatial RasterStore using GDAL providers.", "params": {"connectionString": {"description": "Connection string of the Raster Store (Common GDAL extensions are supported).", "dataType": "string", "default": "output.tif", "extensions": [".tif", ".ecw", ".jp2", ".png", ".jpg"]}, "formatOptions": {"description": "GDAL format options of output Dataset (Optional).", "dataType": "string", "default": "-of COG"}}}, "TableUnpack": {"name": "TableUnpack", "type": "filter", "alias": "Unpack", "category": "Table", "description": "Unpacks input GeoPandas DataFrames to a stream of Features.", "params": {}}, "RasterPolygonize": {"name": "RasterPolygonize", "type": "filter", "alias": "Polygonize", "category": "Raster", "description": "Returns the Geometry containing all connected regions of nodata pixels in input Datasets.", "params": {"bandIndex": {"description": "Index of Band from which to create Geometries.", "dataType": "int", "default": 0}}}, "FeatureLimit": {"name": "FeatureLimit", "type": "filter", "alias": "Limits", "category": "Feature", "description": "Validates that input Geometries do not be greater than a Limit.", "params": {"fullAreaLimit": {"description": "Maximum area covered by all input Geometries (Optional).", "dataType": "float"}, "areaLimit": {"description": "Maximum area covered by each input Geometry (Optional).", "dataType": "float"}, "countLimit": {"description": "Maximum number of input Geometries (Optional).", "dataType": "int"}}}, "TableEval": {"name": "TableEval", "type": "filter", "alias": "Eval", "category": "Table", "description": "Evaluates a string describing operations on GeoPandas DataFrame columns.", "params": {"expression": {"description": "The query to evaluate. Operates on columns only, not specific rows.", "dataType": "calc"}}}, "GeometryTransform": {"name": "GeometryTransform", "type": "filter", "alias": "Transform", "category": "Geometry", "description": "Transforms input Geometries or Rasters between two Spatial Reference Systems (CRS).", "params": {"sourceCrs": {"description": "Source Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported. It uses input CRS when this param is not specified.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}, "targetCrs": {"description": "Output Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}}}, "InputParam": {"name": "InputParam", "type": "filter", "alias": "InputParam", "category": "Graph", "description": "Acts as Feature provider of a Module's parameter", "params": {}}, "RasterReader": {"name": "RasterReader", "type": "reader", "alias": "RasterReader", "category": "Input", "description": "Reads Datasets from a Geospatial RasterSource using GDAL providers.", "params": {"connectionString": {"description": "Connection string of the Raster Store.", "dataType": ["file", "url"], "extensions": [".tiff", ".tif", ".ecw", ".jp2"]}, "countLimit": {"description": "Maximum number of Datasets to fetch (Optional).", "dataType": "int"}}}, "FeatureReader": {"name": "FeatureReader", "type": "reader", "alias": "FeatureReader", "category": "Input", "description": "Reads Features with Geometries from a Geospatial DataSource using OGR providers.", "params": {"connectionString": {"description": "Connection string of the Feature Store.", "dataType": ["file", "url", "geojson"], "extensions": [".geojson", ".gpkg", ".shp.zip"]}, "where": {"description": "Attribute query string when fetching features (Optional).", "dataType": "filter"}, "spatialFilter": {"description": "Geometry to be used as spatial filter when fetching features (Optional).", "dataType": "geometry"}, "countLimit": {"description": "Maximum number of Features to fetch (Optional).", "dataType": "int"}}}, "RasterClip": {"name": "RasterClip", "type": "filter", "alias": "Clip", "category": "Raster", "description": "Clips input Rasters by a Geometry.", "params": {"clipGeometries": {"description": "Collection of Geometries that will clip input Features.", "dataType": "input"}, "cutline": {"description": "Clipping using lines of Geometries, otherwise just envelopes are used. True by default.", "dataType": "bool", "default": true}, "allTouched": {"description": "Ensures that that all pixels overlapping the cutline polygon will be selected, not just those whose center point falls within the polygon. True by default.", "dataType": "bool", "default": true}}}, "EOProductDataset": {"name": "EOProductDataset", "type": "filter", "alias": "Dataset", "category": "EO STAC Imagery", "description": "Extracts Datasets from EO/STAC Collections via spatial & alphanumeric filters.", "params": {"driver": {"description": "Driver class name that implements EO Providers.", "dataType": "string", "default": "STAC", "options": ["STAC", "EODAG"], "labels": ["STAC", "EODAG"]}, "provider": {"description": "Provider name or API Endpoint that provides info about EO Products.", "dataType": "string", "default": "https://earth-search.aws.element84.com/v0/search"}, "product": {"description": "EO Product type or Collection from which to fetch data.", "dataType": "string", "default": "sentinel-s2-l2a-cogs"}, "startDate": {"description": "Start date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "endDate": {"description": "End date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "closestToDate": {"description": "Select only those EO Products which Date is the closest to the specified (Optional).", "dataType": "date"}, "windowDate": {"description": "Days around \"closestToDate\" when \"startDate\" and \"endDate\" are not specified.", "dataType": "int", "default": 5}, "filter": {"description": "Attribute filter string of EO Products to fetch (Optional).", "dataType": "filter"}, "preserveInputCrs": {"description": "Preserve input CRS, otherwise Geometries are transformed to \"EPSG:4326\".", "dataType": "bool", "default": true}, "configVars": {"description": "Environment variables separated by commas. Commonly used to configure credentials.", "dataType": "string", "default": "AWS_NO_SIGN_REQUEST=YES"}, "bands": {"description": "List of Bands to fetch, or a string separated by commas. Empty means fetch all.", "dataType": "string", "default": "B04,B03,B02,B08", "placeHolder": "B04,B03,B02,B08"}, "groupByDate": {"description": "Group EO Products by Date.", "dataType": "bool", "default": true}, "clipByAreaOfInterest": {"description": "Clip EO Products by geometry of input AOI.", "dataType": "bool", "default": true}}}, "TableQuery": {"name": "TableQuery", "type": "filter", "alias": "Query", "category": "Table", "description": "Queries the columns of a GeoPandas DataFrame with a boolean expression.", "params": {"expression": {"description": "The query string to evaluate. You can refer to variables in the environment by prefixing them with an \u2018@\u2019 character like @a + @b.", "dataType": "filter"}}}, "GeometryBuffer": {"name": "GeometryBuffer", "type": "filter", "alias": "Buffer", "category": "Geometry", "description": "Computes a buffer area around a geometry having the given width.", "params": {"distance": {"description": "Distance of buffer to apply to input Geometry.", "dataType": "float", "default": 1.0}, "capStyle": {"description": "Caps style.", "dataType": "int", "default": 1, "options": [1, 2, 3], "labels": ["round", "flat", "square"]}, "joinStyle": {"description": "Join style.", "dataType": "int", "default": 1, "options": [1, 2, 3], "labels": ["round", "mitre", "bevel"]}}}, "FeatureCache": {"name": "FeatureCache", "type": "filter", "alias": "Cache", "category": "Feature", "description": "Caches data of inputs to speedup the management of repetitive invocations of Modules.", "params": {}}, "TimeseriesPlot": {"name": "TimeseriesPlot", "type": "writer", "alias": "TimeseriesPlot", "category": "Output", "description": "It plots to image Time Series of values to visualize trends in counts or numerical values over time.", "params": {"connectionString": {"description": "Connection string or Filename of the image to output.", "dataType": "string", "default": "plot.png", "extensions": [".png", ".jpg"]}, "title": {"description": "Title of the Graph.", "dataType": "string", "default": "Time series Plot"}, "xLabel": {"description": "Label for the x-axis.", "dataType": "string", "default": "Date"}, "yLabel": {"description": "Label for the y-axis.", "dataType": "string", "default": "Value"}, "figureXSize": {"description": "The number of pixels of the image in the X direction.", "dataType": "int", "default": 800}, "figureYSize": {"description": "The number of pixels of the image in the Y direction.", "dataType": "int", "default": 600}, "expressionValue": {"description": "Algebraic expression to calculate the values, or list of them separated by commas.", "dataType": "calc"}, "attributeDate": {"description": "Attribute containing the Date in format \"%Y-%m-%d\".", "dataType": "string"}, "label": {"description": "Optional label, or list of them separated by commas, for the Legend. None by default.", "dataType": "string"}, "dateFormatter": {"description": "Format pattern of Dates for the x-axis.", "dataType": "string", "default": "%Y-%m-%d"}, "dateRange": {"description": "The interval between each iteration for the x-axis ticker.", "dataType": "int", "default": 10}}}, "RasterSplit": {"name": "RasterSplit", "type": "filter", "alias": "Split", "category": "Raster", "description": "Splits input Rasters to tiles.", "params": {"tileSizeX": {"description": "Size of output tiles in X-direction (Pixels).", "dataType": "int", "default": 512}, "tileSizeY": {"description": "Size of output tiles in Y-direction (Pixels).", "dataType": "int", "default": 512}, "paddingVal": {"description": "Extra padding to apply to output", "dataType": "int", "default": 0}}}, "GeometryCentroid": {"name": "GeometryCentroid", "type": "filter", "alias": "Centroid", "category": "Geometry", "description": "Returns the Centroid of input Geometries.", "params": {}}, "TablePack": {"name": "TablePack", "type": "filter", "alias": "Pack", "category": "Table", "description": "Packs input Features into a GeoPandas DataFrame.", "params": {}}, "RasterMosaic": {"name": "RasterMosaic", "type": "filter", "alias": "Mosaic", "category": "Raster", "description": "Merges all input Rasters to one unique Output.", "params": {}}, "RasterStats": {"name": "RasterStats", "type": "filter", "alias": "Stats", "category": "Raster", "description": "Summarizes geospatial raster datasets and transform them to vector geometries.", "params": {"stats": {"description": "Method, or list of methods separated by commas, of summarizing and aggregating the raster values of input Datasets--.", "dataType": "string", "default": "median", "options": ["count", "majority", "max", "mean", "median", "min", "minority", "nodataCount", "percentile_10", "percentile_25", "percentile_75", "percentile_90", "range", "size", "std", "sum", "unique"], "labels": ["count", "majority", "max", "mean", "median", "min", "minority", "nodataCount", "percentile_10", "percentile_25", "percentile_75", "percentile_90", "range", "size", "std", "sum", "unique"]}, "bandIndex": {"description": "Index of Band from which to calculate Raster statistics.", "dataType": "int", "default": 0}, "polygonize": {"description": "Creates vector polygons for all connected regions of no-data pixels in the raster.", "dataType": "bool", "default": false}}}, "SpatialRelation": {"name": "SpatialRelation", "type": "filter", "alias": "SpatialRelation", "category": "Geometry", "description": "Returns input Features that match a Spatial Relationship with one or more other Geometries.", "params": {"relationship": {"description": "Spatial Relationship to validate, 'Intersects' by default.", "dataType": "int", "default": 3, "options": [1, 2, 3, 4, 5, 6, 7, 8], "labels": ["Equals", "Disjoint", "Intersects", "Touches", "Crosses", "Within", "Contains", "Overlaps"]}, "otherGeometries": {"description": "Collection of Geometries with which input Features should validate a Spatial Relationship.", "dataType": "input"}}}, "RasterTransform": {"name": "RasterTransform", "type": "filter", "alias": "Transform", "category": "Raster", "description": "Transforms input Rasters between two Spatial Reference Systems (CRS).", "params": {"sourceCrs": {"description": "Source Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported. It uses input CRS when this param is not specified.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}, "targetCrs": {"description": "Output Spatial Reference System (CRS), SRID, WKT, PROJ formats are supported.", "dataType": "crs", "placeHolder": "EPSG:XXXX or SRID..."}, "resampleAlg": {"description": "Resampling strategy.", "dataType": "int", "default": 1, "options": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12], "labels": ["NearestNeighbour", "Bilinear", "Cubic", "CubicSpline", "Lanczos", "Average", "Mode", "Max", "Min", "Med", "Q1", "Q3"]}}}, "RasterCalc": {"name": "RasterCalc", "type": "filter", "alias": "Calc", "category": "Raster", "description": "Performs raster calc algebraic operations to input Rasters.", "params": {"bands": {"description": "List of Band names defined in Expression, or a string separated by commas.", "dataType": "string", "default": "", "placeHolder": "B04,B03,B02,B08"}, "expression": {"description": "Raster calculator expression with numpy syntax, e.g. (B08\u2013B04)/(B08+B04).", "dataType": "calc", "default": "", "placeHolder": "(B08 - B04) / (B08 + B04)"}, "noData": {"description": "NoData value of output Dataset.", "dataType": "float", "default": -9999.0}}}, "EOProductCatalog": {"name": "EOProductCatalog", "type": "filter", "alias": "Catalog", "category": "EO STAC Imagery", "description": "Extracts Metadata from EO/STAC Collections via spatial & alphanumeric filters.", "params": {"driver": {"description": "Driver class name that implements EO Providers.", "dataType": "string", "default": "STAC", "options": ["STAC", "EODAG"], "labels": ["STAC", "EODAG"]}, "provider": {"description": "Provider name or API Endpoint that provides info about EO Products.", "dataType": "string", "default": "https://earth-search.aws.element84.com/v0/search"}, "product": {"description": "EO Product type or Collection from which to fetch data.", "dataType": "string", "default": "sentinel-s2-l2a-cogs"}, "startDate": {"description": "Start date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "endDate": {"description": "End date of EO Products to fetch (Optional). \"$TODAY()\" is supported.", "dataType": "date"}, "closestToDate": {"description": "Select only those EO Products which Date is the closest to the specified (Optional).", "dataType": "date"}, "windowDate": {"description": "Days around \"closestToDate\" when \"startDate\" and \"endDate\" are not specified.", "dataType": "int", "default": 5}, "filter": {"description": "Attribute filter string of EO Products to fetch (Optional).", "dataType": "filter"}, "preserveInputCrs": {"description": "Preserve input CRS, otherwise Geometries are transformed to \"EPSG:4326\".", "dataType": "bool", "default": true}}}, "FeatureWriter": {"name": "FeatureWriter", "type": "writer", "alias": "FeatureWriter", "category": "Output", "description": "Writes Features with Geometries to a Geospatial DataStore using OGR providers.", "params": {"connectionString": {"description": "Connection string of the FeatureStore ('.geojson', '.gpkg', '.shp.zip' and '.csv' are supported).", "dataType": "string", "default": "output.gpkg", "extensions": [".geojson", ".gpkg", ".shp.zip", ".csv"]}, "formatOptions": {"description": "OGR format options of output Feature Layer (Optional).", "dataType": "string"}}}, "ConnectionJoin": {"name": "ConnectionJoin", "type": "filter", "alias": "ConnectionJoin", "category": "Graph", "description": "Joins the streams of data of several input Modules in one unique output.", "params": {"stages": {"description": "Collection of Modules (Using the \"StageId\" attribute) to merge.", "dataType": "array"}}}}; \ No newline at end of file