diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy index aec6d9e74b..d8b48b2abc 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy @@ -25,10 +25,10 @@ import org.h2gis.utilities.JDBCUtilities import org.h2gis.utilities.URIUtilities import org.locationtech.jts.geom.Geometry import org.orbisgis.data.H2GIS -import org.orbisgis.data.api.dataset.ISpatialTable import org.orbisgis.data.api.dataset.ITable import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators + import org.orbisgis.geoclimate.worldpoptools.WorldPopTools import java.sql.Connection @@ -47,32 +47,28 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param input * @return */ - Map execute(def input) { + Map execute(def input) throws Exception { Map parameters = null if (input) { if (input instanceof String) { //Check if it's a path to a file def configFile = new File(input) if (!configFile.isFile()) { - error "The configuration file doesn't exist" - return + throw new Exception("The configuration file doesn't exist") } if (!FileUtilities.isExtensionWellFormated(configFile, "json")) { - error "The configuration file must be a json file" - return + throw new Exception("The configuration file must be a json file") } parameters = Geoindicators.WorkflowUtilities.readJSON(configFile) } else if (input instanceof Map) { parameters = input } } else { - error "The input parameters cannot be null or empty.\n Please set a path to a configuration file or " + - "a map with all required parameters" - return + throw new Exception("The input parameters cannot be null or empty.\n Please set a path to a configuration file or " + + "a map with all required parameters") } if (!parameters) { - error "Wrong input parameters" - return + throw new Exception("Wrong input parameters") } //Store the zone identifier and the names of the tables def outputTableNamesResult = [:] @@ -95,13 +91,11 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { if (!tmp_folder_db.exists()) { if (!tmp_folder_db.mkdir()) { h2gis_folder = null - error "You don't have permission to write in the folder $h2gis_folder \n" + - "Please check the folder." - return + throw new Exception("You don't have permission to write in the folder $h2gis_folder \n" + + "Please check the folder.") } } else if (!tmp_folder_db.isDirectory()) { - error "Invalid output folder $h2gis_folder." - return + throw new Exception("Invalid output folder $h2gis_folder.") } databaseFolder = h2gis_folder } @@ -129,37 +123,35 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { } if (!inputParameters) { - error "Cannot find any input parameters." - return + throw new Exception("Cannot find any input parameters.") } def inputDataBase = inputParameters.database def inputFolder = inputParameters.folder def locations = inputParameters.locations as Set if (!locations) { - error "Cannot find any locations parameter." - return + throw new Exception("Cannot find any locations parameter.") } def inputSRID = inputParameters.srid if (inputSRID && inputSRID <= 0) { - error "The input srid must be greater than 0." - return + throw new Exception("The input srid must be greater than 0.") } if (inputFolder && inputDataBase) { - error "Please set only one input data provider" - return + throw new Exception("Please set only one input data provider") } def inputWorkflowTableNames = getInputTables() if (!inputWorkflowTableNames) { - error "The input table names cannot be null or empty." - return + throw new Exception("The input table names cannot be null or empty.") } def outputWorkflowTableNames = getOutputTables() //Get processing parameters def processing_parameters = extractProcessingParameters(parameters.parameters) + if (!processing_parameters) { + throw new Exception("Invalid processing parameters") + } //Get the out put parameters def outputDatasource @@ -175,13 +167,11 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { if (!deleteOutputData) { deleteOutputData = true } else if (!deleteOutputData in Boolean) { - error "The delete parameter must be a boolean value" - return + throw new Exception("The delete parameter must be a boolean value") } outputSRID = outputParameters.get("srid") if (outputSRID && outputSRID <= 0) { - error "The output srid must be greater than 0" - return + throw new Exception("The output srid must be greater than 0") } if (outputFolder) { def outputFiles = Geoindicators.WorkflowUtilities.buildOutputFolderParameters(outputFolder, outputWorkflowTableNames) @@ -191,13 +181,11 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { if (!file_outputFolder.exists()) { if (file_outputFolder.mkdir()) { file_outputFolder = null - error "You don't have permission to write in the folder $outputFolder \n" + - "Please check the folder." - return + throw new Exception("You don't have permission to write in the folder $outputFolder \n" + + "Please check the folder.") } } else if (!file_outputFolder.isDirectory()) { - error "Invalid output folder $file_outputFolder." - return + throw new Exception("Invalid output folder $file_outputFolder.") } } if (outputDataBase) { @@ -213,38 +201,35 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { */ if (inputFolder) { def h2gis_datasource = H2GIS.open(h2gis_properties) - if (!h2gis_datasource) { - error "Cannot load the local H2GIS database to run Geoclimate" - return - } def datafromFolder = linkDataFromFolder(inputFolder, inputWorkflowTableNames, h2gis_datasource, inputSRID) - if (!datafromFolder) { - return - } inputSRID = datafromFolder.inputSrid def sourceSrid = datafromFolder.sourceSrid LinkedHashMap tablesLinked = datafromFolder.tableNames if (tablesLinked) { locations.each { location -> //We must extract the data from the shapefiles for each locations - if (filterLinkedShapeFiles(location, processing_parameters.distance, tablesLinked, sourceSrid, inputSRID, h2gis_datasource)) { - def formatedZone = checkAndFormatLocations(location) - if (formatedZone) { - def bdtopo_results = bdtopo_processing(formatedZone, h2gis_datasource, processing_parameters, - createMainFolder(file_outputFolder, formatedZone), outputFileTables, outputDatasource, - outputTables, outputSRID, inputSRID) - if (bdtopo_results) { - outputTableNamesResult.putAll(bdtopo_results) + try { + if (filterLinkedShapeFiles(location, processing_parameters.distance, tablesLinked, sourceSrid, inputSRID, h2gis_datasource)) { + def formatedZone = checkAndFormatLocations(location) + if (formatedZone) { + def bdtopo_results = bdtopo_processing(formatedZone, h2gis_datasource, processing_parameters, + createMainFolder(file_outputFolder, formatedZone), outputFileTables, outputDatasource, + outputTables, outputSRID, inputSRID) + if (bdtopo_results) { + outputTableNamesResult.putAll(bdtopo_results) + } } } + } catch (Exception e) { + saveLogZoneTable(h2gis_datasource, databaseFolder, location, e.getLocalizedMessage()) + //eat the exception and process other zone + warn("The zone $location has not been processed. Please check the log table to get more informations.") } } deleteH2GISDb(delete_h2gis, h2gis_datasource.getConnection(), databaseFolder, databaseName) - return outputTableNamesResult } else { - error "Cannot find any data to process from the folder $inputFolder" - return + throw new Exception("Cannot find any data to process from the folder $inputFolder".toString()) } } @@ -264,34 +249,29 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { } } if (inputTables_tmp.size() == 0) { - error "Please set a valid list of input tables as : \n" + - "${inputWorkflowTableNames.collect { name -> [name: name] }}" - return + throw new Exception("Please set a valid list of input tables as : \n" + + "${inputWorkflowTableNames.collect { name -> [name: name] }}".toString()) } inputTables = inputTables_tmp } - def h2gis_datasource = H2GIS.open(h2gis_properties) - if (!h2gis_datasource) { - error "Cannot load the local H2GIS database to run Geoclimate" - return - } def nbzones = 0 for (location in locations) { nbzones++ - inputSRID = loadDataFromPostGIS(inputDataBase.subMap(["user", "password", "url", "databaseName"]), location, processing_parameters.distance, inputTables, inputSRID, h2gis_datasource) - if (inputSRID) { + try { + inputSRID = loadDataFromPostGIS(inputDataBase.subMap(["user", "password", "url", "databaseName"]), location, processing_parameters.distance, inputTables, inputSRID, h2gis_datasource) + def formatedZone = checkAndFormatLocations(location) if (formatedZone) { def bdtopo_results = bdtopo_processing(formatedZone, h2gis_datasource, processing_parameters, createMainFolder(file_outputFolder, formatedZone), outputFileTables, outputDatasource, outputTables, outputSRID, inputSRID) if (bdtopo_results) { outputTableNamesResult.putAll(bdtopo_results) - } else { - error "Cannot execute the geoclimate processing chain on $location\n" } } - } else { - error "Cannot load the data for the location $location" + } catch (Exception e) { + saveLogZoneTable(h2gis_datasource, databaseFolder, location, e.getLocalizedMessage()) + //eat the exception and process other zone + warn("The zone $location has not been processed. Please check the log table to get more informations.") } info "${nbzones} location(s) on ${locations.size()}" } @@ -300,7 +280,37 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { return outputTableNamesResult } } - return null + } + + + /** + * Method to log message in a table + * @param dataSource + * @param logTableZones + * @param location + * @param message + * @throws Exception + */ + void saveLogZoneTable(JdbcDataSource dataSource,String databaseFolder, String location, String message) throws Exception { + def logTableZones = postfix("log_zones") + //Create the table to log on the processed zone + dataSource.execute("""DROP TABLE IF EXISTS $logTableZones; + CREATE TABLE $logTableZones (the_geom GEOMETRY(GEOMETRY, 4326), + location VARCHAR, info VARCHAR, version VARCHAR, build_number VARCHAR);""") + //Find the geometry of the location + Geometry geom = dataSource.firstRow("SELECT st_union(st_accum(THE_GEOM)) as the_geom FROM WHERE commune").the_geom + if (geom == null) { + dataSource.execute("""INSERT INTO $logTableZones + VALUES(null,'$location', '$message', + '${Geoindicators.version()}', + '${Geoindicators.buildNumber()}')""") + } else { + dataSource.execute("""INSERT INTO $logTableZones + VALUES(st_geomfromtext('${geom}',${geom.getSRID()}) ,'$location', '$message', + '${Geoindicators.version()}', + '${Geoindicators.buildNumber()}')""") + } + dataSource.save(logTableZones, databaseFolder+File.separator+"log_zones_"+id_zone+".fgb", true ) } /** @@ -315,7 +325,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @return */ abstract def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTables, - int sourceSRID, int inputSRID, H2GIS h2gis_datasource) + int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception /** * Load the required tables stored in a database @@ -323,7 +333,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param inputDatasource database where the tables are * @return true is succeed, false otherwise */ - abstract Integer loadDataFromPostGIS(def input_database_properties, def code, def distance, def inputTables, def inputSRID, H2GIS h2gis_datasource); + abstract Integer loadDataFromPostGIS(def input_database_properties, def code, def distance, + def inputTables, def inputSRID, H2GIS h2gis_datasource) throws Exception; /** @@ -336,8 +347,11 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @return a list of id_zones */ def linkDataFromFolder(def inputFolder, def inputWorkflowTableNames, - H2GIS h2gis_datasource, def inputSRID) { + H2GIS h2gis_datasource, def inputSRID) throws Exception { def folder = new File(inputFolder) + if(!folder.exists()){ + throw new Exception("The input folder doesn't exist") + } if (folder.isDirectory()) { def geoFiles = [] folder.eachFileRecurse groovy.io.FileType.FILES, { file -> @@ -349,10 +363,13 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { def commune_file = geoFiles.find { it.toLowerCase().endsWith("commune.shp") ? it : null } if (commune_file) { //Load commune and check if there is some id_zones inside - h2gis_datasource.link(commune_file, "COMMUNE_TMP", true) - ISpatialTable sp_commune = h2gis_datasource.getSpatialTable("COMMUNE_TMP") + try { + h2gis_datasource.link(commune_file, "COMMUNE_TMP", true) + } catch (Exception e) { + throw new Exception("Cannot read the commune.shp file", e) + } geoFiles.remove(commune_file) - int srid = sp_commune.srid + int srid = h2gis_datasource.getSrid("COMMUNE_TMP") def sourceSrid = srid if (srid == 0 && inputSRID) { srid = inputSRID @@ -366,28 +383,29 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { def tableNames = [:] tableNames.put("commune", "COMMUNE_TMP") - geoFiles.eachWithIndex { geoFile, index -> debug "linking file $geoFile $index on $numberFiles" //We must link only the allowed tables def fileName = URIUtilities.fileFromString(geoFile).getName() def name = fileName.substring(0, fileName.lastIndexOf(".")).toLowerCase() if (inputWorkflowTableNames.contains(name)) { - h2gis_datasource.link(geoFile, "${name}_tmp", true) - //h2gis_datasource.execute("CREATE SPATIAL INDEX ON ${name}_tmp(THE_GEOM)".toString()) - tableNames.put(name, "${name}_tmp") + try { + h2gis_datasource.link(geoFile, "${name}_tmp", true) + //h2gis_datasource.execute("CREATE SPATIAL INDEX ON ${name}_tmp(THE_GEOM)".toString()) + tableNames.put(name, "${name}_tmp") + } catch (Exception e) { + throw new Exception("Cannot read the shp file " + geoFile, e) + } } } h2gis_datasource.execute("DROP TABLE IF EXISTS COMMUNE_TMP_LINK;".toString()) return ["sourceSrid": sourceSrid, "inputSrid": srid, "tableNames": tableNames] } else { - error "The input folder must contains a file named commune" - return + throw new Exception("The input folder must contains a file named commune") } } else { - error "The input folder must be a directory" - return + throw new Exception("The input folder must be a directory") } } @@ -398,14 +416,14 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param h2GIS * @return */ - def deleteH2GISDb(def delete, Connection connection, def dbFolder, def dbName) { + def deleteH2GISDb(def delete, Connection connection, def dbFolder, def dbName) throws Exception { if (delete) { if (connection) { connection.close() DeleteDbFiles.execute(dbFolder, dbName, true) debug "The local H2GIS database : ${dbName} has been deleted" } else { - error "Cannot delete the local H2GIS database : ${dbName} " + throw new Exception("Cannot delete the local H2GIS database : ${dbName} ".toString()) } } } @@ -440,9 +458,9 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param processing_parameters the file parameters * @return a filled map of parameters */ - def extractProcessingParameters(def processing_parameters) { - def defaultParameters = [distance : 500f, prefixName: "", - hLevMin : 3] + def extractProcessingParameters(def processing_parameters) throws Exception { + def defaultParameters = [distance: 500f, prefixName: "", + hLevMin : 3] def rsu_indicators_default = [indicatorUse : [], svfSimplified : true, surface_vegetation : 10000f, @@ -482,12 +500,10 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { if (allowedOutputRSUIndicators) { rsu_indicators_default.indicatorUse = indicatorUseP } else { - error "Please set a valid list of RSU indicator names in ${allowedOutputRSUIndicators}" - return + throw new Exception("Please set a valid list of RSU indicator names in ${allowedOutputRSUIndicators}".toString()) } } else { - error "The list of RSU indicator names cannot be null or empty" - return + throw new Exception("The list of RSU indicator names cannot be null or empty") } def snappingToleranceP = rsu_indicators.snappingTolerance if (snappingToleranceP && snappingToleranceP in Number) { @@ -515,8 +531,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { if (mapOfWeightsP && mapOfWeightsP in Map) { def defaultmapOfWeights = rsu_indicators_default.mapOfWeights if ((defaultmapOfWeights + mapOfWeightsP).size() != defaultmapOfWeights.size()) { - error "The number of mapOfWeights parameters must contain exactly the parameters ${defaultmapOfWeights.keySet().join(",")}" - return + throw new Exception("The number of mapOfWeights parameters must contain exactly the parameters ${defaultmapOfWeights.keySet().join(",")}".toString()) } else { rsu_indicators_default.mapOfWeights = mapOfWeightsP } @@ -537,15 +552,14 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { return } if (!list_indicators) { - error "The list of indicator names cannot be null or empty" - return + throw new Exception("The list of indicator names cannot be null or empty") } def allowed_grid_indicators = ["BUILDING_FRACTION", "BUILDING_HEIGHT", "BUILDING_POP", "BUILDING_TYPE_FRACTION", "WATER_FRACTION", "VEGETATION_FRACTION", "ROAD_FRACTION", "IMPERVIOUS_FRACTION", "UTRF_AREA_FRACTION", "UTRF_FLOOR_AREA_FRACTION", "LCZ_FRACTION", "LCZ_PRIMARY", "FREE_EXTERNAL_FACADE_DENSITY", "BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", "BUILDING_HEIGHT_DIST", "FRONTAL_AREA_INDEX", "SEA_LAND_FRACTION", "ASPECT_RATIO", - "SVF", "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS","SPRAWL_AREAS", - "SPRAWL_DISTANCES", "SPRAWL_COOL_DISTANCE"] + "SVF", "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", "SPRAWL_AREAS", + "SPRAWL_DISTANCES", "SPRAWL_COOL_DISTANCE"] def allowedOutputIndicators = allowed_grid_indicators.intersect(list_indicators*.toUpperCase()) if (allowedOutputIndicators) { //Update the RSU indicators list according the grid indicators @@ -575,7 +589,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { } def lcz_lod = grid_indicators.lcz_lod if (lcz_lod && lcz_lod in Integer) { - if (lcz_lod < 0 && lcz_lod >10) { + if (lcz_lod < 0 && lcz_lod > 10) { error "The number of level of details to aggregate the LCZ must be between 0 and 10" return } @@ -583,8 +597,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { } defaultParameters.put("grid_indicators", grid_indicators_tmp) } else { - error "Please set a valid list of indicator names in ${allowed_grid_indicators}" - return + throw new Exception("Please set a valid list of indicator names in ${allowed_grid_indicators}".toString()) } } } @@ -646,15 +659,14 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param id_zones * @return */ - def checkAndFormatLocations(def locations) { + def checkAndFormatLocations(def locations) throws Exception { if (locations in Collection) { return locations.join("_") } else if (locations instanceof String) { return locations.trim() } else { - error "Invalid location input. \n" + - "The location input must be a string value or an array of 4 coordinates to define a bbox " - return null + throw new Exception("Invalid location input. \n" + + "The location input must be a string value or an array of 4 coordinates to define a bbox ") } } @@ -674,7 +686,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @return */ def bdtopo_processing(def location, H2GIS h2gis_datasource, def processing_parameters, def outputFolder, def outputFiles, - def output_datasource, def outputTableNames, def outputSRID, def inputSRID, def deleteOutputData = true) { + def output_datasource, def outputTableNames, def outputSRID, def inputSRID, def deleteOutputData = true) + throws Exception { //Add the GIS layers to the list of results def outputTableNamesResult = [:] def grid_indicators_params = processing_parameters.grid_indicators @@ -684,7 +697,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { } def tmp_results = [:] - def rows = h2gis_datasource.rows("SELECT ST_CollectionExtract(THE_GEOM, 3) as the_geom,code_insee FROM COMMUNE") + def rows = h2gis_datasource.rows("SELECT ST_CollectionExtract(THE_GEOM, 3) as the_geom,code_insee FROM COMMUNE") //We process each zones because the input zone can overlap several communes rows.each { row -> Geometry geom = row.the_geom @@ -699,7 +712,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { DROP TABLE IF EXISTS $subCommuneTableName; CREATE TABLE $subCommuneTableName(the_geom GEOMETRY(POLYGON,$inputSRID), CODE_INSEE VARCHAR) AS SELECT ST_GEOMFROMTEXT('${geom.getGeometryN(0)}', $inputSRID) as the_geom , '${code_insee}' AS CODE_INSEE - """.toString()) + """) def results = bdTopoProcessingSingleArea(h2gis_datasource, code_insee, subCommuneTableName, inputSRID, processing_parameters) if (results) { tmp_results.put(code_insee, results) @@ -721,7 +734,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { DROP TABLE IF EXISTS $subCommuneTableName; CREATE TABLE $subCommuneTableName(the_geom GEOMETRY(POLYGON,$inputSRID), CODE_INSEE VARCHAR) AS SELECT ST_GEOMFROMTEXT('${subGeom}', $inputSRID) as the_geom , '${code_insee_plus_indice}' AS CODE_INSEE - """.toString()) + """) def results = bdTopoProcessingSingleArea(h2gis_datasource, code_insee, subCommuneTableName, inputSRID, processing_parameters) if (results) { tmp_results.put(code_insee_plus_indice, results) @@ -842,12 +855,12 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { info("Start computing grid_indicators") def x_size = grid_indicators_params.x_size def y_size = grid_indicators_params.y_size - Geometry geomEnv = h2gis_datasource.getSpatialTable(results.zone).getExtent() + Geometry geomEnv = h2gis_datasource.getExtent(results.zone) String gridTableName = Geoindicators.WorkflowGeoIndicators.createGrid(h2gis_datasource, geomEnv, x_size, y_size, srid, grid_indicators_params.rowCol) if (gridTableName) { String rasterizedIndicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(h2gis_datasource, gridTableName, - grid_indicators_params.indicators,grid_indicators_params.lcz_lod, + grid_indicators_params.indicators, grid_indicators_params.lcz_lod, results.building, results.road, results.vegetation, results.water, results.impervious, results.rsu_lcz, results.rsu_utrf_area, "", "", @@ -855,9 +868,9 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { if (rasterizedIndicators) { h2gis_datasource.dropTable(gridTableName) results.put("grid_indicators", rasterizedIndicators) - Map sprawl_indic = Geoindicators.WorkflowGeoIndicators.sprawlIndicators(h2gis_datasource,rasterizedIndicators, "id_grid", grid_indicators_params.indicators, - Math.max(x_size,y_size).floatValue()) - if(sprawl_indic){ + Map sprawl_indic = Geoindicators.WorkflowGeoIndicators.sprawlIndicators(h2gis_datasource, rasterizedIndicators, "id_grid", grid_indicators_params.indicators, + Math.max(x_size, y_size).floatValue()) + if (sprawl_indic) { results.put("sprawl_areas", sprawl_indic.sprawl_areas) results.put("grid_indicators", sprawl_indic.grid_indicators) } @@ -883,7 +896,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param deleteOutputData * @return */ - def saveResults(def h2gis_datasource, def id_zone, def results, def srid, def outputFolder, def outputFiles, def output_datasource, def outputTableNames, def outputSRID, def deleteOutputData, def outputGrid) { + def saveResults(def h2gis_datasource, def id_zone, def results, def srid, def outputFolder, def outputFiles, + def output_datasource, def outputTableNames, def outputSRID, def deleteOutputData, def outputGrid) throws Exception { //Check if the user decides to reproject the output data def reproject = false if (outputSRID) { @@ -1007,7 +1021,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { def geomEnv if (noise_indicators) { if (noise_indicators.ground_acoustic) { - geomEnv = h2gis_datasource.getSpatialTable(zone).getExtent() + geomEnv = h2gis_datasource.getExtent(zone) def gridP = Geoindicators.SpatialUnits.createGrid(h2gis_datasource, geomEnv, 200, 200) if (gridP) { String ground_acoustic = Geoindicators.NoiseIndicators.groundAcousticAbsorption(h2gis_datasource, gridP, "id_grid", @@ -1039,7 +1053,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param deleteOutputData true to delete the file if exists * @return */ - def saveOutputFiles(def h2gis_datasource, def results, def outputFiles, def outputFolder, def outputSRID, def reproject, def deleteOutputData, def outputGrid) { + def saveOutputFiles(def h2gis_datasource, def results, def outputFiles, def outputFolder, def outputSRID, + def reproject, def deleteOutputData, def outputGrid) throws Exception { outputFiles.each { if (it == "grid_indicators") { if (outputGrid == "fgb") { @@ -1063,7 +1078,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param id_zone id of the zone * @return */ - def saveTablesInDatabase(def output_datasource, def h2gis_datasource, def outputTableNames, def h2gis_tables, def id_zone, def inputSRID, def outputSRID, def reproject) { + def saveTablesInDatabase(def output_datasource, def h2gis_datasource, def outputTableNames, def h2gis_tables, + def id_zone, def inputSRID, def outputSRID, def reproject) throws Exception { Connection con = output_datasource.getConnection() con.setAutoCommit(true) @@ -1149,7 +1165,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param outputSRID srid code used to reproject the output table * @return */ - def abstractModelTableBatchExportTable(def output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, def filter, def inputSRID, def outputSRID, def reproject) { + def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, def output_table, def id_zone, def h2gis_datasource, + h2gis_table_to_save, def filter, def inputSRID, def outputSRID, def reproject) throws Exception { if (output_table) { if (h2gis_datasource.hasTable(h2gis_table_to_save)) { if (output_datasource.hasTable(output_table)) { @@ -1158,7 +1175,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { int BATCH_MAX_SIZE = 100 ITable inputRes = prepareTableOutput(h2gis_table_to_save, filter, inputSRID, h2gis_datasource, output_table, outputSRID, output_datasource) if (inputRes) { - def outputColumns = output_datasource.getTable(output_table).getColumnsTypes() + def outputColumns = output_datasource.getColumnNamesTypes(output_table) def outputconnection = output_datasource.getConnection() try { def inputColumns = inputRes.getColumnsTypes() @@ -1271,7 +1288,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param outputSRID srid code used to reproject the output table * @return */ - def indicatorTableBatchExportTable(def output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, def filter, def inputSRID, def outputSRID, def reproject) { + def indicatorTableBatchExportTable(JdbcDataSource output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, + def filter, def inputSRID, def outputSRID, def reproject) throws Exception { if (output_table) { if (h2gis_table_to_save) { if (h2gis_datasource.hasTable(h2gis_table_to_save)) { @@ -1281,7 +1299,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { int BATCH_MAX_SIZE = 100 ITable inputRes = prepareTableOutput(h2gis_table_to_save, filter, inputSRID, h2gis_datasource, output_table, outputSRID, output_datasource) if (inputRes) { - def outputColumns = output_datasource.getTable(output_table).getColumnsTypes() + def outputColumns = output_datasource.getColumnNamesTypes(output_table) outputColumns.remove("gid") def outputconnection = output_datasource.getConnection() try { @@ -1368,10 +1386,10 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { } } if (tmpTable) { - output_datasource.execute("ALTER TABLE $output_table ADD COLUMN IF NOT EXISTS id_zone VARCHAR".toString()) + output_datasource.execute("ALTER TABLE $output_table ADD COLUMN IF NOT EXISTS id_zone VARCHAR") output_datasource.execute("UPDATE $output_table SET id_zone= '$id_zone'".toString()) //Add GID column - output_datasource.execute """ALTER TABLE $output_table ADD COLUMN IF NOT EXISTS gid serial;""".toString() + output_datasource.execute("""ALTER TABLE $output_table ADD COLUMN IF NOT EXISTS gid serial;""") info "The table $h2gis_table_to_save has been exported into the table $output_table" } else { @@ -1393,12 +1411,12 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param output_datasource * @return */ - def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, def h2gis_datasource, def output_table, def outputSRID, def output_datasource) { + def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, def h2gis_datasource, def output_table, def outputSRID, def output_datasource) throws Exception { def targetTableSrid = output_datasource.getSpatialTable(output_table).srid if (filter) { if (outputSRID == 0) { if (inputSRID == targetTableSrid) { - inputRes = h2gis_datasource.getTable(h2gis_table_to_save).filter(filter).getTable() + return h2gis_datasource.getTable(h2gis_table_to_save).filter(filter).getTable() } else { if (targetTableSrid == 0 && inputSRID == 0) { return h2gis_datasource.getTable(h2gis_table_to_save).filter(filter).getTable() @@ -1468,5 +1486,5 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param hLevMin * @return */ - abstract Map formatLayers(JdbcDataSource datasource, Map layers, float distance = 1000, float hLevMin = 3) + abstract Map formatLayers(JdbcDataSource datasource, Map layers, float distance = 1000, float hLevMin = 3) throws Exception } diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy index 24f978b19a..b2d110ebd9 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy @@ -143,7 +143,7 @@ abstract class BDTopo extends BDTopoUtils { * Meteorological Society 93, no. 12 (2012): 1879-1900. * */ - static Map workflow(def input, int version) { + static Map workflow(def input, int version) throws Exception{ if (version == 2) { BDTopoV2Workflow bdtopo_v2_workflow = new BDTopoV2Workflow() return bdtopo_v2_workflow.execute(input) @@ -151,10 +151,8 @@ abstract class BDTopo extends BDTopoUtils { BDTopoV3Workflow bdTopoV3Workflow = new BDTopoV3Workflow() return bdTopoV3Workflow.execute(input) } else { - LoggerUtils.error "Unsupported version. Set number 2 or 3" - return null + throw new Exception("Unsupported version. Set number 2 or 3") } - return null } static Map v2(def input) { diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy index 5a5efa9cc6..71eab77603 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy @@ -38,11 +38,11 @@ import java.sql.SQLException @Override -Integer loadDataFromPostGIS(Object input_database_properties, Object code, Object distance, Object inputTables, Object inputSRID, H2GIS h2gis_datasource) { +Integer loadDataFromPostGIS(Object input_database_properties, Object code, Object distance, Object inputTables, + Object inputSRID, H2GIS h2gis_datasource) throws Exception{ def commune_location = inputTables.commune if (!commune_location) { - error "The commune table must be specified to run Geoclimate" - return + throw new Exception("The commune table must be specified to run Geoclimate") } PostGISDBFactory dataSourceFactory = new PostGISDBFactory() Connection sourceConnection = null @@ -52,32 +52,27 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec DataSource ds = dataSourceFactory.createDataSource(props) sourceConnection = ds.getConnection() } catch (SQLException e) { - error("Cannot connect to the database to import the data ") + throw new SQLException("Cannot connect to the database to import the data ") } - if (sourceConnection == null) { - error("Cannot connect to the database to import the data ") - return + throw new Exception("Cannot connect to the database to import the data ") } //Check if the commune table exists if (!JDBCUtilities.tableExists(sourceConnection, commune_location)) { - error("The commune table doesn't exist") - return + throw new Exception("The commune table doesn't exist") } //Find the SRID of the commune table def commune_srid = GeometryTableUtilities.getSRID(sourceConnection, commune_location) if (commune_srid <= 0) { - error("The commune table doesn't have any SRID") - return + throw new Exception("The commune table doesn't have any SRID") } if (commune_srid == 0 && inputSRID) { commune_srid = inputSRID } else if (commune_srid <= 0) { - warn "Cannot find a SRID value for the layer commune.\n" + - "Please set a valid OGC prj or use the parameter srid to force it." - return null + throw new Exception( "Cannot find a SRID value for the layer commune.\n" + + "Please set a valid OGC prj or use the parameter srid to force it.") } String outputTableName = "COMMUNE" @@ -87,8 +82,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec if (code in Collection) { if(code.size()==3){ if(code[2]<100){ - error("The distance to create a bbox from a point must be greater than 100 meters") - return + throw new Exception("The distance to create a bbox from a point must be greater than 100 meters") } code = BDTopoUtils.bbox(code[0], code[1],code[2]) } @@ -104,7 +98,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec debug "Loading in the H2GIS database $outputTableName" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 1000) } - def count = h2gis_datasource."$outputTableName".rowCount + def count = h2gis_datasource.getRowCount(outputTableName) if (count > 0) { //Compute the envelope of the extracted area to extract the thematic tables def geomToExtract = h2gis_datasource.firstRow("SELECT ST_EXPAND(ST_UNION(ST_ACCUM(the_geom)), ${distance}) AS THE_GEOM FROM $outputTableName".toString()).THE_GEOM @@ -140,8 +134,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec debug "Loading in the H2GIS database $outputTableNameRoad" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableNameRoad, -1, 1000) } else { - error "The route table must be provided" - return + throw new Exception("The route table must be provided") } if (inputTables.troncon_voie_ferree) { @@ -215,14 +208,11 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec debug "Loading in the H2GIS database $outputTableName" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 1000) } - sourceConnection.close() - return commune_srid } else { - error "Cannot find any commune with the insee code : $code" - return + throw new Exception("Cannot find any commune with the insee code : $code".toString()) } } @@ -243,21 +233,19 @@ int getVersion() { } @Override -Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) { +Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) throws Exception{ if (!hLevMin) { hLevMin = 3 } if (!datasource) { - error "The database to store the BDTopo data doesn't exist" - return + throw new Exception("The database to store the BDTopo data doesn't exist") } info "Formating BDTopo GIS layers" //Prepare the existing bdtopo data in the local database def importPreprocess = BDTopo.InputDataLoading.loadV2(datasource, layers, distance) if (!importPreprocess) { - error "Cannot prepare the BDTopo data." - return + throw new Exception("Cannot prepare the BDTopo data.") } def zoneTable = importPreprocess.zone def urbanAreas = importPreprocess.urban_areas @@ -302,7 +290,7 @@ Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hL @Override def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTables, - int sourceSRID, int inputSRID, H2GIS h2gis_datasource) { + int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception{ def formatting_geom = "the_geom" if (sourceSRID == 0 && sourceSRID != inputSRID) { formatting_geom = "st_setsrid(the_geom, $inputSRID) as the_geom" @@ -317,8 +305,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl if (location in Collection) { if(location.size()==3){ if(location[2]<100){ - error("The distance to create a bbox from a point must be greater than 100 meters") - return + throw new Exception("The distance to create a bbox from a point must be greater than 100 meters") } location = BDTopoUtils.bbox(location[0], location[1],location[2]) } @@ -331,9 +318,9 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl h2gis_datasource.execute("""DROP TABLE IF EXISTS $outputTableName ; CREATE TABLE $outputTableName as SELECT $formatting_geom, CODE_INSEE FROM ${inputTables.commune} WHERE CODE_INSEE='$location' or lower(nom)='${location.toLowerCase()}'""".toString()) }else{ - return + throw new Exception("Invalid location data type. Please set a text value or a collection of coordinates to specify a bbox") } - def count = h2gis_datasource."$outputTableName".rowCount + def count = h2gis_datasource.getRowCount(outputTableName) if (count > 0) { //Compute the envelope of the extracted area to extract the thematic tables def geomToExtract = h2gis_datasource.firstRow("SELECT ST_EXPAND(ST_UNION(ST_ACCUM(the_geom)), ${distance}) AS THE_GEOM FROM $outputTableName".toString()).THE_GEOM @@ -370,8 +357,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl AND ST_INTERSECTS(the_geom, 'SRID=$sourceSRID;$geomToExtract'::GEOMETRY) AND NATURE NOT IN ('Bac auto', 'Bac piéton', 'Escalier')""".toString()) } else { - error "The route table must be provided" - return + throw new Exception("The route table must be provided") } if (inputTables.troncon_voie_ferree) { @@ -442,8 +428,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl return true } else { - error "Cannot find any commune with the insee code : $location" - return + throw new Exception("Cannot find any commune with the insee code : $location".toString()) } } diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy index 5a108c6da8..ca7e664327 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy @@ -39,7 +39,7 @@ import java.sql.SQLException @Override def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTables, - int sourceSRID, int inputSRID, H2GIS h2gis_datasource) { + int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception{ def formatting_geom = "the_geom" if (sourceSRID == 0 && sourceSRID != inputSRID) { formatting_geom = "st_setsrid(the_geom, $inputSRID) as the_geom" @@ -57,8 +57,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl if(communeColumns.contains("INSEE_COM")) { if(location.size()==3){ if(location[2]<100){ - error("The distance to create a bbox from a point must be greater than 100 meters") - return + throw new IllegalArgumentException("The distance to create a bbox from a point must be greater than 100 meters") } location = BDTopoUtils.bbox(location[0], location[1],location[2]) } @@ -68,8 +67,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl && ST_MakeEnvelope(${location[1]},${location[0]},${location[3]},${location[2]}, $sourceSRID) """.toString()) } else { - error "Cannot find a column insee_com or code_insee to filter the commune" - return + throw new Exception("Cannot find a column insee_com or code_insee to filter the commune") } } else if (location instanceof String) { debug "Loading in the H2GIS database $outputTableName" @@ -79,16 +77,14 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl INSEE_COM AS CODE_INSEE FROM ${inputTables.commune} WHERE INSEE_COM='$location' or lower(nom)='${location.toLowerCase()}'""".toString()) } else { - error "Cannot find a column insee_com or code_insee to filter the commune" - return + throw new Exception("Cannot find a column insee_com or code_insee to filter the commune") } } else { - debug "Invalid location" - return false + throw new IllegalArgumentException("Invalid location") } - def count = h2gis_datasource."$outputTableName".rowCount + def count = h2gis_datasource.getRowCount(outputTableName) if (count > 0) { //Compute the envelope of the extracted area to extract the thematic tables def geomToExtract = h2gis_datasource.firstRow("SELECT ST_EXPAND(ST_UNION(ST_ACCUM(the_geom)), ${distance}) AS THE_GEOM FROM $outputTableName".toString()).THE_GEOM @@ -116,8 +112,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl AND NATURE NOT IN ('Bac ou liaison maritime', 'Escalier') """.toString()) } else { - error "The troncon_de_route table must be provided" - return + throw new Exception("The troncon_de_route table must be provided") } if (inputTables.troncon_de_voie_ferree) { @@ -214,17 +209,16 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl return true } else { - error "Cannot find any commune with the insee code : $location" - return + throw new Exception("Cannot find any commune with the insee code : $location") } } @Override -Integer loadDataFromPostGIS(Object input_database_properties, Object code, Object distance, Object inputTables, Object inputSRID, H2GIS h2gis_datasource) { +Integer loadDataFromPostGIS(Object input_database_properties, Object code, Object distance, Object inputTables, Object inputSRID, + H2GIS h2gis_datasource) throws Exception{ def commune_location = inputTables.commune if (!commune_location) { - error "The commune table must be specified to run Geoclimate" - return + throw new Exception("The commune table must be specified to run Geoclimate") } PostGISDBFactory dataSourceFactory = new PostGISDBFactory() Connection sourceConnection = null @@ -234,25 +228,22 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec DataSource ds = dataSourceFactory.createDataSource(props) sourceConnection = ds.getConnection() } catch (SQLException e) { - error("Cannot connect to the database to import the data ") + throw new Exception("Cannot connect to the database to import the data ", e) } if (sourceConnection == null) { - error("Cannot connect to the database to import the data ") - return + throw new Exception("Cannot connect to the database to import the data ") } //Check if the commune table exists if (!JDBCUtilities.tableExists(sourceConnection, commune_location)) { - error("The commune table doesn't exist") - return + throw new Exception("The commune table doesn't exist") } //Find the SRID of the commune table def commune_srid = GeometryTableUtilities.getSRID(sourceConnection, commune_location) if (commune_srid <= 0) { - error("The commune table doesn't have any SRID") - return + throw new Exception("The commune table doesn't have any SRID") } if (commune_srid == 0 && inputSRID) { commune_srid = inputSRID @@ -271,8 +262,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec if(communeColumns.contains("INSEE_COM")) { if(code.size()==3){ if(code[2]<100){ - error("The distance to create a bbox from a point must be greater than 100 meters") - return + throw new Exception("The distance to create a bbox from a point must be greater than 100 meters") } code = BDTopoUtils.bbox(code[0], code[1],code[2]) } @@ -284,8 +274,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec debug "Loading in the H2GIS database $outputTableName" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 100) }else { - error "Cannot find a column insee_com or code_insee to filter the commune" - return + throw new Exception("Cannot find a column insee_com or code_insee to filter the commune") } } else if (code instanceof String) { def communeColumns = h2gis_datasource.getColumnNames(commune_location) @@ -295,11 +284,12 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 1000) } else { - error "Cannot find a column insee_com to filter the commune" - return + throw new Exception("Cannot find a column insee_com to filter the commune") } + }else{ + throw new Exception("Invalid location data type. Please set a text value or a collection of coordinates to specify a bbox") } - def count = h2gis_datasource."$outputTableName".rowCount + def count = h2gis_datasource.getRowCount(outputTableName) if (count > 0) { //Compute the envelope of the extracted area to extract the thematic tables def geomToExtract = h2gis_datasource.firstRow("SELECT ST_EXPAND(ST_UNION(ST_ACCUM(the_geom)), ${distance}) AS THE_GEOM FROM $outputTableName".toString()).THE_GEOM @@ -321,8 +311,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec debug "Loading in the H2GIS database $outputTableNameRoad" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableNameRoad, -1, 1000) } else { - error "The route table must be provided" - return + throw new Exception("The route table must be provided") } if (inputTables.troncon_de_voie_ferree) { @@ -418,8 +407,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec return commune_srid } else { - error "Cannot find any commune with the insee code : $code" - return + throw new Exception("Cannot find any commune with the insee code : $code".toString()) } return false @@ -439,13 +427,12 @@ int getVersion() { } @Override -Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) { +Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) throws Exception{ if (!hLevMin) { hLevMin = 3 } if (!datasource) { - error "The database to store the BD Topo data doesn't exist" - return + throw new Exception("The database to store the BD Topo data doesn't exist") } info "Formating BDTopo GIS layers" @@ -453,8 +440,7 @@ Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hL def importPreprocess = BDTopo.InputDataLoading.loadV3(datasource, layers, distance) if (!importPreprocess) { - error "Cannot prepare the BDTopo data." - return + throw new Exception("Cannot prepare the BDTopo data.") } def zoneTable = importPreprocess.zone def urbanAreas = importPreprocess.urban_areas diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy index 2cc0d63e72..bb58795a78 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy @@ -38,7 +38,7 @@ import org.orbisgis.geoclimate.Geoindicators * @return The name of the final buildings table */ String formatBuildingLayer(JdbcDataSource datasource, String building, String zone = "", - String urban_areas = "", float h_lev_min = 3) { + String urban_areas = "", float h_lev_min = 3) throws Exception{ if (!h_lev_min) { h_lev_min = 3 } @@ -316,7 +316,7 @@ String formatBuildingLayer(JdbcDataSource datasource, String building, String zo def newBuildingWithType = postfix("NEW_BUILDING_TYPE") - datasource.execute """DROP TABLE IF EXISTS $newBuildingWithType; + datasource.execute("""DROP TABLE IF EXISTS $newBuildingWithType; CREATE TABLE $newBuildingWithType as SELECT a.THE_GEOM, a.ID_BUILD,a.ID_SOURCE, a.HEIGHT_WALL, @@ -325,11 +325,11 @@ String formatBuildingLayer(JdbcDataSource datasource, String building, String zo COALESCE(b.TYPE, a.TYPE) AS TYPE , COALESCE(b.MAIN_USE, a.MAIN_USE) AS MAIN_USE , a.ZINDEX, a.ROOF_SHAPE from $outputTableName - a LEFT JOIN $buildinType b on a.id_build=b.id_build""".toString() + a LEFT JOIN $buildinType b on a.id_build=b.id_build""") - datasource.execute """DROP TABLE IF EXISTS $buildinType, $outputTableName; + datasource.execute("""DROP TABLE IF EXISTS $buildinType, $outputTableName; ALTER TABLE $newBuildingWithType RENAME TO $outputTableName; - DROP TABLE IF EXISTS $newBuildingWithType;""".toString() + DROP TABLE IF EXISTS $newBuildingWithType;""") } } } @@ -373,14 +373,14 @@ static String[] getTypeAndUse(def main_type, def main_use, def types_and_uses) { * @param road The name of the raw roads table in the DB * @return The name of the final roads table */ -String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") { +String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") throws Exception{ debug('Formating road layer') def outputTableName = postfix "ROAD" - datasource """ + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName (THE_GEOM GEOMETRY, id_road serial, ID_SOURCE VARCHAR, WIDTH FLOAT, TYPE VARCHAR, CROSSING VARCHAR(30), SURFACE VARCHAR, SIDEWALK VARCHAR, MAXSPEED INTEGER, DIRECTION INTEGER, ZINDEX INTEGER); - """.toString() + """) if (road) { def road_types_width = ["highway" : 8, @@ -578,7 +578,7 @@ String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") * @param water The name of the raw hydro table in the DB * @return The name of the final hydro table */ -String formatHydroLayer(JdbcDataSource datasource, String water, String zone = "") { +String formatHydroLayer(JdbcDataSource datasource, String water, String zone = "") throws Exception{ debug('Hydro transformation starts') def outputTableName = postfix("HYDRO") datasource.execute """Drop table if exists $outputTableName; @@ -659,7 +659,7 @@ String formatHydroLayer(JdbcDataSource datasource, String water, String zone = " * @param rail The name of the raw rails table in the DB * @return The name of the final rails table */ -String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "") { +String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "") throws Exception{ debug('Rails transformation starts') def outputTableName = postfix("RAILS") datasource.execute """ drop table if exists $outputTableName; @@ -756,12 +756,12 @@ String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "" * @param vegetation The name of the raw vegetation table in the DB * @return The name of the final vegetation table */ -String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "") { +String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "") throws Exception{ debug('Vegetation transformation starts') def outputTableName = postfix "VEGET" - datasource """ + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; - CREATE TABLE $outputTableName (THE_GEOM GEOMETRY, id_veget serial, ID_SOURCE VARCHAR, TYPE VARCHAR, HEIGHT_CLASS VARCHAR(4), ZINDEX INTEGER);""".toString() + CREATE TABLE $outputTableName (THE_GEOM GEOMETRY, id_veget serial, ID_SOURCE VARCHAR, TYPE VARCHAR, HEIGHT_CLASS VARCHAR(4), ZINDEX INTEGER);""") if (vegetation) { if (datasource.hasTable(vegetation)) { def queryMapper = "SELECT a.ID_SOURCE, a.TYPE, a.ZINDEX" @@ -862,7 +862,7 @@ String formatVegetationLayer(JdbcDataSource datasource, String vegetation, Strin * @param impervious The name of the impervious table in the DB * @return outputTableName The name of the final impervious table */ -String formatImperviousLayer(H2GIS datasource, String impervious) { +String formatImperviousLayer(H2GIS datasource, String impervious) throws Exception{ debug('Impervious layer') def outputTableName = postfix("IMPERVIOUS") datasource.execute """ drop table if exists $outputTableName; @@ -907,7 +907,7 @@ String formatImperviousLayer(H2GIS datasource, String impervious) { } } } - datasource.execute("DROP TABLE IF EXISTS $polygonizedTable".toString()) + datasource.execute("DROP TABLE IF EXISTS $polygonizedTable") info "Impervious areas formatted" return outputTableName } diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy index 0dfb087be4..21a54262e3 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy @@ -63,15 +63,14 @@ def loadV2( "terrain_sport" : "", "construction_surfacique": "", "surface_route" : "", "surface_activite": "", "piste_aerodrome" : "", "reservoir": "", "zone_vegetation": ""], - float distance = 1000) { + float distance = 1000) throws Exception{ debug('Import the BDTopo data') def commune = layers.commune // If the Commune table is empty, then the process is stopped if (!commune) { - error 'The process has been stopped since the table Commnune is empty' - return + throw new IllegalArgumentException('The process has been stopped since the table Commnune is empty') } debug('Import the BDTopo data') @@ -94,11 +93,9 @@ def loadV2( srid = currentSrid } else { if (currentSrid == 0) { - error "The process has been stopped since the table $name has a no SRID" - return + throw new IllegalArgumentException( "The process has been stopped since the table $name has a no SRID") } else if (currentSrid > 0 && srid != currentSrid) { - error "The process has been stopped since the table $name has a different SRID from the others" - return + throw new IllegalArgumentException( "The process has been stopped since the table $name has a different SRID from the others") } } } @@ -111,8 +108,7 @@ def loadV2( // If the COMMUNE table does not exist or is empty, then the process is stopped if (!tablesExist.get("commune")) { - error 'The process has been stopped since the table zone does not exist or is empty' - return + throw new IllegalArgumentException( 'The process has been stopped since the table commune does not exist or is empty') } // If the following tables does not exists, we create corresponding empty tables @@ -207,7 +203,6 @@ def loadV2( """.toString()) //2- Preparation of the study area (zone_xx) - def zoneTable = postfix("ZONE") datasource.execute(""" DROP TABLE IF EXISTS $zoneTable; @@ -365,18 +360,16 @@ Map loadV3(JdbcDataSource datasource, "piste_d_aerodrome" : "", "reservoir": "", "construction_surfacique": "", "equipement_de_transport": "", "troncon_de_route" : "", "troncon_de_voie_ferree": "", "surface_hydrographique": "", "zone_de_vegetation": "", "aerodrome": "", "limite_terre_mer": ""], - float distance = 1000) { + float distance = 1000) throws Exception{ if (!layers) { - error "Please set a valid list of layers" - return + throw new IllegalArgumentException( "Please set a valid list of layers") } debug('Import the BDTopo data') def commune = layers.commune // If the Commune table is empty, then the process is stopped if (!commune) { - error 'The process has been stopped since the table Commnune is empty' - return + throw new IllegalArgumentException( 'The process has been stopped since the table Commnune is empty') } // ------------------------------------------------------------------------------- @@ -398,11 +391,9 @@ Map loadV3(JdbcDataSource datasource, srid = currentSrid } else { if (currentSrid == 0) { - error "The process has been stopped since the table $name has a no SRID" - return + throw new IllegalArgumentException( "The process has been stopped since the table $name has a no SRID") } else if (currentSrid > 0 && srid != currentSrid) { - error "The process has been stopped since the table $name has a different SRID from the others" - return + throw new IllegalArgumentException( "The process has been stopped since the table $name has a different SRID from the others") } } } @@ -411,8 +402,7 @@ Map loadV3(JdbcDataSource datasource, } if (!tablesExist.get("commune")) { - error 'The process has been stopped since the table zone does not exist or is empty' - return + throw new IllegalArgumentException( 'The process has been stopped since the table zone does not exist or is empty') } // ------------------------------------------------------------------------------- diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy index 1121ce4c6b..e65b528b43 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy @@ -328,10 +328,9 @@ abstract class WorkflowAbstractTest { assertNotNull(process) //Check if the tables exist and contains at least one row outputTables.values().each { it -> - def spatialTable = externalDB.getSpatialTable(it) - assertNotNull(spatialTable) - assertEquals(2154, spatialTable.srid) - assertTrue(spatialTable.getRowCount() > 0) + assertTrue(externalDB.hasTable(it)) + assertEquals(2154, externalDB.getSrid(it)) + assertTrue(externalDB.getRowCount(it) > 0) } externalDB.close() } @@ -348,8 +347,8 @@ abstract class WorkflowAbstractTest { "delete": false ], "input" : [ - "folder": ["path" : dataFolder, - "locations": [2000, 2001, 2002]]], + "folder": dataFolder, + "locations": [2000, 2001, 2002]], "output" : [ "folder": ["path" : folder.absolutePath, "tables": ["grid_indicators"]]], @@ -362,7 +361,7 @@ abstract class WorkflowAbstractTest { ] ] ] - assertNull(BDTopo.workflow(bdTopoParameters, getVersion())) + assertThrows(Exception.class, ()->BDTopo.workflow(bdTopoParameters, getVersion())) } @Test @@ -435,5 +434,4 @@ abstract class WorkflowAbstractTest { assertTrue(h2gis.firstRow("""SELECT count(*) as count from ${tableNames.road} where TYPE is not null;""".toString()).count > 0) assertTrue(h2gis.firstRow("""SELECT count(*) as count from ${tableNames.road} where WIDTH is not null or WIDTH>0 ;""".toString()).count > 0) } - } \ No newline at end of file diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy index 3189494ad4..64a2d27920 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy @@ -157,12 +157,16 @@ class WorkflowDebugTest { "folder": ["path": directory]], "parameters" : ["distance" : 0, - "grid_indicators": [ + rsu_indicators : [ + "indicatorUse": ["LCZ", "UTRF", "TEB"] + + ], + /*"grid_indicators": [ "x_size" : 100, "y_size" : 100, "indicators" :["FREE_EXTERNAL_FACADE_DENSITY", "BUILDING_FRACTION"] //, "lcz_lod":2 - ] + ]*/ ] ] //BDTopo.v2(bdTopoParameters) diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy index a6eb0f2d08..49ed68ec67 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy @@ -24,6 +24,7 @@ import org.junit.jupiter.api.io.CleanupMode import org.junit.jupiter.api.io.TempDir import static org.junit.jupiter.api.Assertions.assertNull +import static org.junit.jupiter.api.Assertions.assertThrows class WorkflowParametersTest { @@ -68,7 +69,7 @@ class WorkflowParametersTest { "terrain_roughness_class" : 1]] ] ] - assertNull BDTopo.v2(bdTopoParameters) - assertNull BDTopo.v3(bdTopoParameters) + assertThrows(Exception.class, ()->BDTopo.v2(bdTopoParameters) ) + assertThrows(Exception.class, ()-> BDTopo.v3(bdTopoParameters)) } } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy index bc0be435d0..adf96302d4 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy @@ -23,6 +23,8 @@ import groovy.transform.BaseScript import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators @@ -36,9 +38,10 @@ import org.orbisgis.geoclimate.Geoindicators * * @return Table name in which the block id and their corresponding indicator value are stored * - * @author Jérémy Bernard + * @author Jérémy Bernard + * @author Erwan Bocher */ -String holeAreaDensity(JdbcDataSource datasource, String blockTable, String prefixName) { +String holeAreaDensity(JdbcDataSource datasource, String blockTable, String prefixName) throws Exception { def GEOMETRIC_FIELD = "the_geom" def ID_COLUMN_BL = "id_block" def BASE_NAME = "hole_area_density" @@ -54,9 +57,12 @@ String holeAreaDensity(JdbcDataSource datasource, String blockTable, String pref SELECT $ID_COLUMN_BL, ST_AREA(ST_HOLES($GEOMETRIC_FIELD))/ST_AREA($GEOMETRIC_FIELD) AS $BASE_NAME FROM $blockTable """ - - datasource query.toString() - return outputTableName + try { + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the hole area density for the building blocks", e) + } } /** @@ -76,8 +82,9 @@ String holeAreaDensity(JdbcDataSource datasource, String blockTable, String pref * @return Table name in which the block id and their corresponding indicator value are stored * * @author Jérémy Bernard + * @author Erwan Bocher */ -String netCompactness(JdbcDataSource datasource, String building, String buildingVolumeField, String buildingContiguityField, String prefixName) { +String netCompactness(JdbcDataSource datasource, String building, String buildingVolumeField, String buildingContiguityField, String prefixName) throws Exception{ def GEOMETRY_FIELD_BU = "the_geom" def ID_COLUMN_BL = "id_block" def HEIGHT_WALL = "height_wall" @@ -88,7 +95,7 @@ String netCompactness(JdbcDataSource datasource, String building, String buildin // The name of the outputTableName is constructed def outputTableName = prefix(prefixName, "block_" + BASE_NAME) - datasource.createIndex(building,"id_block") + datasource.createIndex(building, "id_block") def query = """ DROP TABLE IF EXISTS $outputTableName; @@ -106,9 +113,12 @@ String netCompactness(JdbcDataSource datasource, String building, String buildin FROM $building GROUP BY $ID_COLUMN_BL """ - - datasource query.toString() - return outputTableName + try { + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new Exception("Cannot compute the net compactness for the building blocks", e) + } } @@ -135,8 +145,9 @@ String netCompactness(JdbcDataSource datasource, String building, String buildin * * @return Table name in which the block id and their corresponding indicator value are stored * @author Jérémy Bernard + * @author Erwan Bocher */ -String closingness(JdbcDataSource datasource, String correlationTableName, String blockTable, String prefixName) { +String closingness(JdbcDataSource datasource, String correlationTableName, String blockTable, String prefixName) throws Exception { def GEOMETRY_FIELD_BU = "the_geom" def GEOMETRY_FIELD_BL = "the_geom" @@ -148,8 +159,8 @@ String closingness(JdbcDataSource datasource, String correlationTableName, Strin // The name of the outputTableName is constructed def outputTableName = prefix(prefixName, "block_" + BASE_NAME) - datasource.createIndex(blockTable,"id_block") - datasource.createIndex(correlationTableName,"id_block") + datasource.createIndex(blockTable, "id_block") + datasource.createIndex(correlationTableName, "id_block") def query = """ DROP TABLE IF EXISTS $outputTableName; @@ -162,7 +173,10 @@ String closingness(JdbcDataSource datasource, String correlationTableName, Strin $blockTable b WHERE a.$ID_COLUMN_BL = b.$ID_COLUMN_BL GROUP BY b.$ID_COLUMN_BL""" - - datasource query.toString() - return outputTableName + try { + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the closingness for the building blocks",e) + } } \ No newline at end of file diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BuildingIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BuildingIndicators.groovy index 240a4b3225..b5e1a7d95a 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BuildingIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BuildingIndicators.groovy @@ -23,6 +23,8 @@ import groovy.transform.BaseScript import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators /** @@ -47,8 +49,9 @@ import org.orbisgis.geoclimate.Geoindicators * @return A database table name. * * @author Jérémy Bernard + * @author Erwan Bocher, CNRS */ -String sizeProperties(JdbcDataSource datasource, String building, List operations, String prefixName) { +String sizeProperties(JdbcDataSource datasource, String building, List operations, String prefixName) throws Exception { def OP_VOLUME = "volume" def OP_FLOOR_AREA = "floor_area" def OP_FACADE_LENGTH = "total_facade_length" @@ -89,8 +92,12 @@ String sizeProperties(JdbcDataSource datasource, String building, List operation } query += "$COLUMN_ID_BU FROM $building" - datasource query.toString() - return outputTableName + try { + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the size properties for the buildings", e) + } } @@ -117,8 +124,9 @@ String sizeProperties(JdbcDataSource datasource, String building, List operation * @return A database table name. * * @author Jérémy Bernard + * @author Erwan Bocher, CNRS */ -String neighborsProperties(JdbcDataSource datasource, String building, List operations, String prefixName) { +String neighborsProperties(JdbcDataSource datasource, String building, List operations, String prefixName) throws Exception { def GEOMETRIC_FIELD = "the_geom" def ID_FIELD = "id_build" def HEIGHT_WALL = "height_wall" @@ -136,8 +144,8 @@ String neighborsProperties(JdbcDataSource datasource, String building, List oper // The name of the outputTableName is constructed def outputTableName = prefix prefixName, BASE_NAME - datasource.createSpatialIndex(building,"the_geom") - datasource.createIndex(building,"id_build") + datasource.createSpatialIndex(building, "the_geom") + datasource.createIndex(building, "id_build") def query = " CREATE TABLE $build_intersec AS SELECT " @@ -184,8 +192,12 @@ String neighborsProperties(JdbcDataSource datasource, String building, List oper ON a.$ID_FIELD = b.$ID_FIELD; DROP TABLE IF EXISTS $build_intersec""" - datasource query.toString() - return outputTableName + try { + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the neighbors properties for the buildings", e) + } } /** @@ -213,8 +225,9 @@ String neighborsProperties(JdbcDataSource datasource, String building, List oper * @return A database table name. * * @author Jérémy Bernard + * @author Erwan Bocher, CNRS */ -String formProperties(JdbcDataSource datasource, String building, List operations, String prefixName) { +String formProperties(JdbcDataSource datasource, String building, List operations, String prefixName) throws Exception { def GEOMETRIC_FIELD = "the_geom" def ID_FIELD = "id_build" def HEIGHT_WALL = "height_wall" @@ -258,8 +271,12 @@ String formProperties(JdbcDataSource datasource, String building, List operation } query += "$ID_FIELD FROM $building" - datasource query.toString() - return outputTableName + try { + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the form properties for the buildings", e) + } } /** @@ -277,26 +294,26 @@ String formProperties(JdbcDataSource datasource, String building, List operation * @return A database table name. * * @author Jérémy Bernard - * @author Erwan Bocher + * @author Erwan Bocher, CNRS */ -String minimumBuildingSpacing(JdbcDataSource datasource, String building, float bufferDist = 100f, String prefixName) { - def GEOMETRIC_FIELD = "the_geom" - def ID_FIELD = "id_build" - def BASE_NAME = "minimum_building_spacing" - - debug "Executing Building minimum building spacing" - +String minimumBuildingSpacing(JdbcDataSource datasource, String building, float bufferDist = 100f, String prefixName) throws Exception { // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names def build_min_distance = postfix "build_min_distance" + try { + def GEOMETRIC_FIELD = "the_geom" + def ID_FIELD = "id_build" + def BASE_NAME = "minimum_building_spacing" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "building_" + BASE_NAME + debug "Executing Building minimum building spacing" - datasource.createSpatialIndex(building,"the_geom") - datasource.createIndex(building,"id_build") + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "building_" + BASE_NAME - datasource """ + datasource.createSpatialIndex(building, "the_geom") + datasource.createIndex(building, "id_build") + + datasource.execute(""" DROP TABLE IF EXISTS $build_min_distance; CREATE TABLE $build_min_distance AS SELECT b.$ID_FIELD, @@ -305,22 +322,27 @@ String minimumBuildingSpacing(JdbcDataSource datasource, String building, float WHERE st_expand(a.$GEOMETRIC_FIELD, $bufferDist) && b.$GEOMETRIC_FIELD AND a.$ID_FIELD <> b.$ID_FIELD GROUP BY b.$ID_FIELD; - CREATE INDEX IF NOT EXISTS with_buff_id ON $build_min_distance ($ID_FIELD); """.toString() + CREATE INDEX IF NOT EXISTS with_buff_id ON $build_min_distance ($ID_FIELD); """) - // The minimum distance is calculated (The minimum distance is set to the $inputE value for buildings - // having no building neighbors in a envelope meters distance - datasource """DROP TABLE IF EXISTS $outputTableName; + // The minimum distance is calculated (The minimum distance is set to the $inputE value for buildings + // having no building neighbors in a envelope meters distance + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName($ID_FIELD INTEGER, $BASE_NAME FLOAT) AS SELECT a.$ID_FIELD, CASE WHEN b.min_distance IS NOT NULL THEN b.min_distance ELSE 100 END FROM $building a LEFT JOIN $build_min_distance b - ON a.$ID_FIELD = b.$ID_FIELD """.toString() - // The temporary tables are deleted - datasource "DROP TABLE IF EXISTS $build_min_distance".toString() - - return outputTableName + ON a.$ID_FIELD = b.$ID_FIELD """) + // The temporary tables are deleted + datasource.execute("DROP TABLE IF EXISTS $build_min_distance") + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the minimum building spacing for the buildings", e) + } finally { + // The temporary tables are deleted + datasource.execute("DROP TABLE IF EXISTS $build_min_distance") + } } /** @@ -339,64 +361,69 @@ String minimumBuildingSpacing(JdbcDataSource datasource, String building, float * * @author Jérémy Bernard */ -String roadDistance(JdbcDataSource datasource, String building, String inputRoadTableName, float bufferDist = 100f, String prefixName) { - def GEOMETRIC_FIELD = "the_geom" - def ID_FIELD_BU = "id_build" - def ROAD_WIDTH = "width" - def BASE_NAME = "road_distance" - - debug "Executing Building road distance" - +String roadDistance(JdbcDataSource datasource, String building, String inputRoadTableName, float bufferDist = 100f, String prefixName) throws Exception { // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names def build_buffer = postfix "build_buffer" def road_surf = postfix "road_surf" def road_within_buffer = postfix "road_within_buffer" + try { + def GEOMETRIC_FIELD = "the_geom" + def ID_FIELD_BU = "id_build" + def ROAD_WIDTH = "width" + def BASE_NAME = "road_distance" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "building_" + BASE_NAME + debug "Executing Building road distance" + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "building_" + BASE_NAME - datasource.createIndex(building,"id_build") + datasource.createIndex(building, "id_build") - // The buffer is created - datasource """DROP TABLE IF EXISTS $build_buffer; + // The buffer is created + datasource.execute("""DROP TABLE IF EXISTS $build_buffer; CREATE TABLE $build_buffer AS SELECT $ID_FIELD_BU, ST_BUFFER($GEOMETRIC_FIELD, $bufferDist, 2) AS $GEOMETRIC_FIELD FROM $building; - CREATE SPATIAL INDEX IF NOT EXISTS buff_ids ON $build_buffer ($GEOMETRIC_FIELD)""".toString() - // The road surfaces are created - datasource """ + CREATE SPATIAL INDEX IF NOT EXISTS buff_ids ON $build_buffer ($GEOMETRIC_FIELD)""") + // The road surfaces are created + datasource.execute(""" DROP TABLE IF EXISTS $road_surf; CREATE TABLE $road_surf AS SELECT ST_BUFFER($GEOMETRIC_FIELD, $ROAD_WIDTH::DOUBLE PRECISION/2,'quad_segs=2 endcap=flat') AS $GEOMETRIC_FIELD FROM $inputRoadTableName; - CREATE SPATIAL INDEX IF NOT EXISTS buff_ids ON $road_surf ($GEOMETRIC_FIELD)""".toString() - // The roads located within the buffer are identified - datasource """ + CREATE SPATIAL INDEX IF NOT EXISTS buff_ids ON $road_surf ($GEOMETRIC_FIELD)""") + // The roads located within the buffer are identified + datasource.execute(""" DROP TABLE IF EXISTS $road_within_buffer; CREATE TABLE $road_within_buffer AS SELECT a.$ID_FIELD_BU, b.$GEOMETRIC_FIELD FROM $build_buffer a, $road_surf b WHERE a.$GEOMETRIC_FIELD && b.$GEOMETRIC_FIELD AND ST_INTERSECTS(a.$GEOMETRIC_FIELD, b.$GEOMETRIC_FIELD); - CREATE INDEX IF NOT EXISTS with_buff_id ON $road_within_buffer ($ID_FIELD_BU); """.toString() + CREATE INDEX IF NOT EXISTS with_buff_id ON $road_within_buffer ($ID_FIELD_BU); """) - // The minimum distance is calculated between each building and the surrounding roads (the minimum - // distance is set to the bufferDist value for buildings having no road within a bufferDist meters - // distance) - datasource """ + // The minimum distance is calculated between each building and the surrounding roads (the minimum + // distance is set to the bufferDist value for buildings having no road within a bufferDist meters + // distance) + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName($BASE_NAME DOUBLE PRECISION, $ID_FIELD_BU INTEGER) AS ( SELECT COALESCE(MIN(st_distance(a.$GEOMETRIC_FIELD, b.$GEOMETRIC_FIELD)), $bufferDist), a.$ID_FIELD_BU FROM $road_within_buffer b RIGHT JOIN $building a ON a.$ID_FIELD_BU = b.$ID_FIELD_BU - GROUP BY a.$ID_FIELD_BU)""".toString() + GROUP BY a.$ID_FIELD_BU)""") - // The temporary tables are deleted - datasource "DROP TABLE IF EXISTS $build_buffer, $road_within_buffer, $road_surf".toString() + // The temporary tables are deleted + datasource.execute("DROP TABLE IF EXISTS $build_buffer, $road_within_buffer, $road_surf") - return outputTableName + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the road distance for the buildings", e) + } finally { + // The temporary tables are delete + datasource.execute("DROP TABLE IF EXISTS $build_buffer, $road_within_buffer, $road_surf") + } } /** @@ -425,26 +452,27 @@ String roadDistance(JdbcDataSource datasource, String building, String inputRoad * @author Jérémy Bernard * */ -String likelihoodLargeBuilding(JdbcDataSource datasource, String building, String nbOfBuildNeighbors, String prefixName) { - def GEOMETRIC_FIELD = "the_geom" - def ID_FIELD_BU = "id_build" - def BASE_NAME = "likelihood_large_building" +String likelihoodLargeBuilding(JdbcDataSource datasource, String building, String nbOfBuildNeighbors, String prefixName) throws Exception { + try { + def GEOMETRIC_FIELD = "the_geom" + def ID_FIELD_BU = "id_build" + def BASE_NAME = "likelihood_large_building" - debug "Executing Building closeness to a 50 m wide building" + debug "Executing Building closeness to a 50 m wide building" - // Processes used for the indicator calculation - // a and r are the two parameters necessary for the logistic regression calculation (their value is - // set according to the training sample of the MaPuce dataset) - def a = Math.exp(6.5) - def r = 0.25 + // Processes used for the indicator calculation + // a and r are the two parameters necessary for the logistic regression calculation (their value is + // set according to the training sample of the MaPuce dataset) + def a = Math.exp(6.5) + def r = 0.25 - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "building_" + BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "building_" + BASE_NAME - datasource.createIndex(building,"id_build") + datasource.createIndex(building, "id_build") - // The calculation of the logistic function is performed only for buildings having no neighbors - datasource """DROP TABLE IF EXISTS $outputTableName; + // The calculation of the logistic function is performed only for buildings having no neighbors + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.$ID_FIELD_BU, CASEWHEN( @@ -454,9 +482,11 @@ String likelihoodLargeBuilding(JdbcDataSource datasource, String building, Strin AS $BASE_NAME FROM $building a LEFT JOIN $building b - ON a.$ID_FIELD_BU = b.$ID_FIELD_BU""".toString() - - return outputTableName + ON a.$ID_FIELD_BU = b.$ID_FIELD_BU""") + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the likelihood large building for the buildings", e) + } } /** @@ -469,38 +499,42 @@ String likelihoodLargeBuilding(JdbcDataSource datasource, String building, Strin * * @author Erwan Bocher, CNRS */ -String buildingPopulation(JdbcDataSource datasource, String inputBuilding, String inputPopulation, List inputPopulationColumns = []) { - def BASE_NAME = "building_with_population" - def ID_BUILDING = "id_build" - def ID_POP = "id_pop" - - debug "Computing building population" - - // The name of the outputTableName is constructed - def outputTableName = postfix BASE_NAME - - //Indexing table - datasource.createSpatialIndex(inputBuilding,"the_geom") - datasource.createSpatialIndex(inputPopulation,"the_geom") - def popColumns = [] - def sum_popColumns = [] - if (inputPopulationColumns) { - def lowerCasePopCols = inputPopulationColumns.collect { it.toLowerCase() } - datasource."$inputPopulation".getColumns().each { col -> - if (!["the_geom", "id_pop"].contains(col.toLowerCase() - ) && lowerCasePopCols.contains(col.toLowerCase())) { - popColumns << "b.$col" - sum_popColumns << "sum((a.area_building * $col)/b.sum_area_building) as $col" +String buildingPopulation(JdbcDataSource datasource, String inputBuilding, String inputPopulation, List inputPopulationColumns = []) throws Exception { + //Temporary tables + def inputBuildingTableName_pop = postfix inputBuilding + def inputBuildingTableName_pop_sum = postfix "building_pop_sum" + def inputBuildingTableName_area_sum = postfix "building_area_sum" + try { + def BASE_NAME = "building_with_population" + def ID_BUILDING = "id_build" + def ID_POP = "id_pop" + + debug "Computing building population" + + // The name of the outputTableName is constructed + def outputTableName = postfix BASE_NAME + + //Indexing table + datasource.createSpatialIndex(inputBuilding, "the_geom") + datasource.createSpatialIndex(inputPopulation, "the_geom") + def popColumns = [] + def sum_popColumns = [] + if (inputPopulationColumns) { + def lowerCasePopCols = inputPopulationColumns.collect { it.toLowerCase() } + datasource.getColumnNames(inputPopulation).each { col -> + if (!["the_geom", "id_pop"].contains(col.toLowerCase() + ) && lowerCasePopCols.contains(col.toLowerCase())) { + popColumns << "b.$col" + sum_popColumns << "sum((a.area_building * $col)/b.sum_area_building) as $col" + } } + } else { + warn "Please set a list one column that contain population data to be disaggregated" + return } - } else { - warn "Please set a list one column that contain population data to be disaggregated" - return - } - //Filtering the building to get only residential and intersect it with the population table - def inputBuildingTableName_pop = postfix inputBuilding - datasource.execute(""" + //Filtering the building to get only residential and intersect it with the population table + datasource.execute(""" drop table if exists $inputBuildingTableName_pop; CREATE TABLE $inputBuildingTableName_pop AS SELECT (ST_AREA(ST_INTERSECTION(a.the_geom, st_force2D(b.the_geom)))*a.NB_LEV) as area_building, a.$ID_BUILDING, b.id_pop, ${popColumns.join(",")} from @@ -509,12 +543,9 @@ String buildingPopulation(JdbcDataSource datasource, String inputBuilding, Strin or a.type in ('apartments', 'building', 'detached', 'farm', 'house','residential')); create index on $inputBuildingTableName_pop ($ID_BUILDING); create index on $inputBuildingTableName_pop ($ID_POP); - """.toString()) - - def inputBuildingTableName_pop_sum = postfix "building_pop_sum" - def inputBuildingTableName_area_sum = postfix "building_area_sum" - //Aggregate population values - datasource.execute("""drop table if exists $inputBuildingTableName_pop_sum, $inputBuildingTableName_area_sum; + """) + //Aggregate population values + datasource.execute("""drop table if exists $inputBuildingTableName_pop_sum, $inputBuildingTableName_area_sum; create table $inputBuildingTableName_area_sum as select id_pop, sum(area_building) as sum_area_building from $inputBuildingTableName_pop group by $ID_POP; create index on $inputBuildingTableName_area_sum($ID_POP); @@ -527,7 +558,13 @@ String buildingPopulation(JdbcDataSource datasource, String inputBuilding, Strin DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.*, ${popColumns.join(",")} from $inputBuilding a LEFT JOIN $inputBuildingTableName_pop_sum b on a.$ID_BUILDING=b.$ID_BUILDING; - drop table if exists $inputBuildingTableName_pop,$inputBuildingTableName_pop_sum, $inputBuildingTableName_area_sum ;""".toString()) - - return outputTableName + drop table if exists $inputBuildingTableName_pop,$inputBuildingTableName_pop_sum, $inputBuildingTableName_area_sum ;""") + + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the population distribution for the buildings", e) + } finally { + // The temporary tables are delete + datasource.execute("drop table if exists $inputBuildingTableName_pop,$inputBuildingTableName_pop_sum, $inputBuildingTableName_area_sum") + } } \ No newline at end of file diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/DataUtils.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/DataUtils.groovy index 3ada827c0e..1985e73aa8 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/DataUtils.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/DataUtils.groovy @@ -24,6 +24,8 @@ import groovy.transform.BaseScript import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators /** @@ -35,55 +37,53 @@ import org.orbisgis.geoclimate.Geoindicators * * @return */ -String joinTables(JdbcDataSource datasource, Map inputTableNamesWithId, String outputTableName, boolean prefixWithTabName = false) { - debug "Executing Utility process to join tables in one" - - def columnKey - def alias = "a" - def leftQuery = "" - def indexes = "" +String joinTables(JdbcDataSource datasource, Map inputTableNamesWithId, String outputTableName, boolean prefixWithTabName = false) throws Exception { + try { + debug "Executing Utility process to join tables in one" + def columnKey + def alias = "a" + def leftQuery = "" + def indexes = "" - def columns = [] + def columns = [] - inputTableNamesWithId.each { key, value -> - //Reload cache to be sure that the table is up to date - datasource."$key".reload() - if (alias == "a") { - columnKey = "$alias.$value" - // Whether or not the table name is add as prefix of the indicator in the new table - if (prefixWithTabName) { - columns = datasource."$key".columns.collect { - alias + ".$it AS ${key}_$it" + inputTableNamesWithId.each { key, value -> + if (alias == "a") { + columnKey = "$alias.$value" + // Whether or not the table name is add as prefix of the indicator in the new table + if (prefixWithTabName) { + columns = datasource.getColumnNames(key).collect { + alias + ".$it AS ${key}_$it" + } + } else { + columns = datasource.getColumnNames(key).collect { + alias + ".$it" + } } + leftQuery += " FROM $key as $alias " } else { - columns = datasource."$key".columns.collect { - alias + ".$it" - } - } - leftQuery += " FROM $key as $alias " - } else { - datasource."$key".columns.forEach() { item -> - if (!item.equalsIgnoreCase(value)) { - if (prefixWithTabName) { - columns.add(alias + ".$item AS ${key}_$item") - } else { - columns.add(alias + ".$item") + datasource.getColumnNames(key).forEach() { item -> + if (!item.equalsIgnoreCase(value)) { + if (prefixWithTabName) { + columns.add(alias + ".$item AS ${key}_$item") + } else { + columns.add(alias + ".$item") + } } } + leftQuery += " LEFT JOIN $key as $alias ON $alias.$value = $columnKey " } - leftQuery += " LEFT JOIN $key as $alias ON $alias.$value = $columnKey " + indexes += "CREATE INDEX IF NOT EXISTS ${key}_ids ON $key ($value);" + alias++ } - indexes += "CREATE INDEX IF NOT EXISTS ${key}_ids ON $key ($value);" - alias++ + def columnsAsString = columns.join(",") + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; + ${indexes.toString()} + CREATE TABLE $outputTableName AS SELECT $columnsAsString $leftQuery""") + return outputTableName + } catch (java.sql.SQLException e) { + throw new SQLException("Cannot join the tables", e) } - - def columnsAsString = columns.join(",") - - datasource "DROP TABLE IF EXISTS $outputTableName".toString() - datasource indexes.toString() - datasource "CREATE TABLE $outputTableName AS SELECT $columnsAsString $leftQuery".toString() - - return outputTableName } /** @@ -93,37 +93,41 @@ String joinTables(JdbcDataSource datasource, Map inputTableNamesWithId, String o * @param datasource connection to the database * @param inputTableNames to be stored in the directory. * Note : A spatial table is saved in a flatgeobuffer file and the other in csv - * @param delete true to delete the file is exist + * @param delete true to delete the file is exist * @param directory folder to save the tables * * @return the directory where the tables are saved */ -String saveTablesAsFiles(JdbcDataSource datasource, List inputTableNames, boolean delete = true, String directory) { - if (directory == null) { - error "The directory to save the data cannot be null" - return - } - def dirFile = new File(directory) +String saveTablesAsFiles(JdbcDataSource datasource, List inputTableNames, boolean delete = true, String directory) throws Exception { + try { + if (directory == null) { + error "The directory to save the data cannot be null" + return + } + def dirFile = new File(directory) - if (!dirFile.exists()) { - dirFile.mkdir() - debug "The folder $directory has been created" - } else if (!dirFile.isDirectory()) { - error "Invalid directory path" - return - } - inputTableNames.each { tableName -> - if (tableName) { - def fileToSave = dirFile.absolutePath + File.separator + tableName + - (datasource."$tableName".spatial ? ".fgb" : ".csv") - def table = datasource.getTable(tableName) - if (table) { - table.save(fileToSave, delete) - debug "The table $tableName has been saved in file $fileToSave" + if (!dirFile.exists()) { + dirFile.mkdir() + debug "The folder $directory has been created" + } else if (!dirFile.isDirectory()) { + error "Invalid directory path" + return + } + inputTableNames.each { tableName -> + if (tableName) { + def fileToSave = dirFile.absolutePath + File.separator + tableName + + (datasource.hasGeometryColumn(tableName) ? ".fgb" : ".csv") + def table = datasource.getTable(tableName) + if (table) { + table.save(fileToSave, delete) + debug "The table $tableName has been saved in file $fileToSave" + } } } + return directory + } catch (java.sql.SQLException e) { + throw new SQLException("Cannot save the tables", e) } - return directory } @@ -157,8 +161,8 @@ static Map parametersMapping(def file, def altResourceStream) { * @param alias * @return */ -static String aliasColumns(JdbcDataSource datasource, String tableName, String alias){ - Collection columnNames = datasource.getColumnNames(tableName) +static String aliasColumns(JdbcDataSource datasource, String tableName, String alias) { + Collection columnNames = datasource.getColumnNames(tableName) return columnNames.inject([]) { result, iter -> result += "$alias.$iter" }.join(",") @@ -172,8 +176,8 @@ static String aliasColumns(JdbcDataSource datasource, String tableName, String a * @param exceptColumns columns to remove * @return */ -static String aliasColumns(JdbcDataSource datasource, def tableName, def alias, def exceptColumns){ - Collection columnNames = datasource.getColumnNames(tableName) +static String aliasColumns(JdbcDataSource datasource, def tableName, def alias, def exceptColumns) { + Collection columnNames = datasource.getColumnNames(tableName) columnNames.removeAll(exceptColumns) return columnNames.inject([]) { result, iter -> result += "$alias.$iter" diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy index c689bed99b..ea07b9d45b 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy @@ -20,10 +20,11 @@ package org.orbisgis.geoclimate.geoindicators import groovy.transform.BaseScript -import org.orbisgis.data.api.dataset.ISpatialTable import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators /** @@ -54,69 +55,73 @@ import org.orbisgis.geoclimate.Geoindicators * @author Jérémy Bernard */ String unweightedOperationFromLowerScale(JdbcDataSource datasource, String inputLowerScaleTableName, String inputUpperScaleTableName, String inputIdUp, - String inputIdLow, Map inputVarAndOperations, String prefixName) { - def GEOMETRIC_FIELD_UP = "the_geom" - def BASE_NAME = "unweighted_operation_from_lower_scale" - def SUM = "SUM" - def AVG = "AVG" - def GEOM_AVG = "GEOM_AVG" - def DENS = "DENS" - def NB_DENS = "NB_DENS" - def STD = "STD" - def COLUMN_TYPE_TO_AVOID = ["GEOMETRY", "VARCHAR"] - def SPECIFIC_OPERATIONS = [NB_DENS] - - debug "Executing Unweighted statistical operations from lower scale" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - - datasource.createIndex(inputLowerScaleTableName,inputIdUp) - datasource.createIndex(inputUpperScaleTableName,inputIdUp) - - def query = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT " - - def columnNamesTypes = datasource."$inputLowerScaleTableName".getColumnsTypes() - def filteredColumns = columnNamesTypes.findAll { !COLUMN_TYPE_TO_AVOID.contains(it.value) } - inputVarAndOperations.each { var, operations -> - // Some operations may not need to use an existing variable thus not concerned by the column filtering - def filteredOperations = operations - SPECIFIC_OPERATIONS - if (filteredColumns.containsKey(var.toUpperCase()) | (filteredOperations.isEmpty())) { - operations.each { - def op = it.toUpperCase() - switch (op) { - case GEOM_AVG: - query += "COALESCE(EXP(1.0/COUNT(a.*)*SUM(LOG(a.$var))),0) AS ${op + "_" + var}," - break - case DENS: - query += "COALESCE(SUM(a.$var::float)/ST_AREA(b.$GEOMETRIC_FIELD_UP),0) AS ${var + "_DENSITY"}," - break - case NB_DENS: - query += "COALESCE(COUNT(a.$inputIdLow)/ST_AREA(b.$GEOMETRIC_FIELD_UP),0) AS ${var + "_NUMBER_DENSITY"}," - break - case SUM: - query += "COALESCE(SUM(a.$var::float),0) AS ${op + "_" + var}," - break - case AVG: - query += "COALESCE($op(a.$var::float),0) AS ${op + "_" + var}," - break - case STD: - query += "COALESCE(STDDEV_POP(a.$var::float),0) AS ${op + "_" + var}," - break - default: - break + String inputIdLow, Map inputVarAndOperations, String prefixName) throws Exception { + try { + def GEOMETRIC_FIELD_UP = "the_geom" + def BASE_NAME = "unweighted_operation_from_lower_scale" + def SUM = "SUM" + def AVG = "AVG" + def GEOM_AVG = "GEOM_AVG" + def DENS = "DENS" + def NB_DENS = "NB_DENS" + def STD = "STD" + def COLUMN_TYPE_TO_AVOID = ["GEOMETRY", "VARCHAR"] + def SPECIFIC_OPERATIONS = [NB_DENS] + + debug "Executing Unweighted statistical operations from lower scale" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME + + datasource.createIndex(inputLowerScaleTableName, inputIdUp) + datasource.createIndex(inputUpperScaleTableName, inputIdUp) + + def query = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT " + + def columnNamesTypes = datasource.getColumnNamesTypes(inputLowerScaleTableName) + def filteredColumns = columnNamesTypes.findAll { !COLUMN_TYPE_TO_AVOID.contains(it.value.toUpperCase()) } + inputVarAndOperations.each { var, operations -> + // Some operations may not need to use an existing variable thus not concerned by the column filtering + def filteredOperations = operations - SPECIFIC_OPERATIONS + if (filteredColumns.containsKey(var.toUpperCase()) | (filteredOperations.isEmpty())) { + operations.each { + def op = it.toUpperCase() + switch (op) { + case GEOM_AVG: + query += "COALESCE(EXP(1.0/COUNT(a.*)*SUM(LOG(a.$var))),0) AS ${op + "_" + var}," + break + case DENS: + query += "COALESCE(SUM(a.$var::float)/ST_AREA(b.$GEOMETRIC_FIELD_UP),0) AS ${var + "_DENSITY"}," + break + case NB_DENS: + query += "COALESCE(COUNT(a.$inputIdLow)/ST_AREA(b.$GEOMETRIC_FIELD_UP),0) AS ${var + "_NUMBER_DENSITY"}," + break + case SUM: + query += "COALESCE(SUM(a.$var::float),0) AS ${op + "_" + var}," + break + case AVG: + query += "COALESCE($op(a.$var::float),0) AS ${op + "_" + var}," + break + case STD: + query += "COALESCE(STDDEV_POP(a.$var::float),0) AS ${op + "_" + var}," + break + default: + break + } } + } else { + debug("""The column $var doesn't exist or should be numeric""") } - } else { - debug("""The column $var doesn't exist or should be numeric""") } - } - query += "b.$inputIdUp FROM $inputLowerScaleTableName a RIGHT JOIN $inputUpperScaleTableName b " + - "ON a.$inputIdUp = b.$inputIdUp GROUP BY b.$inputIdUp" + query += "b.$inputIdUp FROM $inputLowerScaleTableName a RIGHT JOIN $inputUpperScaleTableName b " + + "ON a.$inputIdUp = b.$inputIdUp GROUP BY b.$inputIdUp" - datasource query.toString() + datasource.execute(query) - return outputTableName + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot execute the unweighted operation from lower scale", e) + } } /** @@ -142,65 +147,68 @@ String unweightedOperationFromLowerScale(JdbcDataSource datasource, String input */ String weightedAggregatedStatistics(JdbcDataSource datasource, String inputLowerScaleTableName, String inputUpperScaleTableName, String inputIdUp, - Map inputVarWeightsOperations, String prefixName) { - - def AVG = "AVG" - def STD = "STD" - def BASE_NAME = "weighted_aggregated_statistics" - - debug "Executing Weighted statistical operations from lower scale" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - + Map inputVarWeightsOperations, String prefixName) throws Exception { // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names def weighted_mean = postfix "weighted_mean" + try { + def AVG = "AVG" + def STD = "STD" + def BASE_NAME = "weighted_aggregated_statistics" - datasource.createIndex(inputLowerScaleTableName,inputIdUp) - datasource.createIndex(inputUpperScaleTableName,inputIdUp) - - // The weighted mean is calculated in all cases since it is useful for the STD calculation - def weightedMeanQuery = "DROP TABLE IF EXISTS $weighted_mean; " + - "CREATE TABLE $weighted_mean($inputIdUp INTEGER," - def nameAndType = "" - def weightedMean = "" - inputVarWeightsOperations.each { var, weights -> - weights.each { weight, operations -> - nameAndType += "weighted_avg_${var}_$weight DOUBLE PRECISION DEFAULT 0," - weightedMean += "CASE WHEN SUM(a.$weight)=0 THEN 0 ELSE COALESCE(SUM(a.$var*a.$weight) / SUM(a.$weight),0) END AS weighted_avg_${var}_$weight," - } - } - weightedMeanQuery += nameAndType[0..-2] + ") AS (SELECT b.$inputIdUp, ${weightedMean[0..-2]}" + - " FROM $inputLowerScaleTableName a RIGHT JOIN $inputUpperScaleTableName b " + - "ON a.$inputIdUp = b.$inputIdUp GROUP BY b.$inputIdUp)" - datasource weightedMeanQuery.toString() - - // The weighted std is calculated if needed and only the needed fields are returned - def weightedStdQuery = "CREATE INDEX IF NOT EXISTS id_lcorr ON $weighted_mean ($inputIdUp); " + - "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT b.$inputIdUp," - inputVarWeightsOperations.each { var, weights -> - weights.each { weight, operations -> - // The operation names are transformed into upper case - operations.replaceAll { it.toUpperCase() } - if (operations.contains(AVG)) { - weightedStdQuery += "COALESCE(b.weighted_avg_${var}_$weight,0) AS avg_${var}_${weight}_weighted," + debug "Executing Weighted statistical operations from lower scale" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME + + datasource.createIndex(inputLowerScaleTableName, inputIdUp) + datasource.createIndex(inputUpperScaleTableName, inputIdUp) + + // The weighted mean is calculated in all cases since it is useful for the STD calculation + def weightedMeanQuery = "DROP TABLE IF EXISTS $weighted_mean; " + + "CREATE TABLE $weighted_mean($inputIdUp INTEGER," + def nameAndType = "" + def weightedMean = "" + inputVarWeightsOperations.each { var, weights -> + weights.each { weight, operations -> + nameAndType += "weighted_avg_${var}_$weight DOUBLE PRECISION DEFAULT 0," + weightedMean += "CASE WHEN SUM(a.$weight)=0 THEN 0 ELSE COALESCE(SUM(a.$var*a.$weight) / SUM(a.$weight),0) END AS weighted_avg_${var}_$weight," } - if (operations.contains(STD)) { - weightedStdQuery += "CASE WHEN SUM(a.$weight)=0 THEN 0 ELSE COALESCE(POWER(SUM(a.$weight*POWER(a.$var-b.weighted_avg_${var}_$weight,2))/" + - "SUM(a.$weight),0.5),0) END AS std_${var}_${weight}_weighted," + } + weightedMeanQuery += nameAndType[0..-2] + ") AS (SELECT b.$inputIdUp, ${weightedMean[0..-2]}" + + " FROM $inputLowerScaleTableName a RIGHT JOIN $inputUpperScaleTableName b " + + "ON a.$inputIdUp = b.$inputIdUp GROUP BY b.$inputIdUp)" + datasource weightedMeanQuery.toString() + + // The weighted std is calculated if needed and only the needed fields are returned + def weightedStdQuery = "CREATE INDEX IF NOT EXISTS id_lcorr ON $weighted_mean ($inputIdUp); " + + "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT b.$inputIdUp," + inputVarWeightsOperations.each { var, weights -> + weights.each { weight, operations -> + // The operation names are transformed into upper case + operations.replaceAll { it.toUpperCase() } + if (operations.contains(AVG)) { + weightedStdQuery += "COALESCE(b.weighted_avg_${var}_$weight,0) AS avg_${var}_${weight}_weighted," + } + if (operations.contains(STD)) { + weightedStdQuery += "CASE WHEN SUM(a.$weight)=0 THEN 0 ELSE COALESCE(POWER(SUM(a.$weight*POWER(a.$var-b.weighted_avg_${var}_$weight,2))/" + + "SUM(a.$weight),0.5),0) END AS std_${var}_${weight}_weighted," + } } } - } - weightedStdQuery = weightedStdQuery[0..-2] + " FROM $inputLowerScaleTableName a RIGHT JOIN $weighted_mean b " + - "ON a.$inputIdUp = b.$inputIdUp GROUP BY b.$inputIdUp" - - datasource weightedStdQuery.toString() - - // The temporary tables are deleted - datasource "DROP TABLE IF EXISTS $weighted_mean".toString() + weightedStdQuery = weightedStdQuery[0..-2] + " FROM $inputLowerScaleTableName a RIGHT JOIN $weighted_mean b " + + "ON a.$inputIdUp = b.$inputIdUp GROUP BY b.$inputIdUp" - return outputTableName + datasource.execute(weightedStdQuery) + // The temporary tables are deleted + datasource.dropTable(weighted_mean) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot execute the weighted aggregated statistics operation", e) + } finally { + // The temporary tables are deleted + datasource.dropTable(weighted_mean) + } } /** @@ -216,35 +224,37 @@ String weightedAggregatedStatistics(JdbcDataSource datasource, String inputLower * @param prefixName String use as prefix to name the output table * * @return A database table name. - * @author Erwan Bocher + * @author Erwan Bocher, CNRS */ String geometryProperties(JdbcDataSource datasource, String inputTableName, List inputFields, - List operations, String prefixName) { - - def GEOMETRIC_FIELD = "the_geom" - def OPS = ["st_geomtype", "st_srid", "st_length", "st_perimeter", "st_area", "st_dimension", - "st_coorddim", "st_num_geoms", "st_num_pts", "st_issimple", "st_isvalid", "st_isempty"] - def BASE_NAME = "geometry_properties" + List operations, String prefixName) throws Exception { + try { + def GEOMETRIC_FIELD = "the_geom" + def OPS = ["st_geomtype", "st_srid", "st_length", "st_perimeter", "st_area", "st_dimension", + "st_coorddim", "st_num_geoms", "st_num_pts", "st_issimple", "st_isvalid", "st_isempty"] + def BASE_NAME = "geometry_properties" - debug "Executing Geometry properties" + debug "Executing Geometry properties" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME - def query = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT " + def query = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT " - // The operation names are transformed into lower case - operations.replaceAll { it.toLowerCase() } - operations.each { - if (OPS.contains(it)) { - query += "$it($GEOMETRIC_FIELD) as ${it.substring(3)}," + // The operation names are transformed into lower case + operations.replaceAll { it.toLowerCase() } + operations.each { + if (OPS.contains(it)) { + query += "$it($GEOMETRIC_FIELD) as ${it.substring(3)}," + } } - } - query += "${inputFields.join(",")} from $inputTableName" - - datasource query.toString() + query += "${inputFields.join(",")} from $inputTableName" - return outputTableName + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the geometry properties", e) + } } /** @@ -280,117 +290,124 @@ String geometryProperties(JdbcDataSource datasource, String inputTableName, List */ String buildingDirectionDistribution(JdbcDataSource datasource, String buildingTableName, String tableUp, String inputIdUp, float angleRangeSize = 15f - , List distribIndicator = ["equality", "uniqueness"], String prefixName) { - - def GEOMETRIC_FIELD = "the_geom" - def ID_FIELD_BU = "id_build" - def INEQUALITY = "BUILDING_DIRECTION_EQUALITY" - def UNIQUENESS = "BUILDING_DIRECTION_UNIQUENESS" - def BASENAME = "MAIN_BUILDING_DIRECTION" - - debug "Executing Perkins skill score building direction" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASENAME - - // Test whether the angleRangeSize is a divisor of 180° - if ((180 % angleRangeSize) == 0 && (180 / angleRangeSize) > 1) { - def med_angle = angleRangeSize / 2 - // To avoid overwriting the output files of this step, a unique identifier is created - // Temporary table names - def build_min_rec = postfix "build_min_rec" - def build_dir360 = postfix "build_dir360" - def build_dir180 = postfix "build_dir180" - def build_dir_dist = postfix "build_dir_dist" - - // The minimum diameter of the minimum rectangle is created for each building - datasource """DROP TABLE IF EXISTS $build_min_rec; CREATE TABLE $build_min_rec AS + , List distribIndicator = ["equality", "uniqueness"], String prefixName) throws Exception { + // To avoid overwriting the output files of this step, a unique identifier is created + // Temporary table names + def build_min_rec = postfix "build_min_rec" + def build_dir360 = postfix "build_dir360" + def build_dir180 = postfix "build_dir180" + def build_dir_dist = postfix "build_dir_dist" + + try { + def GEOMETRIC_FIELD = "the_geom" + def ID_FIELD_BU = "id_build" + def INEQUALITY = "BUILDING_DIRECTION_EQUALITY" + def UNIQUENESS = "BUILDING_DIRECTION_UNIQUENESS" + def BASENAME = "MAIN_BUILDING_DIRECTION" + + debug "Executing Perkins skill score building direction" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASENAME + + // Test whether the angleRangeSize is a divisor of 180° + if ((180 % angleRangeSize) == 0 && (180 / angleRangeSize) > 1) { + def med_angle = angleRangeSize / 2 + + // The minimum diameter of the minimum rectangle is created for each building + datasource.execute("""DROP TABLE IF EXISTS $build_min_rec; CREATE TABLE $build_min_rec AS SELECT $ID_FIELD_BU, $inputIdUp, ST_MINIMUMDIAMETER(ST_MINIMUMRECTANGLE($GEOMETRIC_FIELD)) - AS the_geom FROM $buildingTableName;""".toString() + AS the_geom FROM $buildingTableName;""") - datasource.createIndex(buildingTableName,"id_build") + datasource.createIndex(buildingTableName, "id_build") - // The length and direction of the smallest and the longest sides of the Minimum rectangle are calculated - datasource """CREATE INDEX IF NOT EXISTS id_bua ON $build_min_rec ($ID_FIELD_BU); + // The length and direction of the smallest and the longest sides of the Minimum rectangle are calculated + datasource.execute("""CREATE INDEX IF NOT EXISTS id_bua ON $build_min_rec ($ID_FIELD_BU); DROP TABLE IF EXISTS $build_dir360; CREATE TABLE $build_dir360 AS SELECT a.$inputIdUp, ST_LENGTH(a.the_geom) AS LEN_L, ST_AREA(b.the_geom)/ST_LENGTH(a.the_geom) AS LEN_H, ROUND(DEGREES(ST_AZIMUTH(ST_STARTPOINT(a.the_geom), ST_ENDPOINT(a.the_geom)))) AS ANG_L, ROUND(DEGREES(ST_AZIMUTH(ST_STARTPOINT(ST_ROTATE(a.the_geom, pi()/2)), ST_ENDPOINT(ST_ROTATE(a.the_geom, pi()/2))))) AS ANG_H FROM $build_min_rec a - LEFT JOIN $buildingTableName b ON a.$ID_FIELD_BU=b.$ID_FIELD_BU""".toString() + LEFT JOIN $buildingTableName b ON a.$ID_FIELD_BU=b.$ID_FIELD_BU""") - // The angles are transformed in the [0, 180]° interval - datasource """DROP TABLE IF EXISTS $build_dir180; CREATE TABLE $build_dir180 AS + // The angles are transformed in the [0, 180]° interval + datasource.execute("""DROP TABLE IF EXISTS $build_dir180; CREATE TABLE $build_dir180 AS SELECT $inputIdUp, LEN_L, LEN_H, CASEWHEN(ANG_L>=180, ANG_L-180, ANG_L) AS ANG_L, - CASEWHEN(ANG_H>180, ANG_H-180, ANG_H) AS ANG_H FROM $build_dir360""".toString() + CASEWHEN(ANG_H>180, ANG_H-180, ANG_H) AS ANG_H FROM $build_dir360""") - datasource "CREATE INDEX ON $build_dir180 ($inputIdUp)".toString() + datasource.execute( "CREATE INDEX ON $build_dir180 ($inputIdUp)") - // The query aiming to create the building direction distribution is created - def sqlQueryDist = "DROP TABLE IF EXISTS $build_dir_dist; CREATE TABLE $build_dir_dist AS SELECT " - for (int i = angleRangeSize; i <= 180; i += angleRangeSize) { - def nameAngle = (i - med_angle).toString().replace(".", "_") - sqlQueryDist += "SUM(CASEWHEN(ANG_L>=${i - angleRangeSize} AND ANG_L<$i, LEN_L, " + - "CASEWHEN(ANG_H>=${i - angleRangeSize} AND ANG_H<$i, LEN_H, 0))) AS ANG$nameAngle, " - } - sqlQueryDist += "$inputIdUp FROM $build_dir180 GROUP BY $inputIdUp;" + // The query aiming to create the building direction distribution is created + def sqlQueryDist = "DROP TABLE IF EXISTS $build_dir_dist; CREATE TABLE $build_dir_dist AS SELECT " + for (int i = angleRangeSize; i <= 180; i += angleRangeSize) { + def nameAngle = (i - med_angle).toString().replace(".", "_") + sqlQueryDist += "SUM(CASEWHEN(ANG_L>=${i - angleRangeSize} AND ANG_L<$i, LEN_L, " + + "CASEWHEN(ANG_H>=${i - angleRangeSize} AND ANG_H<$i, LEN_H, 0))) AS ANG$nameAngle, " + } + sqlQueryDist += "$inputIdUp FROM $build_dir180 GROUP BY $inputIdUp;" - // The query is executed - datasource sqlQueryDist.toString() + // The query is executed + datasource sqlQueryDist.toString() - // The main building direction and indicators characterizing the distribution are calculated - def resultsDistrib = distributionCharacterization(datasource, build_dir_dist, - tableUp, inputIdUp, distribIndicator, "GREATEST", prefixName) + // The main building direction and indicators characterizing the distribution are calculated + def resultsDistrib = distributionCharacterization(datasource, build_dir_dist, + tableUp, inputIdUp, distribIndicator, "GREATEST", prefixName) - // Rename the standard indicators into names consistent with the current method (building direction...) - datasource """DROP TABLE IF EXISTS $outputTableName; + // Rename the standard indicators into names consistent with the current method (building direction...) + datasource """DROP TABLE IF EXISTS $outputTableName; ALTER TABLE $resultsDistrib RENAME TO $outputTableName; ALTER TABLE $outputTableName RENAME COLUMN EXTREMUM_COL TO $BASENAME; ALTER TABLE $outputTableName RENAME COLUMN UNIQUENESS_VALUE TO $UNIQUENESS; ALTER TABLE $outputTableName RENAME COLUMN EQUALITY_VALUE TO $INEQUALITY;""".toString() + // The temporary tables are deleted + datasource """DROP TABLE IF EXISTS $build_min_rec, $build_dir360, $build_dir180, + $build_dir_dist;""".toString() + /* + if (distribIndicator.contains("uniqueness")){ + // Reorganise the distribution Table (having the same number of column than the number + // of direction of analysis) into a simple two column table (ID and SURF) + def sqlQueryUnique = "DROP TABLE IF EXISTS $build_dir_bdd; CREATE TABLE $build_dir_bdd AS SELECT " + def columnNames = datasource.getTable(build_dir_dist).columns + columnNames.remove(inputIdUp) + for (col in columnNames.take(columnNames.size() - 1)){ + sqlQueryUnique += "$inputIdUp, $col AS SURF FROM $build_dir_dist UNION ALL SELECT " + } + sqlQueryUnique += """$inputIdUp, ${columnNames[-1]} AS SURF FROM $build_dir_dist; + CREATE INDEX ON $build_dir_bdd USING BTREE($inputIdUp); + CREATE INDEX ON $build_dir_bdd USING BTREE(SURF);""" + datasource sqlQueryUnique - /* - if (distribIndicator.contains("uniqueness")){ - // Reorganise the distribution Table (having the same number of column than the number - // of direction of analysis) into a simple two column table (ID and SURF) - def sqlQueryUnique = "DROP TABLE IF EXISTS $build_dir_bdd; CREATE TABLE $build_dir_bdd AS SELECT " - def columnNames = datasource.getTable(build_dir_dist).columns - columnNames.remove(inputIdUp) - for (col in columnNames.take(columnNames.size() - 1)){ - sqlQueryUnique += "$inputIdUp, $col AS SURF FROM $build_dir_dist UNION ALL SELECT " - } - sqlQueryUnique += """$inputIdUp, ${columnNames[-1]} AS SURF FROM $build_dir_dist; - CREATE INDEX ON $build_dir_bdd USING BTREE($inputIdUp); - CREATE INDEX ON $build_dir_bdd USING BTREE(SURF);""" - datasource sqlQueryUnique - - def sqlQueryLast = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS " + - "SELECT b.$inputIdUp, a.main_building_direction," + def sqlQueryLast = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS " + + "SELECT b.$inputIdUp, a.main_building_direction," - if (distribIndicator.contains("inequality")) { - sqlQueryLast += " a.$INEQUALITY, " + if (distribIndicator.contains("inequality")) { + sqlQueryLast += " a.$INEQUALITY, " + } + sqlQueryLast += """a.max_surf/(MAX(b.SURF)+a.max_surf) AS $UNIQUENESS + FROM $build_perk_fin a + RIGHT JOIN $build_dir_bdd b + ON a.$inputIdUp = b.$inputIdUp + WHERE b.SURF < a.max_surf + GROUP BY b.$inputIdUp;""" + + datasource "CREATE INDEX ON $build_perk_fin ($inputIdUp);" + datasource sqlQueryLast } - sqlQueryLast += """a.max_surf/(MAX(b.SURF)+a.max_surf) AS $UNIQUENESS - FROM $build_perk_fin a - RIGHT JOIN $build_dir_bdd b - ON a.$inputIdUp = b.$inputIdUp - WHERE b.SURF < a.max_surf - GROUP BY b.$inputIdUp;""" - - datasource "CREATE INDEX ON $build_perk_fin ($inputIdUp);" - datasource sqlQueryLast - } - else{ - datasource "ALTER TABLE $build_perk_fin RENAME TO $outputTableName;" - } - */ + else{ + datasource "ALTER TABLE $build_perk_fin RENAME TO $outputTableName;" + } + */ + return outputTableName + } + } catch (SQLException e) { + throw new SQLException("Cannot compute the building direction distribution", e) + } finally { // The temporary tables are deleted datasource """DROP TABLE IF EXISTS $build_min_rec, $build_dir360, $build_dir180, $build_dir_dist;""".toString() - - return outputTableName } + } /** @@ -431,197 +448,200 @@ String buildingDirectionDistribution(JdbcDataSource datasource, String buildingT String distributionCharacterization(JdbcDataSource datasource, String distribTableName, String initialTable, String inputId, List distribIndicator = ["equality", "uniqueness"], String extremum = "GREATEST", - boolean keep2ndCol = false, boolean keepColVal = false, String prefixName) { - def EQUALITY = "EQUALITY_VALUE" - def UNIQUENESS = "UNIQUENESS_VALUE" - def EXTREMUM_COL = "EXTREMUM_COL" - def EXTREMUM_COL2 = "EXTREMUM_COL2" - def EXTREMUM_VAL = "EXTREMUM_VAL" - def BASENAME = "DISTRIBUTION_REPARTITION" - def GEOMETRY_FIELD = "THE_GEOM" - - debug "Executing equality and uniqueness indicators" - - if (extremum.toUpperCase() == "GREATEST" || extremum.toUpperCase() == "LEAST") { - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASENAME - - // Get all columns from the distribution table and remove the geometry column if exists - def allColumns = datasource.getColumnNames(distribTableName) - if (allColumns.contains(GEOMETRY_FIELD)) { - allColumns -= GEOMETRY_FIELD - } - // Get the distribution columns and the number of columns - def distribColumns = allColumns.minus(inputId.toUpperCase()) - def nbDistCol = distribColumns.size() - - if(distribColumns.size()==0){ - error("Any columns to compute the distribution from the table $distribTableName".toString()) - return - } + boolean keep2ndCol = false, boolean keepColVal = false, String prefixName) throws Exception { + // Create temporary tables + def outputTableMissingSomeObjects = postfix "output_table_missing_some_objects" + def distribTableNameNoNull = postfix "distrib_table_name_no_null" + try { + def EQUALITY = "EQUALITY_VALUE" + def UNIQUENESS = "UNIQUENESS_VALUE" + def EXTREMUM_COL = "EXTREMUM_COL" + def EXTREMUM_COL2 = "EXTREMUM_COL2" + def EXTREMUM_VAL = "EXTREMUM_VAL" + def BASENAME = "DISTRIBUTION_REPARTITION" + def GEOMETRY_FIELD = "THE_GEOM" + + debug "Executing equality and uniqueness indicators" + + if (extremum.toUpperCase() == "GREATEST" || extremum.toUpperCase() == "LEAST") { + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASENAME + + // Get all columns from the distribution table and remove the geometry column if exists + def allColumns = datasource.getColumnNames(distribTableName) + if (allColumns.contains(GEOMETRY_FIELD)) { + allColumns -= GEOMETRY_FIELD + } + // Get the distribution columns and the number of columns + def distribColumns = allColumns.minus(inputId.toUpperCase()) + def nbDistCol = distribColumns.size() - def idxExtrem = nbDistCol - 1 - def idxExtrem_1 = nbDistCol - 2 - if (extremum.toUpperCase() == "LEAST") { - idxExtrem = 0 - idxExtrem_1 = 1 - } + if (distribColumns.size() == 0) { + throw new IllegalArgumentException("Any columns to compute the distribution from the table $distribTableName".toString()) + } - def queryCoalesce = "" + def idxExtrem = nbDistCol - 1 + def idxExtrem_1 = nbDistCol - 2 + if (extremum.toUpperCase() == "LEAST") { + idxExtrem = 0 + idxExtrem_1 = 1 + } - // Create temporary tables - def outputTableMissingSomeObjects = postfix "output_table_missing_some_objects" - def distribTableNameNoNull = postfix "distrib_table_name_no_null" + def queryCoalesce = "" - // Delete rows having null values (and remove the geometry field if exists) - datasource """ DROP TABLE IF EXISTS $distribTableNameNoNull; + // Delete rows having null values (and remove the geometry field if exists) + datasource """ DROP TABLE IF EXISTS $distribTableNameNoNull; CREATE TABLE $distribTableNameNoNull AS SELECT ${allColumns.join(",")} FROM $distribTableName WHERE ${distribColumns.join(" IS NOT NULL AND ")} IS NOT NULL""".toString() - if (distribIndicator.contains("equality") && !distribIndicator.contains("uniqueness")) { - def queryCreateTable = """CREATE TABLE $outputTableMissingSomeObjects($inputId integer, + if (distribIndicator.contains("equality") && !distribIndicator.contains("uniqueness")) { + def queryCreateTable = """CREATE TABLE $outputTableMissingSomeObjects($inputId integer, $EQUALITY DOUBLE PRECISION, $EXTREMUM_COL VARCHAR)""" - // If the second extremum col should be conserved - if (keep2ndCol) { - queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_COL2 VARCHAR)" - } - // If the value of the extremum column should be conserved - if (keepColVal) { - queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_VAL DOUBLE PRECISION)" - } - datasource queryCreateTable.toString() - // Will insert values by batch of 100 in the table - datasource.withBatch(100) { stmt -> - datasource.eachRow("SELECT * FROM $distribTableNameNoNull".toString()) { row -> - def rowMap = row.toRowResult() - def id_rsu = rowMap."$inputId" - rowMap.remove(inputId.toUpperCase()) - def sortedMap = rowMap.sort { it.value } - // We want to get rid of some of the values identified as -9999.99 - while (sortedMap.values().remove(-9999.99 as double)); - def queryInsert = """INSERT INTO $outputTableMissingSomeObjects + // If the second extremum col should be conserved + if (keep2ndCol) { + queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_COL2 VARCHAR)" + } + // If the value of the extremum column should be conserved + if (keepColVal) { + queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_VAL DOUBLE PRECISION)" + } + datasource queryCreateTable.toString() + // Will insert values by batch of 100 in the table + datasource.withBatch(100) { stmt -> + datasource.eachRow("SELECT * FROM $distribTableNameNoNull".toString()) { row -> + def rowMap = row.toRowResult() + def id_rsu = rowMap."$inputId" + rowMap.remove(inputId.toUpperCase()) + def sortedMap = rowMap.sort { it.value } + // We want to get rid of some of the values identified as -9999.99 + while (sortedMap.values().remove(-9999.99 as double)); + def queryInsert = """INSERT INTO $outputTableMissingSomeObjects VALUES ($id_rsu, ${getEquality(sortedMap, nbDistCol)}, '${sortedMap.keySet()[idxExtrem]}')""" - // If the second extremum col should be conserved - if (keep2ndCol) { - queryInsert = "${queryInsert[0..-2]}, '${sortedMap.keySet()[idxExtrem_1]}')" - } - // If the value of the extremum column should be conserved - if (keepColVal) { - queryInsert = "${queryInsert[0..-2]}, ${sortedMap.values()[idxExtrem]})" + // If the second extremum col should be conserved + if (keep2ndCol) { + queryInsert = "${queryInsert[0..-2]}, '${sortedMap.keySet()[idxExtrem_1]}')" + } + // If the value of the extremum column should be conserved + if (keepColVal) { + queryInsert = "${queryInsert[0..-2]}, ${sortedMap.values()[idxExtrem]})" + } + stmt.addBatch queryInsert.toString() } - stmt.addBatch queryInsert.toString() } - } - queryCoalesce += """ COALESCE(a.$EQUALITY, -1) AS $EQUALITY, + queryCoalesce += """ COALESCE(a.$EQUALITY, -1) AS $EQUALITY, COALESCE(a.$EXTREMUM_COL, 'unknown') AS $EXTREMUM_COL,""" - } else if (!distribIndicator.contains("equality") && distribIndicator.contains("uniqueness")) { - def queryCreateTable = """CREATE TABLE $outputTableMissingSomeObjects($inputId integer, + } else if (!distribIndicator.contains("equality") && distribIndicator.contains("uniqueness")) { + def queryCreateTable = """CREATE TABLE $outputTableMissingSomeObjects($inputId integer, $UNIQUENESS DOUBLE PRECISION, $EXTREMUM_COL VARCHAR)""" - // If the second extremum col should be conserved - if (keep2ndCol) { - queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_COL2 VARCHAR)" - } - // If the value of the extremum column should be conserved - if (keepColVal) { - queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_VAL DOUBLE PRECISION)" - } + // If the second extremum col should be conserved + if (keep2ndCol) { + queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_COL2 VARCHAR)" + } + // If the value of the extremum column should be conserved + if (keepColVal) { + queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_VAL DOUBLE PRECISION)" + } - datasource queryCreateTable.toString() - // Will insert values by batch of 100 in the table - datasource.withBatch(100) { stmt -> - datasource.eachRow("SELECT * FROM $distribTableNameNoNull".toString()) { row -> - def rowMap = row.toRowResult() - def id_rsu = rowMap."$inputId" - rowMap.remove(inputId.toUpperCase()) - def sortedMap = rowMap.sort { it.value } - // We want to get rid of some of the values identified as -9999.99 - while (sortedMap.values().remove(-9999.99 as double)); - def queryInsert = """INSERT INTO $outputTableMissingSomeObjects + datasource queryCreateTable.toString() + // Will insert values by batch of 100 in the table + datasource.withBatch(100) { stmt -> + datasource.eachRow("SELECT * FROM $distribTableNameNoNull".toString()) { row -> + def rowMap = row.toRowResult() + def id_rsu = rowMap."$inputId" + rowMap.remove(inputId.toUpperCase()) + def sortedMap = rowMap.sort { it.value } + // We want to get rid of some of the values identified as -9999.99 + while (sortedMap.values().remove(-9999.99 as double)); + def queryInsert = """INSERT INTO $outputTableMissingSomeObjects VALUES ($id_rsu, ${getUniqueness(sortedMap, idxExtrem, idxExtrem_1)}, '${sortedMap.keySet()[idxExtrem]}')""" - // If the second extremum col should be conserved - if (keep2ndCol) { - queryInsert = "${queryInsert[0..-2]}, '${sortedMap.keySet()[idxExtrem_1]}')" - } - // If the value of the extremum column should be conserved - if (keepColVal) { - queryInsert = "${queryInsert[0..-2]}, ${sortedMap.values()[idxExtrem]})" + // If the second extremum col should be conserved + if (keep2ndCol) { + queryInsert = "${queryInsert[0..-2]}, '${sortedMap.keySet()[idxExtrem_1]}')" + } + // If the value of the extremum column should be conserved + if (keepColVal) { + queryInsert = "${queryInsert[0..-2]}, ${sortedMap.values()[idxExtrem]})" + } + stmt.addBatch queryInsert.toString() } - stmt.addBatch queryInsert.toString() } - } - queryCoalesce += """ COALESCE(a.$UNIQUENESS, -1) AS $UNIQUENESS, + queryCoalesce += """ COALESCE(a.$UNIQUENESS, -1) AS $UNIQUENESS, COALESCE(a.$EXTREMUM_COL, 'unknown') AS $EXTREMUM_COL,""" - } else if (distribIndicator.contains("equality") && distribIndicator.contains("uniqueness")) { - def queryCreateTable = """CREATE TABLE $outputTableMissingSomeObjects($inputId integer, + } else if (distribIndicator.contains("equality") && distribIndicator.contains("uniqueness")) { + def queryCreateTable = """CREATE TABLE $outputTableMissingSomeObjects($inputId integer, $EQUALITY DOUBLE PRECISION, $UNIQUENESS DOUBLE PRECISION, $EXTREMUM_COL VARCHAR)""" - // If the second extremum col should be conserved - if (keep2ndCol) { - queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_COL2 VARCHAR)" - } - // If the value of the extremum column should be conserved - if (keepColVal) { - queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_VAL DOUBLE PRECISION)" - } + // If the second extremum col should be conserved + if (keep2ndCol) { + queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_COL2 VARCHAR)" + } + // If the value of the extremum column should be conserved + if (keepColVal) { + queryCreateTable = "${queryCreateTable[0..-2]}, $EXTREMUM_VAL DOUBLE PRECISION)" + } - datasource queryCreateTable.toString() + datasource queryCreateTable.toString() - // Will insert values by batch of 100 in the table - datasource.withBatch(100) { stmt -> - datasource.eachRow("SELECT * FROM $distribTableNameNoNull".toString()) { row -> - def rowMap = row.toRowResult() - def id_rsu = rowMap."$inputId" - def sortedMap = rowMap.findAll { it.key.toLowerCase() != inputId && (it.value != -9999.99) }.sort { it.value } - def queryInsert = """INSERT INTO $outputTableMissingSomeObjects + // Will insert values by batch of 100 in the table + datasource.withBatch(100) { stmt -> + datasource.eachRow("SELECT * FROM $distribTableNameNoNull".toString()) { row -> + def rowMap = row.toRowResult() + def id_rsu = rowMap."$inputId" + def sortedMap = rowMap.findAll { it.key.toLowerCase() != inputId && (it.value != -9999.99) }.sort { it.value } + def queryInsert = """INSERT INTO $outputTableMissingSomeObjects VALUES ($id_rsu, ${getEquality(sortedMap, nbDistCol)}, ${getUniqueness(sortedMap, idxExtrem, idxExtrem_1)}, '${sortedMap.keySet()[idxExtrem]}')""" - // If the second extremum col should be conserved - if (keep2ndCol) { - queryInsert = "${queryInsert[0..-2]}, '${sortedMap.keySet()[idxExtrem_1]}')" - } - // If the value of the extremum column should be conserved - if (keepColVal) { - queryInsert = "${queryInsert[0..-2]}, ${sortedMap.values()[idxExtrem]})" + // If the second extremum col should be conserved + if (keep2ndCol) { + queryInsert = "${queryInsert[0..-2]}, '${sortedMap.keySet()[idxExtrem_1]}')" + } + // If the value of the extremum column should be conserved + if (keepColVal) { + queryInsert = "${queryInsert[0..-2]}, ${sortedMap.values()[idxExtrem]})" + } + stmt.addBatch queryInsert.toString() } - stmt.addBatch queryInsert.toString() } - } - queryCoalesce += """ COALESCE(a.$EQUALITY, -1) AS $EQUALITY, + queryCoalesce += """ COALESCE(a.$EQUALITY, -1) AS $EQUALITY, COALESCE(a.$UNIQUENESS, -1) AS $UNIQUENESS, COALESCE(a.$EXTREMUM_COL, 'unknown') AS $EXTREMUM_COL,""" - } - // If the second extremum col should be conserved - if (keep2ndCol) { - queryCoalesce += "COALESCE(a.$EXTREMUM_COL2, 'unknown') AS $EXTREMUM_COL2, " - } - if (keepColVal) { - queryCoalesce += "COALESCE(a.$EXTREMUM_VAL, -1) AS $EXTREMUM_VAL, " - } - // Set to default value (for example if we characterize the building direction in a RSU having no building...) - datasource.createIndex(outputTableMissingSomeObjects,inputId) - datasource.createIndex(initialTable,inputId) - datasource """DROP TABLE IF EXISTS $outputTableName; + } + // If the second extremum col should be conserved + if (keep2ndCol) { + queryCoalesce += "COALESCE(a.$EXTREMUM_COL2, 'unknown') AS $EXTREMUM_COL2, " + } + if (keepColVal) { + queryCoalesce += "COALESCE(a.$EXTREMUM_VAL, -1) AS $EXTREMUM_VAL, " + } + // Set to default value (for example if we characterize the building direction in a RSU having no building...) + datasource.createIndex(outputTableMissingSomeObjects, inputId) + datasource.createIndex(initialTable, inputId) + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT $queryCoalesce b.$inputId FROM $outputTableMissingSomeObjects a RIGHT JOIN $initialTable b ON a.$inputId = b.$inputId; - """.toString() - - datasource.execute """DROP TABLE IF EXISTS $outputTableMissingSomeObjects, $distribTableNameNoNull""".toString() + """) + datasource.execute("""DROP TABLE IF EXISTS $outputTableMissingSomeObjects, $distribTableNameNoNull""") - return outputTableName - } else { - error """The 'extremum' input parameter should be equal to "GREATEST" or "LEAST""" + return outputTableName + } else { + throw new SQLException("The 'extremum' input parameter should be equal to 'GREATEST' or 'LEAST'") + } + } catch (SQLException e) { + throw new SQLException("Cannot compute the distribution characterization", e) + } finally { + datasource.execute("""DROP TABLE IF EXISTS $outputTableMissingSomeObjects, $distribTableNameNoNull""") } } @@ -676,81 +696,84 @@ static Double getEquality(def myMap, def nbDistCol) { */ String typeProportion(JdbcDataSource datasource, String inputTableName, String idField, String typeFieldName, String inputUpperTableName, Map areaTypeAndComposition, Map floorAreaTypeAndComposition, - String prefixName) { - def GEOMETRIC_FIELD_LOW = "the_geom" - def BASE_NAME = "type_proportion" - def NB_LEV = "nb_lev" - - debug "Executing typeProportion" - - if (areaTypeAndComposition || floorAreaTypeAndComposition) { - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - - // To avoid overwriting the output files of this step, a unique identifier is created - // Temporary table names - def caseWhenTab = postfix "case_when_tab" - def outputTableWithNull = postfix "output_table_with_null" - - // Define the pieces of query according to each type of the input table - def queryCalc = "" - def queryCaseWh = "" - // For the area fractions - if (areaTypeAndComposition) { - queryCaseWh += " ST_AREA($GEOMETRIC_FIELD_LOW) AS AREA, " - areaTypeAndComposition.forEach { type, compo -> - queryCaseWh += "CASE WHEN $typeFieldName='${compo.join("' OR $typeFieldName='")}' THEN ST_AREA($GEOMETRIC_FIELD_LOW) END AS AREA_${type}," - queryCalc += "CASE WHEN SUM(AREA)=0 THEN 0 ELSE SUM(AREA_${type})/SUM(AREA) END AS AREA_FRACTION_${type}, " + String prefixName) throws Exception { + // To avoid overwriting the output files of this step, a unique identifier is created + // Temporary table names + def caseWhenTab = postfix "case_when_tab" + def outputTableWithNull = postfix "output_table_with_null" + try { + def GEOMETRIC_FIELD_LOW = "the_geom" + def BASE_NAME = "type_proportion" + def NB_LEV = "nb_lev" + + debug "Executing typeProportion" + + if (areaTypeAndComposition || floorAreaTypeAndComposition) { + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME + + // Define the pieces of query according to each type of the input table + def queryCalc = "" + def queryCaseWh = "" + // For the area fractions + if (areaTypeAndComposition) { + queryCaseWh += " ST_AREA($GEOMETRIC_FIELD_LOW) AS AREA, " + areaTypeAndComposition.forEach { type, compo -> + queryCaseWh += "CASE WHEN $typeFieldName='${compo.join("' OR $typeFieldName='")}' THEN ST_AREA($GEOMETRIC_FIELD_LOW) END AS AREA_${type}," + queryCalc += "CASE WHEN SUM(AREA)=0 THEN 0 ELSE SUM(AREA_${type})/SUM(AREA) END AS AREA_FRACTION_${type}, " + } } - } - // For the floor area fractions in case the objects are buildings - if (floorAreaTypeAndComposition) { - queryCaseWh += " ST_AREA($GEOMETRIC_FIELD_LOW)*$NB_LEV AS FLOOR_AREA, " - floorAreaTypeAndComposition.forEach { type, compo -> - queryCaseWh += "CASE WHEN $typeFieldName='${compo.join("' OR $typeFieldName='")}' THEN ST_AREA($GEOMETRIC_FIELD_LOW)*$NB_LEV END AS FLOOR_AREA_${type}," - queryCalc += "CASE WHEN SUM(FLOOR_AREA) =0 THEN 0 ELSE SUM(FLOOR_AREA_${type})/SUM(FLOOR_AREA) END AS FLOOR_AREA_FRACTION_${type}, " + // For the floor area fractions in case the objects are buildings + if (floorAreaTypeAndComposition) { + queryCaseWh += " ST_AREA($GEOMETRIC_FIELD_LOW)*$NB_LEV AS FLOOR_AREA, " + floorAreaTypeAndComposition.forEach { type, compo -> + queryCaseWh += "CASE WHEN $typeFieldName='${compo.join("' OR $typeFieldName='")}' THEN ST_AREA($GEOMETRIC_FIELD_LOW)*$NB_LEV END AS FLOOR_AREA_${type}," + queryCalc += "CASE WHEN SUM(FLOOR_AREA) =0 THEN 0 ELSE SUM(FLOOR_AREA_${type})/SUM(FLOOR_AREA) END AS FLOOR_AREA_FRACTION_${type}, " + } } - } - // Calculates the surface of each object depending on its type - datasource.execute """DROP TABLE IF EXISTS $caseWhenTab; + // Calculates the surface of each object depending on its type + datasource.execute """DROP TABLE IF EXISTS $caseWhenTab; CREATE TABLE $caseWhenTab AS SELECT $idField, ${queryCaseWh[0..-2]} FROM $inputTableName""".toString() - datasource.createIndex(caseWhenTab,idField) + datasource.createIndex(caseWhenTab, idField) - // Calculate the proportion of each type - datasource.execute """DROP TABLE IF EXISTS $outputTableWithNull; + // Calculate the proportion of each type + datasource.execute """DROP TABLE IF EXISTS $outputTableWithNull; CREATE TABLE $outputTableWithNull AS SELECT $idField, ${queryCalc[0..-2]} FROM $caseWhenTab GROUP BY $idField""".toString() - // Set 0 as default value (for example if we characterize the building type in a RSU having no building...) - def allFinalCol = datasource."$outputTableWithNull".getColumns() - allFinalCol = allFinalCol.minus([idField.toUpperCase()]) - datasource.createIndex(inputUpperTableName,idField) - datasource.createIndex(outputTableWithNull,idField) - def pieceOfQuery = "" - allFinalCol.each { col -> - pieceOfQuery += "COALESCE(a.$col, 0) AS $col, " - } - datasource """DROP TABLE IF EXISTS $outputTableName; + // Set 0 as default value (for example if we characterize the building type in a RSU having no building...) + def allFinalCol = datasource.getColumnNames(outputTableWithNull) + allFinalCol = allFinalCol.minus([idField.toUpperCase()]) + datasource.createIndex(inputUpperTableName, idField) + datasource.createIndex(outputTableWithNull, idField) + def pieceOfQuery = "" + allFinalCol.each { col -> + pieceOfQuery += "COALESCE(a.$col, 0) AS $col, " + } + datasource """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT ${pieceOfQuery[0..-2]} b.$idField FROM $outputTableWithNull a RIGHT JOIN $inputUpperTableName b ON a.$idField = b.$idField; """.toString() - + datasource.execute """DROP TABLE IF EXISTS $outputTableWithNull, $caseWhenTab""".toString() + return outputTableName + } else { + throw new SQLException("'floorAreaTypeAndComposition' or 'areaTypeAndComposition' arguments should be a Map " + + "with at least one combination key-value") + } + } catch (SQLException e) { + throw new SQLException("Cannot compute the type proportion", e) + } finally { datasource.execute """DROP TABLE IF EXISTS $outputTableWithNull, $caseWhenTab""".toString() - - return outputTableName - } else { - error "'floorAreaTypeAndComposition' or 'areaTypeAndComposition' arguments should be a Map " + - "with at least one combination key-value" } } @@ -778,153 +801,159 @@ String typeProportion(JdbcDataSource datasource, String inputTableName, String i */ String gatherScales(JdbcDataSource datasource, String buildingTable, String blockTable, String rsuTable, String targetedScale = "RSU", List operationsToApply = ["AVG", "STD"], - boolean removeNull = true, String prefixName) { - // List of columns to remove from the analysis in building and block tables - def BLOCK_COL_TO_REMOVE = ["THE_GEOM", "ID_RSU", "ID_BLOCK", "MAIN_BUILDING_DIRECTION"] - def BUILD_COL_TO_REMOVE = ["THE_GEOM", "ID_RSU", "ID_BUILD", "ID_BLOCK", "ID_ZONE", "NB_LEV", "ZINDEX", "MAIN_USE", "TYPE", "ROOF_SHAPE", "ID_SOURCE"] - def BASE_NAME = "all_scales_table" + boolean removeNull = true, String prefixName) throws Exception { + // Temporary tables that will be deleted at the end of the process + def finalScaleTableName = postfix "final_before_join" + def scale1ScaleFin = postfix "scale1_scale_fin" + def buildIndicRsuScale + try { + // List of columns to remove from the analysis in building and block tables + def BLOCK_COL_TO_REMOVE = ["THE_GEOM", "ID_RSU", "ID_BLOCK", "MAIN_BUILDING_DIRECTION"] + def BUILD_COL_TO_REMOVE = ["THE_GEOM", "ID_RSU", "ID_BUILD", "ID_BLOCK", "ID_ZONE", "NB_LEV", "ZINDEX", "MAIN_USE", "TYPE", "ROOF_SHAPE", "ID_SOURCE"] + def BASE_NAME = "all_scales_table" - debug """ Executing the gathering of scales (to building or to RSU scale)""" + debug """ Executing the gathering of scales (to building or to RSU scale)""" - if ((targetedScale.toUpperCase() == "RSU") || (targetedScale.toUpperCase() == "BUILDING")) { - // Temporary tables that will be deleted at the end of the process - def finalScaleTableName = postfix "final_before_join" - def scale1ScaleFin = postfix "scale1_scale_fin" + if ((targetedScale.toUpperCase() == "RSU") || (targetedScale.toUpperCase() == "BUILDING")) { - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - // Some tables will be needed to call only some specific columns - def listblockFinalRename = [] + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME - def blockIndicFinalScale = blockTable - def idbuildForMerge - def idBlockForMerge + // Some tables will be needed to call only some specific columns + def listblockFinalRename = [] - // Add operations to compute at RSU scale to each indicator of the building scale - def inputVarAndOperationsBuild = [:] - def buildIndicatorsColumns = datasource.getTable(buildingTable).getColumns() - for (col in buildIndicatorsColumns) { - if (!BUILD_COL_TO_REMOVE.contains(col)) { - inputVarAndOperationsBuild[col] = operationsToApply - } - } - // Calculate building indicators averaged at RSU scale - def buildIndicRsuScale = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, buildingTable, - rsuTable, "id_rsu", "id_build", inputVarAndOperationsBuild, - "bu") - - // To avoid crashes of the join due to column duplicate, need to prefix some names - def buildRsuCol2Rename = datasource.getColumnNames(buildIndicRsuScale) - def listBuildRsuRename = [] - for (col in buildRsuCol2Rename) { - if (col != "ID_BUILD" && col != "ID_BLOCK" && col != "ID_RSU" && col != "THE_GEOM") { - listBuildRsuRename.add("a.$col AS build_$col") - } - } + def blockIndicFinalScale = blockTable + def idbuildForMerge + def idBlockForMerge - // Special processes if the scale of analysis is RSU - if (targetedScale.toUpperCase() == "RSU") { - // Calculate building average and variance at RSU scale from each indicator of the block scale - def inputVarAndOperationsBlock = [:] - def blockIndicators = datasource.getTable(blockTable).getColumns() - for (col in blockIndicators) { - if (!BLOCK_COL_TO_REMOVE.contains(col)) { - inputVarAndOperationsBlock[col] = operationsToApply + // Add operations to compute at RSU scale to each indicator of the building scale + def inputVarAndOperationsBuild = [:] + def buildIndicatorsColumns = datasource.getColumnNames(buildingTable) + for (col in buildIndicatorsColumns) { + if (!BUILD_COL_TO_REMOVE.contains(col)) { + inputVarAndOperationsBuild[col] = operationsToApply } } - // Calculate block indicators averaged at RSU scale - blockIndicFinalScale = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, - blockTable, rsuTable, "id_rsu", "id_block", - inputVarAndOperationsBlock, "bl") + // Calculate building indicators averaged at RSU scale + buildIndicRsuScale = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, buildingTable, + rsuTable, "id_rsu", "id_build", inputVarAndOperationsBuild, + "bu") // To avoid crashes of the join due to column duplicate, need to prefix some names - def blockRsuCol2Rename = datasource.getColumnNames(blockIndicFinalScale) - for (col in blockRsuCol2Rename) { - if (col != "ID_BLOCK" && col != "ID_RSU" && col != "THE_GEOM") { - listblockFinalRename.add("b.$col AS block_$col") + def buildRsuCol2Rename = datasource.getColumnNames(buildIndicRsuScale) + def listBuildRsuRename = [] + for (col in buildRsuCol2Rename) { + if (col != "ID_BUILD" && col != "ID_BLOCK" && col != "ID_RSU" && col != "THE_GEOM") { + listBuildRsuRename.add("a.$col AS build_$col") } } - // Define generic name whatever be the 'targetedScale' - finalScaleTableName = rsuTable - // Useful for merge between buildings and rsu tables - idbuildForMerge = "id_rsu" - idBlockForMerge = "id_rsu" - // Useful if the classif is a regression - idName = "id_rsu" - } - - // Special processes if the scale of analysis is building - else if (targetedScale.toUpperCase() == "BUILDING") { - // Need to join RSU and building tables - def listRsuCol = datasource.getTable(rsuTable).getColumns() - def listRsuRename = [] - for (col in listRsuCol) { - if (col != "ID_RSU" && col != "THE_GEOM") { - listRsuRename.add("a.$col AS rsu_$col") + // Special processes if the scale of analysis is RSU + if (targetedScale.toUpperCase() == "RSU") { + // Calculate building average and variance at RSU scale from each indicator of the block scale + def inputVarAndOperationsBlock = [:] + def blockIndicators = datasource.getColumnNames(blockTable) + for (col in blockIndicators) { + if (!BLOCK_COL_TO_REMOVE.contains(col)) { + inputVarAndOperationsBlock[col] = operationsToApply + } } - } - //def listBuildCol = datasource.getTable(buildingTable).getColumns() - def listBuildRename = [] - for (col in buildIndicatorsColumns) { - if (col != "ID_RSU" && col != "ID_BLOCK" && col != "ID_BUILD" && col != "THE_GEOM") { - listBuildRename.add("b.$col AS build_$col") - } else { - listBuildRename.add("b.$col") + // Calculate block indicators averaged at RSU scale + blockIndicFinalScale = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, + blockTable, rsuTable, "id_rsu", "id_block", + inputVarAndOperationsBlock, "bl") + + // To avoid crashes of the join due to column duplicate, need to prefix some names + def blockRsuCol2Rename = datasource.getColumnNames(blockIndicFinalScale) + for (col in blockRsuCol2Rename) { + if (col != "ID_BLOCK" && col != "ID_RSU" && col != "THE_GEOM") { + listblockFinalRename.add("b.$col AS block_$col") + } } + + // Define generic name whatever be the 'targetedScale' + finalScaleTableName = rsuTable + // Useful for merge between buildings and rsu tables + idbuildForMerge = "id_rsu" + idBlockForMerge = "id_rsu" + // Useful if the classif is a regression + idName = "id_rsu" } - // Merge scales (building and Rsu indicators) - datasource.createIndex(rsuTable,"id_rsu") - datasource.createIndex(buildingTable,"id_rsu") - datasource.execute """ DROP TABLE IF EXISTS $finalScaleTableName; + // Special processes if the scale of analysis is building + else if (targetedScale.toUpperCase() == "BUILDING") { + // Need to join RSU and building tables + def listRsuCol = datasource.getColumnNames(rsuTable) + def listRsuRename = [] + for (col in listRsuCol) { + if (col != "ID_RSU" && col != "THE_GEOM") { + listRsuRename.add("a.$col AS rsu_$col") + } + } + def listBuildRename = [] + for (col in buildIndicatorsColumns) { + if (col != "ID_RSU" && col != "ID_BLOCK" && col != "ID_BUILD" && col != "THE_GEOM") { + listBuildRename.add("b.$col AS build_$col") + } else { + listBuildRename.add("b.$col") + } + } + + // Merge scales (building and Rsu indicators) + datasource.createIndex(rsuTable, "id_rsu") + datasource.createIndex(buildingTable, "id_rsu") + datasource.execute """ DROP TABLE IF EXISTS $finalScaleTableName; CREATE TABLE $finalScaleTableName AS SELECT ${listRsuRename.join(', ')}, ${listBuildRename.join(', ')} FROM $rsuTable a LEFT JOIN $buildingTable b ON a.id_rsu = b.id_rsu;""".toString() - // To avoid crashes of the join due to column duplicate, need to prefix some names - def blockCol2Rename = datasource.getTable(blockTable).getColumns() - for (col in blockCol2Rename) { - if (col != "ID_BLOCK" && col != "ID_RSU" && col != "THE_GEOM") { - listblockFinalRename.add("b.$col AS block_$col") + // To avoid crashes of the join due to column duplicate, need to prefix some names + def blockCol2Rename = datasource.getColumnNames(blockTable) + for (col in blockCol2Rename) { + if (col != "ID_BLOCK" && col != "ID_RSU" && col != "THE_GEOM") { + listblockFinalRename.add("b.$col AS block_$col") + } } + // Useful for merge between gathered building and rsu indicators and building indicators averaged at RSU scale + idbuildForMerge = "id_rsu" + idBlockForMerge = "id_block" + // Useful if the classif is a regression + idName = "id_build" } - // Useful for merge between gathered building and rsu indicators and building indicators averaged at RSU scale - idbuildForMerge = "id_rsu" - idBlockForMerge = "id_block" - // Useful if the classif is a regression - idName = "id_build" - } - // Gather all indicators (coming from three different scales) in a single table (the 'targetTableScale' scale) - // Note that in order to avoid crashes of the join due to column duplicate, indicators have been prefixed - datasource.createIndex(buildIndicRsuScale, "id_rsu") - datasource.createIndex(finalScaleTableName,"id_rsu") - def queryRemoveNull = "" - if (removeNull) { - queryRemoveNull += " WHERE b.$idbuildForMerge IS NOT NULL" - } - datasource.execute """ DROP TABLE IF EXISTS $scale1ScaleFin; + // Gather all indicators (coming from three different scales) in a single table (the 'targetTableScale' scale) + // Note that in order to avoid crashes of the join due to column duplicate, indicators have been prefixed + datasource.createIndex(buildIndicRsuScale, "id_rsu") + datasource.createIndex(finalScaleTableName, "id_rsu") + def queryRemoveNull = "" + if (removeNull) { + queryRemoveNull += " WHERE b.$idbuildForMerge IS NOT NULL" + } + datasource.execute """ DROP TABLE IF EXISTS $scale1ScaleFin; CREATE TABLE $scale1ScaleFin AS SELECT ${listBuildRsuRename.join(', ')}, b.* FROM $buildIndicRsuScale a RIGHT JOIN $finalScaleTableName b ON a.$idbuildForMerge = b.$idbuildForMerge $queryRemoveNull;""".toString() - datasource.createIndex(blockIndicFinalScale, idBlockForMerge) - datasource.createIndex(scale1ScaleFin,idBlockForMerge) - datasource.execute """ DROP TABLE IF EXISTS $outputTableName; + datasource.createIndex(blockIndicFinalScale, idBlockForMerge) + datasource.createIndex(scale1ScaleFin, idBlockForMerge) + datasource.execute """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.*, ${listblockFinalRename.join(', ')} FROM $scale1ScaleFin a LEFT JOIN $blockIndicFinalScale b ON a.$idBlockForMerge = b.$idBlockForMerge;""".toString() - + datasource.dropTable(finalScaleTableName, scale1ScaleFin, buildIndicRsuScale) + return outputTableName + } else { + throw new SQLException(""" The 'targetedScale' parameter should either be 'RSU' or 'BUILDING' """) + } + } catch (SQLException e) { + throw new SQLException("Cannot compute the join the tables at gather scale", e) + } finally { datasource.dropTable(finalScaleTableName, scale1ScaleFin, buildIndicRsuScale) - return outputTableName - } else { - error """ The 'targetedScale' parameter should either be 'RSU' or 'BUILDING' """ } } @@ -947,23 +976,24 @@ String gatherScales(JdbcDataSource datasource, String buildingTable, String bloc */ String upperScaleAreaStatistics(JdbcDataSource datasource, String upperTableName, String upperColumnId, String lowerTableName, - String lowerColumnName, String lowerColumnAlias, boolean keepGeometry = true, String prefixName) { - def upperGeometryColumn = datasource.getGeometryColumn(upperTableName) - if (!upperGeometryColumn) { - error "The upper scale table must contain a geometry column" - return - } - def lowerGeometryColumn = datasource.getGeometryColumn(lowerTableName) - if (!lowerGeometryColumn) { - error "The lower scale table must contain a geometry column" - return - } - datasource.createSpatialIndex(upperTableName,upperGeometryColumn ) - datasource.createIndex(upperTableName, upperColumnId) - datasource.createSpatialIndex(lowerTableName, lowerGeometryColumn) + String lowerColumnName, String lowerColumnAlias, boolean keepGeometry = true, String prefixName) throws Exception { + def spatialJoinTable = postfix("upper_table_join") + def pivotTable = postfix("pivotAreaTable") + try { + def upperGeometryColumn = datasource.getGeometryColumn(upperTableName) + if (!upperGeometryColumn) { + throw new IllegalArgumentException("The upper scale table must contain a geometry column") + } + def lowerGeometryColumn = datasource.getGeometryColumn(lowerTableName) + if (!lowerGeometryColumn) { + throw new IllegalArgumentException("The lower scale table must contain a geometry column") + } + datasource.createSpatialIndex(upperTableName, upperGeometryColumn) + datasource.createIndex(upperTableName, upperColumnId) + datasource.createSpatialIndex(lowerTableName, lowerGeometryColumn) - def spatialJoinTable = "upper_table_join" - def spatialJoin = """ + + def spatialJoin = """ DROP TABLE IF EXISTS $spatialJoinTable; CREATE TABLE $spatialJoinTable AS SELECT b.$upperColumnId, a.$lowerColumnName, @@ -973,94 +1003,99 @@ String upperScaleAreaStatistics(JdbcDataSource datasource, String upperTableName WHERE a.$lowerGeometryColumn && b.$upperGeometryColumn AND ST_INTERSECTS(a.$lowerGeometryColumn, b.$upperGeometryColumn); """ - datasource.execute(spatialJoin.toString()) - datasource """CREATE INDEX ON $spatialJoinTable ($lowerColumnName); + datasource.execute(spatialJoin.toString()) + datasource """CREATE INDEX ON $spatialJoinTable ($lowerColumnName); CREATE INDEX ON $spatialJoinTable ($upperColumnId)""".toString() - // Creation of a list which contains all indicators of distinct values - def qIndicator = """ + // Creation of a list which contains all indicators of distinct values + def qIndicator = """ SELECT DISTINCT $lowerColumnName AS val FROM $spatialJoinTable """ - def listValues = datasource.rows(qIndicator.toString()) + def listValues = datasource.rows(qIndicator.toString()) - def isString = datasource.getTable(spatialJoinTable).getColumnType(lowerColumnName) == "VARCHAR" + def isString = datasource.getTable(spatialJoinTable).getColumnType(lowerColumnName) == "VARCHAR" - // Creation of the pivot table which contains for each upper geometry - def pivotTable = "pivotAreaTable" - def query = """ + // Creation of the pivot table which contains for each upper geometry + def query = """ DROP TABLE IF EXISTS $pivotTable; CREATE TABLE $pivotTable AS SELECT $upperColumnId """ - listValues.each { - def aliasColumn = "${lowerColumnAlias}_${it.val.toString().replace('.', '_')}" - query += """ + listValues.each { + def aliasColumn = "${lowerColumnAlias}_${it.val.toString().replace('.', '_')}" + query += """ , SUM($aliasColumn) AS $aliasColumn """ - } - query += " FROM (SELECT $upperColumnId" - listValues.each { - def aliasColumn = "${lowerColumnAlias}_${it.val.toString().replace('.', '_')}" - if (it.val) { - if (isString) { - query += """ + } + query += " FROM (SELECT $upperColumnId" + listValues.each { + def aliasColumn = "${lowerColumnAlias}_${it.val.toString().replace('.', '_')}" + if (it.val) { + if (isString) { + query += """ , CASE WHEN $lowerColumnName='${it.val}' THEN SUM(area) ELSE 0 END AS $aliasColumn """ - } else { - query += """ + } else { + query += """ , CASE WHEN $lowerColumnName='${it.val}' THEN SUM(area) ELSE 0 END AS $aliasColumn """ - } - } else { - query += """ + } + } else { + query += """ , CASE WHEN $lowerColumnName is null THEN SUM(area) ELSE 0 END AS $aliasColumn """ + } } - } - query += """ + query += """ FROM $spatialJoinTable GROUP BY $upperColumnId, $lowerColumnName) GROUP BY $upperColumnId; """ - datasource.execute(query.toString()) - //Build indexes - datasource "CREATE INDEX ON $pivotTable ($upperColumnId)".toString() - - // Creation of a table which is built from - // the union of the upperTable and pivot tables based on the same cell 'id' - def outputTableName = prefix prefixName, "upper_scale_statistics_area" - def qjoin = """ + datasource.execute(query.toString()) + //Build indexes + datasource "CREATE INDEX ON $pivotTable ($upperColumnId)".toString() + + // Creation of a table which is built from + // the union of the upperTable and pivot tables based on the same cell 'id' + def outputTableName = prefix prefixName, "upper_scale_statistics_area" + def qjoin = """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT b.$upperColumnId """ - if (keepGeometry) { - qjoin += ", b.$upperGeometryColumn" - } - listValues.each { - def aliasColumn = "${lowerColumnAlias}_${it.val.toString().replace('.', '_')}" - qjoin += """ + if (keepGeometry) { + qjoin += ", b.$upperGeometryColumn" + } + listValues.each { + def aliasColumn = "${lowerColumnAlias}_${it.val.toString().replace('.', '_')}" + qjoin += """ , CASE WHEN $aliasColumn IS NULL THEN NULL ELSE $aliasColumn / ST_AREA(b.$upperGeometryColumn) END AS $aliasColumn """ - } - qjoin += """ + } + qjoin += """ FROM $upperTableName b LEFT JOIN $pivotTable a ON (a.$upperColumnId = b.$upperColumnId); """ - datasource.execute(qjoin.toString()) - // Drop intermediate tables created during process - datasource.execute("DROP TABLE IF EXISTS $spatialJoinTable, $pivotTable;".toString()) - debug "The zonal area table have been created" - return outputTableName + datasource.execute(qjoin.toString()) + // Drop intermediate tables created during process + datasource.execute("DROP TABLE IF EXISTS $spatialJoinTable, $pivotTable;".toString()) + debug "The zonal area table have been created" + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the aggregation to a upper scale from a lower scale", e) + } finally { + // Drop intermediate tables created during process + datasource.execute("DROP TABLE IF EXISTS $spatialJoinTable, $pivotTable;".toString()) + } } \ No newline at end of file diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy index 8d2757f3fd..4745767587 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy @@ -33,6 +33,6 @@ import org.orbisgis.data.H2GIS * @param filePath path for the file * @return */ -static String save(Geometry geometry, H2GIS h2GIS, String filePath) { +static String save(Geometry geometry, H2GIS h2GIS, String filePath) throws Exception{ return h2GIS.save("(SELECT ST_GEOMFROMTEXT('${geometry}',${geometry.getSRID()}) as the_geom, CAST(1 as integer) as id)", filePath, true) } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy index 1d25f68610..39c3d28a23 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy @@ -25,6 +25,8 @@ import org.locationtech.jts.operation.distance.IndexedFacetDistance import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators @@ -38,37 +40,41 @@ import org.orbisgis.geoclimate.Geoindicators * * @author Erwan Bocher, CNRS */ -String gridPopulation(JdbcDataSource datasource, String gridTable, String populationTable, List populationColumns = []) { - def BASE_NAME = "grid_with_population" - def ID_RSU = "id_grid" - def ID_POP = "id_pop" +String gridPopulation(JdbcDataSource datasource, String gridTable, String populationTable, List populationColumns = []) throws Exception { + //temporary tables + def gridTable_pop_sum = postfix "grid_pop_sum" + def gridTable_area_sum = postfix "grid_area_sum" + def gridTable_pop = postfix gridTable + try { + def BASE_NAME = "grid_with_population" + def ID_RSU = "id_grid" + def ID_POP = "id_pop" - debug "Computing grid population" + debug "Computing grid population" - // The name of the outputTableName is constructed - def outputTableName = postfix BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = postfix BASE_NAME - //Indexing table - datasource.createSpatialIndex(gridTable, "the_geom") - datasource.createSpatialIndex(populationTable, "the_geom") - def popColumns = [] - def sum_popColumns = [] - if (populationColumns) { - datasource."$populationTable".getColumns().each { col -> - if (!["the_geom", "id_pop"].contains(col.toLowerCase() - ) && populationColumns.contains(col.toLowerCase())) { - popColumns << "b.$col" - sum_popColumns << "sum((a.area_rsu * $col)/b.sum_area_rsu) as $col" + //Indexing table + datasource.createSpatialIndex(gridTable, "the_geom") + datasource.createSpatialIndex(populationTable, "the_geom") + def popColumns = [] + def sum_popColumns = [] + if (populationColumns) { + datasource.getColumnNames(populationTable).each { col -> + if (!["the_geom", "id_pop"].contains(col.toLowerCase() + ) && populationColumns.contains(col.toLowerCase())) { + popColumns << "b.$col" + sum_popColumns << "sum((a.area_rsu * $col)/b.sum_area_rsu) as $col" + } } + } else { + throw new IllegalArgumentException("Please set a list one column that contain population data to be disaggregated") } - } else { - warn "Please set a list one column that contain population data to be disaggregated" - return - } - //Filtering the grid to get only the geometries that intersect the population table - def gridTable_pop = postfix gridTable - datasource.execute(""" + //Filtering the grid to get only the geometries that intersect the population table + + datasource.execute(""" drop table if exists $gridTable_pop; CREATE TABLE $gridTable_pop AS SELECT (ST_AREA(ST_INTERSECTION(a.the_geom, st_force2D(b.the_geom)))) as area_rsu, a.$ID_RSU, b.id_pop, ${popColumns.join(",")} from @@ -78,10 +84,8 @@ String gridPopulation(JdbcDataSource datasource, String gridTable, String popula create index on $gridTable_pop ($ID_POP); """.toString()) - def gridTable_pop_sum = postfix "grid_pop_sum" - def gridTable_area_sum = postfix "grid_area_sum" - //Aggregate population values - datasource.execute("""drop table if exists $gridTable_pop_sum, $gridTable_area_sum; + //Aggregate population values + datasource.execute("""drop table if exists $gridTable_pop_sum, $gridTable_area_sum; create table $gridTable_area_sum as select id_pop, sum(area_rsu) as sum_area_rsu from $gridTable_pop group by $ID_POP; create index on $gridTable_area_sum($ID_POP); @@ -91,10 +95,14 @@ String gridPopulation(JdbcDataSource datasource, String gridTable, String popula CREATE INDEX ON $gridTable_pop_sum ($ID_RSU); DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.*, ${popColumns.join(",")} from $gridTable a - LEFT JOIN $gridTable_pop_sum b on a.$ID_RSU=b.$ID_RSU; - drop table if exists $gridTable_pop,$gridTable_pop_sum, $gridTable_area_sum ;""".toString()) + LEFT JOIN $gridTable_pop_sum b on a.$ID_RSU=b.$ID_RSU;""") - return outputTableName + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the population on the grid", e) + } finally { + datasource.execute(" drop table if exists $gridTable_pop,$gridTable_pop_sum, $gridTable_area_sum ;") + } } @@ -112,38 +120,36 @@ String gridPopulation(JdbcDataSource datasource, String gridTable, String popula * @return a the initial grid with all aggregated values by levels and the indexes (row, col) for each levels * @author Erwan Bocher (CNRS) */ -String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, String id_grid, int nb_levels = 1) { - if (!grid_indicators) { - error("No grid_indicators table to aggregate the LCZ values") - return - } - if (nb_levels <= 0 || nb_levels >= 10) { - error("The number of levels to aggregate the LCZ values must be between 1 and 10") - return - } - - def gridColumns = datasource.getColumnNames(grid_indicators) +String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, String id_grid, int nb_levels = 1) throws Exception { + def tablesToDrop = [] + try { + if (!grid_indicators) { + throw new IllegalArgumentException("No grid_indicators table to aggregate the LCZ values") + } + if (nb_levels <= 0 || nb_levels >= 10) { + throw new IllegalArgumentException("The number of levels to aggregate the LCZ values must be between 1 and 10") + } + def gridColumns = datasource.getColumnNames(grid_indicators) - if(gridColumns.intersect(["LCZ_PRIMARY", "ID_ROW", "ID_COLUMN", id_grid]).size()==0){ - error("The grid indicators table must contain the columns LCZ_PRIMARY, ID_ROW, $id_grid") - return - } + if (gridColumns.intersect(["LCZ_PRIMARY", "ID_ROW", "ID_COLUMN", id_grid]).size() == 0) { + throw new IllegalArgumentException("The grid indicators table must contain the columns LCZ_PRIMARY, ID_ROW, $id_grid") + } - datasource.execute("""create index on $grid_indicators(id_row,id_col)""".toString()) - ///First build the index levels for each cell of the input grid - def grid_scaling_indices = postfix("grid_scaling_indices") - def grid_levels_query = [] - int grid_offset = 3 - int offsetCol = 1 - for (int i in 1..nb_levels) { - int level = Math.pow(grid_offset, i) - grid_levels_query << " (CAST (ABS(ID_ROW-1)/${level} AS INT)+1) AS ID_ROW_LOD_$i," + - "(CAST (ABS(ID_COL-1)/${level} AS INT)+$offsetCol-1) AS ID_COL_LOD_$i" - offsetCol++ - } + datasource.execute("""create index on $grid_indicators(id_row,id_col)""") + ///First build the index levels for each cell of the input grid + def grid_scaling_indices = postfix("grid_scaling_indices") + def grid_levels_query = [] + int grid_offset = 3 + int offsetCol = 1 + for (int i in 1..nb_levels) { + int level = Math.pow(grid_offset, i) + grid_levels_query << " (CAST (ABS(ID_ROW-1)/${level} AS INT)+1) AS ID_ROW_LOD_$i," + + "(CAST (ABS(ID_COL-1)/${level} AS INT)+$offsetCol-1) AS ID_COL_LOD_$i" + offsetCol++ + } - //Compute the indices for each levels and find the 8 adjacent cells - datasource.execute("""DROP TABLE IF EXISTS $grid_scaling_indices; + //Compute the indices for each levels and find the 8 adjacent cells + datasource.execute("""DROP TABLE IF EXISTS $grid_scaling_indices; CREATE TABLE $grid_scaling_indices as SELECT *, ${grid_levels_query.join(",")}, (SELECT LCZ_PRIMARY FROM $grid_indicators WHERE ID_ROW = a.ID_ROW+1 AND ID_COL=a.ID_COL) AS LCZ_PRIMARY_N, (SELECT LCZ_PRIMARY FROM $grid_indicators WHERE ID_ROW = a.ID_ROW+1 AND ID_COL=a.ID_COL+1) AS LCZ_PRIMARY_NE, @@ -155,9 +161,9 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri (SELECT LCZ_PRIMARY FROM $grid_indicators WHERE ID_ROW = a.ID_ROW+1 AND ID_COL=a.ID_COL-1) AS LCZ_PRIMARY_NW FROM $grid_indicators as a; """.toString()) - //Add LCZ_WARM count at this first level - def lcz_warm_first_level = postfix("lcz_warm_first_level") - datasource.execute("""DROP TABLE IF EXISTS $lcz_warm_first_level; + //Add LCZ_WARM count at this first level + def lcz_warm_first_level = postfix("lcz_warm_first_level") + datasource.execute("""DROP TABLE IF EXISTS $lcz_warm_first_level; CREATE TABLE $lcz_warm_first_level as SELECT *, (CASE WHEN LCZ_PRIMARY_N in (1,2,3,4,5,6,7,8,9,10,105) THEN 1 ELSE 0 END + CASE WHEN LCZ_PRIMARY_NE in (1,2,3,4,5,6,7,8,9,10,105) THEN 1 ELSE 0 END + @@ -170,22 +176,22 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri CASE WHEN LCZ_PRIMARY in (1,2,3,4,5,6,7,8,9,10,105) THEN 1 ELSE 0 END) AS LCZ_WARM FROM $grid_scaling_indices """.toString()) - def tablesToDrop = [] - def tableLevelToJoin = lcz_warm_first_level - tablesToDrop << grid_scaling_indices - tablesToDrop << lcz_warm_first_level - //Process all level of details - for (int i in 1..nb_levels) { - //Index the level row and col - datasource.execute(""" + def tableLevelToJoin = lcz_warm_first_level + tablesToDrop << grid_scaling_indices + tablesToDrop << lcz_warm_first_level + + //Process all level of details + for (int i in 1..nb_levels) { + //Index the level row and col + datasource.execute(""" CREATE INDEX IF NOT EXISTS ${grid_scaling_indices}_idx ON $grid_scaling_indices(id_row_lod_${i},id_col_lod_${i})""".toString()) - //First compute the number of cells by level of detail - //Use the original grid to aggregate the data - //A weight is used to select the LCZ value when the mode returns more than one possibility - def lcz_count_lod = postfix("lcz_count_lod") - tablesToDrop << lcz_count_lod - datasource.execute(""" + //First compute the number of cells by level of detail + //Use the original grid to aggregate the data + //A weight is used to select the LCZ value when the mode returns more than one possibility + def lcz_count_lod = postfix("lcz_count_lod") + tablesToDrop << lcz_count_lod + datasource.execute(""" DROP TABLE IF EXISTS $lcz_count_lod; CREATE TABLE $lcz_count_lod as SELECT COUNT(*) FILTER (WHERE LCZ_PRIMARY IS NOT NULL) AS COUNT, LCZ_PRIMARY, @@ -201,11 +207,11 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri WHERE LCZ_PRIMARY IS NOT NULL GROUP BY ID_ROW_LOD_${i}, ID_COL_LOD_${i}, LCZ_PRIMARY;""".toString()) - //Select the LCZ values according the maximum number of cells and the weight - //Note that we compute the number of cells for urban and cool LCZ - def lcz_count_lod_mode = postfix("lcz_count_lod_mode") - tablesToDrop << lcz_count_lod_mode - datasource.execute(""" + //Select the LCZ values according the maximum number of cells and the weight + //Note that we compute the number of cells for urban and cool LCZ + def lcz_count_lod_mode = postfix("lcz_count_lod_mode") + tablesToDrop << lcz_count_lod_mode + datasource.execute(""" CREATE INDEX ON $lcz_count_lod(ID_ROW_LOD_${i}, ID_COL_LOD_${i}); DROP TABLE IF EXISTS $lcz_count_lod_mode; CREATE TABLE $lcz_count_lod_mode as @@ -219,10 +225,10 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri from $lcz_count_lod as a order by count desc, ID_ROW_LOD_${i}, ID_COL_LOD_${i}, weight_lcz;""".toString()) - //Find the 8 adjacent cells for the current level - def grid_lod_level_final = postfix("grid_lod_level_final") - tablesToDrop << grid_lod_level_final - datasource.execute(""" + //Find the 8 adjacent cells for the current level + def grid_lod_level_final = postfix("grid_lod_level_final") + tablesToDrop << grid_lod_level_final + datasource.execute(""" CREATE INDEX on $lcz_count_lod_mode(ID_ROW_LOD_${i}, ID_COL_LOD_${i}); DROP TABLE IF EXISTS $grid_lod_level_final; CREATE TABLE $grid_lod_level_final as select * EXCEPT(LCZ_PRIMARY, COUNT, weight_lcz), LCZ_PRIMARY AS LCZ_PRIMARY_LOD_${i}, @@ -246,11 +252,11 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri FROM $lcz_count_lod_mode as a; """.toString()) - tableLevelToJoin << grid_lod_level_final + tableLevelToJoin << grid_lod_level_final - //Join the final grid level with the original grid - def grid_level_join = postfix("grid_level_join") - datasource.execute(""" + //Join the final grid level with the original grid + def grid_level_join = postfix("grid_level_join") + datasource.execute(""" CREATE INDEX IF NOT EXISTS ${tableLevelToJoin}_idx ON $tableLevelToJoin (ID_ROW_LOD_${i}, ID_COL_LOD_${i}); create index on $grid_lod_level_final(ID_ROW_LOD_${i}, ID_COL_LOD_${i}); DROP TABLE IF EXISTS $grid_level_join; @@ -260,11 +266,14 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri where a.ID_ROW_LOD_${i} = b.ID_ROW_LOD_${i} and a.ID_COL_LOD_${i}= b.ID_COL_LOD_${i} group by a.ID_ROW_LOD_${i}, a.ID_COL_LOD_${i} , a.id_grid; """.toString()) - tableLevelToJoin = grid_level_join + tableLevelToJoin = grid_level_join + } + return tableLevelToJoin + } catch (SQLException e) { + throw new SQLException("", e) + } finally { + datasource.dropTable(tablesToDrop) } - datasource.dropTable(tablesToDrop) - return tableLevelToJoin - } @@ -278,68 +287,69 @@ String multiscaleLCZGrid(JdbcDataSource datasource, String grid_indicators, Stri * @author Erwan Bocher (CNRS) * TODO : convert this method as a function table in H2GIS */ -String gridDistances(JdbcDataSource datasource, String input_polygons, String grid, String id_grid, boolean keep_geometry=true) { +String gridDistances(JdbcDataSource datasource, String input_polygons, String grid, String id_grid, boolean keep_geometry = true) throws Exception { if (!input_polygons) { - error("The input polygons cannot be null or empty") - return + throw new IllegalArgumentException("The input polygons cannot be null or empty") } if (!grid) { - error("The grid cannot be null or empty") - return + throw new IllegalArgumentException("The grid cannot be null or empty") } if (!id_grid) { - error("Please set the column name identifier for the grid cells") - return + throw new IllegalArgumentException("Please set the column name identifier for the grid cells") } - int epsg = datasource.getSrid(grid) - def outputTableName = postfix("grid_distances") + try { + int epsg = datasource.getSrid(grid) + def outputTableName = postfix("grid_distances") - if(keep_geometry) { - datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; + if (keep_geometry) { + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName (THE_GEOM GEOMETRY,$id_grid INT, DISTANCE FLOAT); """.toString()) - datasource.createSpatialIndex(input_polygons) - datasource.createSpatialIndex(grid) + datasource.createSpatialIndex(input_polygons) + datasource.createSpatialIndex(grid) - datasource.withBatch(100) { stmt -> - datasource.eachRow("SELECT the_geom from $input_polygons".toString()) { row -> - Geometry geom = row.the_geom - if (geom) { - IndexedFacetDistance indexedFacetDistance = new IndexedFacetDistance(geom) - datasource.eachRow("""SELECT the_geom, ${id_grid} as id from $grid + datasource.withBatch(100) { stmt -> + datasource.eachRow("SELECT the_geom from $input_polygons".toString()) { row -> + Geometry geom = row.the_geom + if (geom) { + IndexedFacetDistance indexedFacetDistance = new IndexedFacetDistance(geom) + datasource.eachRow("""SELECT the_geom, ${id_grid} as id from $grid where ST_GEOMFROMTEXT('${geom}',$epsg) && the_geom and st_intersects(ST_GEOMFROMTEXT('${geom}',$epsg) , ST_POINTONSURFACE(the_geom))""".toString()) { cell -> - Geometry cell_geom = cell.the_geom - double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) - stmt.addBatch "insert into $outputTableName values(ST_GEOMFROMTEXT('${cell_geom}',$epsg), ${cell.id},${distance})".toString() + Geometry cell_geom = cell.the_geom + double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) + stmt.addBatch "insert into $outputTableName values(ST_GEOMFROMTEXT('${cell_geom}',$epsg), ${cell.id},${distance})".toString() + } } } } - } - }else{ - datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; + } else { + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName ($id_grid INT, DISTANCE FLOAT); """.toString()) - datasource.createSpatialIndex(input_polygons) - datasource.createSpatialIndex(grid) + datasource.createSpatialIndex(input_polygons) + datasource.createSpatialIndex(grid) - datasource.withBatch(100) { stmt -> - datasource.eachRow("SELECT the_geom from $input_polygons".toString()) { row -> - Geometry geom = row.the_geom - if (geom) { - IndexedFacetDistance indexedFacetDistance = new IndexedFacetDistance(geom) - datasource.eachRow("""SELECT the_geom, ${id_grid} as id from $grid + datasource.withBatch(100) { stmt -> + datasource.eachRow("SELECT the_geom from $input_polygons".toString()) { row -> + Geometry geom = row.the_geom + if (geom) { + IndexedFacetDistance indexedFacetDistance = new IndexedFacetDistance(geom) + datasource.eachRow("""SELECT the_geom, ${id_grid} as id from $grid where ST_GEOMFROMTEXT('${geom}',$epsg) && the_geom and st_intersects(ST_GEOMFROMTEXT('${geom}',$epsg) , ST_POINTONSURFACE(the_geom))""".toString()) { cell -> - Geometry cell_geom = cell.the_geom - double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) - stmt.addBatch "insert into $outputTableName values(${cell.id},${distance})".toString() + Geometry cell_geom = cell.the_geom + double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) + stmt.addBatch "insert into $outputTableName values(${cell.id},${distance})".toString() + } } } } } + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the grid distances", e) } - return outputTableName } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicators.groovy index 7370ae1e52..447ae7ceac 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicators.groovy @@ -25,6 +25,8 @@ import org.locationtech.jts.geom.Geometry import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators @@ -58,41 +60,49 @@ import org.orbisgis.geoclimate.Geoindicators */ String groundAcousticAbsorption(JdbcDataSource datasource, String zone, String id_zone, String building, String road, String water, String vegetation, - String impervious, String jsonFilename = "", boolean unknownArea = false) { - def outputTableName = postfix("GROUND_ACOUSTIC") - datasource.execute """ drop table if exists $outputTableName; + String impervious, String jsonFilename = "", boolean unknownArea = false) throws Exception { + //Ground layer name + String ground + try { + def outputTableName = postfix("GROUND_ACOUSTIC") + datasource.execute """ drop table if exists $outputTableName; CREATE TABLE $outputTableName (THE_GEOM GEOMETRY, id_ground serial,G float, type VARCHAR, layer VARCHAR);""".toString() - def paramsDefaultFile = this.class.getResourceAsStream("ground_acoustic_absorption.json") - def absorption_params = Geoindicators.DataUtils.parametersMapping(jsonFilename, paramsDefaultFile) - def default_absorption = absorption_params.default_g - def g_absorption = absorption_params.g - def layer_priorities = absorption_params.layer_priorities - String filter = " where layer not in('building','road') " - if (unknownArea) { - filter += " or layer is null" - } - String ground = Geoindicators.RsuIndicators.groundLayer(datasource, zone, id_zone, - building, road, water, vegetation, - impervious, layer_priorities) - if (ground) { - int rowcount = 1 - datasource.withBatch(100) { stmt -> - datasource.eachRow("SELECT the_geom, TYPE, layer FROM $ground $filter".toString()) { row -> - String type = row.type - def layer = row.layer - float g_coeff = default_absorption as float - if (type) { - g_coeff = g_absorption.get(type) + def paramsDefaultFile = this.class.getResourceAsStream("ground_acoustic_absorption.json") + def absorption_params = Geoindicators.DataUtils.parametersMapping(jsonFilename, paramsDefaultFile) + def default_absorption = absorption_params.default_g + def g_absorption = absorption_params.g + def layer_priorities = absorption_params.layer_priorities + String filter = " where layer not in('building','road') " + if (unknownArea) { + filter += " or layer is null" + } + ground = Geoindicators.RsuIndicators.groundLayer(datasource, zone, id_zone, + building, road, water, vegetation, + impervious, layer_priorities) + if (ground) { + int rowcount = 1 + datasource.withBatch(100) { stmt -> + datasource.eachRow("SELECT the_geom, TYPE, layer FROM $ground $filter".toString()) { row -> + String type = row.type + def layer = row.layer + float g_coeff = default_absorption as float + if (type) { + g_coeff = g_absorption.get(type) + } + Geometry geom = row.the_geom + def epsg = geom.getSRID() + stmt.addBatch "insert into $outputTableName values(ST_GEOMFROMTEXT('${geom}',$epsg), ${rowcount++},${g_coeff}, ${StringUtils.quoteStringSQL(type)}, ${StringUtils.quoteStringSQL(layer)})".toString() } - Geometry geom = row.the_geom - def epsg = geom.getSRID() - stmt.addBatch "insert into $outputTableName values(ST_GEOMFROMTEXT('${geom}',$epsg), ${rowcount++},${g_coeff}, ${StringUtils.quoteStringSQL(type)}, ${StringUtils.quoteStringSQL(layer)})".toString() } + datasource.dropTable(ground) } + debug('Ground acoustic transformation finishes') + return outputTableName + } catch (SQLException e) { + throw new SQLException("", e) + } finally { datasource.dropTable(ground) } - debug('Ground acoustic transformation finishes') - return outputTableName } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy index 8a7d266d8c..6e63d51abc 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy @@ -23,6 +23,8 @@ import groovy.transform.BaseScript import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + @BaseScript Geoindicators geoindicators @@ -36,10 +38,11 @@ import org.orbisgis.geoclimate.Geoindicators * @return the name of the population table */ String formatPopulationTable(JdbcDataSource datasource, String populationTable, List populationColumns = [], - String zoneTable = "") { - def tablePopulation_tmp = postfix(populationTable) - if (zoneTable) { - datasource.execute(""" + String zoneTable = "") throws Exception { + try { + def tablePopulation_tmp = postfix(populationTable) + if (zoneTable) { + datasource.execute(""" CREATE SPATIAL INDEX ON $populationTable(the_geom); CREATE SPATIAL INDEX ON $zoneTable(the_geom); DROP TABLE IF EXISTS $tablePopulation_tmp ; @@ -49,16 +52,19 @@ String formatPopulationTable(JdbcDataSource datasource, String populationTable, DROP TABLE IF EXISTS $populationTable; ALTER TABLE $tablePopulation_tmp rename to $populationTable; """.toString()) - } else { - datasource.execute(""" + } else { + datasource.execute(""" DROP TABLE IF EXISTS $tablePopulation_tmp ; CREATE TABLE $tablePopulation_tmp AS SELECT rownum() as id_pop, ST_MAKEVALID(the_geom) as the_geom , ${populationColumns.join(",")} from $populationTable ; DROP TABLE IF EXISTS $populationTable; ALTER TABLE $tablePopulation_tmp rename to $populationTable; """.toString()) + } + return populationTable + } catch (SQLException e) { + throw new SQLException("Cannot format the population table", e) } - return populationTable } /** @@ -74,15 +80,17 @@ String formatPopulationTable(JdbcDataSource datasource, String populationTable, * @return a map with the population distributed on building, rsu or grid */ Map multiScalePopulation(JdbcDataSource datasource, String populationTable, List populationColumns = [], - String buildingTable, String rsuTable, String gridTable) { + String buildingTable, String rsuTable, String gridTable) throws Exception { if (populationTable && populationColumns) { - def prefixName = "pop" - if (buildingTable) { - String buildingPop = Geoindicators.BuildingIndicators.buildingPopulation(datasource, buildingTable, - populationTable, populationColumns) - if (buildingPop) { + def tablesToDrop=[] + try { + def prefixName = "pop" + if (buildingTable) { + String buildingPop = Geoindicators.BuildingIndicators.buildingPopulation(datasource, buildingTable, + populationTable, populationColumns) datasource.execute("""DROP TABLE IF EXISTS $buildingTable; ALTER TABLE ${buildingPop} RENAME TO $buildingTable""".toString()) + tablesToDrop< @@ -91,21 +99,18 @@ Map multiScalePopulation(JdbcDataSource datasource, String populationTable, List def rsu_pop_tmp = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, buildingTable, rsuTable, "id_rsu", "id_rsu", - unweightedBuildingIndicators, prefixName,) - if (rsu_pop_tmp) { - def rsu_pop_geom = postfix(rsuTable) - def tablesToJoin = [:] - tablesToJoin.put(rsuTable, "id_rsu") - tablesToJoin.put(rsu_pop_tmp, "id_rsu") - def p = Geoindicators.DataUtils.joinTables(datasource, - tablesToJoin, rsu_pop_geom) - if (p) { - datasource.execute("""DROP TABLE IF EXISTS $rsuTable, $rsu_pop_tmp; + unweightedBuildingIndicators, prefixName) + tablesToDrop< 0) { - def columnNames = inputSpatialTable.columns - columnNames.remove("THE_GEOM") - - def flatListColumns = columnNames.inject([]) { result, iter -> - result += "a.\"$iter\"" - }.join(",") + try { + //Define the mapping between the values in OSM and those used in the abstract model + def queryMapper = "SELECT " + if (datasource.getRowCount(road)> 0) { + def columnNames = datasource.getColumnNames(road) + columnNames.remove("THE_GEOM") + def flatListColumns = columnNames.inject([]) { result, iter -> + result += "a.\"$iter\"" + }.join(",") - if (zone) { - datasource.createSpatialIndex(road,"the_geom") - queryMapper += "${flatListColumns}, CASE WHEN st_overlaps(st_force2D(a.the_geom), b.the_geom) " + - "THEN st_force2D(st_makevalid(st_intersection(st_force2D(a.the_geom), b.the_geom))) " + - "ELSE st_force2D(a.the_geom) " + - "END AS the_geom " + - "FROM " + - "$road AS a, $zone AS b " + - "WHERE " + - "a.the_geom && b.the_geom and a.type not in ('track', 'path', 'cycleway', 'steps') " - } else { - queryMapper += "${flatListColumns}, st_force2D(a.the_geom) as the_geom FROM $road as a where type not in ('track', 'path', 'cycleway', 'steps')" - } - datasource.withBatch(100) { stmt -> - datasource.eachRow(queryMapper) { row -> - //Find road type - def source_road_type = row."type" - def road_type = getTrafficRoadType(road_types, source_road_type) - //Set a default road - if (road_type) { - def maxspeed_value = row."maxspeed" - //Find best speed from road type - if (maxspeed_value == -1) { - maxspeed_value = maxspeed[road_type] - } - def direction = row."direction" - def surface = row."surface" - def pavement_value = getPavement(pavements, surface) - def traffic_data = getNumberVehiclesPerHour(road_type, direction, flow_data, flow_period) - Geometry geom = row.the_geom - int epsg = geom.getSRID() - if (geom) { - //Explode geometries - for (int i = 0; i < geom.getNumGeometries(); i++) { - stmt.addBatch """insert into $outputTableName (THE_GEOM, ID_SOURCE, ROAD_TYPE, SOURCE_ROAD_TYPE, + if (zone) { + datasource.createSpatialIndex(road, "the_geom") + queryMapper += "${flatListColumns}, CASE WHEN st_overlaps(st_force2D(a.the_geom), b.the_geom) " + + "THEN st_force2D(st_makevalid(st_intersection(st_force2D(a.the_geom), b.the_geom))) " + + "ELSE st_force2D(a.the_geom) " + + "END AS the_geom " + + "FROM " + + "$road AS a, $zone AS b " + + "WHERE " + + "a.the_geom && b.the_geom and a.type not in ('track', 'path', 'cycleway', 'steps') " + } else { + queryMapper += "${flatListColumns}, st_force2D(a.the_geom) as the_geom FROM $road as a where type not in ('track', 'path', 'cycleway', 'steps')" + } + datasource.withBatch(100) { stmt -> + datasource.eachRow(queryMapper) { row -> + //Find road type + def source_road_type = row."type" + def road_type = getTrafficRoadType(road_types, source_road_type) + //Set a default road + if (road_type) { + def maxspeed_value = row."maxspeed" + //Find best speed from road type + if (maxspeed_value == -1) { + maxspeed_value = maxspeed[road_type] + } + def direction = row."direction" + def surface = row."surface" + def pavement_value = getPavement(pavements, surface) + def traffic_data = getNumberVehiclesPerHour(road_type, direction, flow_data, flow_period) + Geometry geom = row.the_geom + int epsg = geom.getSRID() + if (geom) { + //Explode geometries + for (int i = 0; i < geom.getNumGeometries(); i++) { + stmt.addBatch """insert into $outputTableName (THE_GEOM, ID_SOURCE, ROAD_TYPE, SOURCE_ROAD_TYPE, SURFACE, DIRECTION, SLOPE ,PAVEMENT, DAY_LV_HOUR, DAY_HV_HOUR , DAY_LV_SPEED ,DAY_HV_SPEED , NIGHT_LV_HOUR , NIGHT_HV_HOUR , NIGHT_LV_SPEED , NIGHT_HV_SPEED , @@ -156,12 +152,12 @@ String build_road_traffic(JdbcDataSource datasource, String road, String zone = ${traffic_data.day_lv_hour},${traffic_data.day_hv_hour},${maxspeed_value},${maxspeed_value}, ${traffic_data.night_lv_hour},${traffic_data.night_hv_hour},${maxspeed_value},${maxspeed_value}, ${traffic_data.ev_lv_hour},${traffic_data.ev_hv_hour},${maxspeed_value},${maxspeed_value})""".toString() + } } } } } - } - datasource.execute """COMMENT ON COLUMN ${outputTableName}."ROAD_TYPE" IS 'Default value road type'; + datasource.execute("""COMMENT ON COLUMN ${outputTableName}."ROAD_TYPE" IS 'Default value road type'; COMMENT ON COLUMN ${outputTableName}."DAY_LV_HOUR" IS 'Number of light vehicles per hour for day'; COMMENT ON COLUMN ${outputTableName}."DAY_HV_HOUR" IS 'Number of heavy vehicles per hour for day'; COMMENT ON COLUMN ${outputTableName}."DAY_LV_SPEED" IS 'Light vehicles speed for day'; @@ -175,11 +171,12 @@ String build_road_traffic(JdbcDataSource datasource, String road, String zone = COMMENT ON COLUMN ${outputTableName}."EV_LV_SPEED" IS 'Light vehicles speed for evening'; COMMENT ON COLUMN ${outputTableName}."EV_HV_SPEED" IS 'Number of heavy vehicles per hour for evening'; COMMENT ON COLUMN ${outputTableName}."SLOPE" IS 'Slope (in %) of the road section.'; - COMMENT ON COLUMN ${outputTableName}."DIRECTION" IS 'Define the direction of the road section. 1 = one way road section and the traffic goes in the same way that the slope definition you have used, 2 = one way road section and the traffic goes in the inverse way that the slope definition you have used, 3 = bi-directional traffic flow, the flow is split into two components and correct half for uphill and half for downhill'""".toString() - + COMMENT ON COLUMN ${outputTableName}."DIRECTION" IS 'Define the direction of the road section. 1 = one way road section and the traffic goes in the same way that the slope definition you have used, 2 = one way road section and the traffic goes in the inverse way that the slope definition you have used, 3 = bi-directional traffic flow, the flow is split into two components and correct half for uphill and half for downhill'""") + } + }catch (SQLException e){ + throw new SQLException("Cannot compute the road traffic", e) } } - debug('Roads traffic computed') return outputTableName } /** diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy index 81079dda9d..8779cd3800 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy @@ -23,6 +23,8 @@ import groovy.transform.BaseScript import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators +import java.sql.SQLException + import static java.lang.Math.round import static java.lang.Math.toRadians @@ -49,22 +51,22 @@ import static java.lang.Math.toRadians * @author Jérémy Bernard */ String freeExternalFacadeDensity(JdbcDataSource datasource, String building, String rsu, String buContiguityColumn, - String buTotalFacadeLengthColumn, String prefixName) { + String buTotalFacadeLengthColumn, String prefixName) throws Exception { + try { + def GEOMETRIC_FIELD_RSU = "the_geom" + def ID_FIELD_RSU = "id_rsu" + def HEIGHT_WALL = "height_wall" + def BASE_NAME = "free_external_facade_density" - def GEOMETRIC_FIELD_RSU = "the_geom" - def ID_FIELD_RSU = "id_rsu" - def HEIGHT_WALL = "height_wall" - def BASE_NAME = "free_external_facade_density" + debug "Executing RSU free external facade density" - debug "Executing RSU free external facade density" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - datasource.createIndex(building,"id_rsu") - datasource.createIndex(rsu,"id_rsu") + datasource.createIndex(building, "id_rsu") + datasource.createIndex(rsu, "id_rsu") - def query = """ + def query = """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT COALESCE( @@ -73,12 +75,14 @@ String freeExternalFacadeDensity(JdbcDataSource datasource, String building, Str AS $BASE_NAME, b.$ID_FIELD_RSU """ - query += " FROM $building a RIGHT JOIN $rsu b " + - "ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU GROUP BY b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD_RSU;" + query += " FROM $building a RIGHT JOIN $rsu b " + + "ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU GROUP BY b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD_RSU;" - datasource query.toString() - - return outputTableName + datasource.execute(query) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the free external facade density at RSU scale", e) + } } /** @@ -102,46 +106,48 @@ String freeExternalFacadeDensity(JdbcDataSource datasource, String building, Str * @author Jérémy Bernard */ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building, - String rsu, String idRsu, String prefixName) { - def GEOMETRIC_FIELD_RSU = "the_geom" - def GEOMETRIC_FIELD_BU = "the_geom" - def ID_FIELD_BU = "id_build" - def HEIGHT_WALL = "height_wall" - def FACADE_AREA = "facade_area" - def RSU_AREA = "rsu_area" - def BASE_NAME = "free_external_facade_density" - - debug "Executing RSU free external facade density (exact version)" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME + String rsu, String idRsu, String prefixName) throws Exception { // Temporary table names def buildLine = postfix "buildLine" def buildLineRsu = postfix "buildLineRsu" def sharedLineRsu = postfix "shareLineRsu" def onlyBuildRsu = postfix "onlyBuildRsu" + try { + def GEOMETRIC_FIELD_RSU = "the_geom" + def GEOMETRIC_FIELD_BU = "the_geom" + def ID_FIELD_BU = "id_build" + def HEIGHT_WALL = "height_wall" + def FACADE_AREA = "facade_area" + def RSU_AREA = "rsu_area" + def BASE_NAME = "free_external_facade_density" + + debug "Executing RSU free external facade density (exact version)" - // Consider facades as touching each other within a snap tolerance - def snap_tolerance = 0.01 + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME - // 1. Convert the building polygons into lines and create the intersection with RSU polygons - datasource.createIndex(building,idRsu) - datasource.createIndex(rsu,idRsu) - datasource """ + + // Consider facades as touching each other within a snap tolerance + def snap_tolerance = 0.01 + + // 1. Convert the building polygons into lines and create the intersection with RSU polygons + datasource.createIndex(building, idRsu) + datasource.createIndex(rsu, idRsu) + datasource.execute( """ DROP TABLE IF EXISTS $buildLine; CREATE TABLE $buildLine AS SELECT a.$ID_FIELD_BU, a.$idRsu, ST_AREA(b.$GEOMETRIC_FIELD_RSU) AS $RSU_AREA, ST_CollectionExtract(ST_INTERSECTION(ST_TOMULTILINE(a.$GEOMETRIC_FIELD_BU), b.$GEOMETRIC_FIELD_RSU), 2) AS $GEOMETRIC_FIELD_BU, a.$HEIGHT_WALL FROM $building AS a LEFT JOIN $rsu AS b - ON a.$idRsu = b.$idRsu""".toString() + ON a.$idRsu = b.$idRsu""") - // 2. Keep only intersected facades within a given distance and calculate their area per RSU - datasource.createSpatialIndex(buildLine,GEOMETRIC_FIELD_BU) - datasource.createIndex(buildLine,idRsu) - datasource.createIndex(buildLine,ID_FIELD_BU) - datasource """ + // 2. Keep only intersected facades within a given distance and calculate their area per RSU + datasource.createSpatialIndex(buildLine, GEOMETRIC_FIELD_BU) + datasource.createIndex(buildLine, idRsu) + datasource.createIndex(buildLine, ID_FIELD_BU) + datasource.execute( """ DROP TABLE IF EXISTS $sharedLineRsu; CREATE TABLE $sharedLineRsu AS SELECT SUM(ST_LENGTH( ST_INTERSECTION(a.$GEOMETRIC_FIELD_BU, @@ -155,22 +161,22 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building WHERE a.$GEOMETRIC_FIELD_BU && b.$GEOMETRIC_FIELD_BU AND ST_INTERSECTS(a.$GEOMETRIC_FIELD_BU, ST_SNAP(b.$GEOMETRIC_FIELD_BU, a.$GEOMETRIC_FIELD_BU, $snap_tolerance)) AND a.$ID_FIELD_BU <> b.$ID_FIELD_BU - GROUP BY a.$idRsu;""".toString() + GROUP BY a.$idRsu;""") - // 3. Calculates the building facade area within each RSU - datasource.createIndex(buildLine,idRsu) - datasource """ + // 3. Calculates the building facade area within each RSU + datasource.createIndex(buildLine, idRsu) + datasource.execute( """ DROP TABLE IF EXISTS $buildLineRsu; CREATE TABLE $buildLineRsu AS SELECT $idRsu, MIN($RSU_AREA) AS $RSU_AREA, SUM(ST_LENGTH($GEOMETRIC_FIELD_BU) * $HEIGHT_WALL) AS $FACADE_AREA FROM $buildLine - GROUP BY $idRsu;""".toString() + GROUP BY $idRsu;""") - // 4. Calculates the free facade density by RSU (subtract 3 and 2 and divide by RSU area) - datasource.createIndex(buildLineRsu,idRsu) - datasource.createIndex(sharedLineRsu,idRsu) - datasource """ + // 4. Calculates the free facade density by RSU (subtract 3 and 2 and divide by RSU area) + datasource.createIndex(buildLineRsu, idRsu) + datasource.createIndex(sharedLineRsu, idRsu) + datasource.execute( """ DROP TABLE IF EXISTS $onlyBuildRsu; CREATE TABLE $onlyBuildRsu AS SELECT a.$idRsu, @@ -182,23 +188,27 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building (a.$FACADE_AREA-b.$FACADE_AREA)/a.$RSU_AREA AS $BASE_NAME FROM $buildLineRsu AS a right JOIN $sharedLineRsu AS b ON a.$idRsu = b.$idRsu - """.toString() + """) - // 5. Join RSU having no buildings and set their value to 0 - datasource.createIndex(onlyBuildRsu,idRsu) - datasource """ + // 5. Join RSU having no buildings and set their value to 0 + datasource.createIndex(onlyBuildRsu, idRsu) + datasource.execute( """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.$idRsu, COALESCE(b.$BASE_NAME, 0) AS $BASE_NAME FROM $rsu AS a LEFT JOIN $onlyBuildRsu AS b - ON a.$idRsu = b.$idRsu""".toString() - - // The temporary tables are deleted - datasource "DROP TABLE IF EXISTS $buildLine, $buildLineRsu, $sharedLineRsu, $onlyBuildRsu".toString() + ON a.$idRsu = b.$idRsu""") - return outputTableName + // The temporary tables are deleted + datasource.dropTable(buildLine, buildLineRsu, sharedLineRsu, onlyBuildRsu) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the exact free external facade density at RSU scale", e) + } finally { + datasource.dropTable(buildLine, buildLineRsu, sharedLineRsu, onlyBuildRsu) + } } /** @@ -234,15 +244,7 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building * @author Erwan Bocher */ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, String correlationBuildingTable, float pointDensity, - float rayLength, int numberOfDirection, String prefixName) { - def GEOMETRIC_COLUMN_RSU = "the_geom" - def GEOMETRIC_COLUMN_BU = "the_geom" - def ID_COLUMN_RSU = id_rsu - def HEIGHT_WALL = "height_wall" - def BASE_NAME = "ground_sky_view_factor" - - debug "Executing RSU ground sky view factor" - + float rayLength, int numberOfDirection, String prefixName) throws Exception { // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names def rsuDiff = postfix "rsuDiff" @@ -253,21 +255,29 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, def pts_order = postfix "pts_order" def svfPts = postfix "svfPts" def pts_RANG = postfix "pts_RANG" + try { + def GEOMETRIC_COLUMN_RSU = "the_geom" + def GEOMETRIC_COLUMN_BU = "the_geom" + def ID_COLUMN_RSU = id_rsu + def HEIGHT_WALL = "height_wall" + def BASE_NAME = "ground_sky_view_factor" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + debug "Executing RSU ground sky view factor" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - // Create the needed index on input tables and the table that will contain the SVF calculation points - datasource.createSpatialIndex(rsu,GEOMETRIC_COLUMN_RSU) - datasource.createIndex(rsu,ID_COLUMN_RSU) + // Create the needed index on input tables and the table that will contain the SVF calculation points + datasource.createSpatialIndex(rsu, GEOMETRIC_COLUMN_RSU) + datasource.createIndex(rsu, ID_COLUMN_RSU) - datasource.createSpatialIndex(correlationBuildingTable,GEOMETRIC_COLUMN_BU) - datasource.createIndex(correlationBuildingTable,ID_COLUMN_RSU) + datasource.createSpatialIndex(correlationBuildingTable, GEOMETRIC_COLUMN_BU) + datasource.createIndex(correlationBuildingTable, ID_COLUMN_RSU) - def to_start = System.currentTimeMillis() + def to_start = System.currentTimeMillis() - // Create the geometries of buildings and RSU holes included within each RSU - datasource """ + // Create the geometries of buildings and RSU holes included within each RSU + datasource.execute( """ DROP TABLE IF EXISTS $rsuDiff, $multiptsRSU, $multiptsRSUtot, $rsuDiffTot,$pts_RANG,$pts_order,$ptsRSUtot, $svfPts, $outputTableName; CREATE TABLE $rsuDiff AS (SELECT CASE WHEN ST_ISEMPTY(st_difference(a.$GEOMETRIC_COLUMN_RSU, st_makevalid(ST_ACCUM(b.$GEOMETRIC_COLUMN_BU)))) @@ -278,19 +288,19 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, WHERE a.$GEOMETRIC_COLUMN_RSU && b.$GEOMETRIC_COLUMN_BU AND ST_INTERSECTS(a.$GEOMETRIC_COLUMN_RSU, b.$GEOMETRIC_COLUMN_BU) GROUP BY a.$ID_COLUMN_RSU); - """.toString() - datasource """ + """) + datasource.execute( """ CREATE INDEX ON $rsuDiff ($ID_COLUMN_RSU); CREATE TABLE $rsuDiffTot AS SELECT b.$ID_COLUMN_RSU, case when a.$ID_COLUMN_RSU is null then b.the_geom else a.the_geom end as the_geom FROM $rsu as b left join $rsuDiff as a on a.$ID_COLUMN_RSU=b.$ID_COLUMN_RSU; - """.toString() + """) - // The points used for the SVF calculation are regularly selected within each RSU. The points are - // located outside buildings (and RSU holes) and the size of the grid mesh used to sample each RSU - // (based on the building density + 10%) - if the building density exceeds 90%, - // the LCZ 7 building density is then set to 90%) - datasource """CREATE TABLE $multiptsRSU AS SELECT $ID_COLUMN_RSU, THE_GEOM + // The points used for the SVF calculation are regularly selected within each RSU. The points are + // located outside buildings (and RSU holes) and the size of the grid mesh used to sample each RSU + // (based on the building density + 10%) - if the building density exceeds 90%, + // the LCZ 7 building density is then set to 90%) + datasource.execute( """CREATE TABLE $multiptsRSU AS SELECT $ID_COLUMN_RSU, THE_GEOM FROM ST_EXPLODE('(SELECT $ID_COLUMN_RSU, case when LEAST(TRUNC($pointDensity*c.rsu_area_free),100)=0 @@ -299,21 +309,21 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, AS the_geom FROM (SELECT the_geom, st_area($GEOMETRIC_COLUMN_RSU) AS rsu_area_free, $ID_COLUMN_RSU - FROM st_explode(''(select * from $rsuDiffTot)'') where st_area(the_geom)>0) as c)');""".toString() + FROM st_explode(''(select * from $rsuDiffTot)'') where st_area(the_geom)>0) as c)');""") - // Need to identify specific points for buildings being RSU (slightly away from the wall on each facade) - datasource """ CREATE TABLE $multiptsRSUtot + // Need to identify specific points for buildings being RSU (slightly away from the wall on each facade) + datasource.execute( """ CREATE TABLE $multiptsRSUtot AS SELECT $ID_COLUMN_RSU, THE_GEOM FROM ST_EXPLODE('(SELECT $ID_COLUMN_RSU, ST_LocateAlong(THE_GEOM, 0.5, 0.01) AS THE_GEOM FROM $rsuDiffTot WHERE ST_DIMENSION(THE_GEOM)=1)') UNION SELECT $ID_COLUMN_RSU, THE_GEOM - FROM $multiptsRSU""".toString() + FROM $multiptsRSU""") - datasource.createSpatialIndex(multiptsRSUtot,"the_geom") - // The SVF calculation is performed at point scale - datasource """ + datasource.createSpatialIndex(multiptsRSUtot, "the_geom") + // The SVF calculation is performed at point scale + datasource.execute( """ CREATE TABLE $svfPts AS SELECT a.$ID_COLUMN_RSU, ST_SVF(ST_GEOMETRYN(a.the_geom,1), ST_ACCUM(ST_UPDATEZ(b.$GEOMETRIC_COLUMN_BU, b.$HEIGHT_WALL)), @@ -321,24 +331,32 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, FROM $multiptsRSUtot AS a, $correlationBuildingTable AS b WHERE ST_EXPAND(a.the_geom, $rayLength) && b.$GEOMETRIC_COLUMN_BU AND ST_DWITHIN(b.$GEOMETRIC_COLUMN_BU, a.the_geom, $rayLength) - GROUP BY a.the_geom""".toString() - datasource.createIndex(svfPts,ID_COLUMN_RSU) + GROUP BY a.the_geom""") + datasource.createIndex(svfPts, ID_COLUMN_RSU) - // The result of the SVF calculation is averaged at RSU scale - datasource """ + // The result of the SVF calculation is averaged at RSU scale + datasource.execute( """ CREATE TABLE $outputTableName($ID_COLUMN_RSU integer, $BASE_NAME double) AS (SELECT a.$ID_COLUMN_RSU, CASE WHEN AVG(b.SVF) is not null THEN AVG(b.SVF) ELSE 1 END FROM $rsu a LEFT JOIN $svfPts b ON a.$ID_COLUMN_RSU = b.$ID_COLUMN_RSU - GROUP BY a.$ID_COLUMN_RSU)""".toString() + GROUP BY a.$ID_COLUMN_RSU)""") - debug "SVF calculation time: ${(System.currentTimeMillis() - to_start) / 1000} s" + debug "SVF calculation time: ${(System.currentTimeMillis() - to_start) / 1000} s" - // The temporary tables are deleted - datasource "DROP TABLE IF EXISTS $rsuDiff, $ptsRSUtot, $multiptsRSU, $rsuDiffTot,$pts_order,$multiptsRSUtot, $svfPts".toString() - - return outputTableName + // The temporary tables are deleted + datasource.dropTable(rsuDiff, rsuDiffTot, + multiptsRSU, multiptsRSUtot, ptsRSUtot, + pts_order, svfPts, pts_RANG) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the ground sky view factor at RSU scale", e) + } finally { + datasource.dropTable(rsuDiff, rsuDiffTot, + multiptsRSU, multiptsRSUtot, ptsRSUtot, + pts_order, svfPts, pts_RANG) + } } /** @@ -361,25 +379,26 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, * @author Jérémy Bernard */ String aspectRatio(JdbcDataSource datasource, String rsuTable, String rsuFreeExternalFacadeDensityColumn, String rsuBuildingDensityColumn, - prefixName) { - - def COLUMN_ID_RSU = "id_rsu" - def BASE_NAME = "aspect_ratio" - - debug "Executing RSU aspect ratio" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - - datasource """ + prefixName) throws Exception { + try { + def COLUMN_ID_RSU = "id_rsu" + def BASE_NAME = "aspect_ratio" + debug "Executing RSU aspect ratio" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + datasource.execute( """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT CASE WHEN $rsuBuildingDensityColumn = 1 THEN null ELSE 0.5 * ($rsuFreeExternalFacadeDensityColumn/(1-$rsuBuildingDensityColumn)) END - AS $BASE_NAME, $COLUMN_ID_RSU FROM $rsuTable""".toString() + AS $BASE_NAME, $COLUMN_ID_RSU FROM $rsuTable""") - return outputTableName + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the aspect ratio at RSU scale", e) + } } /** @@ -400,49 +419,51 @@ String aspectRatio(JdbcDataSource datasource, String rsuTable, String rsuFreeExt * @return A database table name. * @author Jérémy Bernard */ -String projectedFacadeAreaDistribution(JdbcDataSource datasource, String building, String rsu,String id_rsu, +String projectedFacadeAreaDistribution(JdbcDataSource datasource, String building, String rsu, String id_rsu, List listLayersBottom = [0, 10, 20, 30, 40, 50], int numberOfDirection = 12, - String prefixName) { - def BASE_NAME = "projected_facade_area_distribution" - def GEOMETRIC_COLUMN_RSU = "the_geom" - def GEOMETRIC_COLUMN_BU = "the_geom" - def ID_COLUMN_RSU = id_rsu - def ID_COLUMN_BU = "id_build" - def HEIGHT_WALL = "height_wall" + String prefixName) throws Exception { - debug "Executing RSU projected facade area distribution" + // To avoid overwriting the output files of this step, a unique identifier is created + // Temporary table names + def buildingIntersection = postfix "building_intersection" + def buildingIntersectionExpl = postfix "building_intersection_expl" + def buildingFree = postfix "buildingFree" + def buildingLayer = postfix "buildingLayer" + def buildingFreeExpl = postfix "buildingFreeExpl" + def rsuInter = postfix "rsuInter" + def finalIndicator = postfix "finalIndicator" + try { + def BASE_NAME = "projected_facade_area_distribution" + def GEOMETRIC_COLUMN_RSU = "the_geom" + def GEOMETRIC_COLUMN_BU = "the_geom" + def ID_COLUMN_RSU = id_rsu + def ID_COLUMN_BU = "id_build" + def HEIGHT_WALL = "height_wall" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + debug "Executing RSU projected facade area distribution" - datasource.createSpatialIndex(building,"the_geom") - datasource.createSpatialIndex(rsu,"the_geom") - datasource.createIndex(building,"id_build") - datasource.createIndex(rsu,ID_COLUMN_RSU) + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - if (360 % numberOfDirection == 0 && numberOfDirection % 2 == 0) { + datasource.createSpatialIndex(building, "the_geom") + datasource.createSpatialIndex(rsu, "the_geom") + datasource.createIndex(building, "id_build") + datasource.createIndex(rsu, ID_COLUMN_RSU) - // To avoid overwriting the output files of this step, a unique identifier is created - // Temporary table names - def buildingIntersection = postfix "building_intersection" - def buildingIntersectionExpl = postfix "building_intersection_expl" - def buildingFree = postfix "buildingFree" - def buildingLayer = postfix "buildingLayer" - def buildingFreeExpl = postfix "buildingFreeExpl" - def rsuInter = postfix "rsuInter" - def finalIndicator = postfix "finalIndicator" + if (360 % numberOfDirection == 0 && numberOfDirection % 2 == 0) { - // The projection should be performed at the median of the angle interval - def dirMedDeg = 180 / numberOfDirection - def dirMedRad = toRadians(dirMedDeg) - dirMedDeg = round(dirMedDeg) + // The projection should be performed at the median of the angle interval + def dirMedDeg = 180 / numberOfDirection + def dirMedRad = toRadians(dirMedDeg) - // The list that will store the fields name is initialized - def names = [] + dirMedDeg = round(dirMedDeg) + + // The list that will store the fields name is initialized + def names = [] - // Common party walls between buildings are calculated - datasource """ + // Common party walls between buildings are calculated + datasource.execute( """ DROP TABLE IF EXISTS $buildingIntersection; CREATE TABLE $buildingIntersection( the_geom GEOMETRY, id_build_a INTEGER, id_build_b INTEGER, z_max DOUBLE, z_min DOUBLE) AS SELECT ST_CollectionExtract(t.the_geom,2), t.id_build_a , t.id_build_b , t.z_max , t.z_min @@ -457,16 +478,16 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin WHERE a.$GEOMETRIC_COLUMN_BU && b.$GEOMETRIC_COLUMN_BU AND ST_INTERSECTS(a.$GEOMETRIC_COLUMN_BU, b.$GEOMETRIC_COLUMN_BU) AND a.$ID_COLUMN_BU <> b.$ID_COLUMN_BU) AS t - """.toString() + """) - datasource.createIndex(buildingIntersection,"id_build_a") - datasource.createIndex(buildingIntersection,"id_build_b") + datasource.createIndex(buildingIntersection, "id_build_a") + datasource.createIndex(buildingIntersection, "id_build_b") - // Each free facade is stored TWICE (an intersection could be seen from the point of view of two - // buildings). - // Facades of isolated buildings are unioned to free facades of non-isolated buildings which are - // unioned to free intersection facades. To each facade is affected its corresponding free height - datasource """ + // Each free facade is stored TWICE (an intersection could be seen from the point of view of two + // buildings). + // Facades of isolated buildings are unioned to free facades of non-isolated buildings which are + // unioned to free intersection facades. To each facade is affected its corresponding free height + datasource.execute( """ DROP TABLE IF EXISTS $buildingFree; CREATE TABLE $buildingFree (the_geom GEOMETRY, z_max double precision, z_min double precision) AS (SELECT ST_TOMULTISEGMENTS(a.the_geom) as the_geom, a.$HEIGHT_WALL as z_max, 0 as z_min @@ -476,110 +497,116 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin ST_UNION(ST_ACCUM(b.the_geom)))) as the_geom, a.$HEIGHT_WALL as z_max, 0 as z_min FROM $building a, $buildingIntersection b WHERE a.$ID_COLUMN_BU=b.ID_build_a and st_isempty(b.the_geom)=false GROUP BY b.ID_build_a) UNION ALL (SELECT ST_TOMULTISEGMENTS(the_geom) - AS the_geom, z_max, z_min FROM $buildingIntersection WHERE ID_build_a= ${listLayersBottom[i]}, " + - "0, ${listLayersBottom[i] - listLayersBottom[i - 1]}-" + - "GREATEST(${listLayersBottom[i]}-z_max,0)" + - "-GREATEST(z_min-${listLayersBottom[i - 1]},0))) AS ${names[i - 1]} ," - } + AS the_geom, z_max, z_min FROM $buildingIntersection WHERE ID_build_a= ${listLayersBottom[i]}, " + + "0, ${listLayersBottom[i] - listLayersBottom[i - 1]}-" + + "GREATEST(${listLayersBottom[i]}-z_max,0)" + + "-GREATEST(z_min-${listLayersBottom[i - 1]},0))) AS ${names[i - 1]} ," + } - // ...and for the final level - names.add(getDistribIndicName(BASE_NAME, 'H', listLayersBottom[listLayersBottom.size() - 1], null)) - layerQuery += "CASEWHEN(z_max >= ${listLayersBottom[listLayersBottom.size() - 1]}, " + - "z_max-GREATEST(z_min,${listLayersBottom[listLayersBottom.size() - 1]}), 0) " + - "AS ${names[listLayersBottom.size() - 1]} FROM $buildingFree" - datasource layerQuery.toString() - - // Names and types of all columns are then useful when calling sql queries - def namesAndType = names.inject([]) { result, iter -> - result += " $iter double precision" - }.join(",") - def onlyNamesB = names.inject([]) { result, iter -> - result += "b.$iter" - }.join(",") - def onlyNames = names.join(",") - - datasource.createSpatialIndex(buildingLayer,"the_geom") - - // Intersections between free facades and rsu geometries are calculated - datasource """ DROP TABLE IF EXISTS $buildingFreeExpl; + // ...and for the final level + names.add(getDistribIndicName(BASE_NAME, 'H', listLayersBottom[listLayersBottom.size() - 1], null)) + layerQuery += "CASEWHEN(z_max >= ${listLayersBottom[listLayersBottom.size() - 1]}, " + + "z_max-GREATEST(z_min,${listLayersBottom[listLayersBottom.size() - 1]}), 0) " + + "AS ${names[listLayersBottom.size() - 1]} FROM $buildingFree" + datasource layerQuery.toString() + + // Names and types of all columns are then useful when calling sql queries + def namesAndType = names.inject([]) { result, iter -> + result += " $iter double precision" + }.join(",") + def onlyNamesB = names.inject([]) { result, iter -> + result += "b.$iter" + }.join(",") + def onlyNames = names.join(",") + + datasource.createSpatialIndex(buildingLayer, "the_geom") + + // Intersections between free facades and rsu geometries are calculated + datasource.execute( """ DROP TABLE IF EXISTS $buildingFreeExpl; CREATE TABLE $buildingFreeExpl($ID_COLUMN_RSU INTEGER, the_geom GEOMETRY, $namesAndType) AS (SELECT a.$ID_COLUMN_RSU, ST_INTERSECTION(a.$GEOMETRIC_COLUMN_RSU, ST_TOMULTILINE(b.the_geom)), ${onlyNamesB} FROM $rsu a, $buildingLayer b WHERE a.$GEOMETRIC_COLUMN_RSU && b.the_geom - AND ST_INTERSECTS(a.$GEOMETRIC_COLUMN_RSU, b.the_geom))""".toString() + AND ST_INTERSECTS(a.$GEOMETRIC_COLUMN_RSU, b.the_geom))""") - // Intersections facades are exploded to multisegments - datasource """DROP TABLE IF EXISTS $rsuInter; + // Intersections facades are exploded to multisegments + datasource.execute( """DROP TABLE IF EXISTS $rsuInter; CREATE TABLE $rsuInter($ID_COLUMN_RSU INTEGER, the_geom GEOMETRY, $namesAndType) - AS (SELECT $ID_COLUMN_RSU, the_geom, ${onlyNames} FROM ST_EXPLODE('$buildingFreeExpl'))""".toString() - - - // The analysis is then performed for each direction ('numberOfDirection' / 2 because calculation - // is performed for a direction independently of the "toward") - def namesAndTypeDir = [] - def onlyNamesDir = [] - def sumNamesDir = [] - def queryColumns = [] - for (int d = 0; d < numberOfDirection / 2; d++) { - int dirDeg = d * 360 / numberOfDirection - def dirRad = toRadians(dirDeg) - int rangeDeg = 360 / numberOfDirection - def dirRadMid = dirRad + dirMedRad - def dirDegMid = dirDeg + dirMedDeg - // Define the field name for each of the directions and vertical layers - names.each { - namesAndTypeDir += " " + "${getDistribIndicName(it, 'D', dirDeg, dirDeg + rangeDeg)} double precision" - queryColumns += """CASE + AS (SELECT $ID_COLUMN_RSU, the_geom, ${onlyNames} FROM ST_EXPLODE('$buildingFreeExpl'))""") + + + // The analysis is then performed for each direction ('numberOfDirection' / 2 because calculation + // is performed for a direction independently of the "toward") + def namesAndTypeDir = [] + def onlyNamesDir = [] + def sumNamesDir = [] + def queryColumns = [] + for (int d = 0; d < numberOfDirection / 2; d++) { + int dirDeg = d * 360 / numberOfDirection + def dirRad = toRadians(dirDeg) + int rangeDeg = 360 / numberOfDirection + def dirRadMid = dirRad + dirMedRad + def dirDegMid = dirDeg + dirMedDeg + // Define the field name for each of the directions and vertical layers + names.each { + namesAndTypeDir += " " + "${getDistribIndicName(it, 'D', dirDeg, dirDeg + rangeDeg)} double precision" + queryColumns += """CASE WHEN a.azimuth-$dirRadMid>PI()/2 THEN a.$it*a.length*COS(a.azimuth-$dirRadMid-PI()/2)/2 WHEN a.azimuth-$dirRadMid<-PI()/2 THEN a.$it*a.length*COS(a.azimuth-$dirRadMid+PI()/2)/2 ELSE a.$it*a.length*ABS(SIN(a.azimuth-$dirRadMid))/2 END AS ${getDistribIndicName(it, 'D', dirDeg, dirDeg + rangeDeg)}""" - onlyNamesDir += "${it}_D${dirDeg}_${dirDeg + rangeDeg}" - sumNamesDir += "COALESCE(SUM(b.${it}_D${dirDeg}_${dirDeg + rangeDeg}), 0) " + - "AS ${getDistribIndicName(it, 'D', dirDeg, dirDeg + rangeDeg)}" + onlyNamesDir += "${it}_D${dirDeg}_${dirDeg + rangeDeg}" + sumNamesDir += "COALESCE(SUM(b.${it}_D${dirDeg}_${dirDeg + rangeDeg}), 0) " + + "AS ${getDistribIndicName(it, 'D', dirDeg, dirDeg + rangeDeg)}" + } } - } - namesAndTypeDir = namesAndTypeDir.join(",") - queryColumns = queryColumns.join(",") - onlyNamesDir = onlyNamesDir.join(",") - sumNamesDir = sumNamesDir.join(",") + namesAndTypeDir = namesAndTypeDir.join(",") + queryColumns = queryColumns.join(",") + onlyNamesDir = onlyNamesDir.join(",") + sumNamesDir = sumNamesDir.join(",") - def query = "DROP TABLE IF EXISTS $finalIndicator; " + - "CREATE TABLE $finalIndicator AS SELECT a.$ID_COLUMN_RSU," + queryColumns + - " FROM (SELECT $ID_COLUMN_RSU, CASE WHEN ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom)) >= PI()" + - "THEN ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom)) - PI() " + - "ELSE ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom)) END AS azimuth," + - " ST_LENGTH(the_geom) AS length, ${onlyNames} FROM $rsuInter) a" + def query = "DROP TABLE IF EXISTS $finalIndicator; " + + "CREATE TABLE $finalIndicator AS SELECT a.$ID_COLUMN_RSU," + queryColumns + + " FROM (SELECT $ID_COLUMN_RSU, CASE WHEN ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom)) >= PI()" + + "THEN ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom)) - PI() " + + "ELSE ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom)) END AS azimuth," + + " ST_LENGTH(the_geom) AS length, ${onlyNames} FROM $rsuInter) a" - datasource query.toString() + datasource query.toString() - datasource.createIndex(finalIndicator,ID_COLUMN_RSU) - // Sum area at RSU scale and fill null values with 0 - datasource """ + datasource.createIndex(finalIndicator, ID_COLUMN_RSU) + // Sum area at RSU scale and fill null values with 0 + datasource.execute( """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE ${outputTableName} AS SELECT a.$ID_COLUMN_RSU, ${sumNamesDir} FROM $rsu a LEFT JOIN $finalIndicator b ON a.$ID_COLUMN_RSU = b.$ID_COLUMN_RSU - GROUP BY a.$ID_COLUMN_RSU""".toString() + GROUP BY a.$ID_COLUMN_RSU""") - // Remove all temporary tables created - datasource """DROP TABLE IF EXISTS $buildingIntersection, $buildingIntersectionExpl, - $buildingFree, $buildingFreeExpl, $buildingLayer, $rsuInter, $finalIndicator;""".toString() + // Remove all temporary tables created + datasource.dropTable(buildingIntersection, buildingIntersectionExpl, buildingFree, buildingLayer, + buildingFreeExpl, rsuInter, finalIndicator) + } + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the projected facade area distribution at RSU scale", e) + } finally { + datasource.dropTable(buildingIntersection, buildingIntersectionExpl, buildingFree, buildingLayer, + buildingFreeExpl, rsuInter, finalIndicator) } - return outputTableName } /** @@ -594,15 +621,13 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin * * @author Jérémy Bernard */ -String getDistribIndicName(String base_name, String var_type, Integer lev_bot, Integer lev_up){ +String getDistribIndicName(String base_name, String var_type, Integer lev_bot, Integer lev_up) { String name - if (lev_up == null){ + if (lev_up == null) { name = "${base_name}_${var_type}${lev_bot}" - } - else{ + } else { name = "${base_name}_${var_type}${lev_bot}_${lev_up}" } - return name } @@ -633,17 +658,7 @@ String getDistribIndicName(String base_name, String var_type, Integer lev_bot, I */ String roofAreaDistribution(JdbcDataSource datasource, String rsu, String building, List listLayersBottom = [0, 10, 20, 30, 40, 50], String prefixName, - boolean density = true) { - def GEOMETRIC_COLUMN_RSU = "the_geom" - def GEOMETRIC_COLUMN_BU = "the_geom" - def ID_COLUMN_RSU = "id_rsu" - def ID_COLUMN_BU = "id_build" - def HEIGHT_WALL = "height_wall" - def HEIGHT_ROOF = "height_roof" - def BASE_NAME = "roof_area_distribution" - - debug "Executing RSU roof area distribution (and optionally roof density)" - + boolean density = true) throws Exception { // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names def buildRoofSurfIni = postfix "build_roof_surf_ini" @@ -651,15 +666,25 @@ String roofAreaDistribution(JdbcDataSource datasource, String rsu, String buildi def buildVertRoofAll = postfix "buildVertRoofAll" def buildRoofSurfTot = postfix "build_roof_surf_tot" def optionalTempo = postfix "optionalTempo" + try { + def GEOMETRIC_COLUMN_RSU = "the_geom" + def GEOMETRIC_COLUMN_BU = "the_geom" + def ID_COLUMN_RSU = "id_rsu" + def ID_COLUMN_BU = "id_build" + def HEIGHT_WALL = "height_wall" + def HEIGHT_ROOF = "height_roof" + def BASE_NAME = "roof_area_distribution" - datasource.createSpatialIndex(rsu,"the_geom") - datasource.createIndex(rsu,"id_rsu") + debug "Executing RSU roof area distribution (and optionally roof density)" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + datasource.createSpatialIndex(rsu, "the_geom") + datasource.createIndex(rsu, "id_rsu") - // Vertical and non-vertical (tilted and horizontal) roof areas are calculated - datasource """ + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + + // Vertical and non-vertical (tilted and horizontal) roof areas are calculated + datasource.execute(""" DROP TABLE IF EXISTS $buildRoofSurfIni; CREATE TABLE $buildRoofSurfIni AS SELECT $GEOMETRIC_COLUMN_BU, @@ -672,17 +697,17 @@ String roofAreaDistribution(JdbcDataSource datasource, String rsu, String buildi POWER(POWER(ST_AREA($GEOMETRIC_COLUMN_BU),2)+4*ST_AREA($GEOMETRIC_COLUMN_BU)*POWER($HEIGHT_ROOF-$HEIGHT_WALL,2),0.5) AS non_vertical_roof_area, POWER(ST_AREA($GEOMETRIC_COLUMN_BU), 0.5)*($HEIGHT_ROOF-$HEIGHT_WALL) AS vertical_roof_area FROM $building; - """.toString() + """) - // Indexes and spatial indexes are created on rsu and building Tables - datasource """CREATE SPATIAL INDEX IF NOT EXISTS ids_ina ON $buildRoofSurfIni ($GEOMETRIC_COLUMN_BU); - CREATE INDEX IF NOT EXISTS id_ina ON $buildRoofSurfIni ($ID_COLUMN_BU);""".toString() + // Indexes and spatial indexes are created on rsu and building Tables + datasource.execute("""CREATE SPATIAL INDEX IF NOT EXISTS ids_ina ON $buildRoofSurfIni ($GEOMETRIC_COLUMN_BU); + CREATE INDEX IF NOT EXISTS id_ina ON $buildRoofSurfIni ($ID_COLUMN_BU);""") - // Vertical roofs that are potentially in contact with the facade of a building neighbor are identified - // and the corresponding area is estimated (only if the building roof does not overpass the building - // wall of the neighbor) - datasource """ + // Vertical roofs that are potentially in contact with the facade of a building neighbor are identified + // and the corresponding area is estimated (only if the building roof does not overpass the building + // wall of the neighbor) + datasource.execute(""" DROP TABLE IF EXISTS $buildVertRoofInter; CREATE TABLE $buildVertRoofInter(id_build INTEGER, vert_roof_to_remove DOUBLE) AS ( SELECT b.$ID_COLUMN_BU, @@ -694,15 +719,15 @@ String roofAreaDistribution(JdbcDataSource datasource, String rsu, String buildi AND ST_INTERSECTS(a.$GEOMETRIC_COLUMN_BU, b.$GEOMETRIC_COLUMN_BU) AND a.$ID_COLUMN_BU <> b.$ID_COLUMN_BU AND a.z_min >= b.z_max - GROUP BY b.$ID_COLUMN_BU);""".toString() + GROUP BY b.$ID_COLUMN_BU);""") - // Indexes and spatial indexes are created on rsu and building Tables - datasource """CREATE INDEX IF NOT EXISTS id_bu ON $buildVertRoofInter ($ID_COLUMN_BU);""".toString() + // Indexes and spatial indexes are created on rsu and building Tables + datasource.execute("""CREATE INDEX IF NOT EXISTS id_bu ON $buildVertRoofInter ($ID_COLUMN_BU);""") - // Vertical roofs that are potentially in contact with the facade of a building neighbor are identified - // and the corresponding area is estimated (only if the building roof does not overpass the building wall - // of the neighbor) - datasource """ + // Vertical roofs that are potentially in contact with the facade of a building neighbor are identified + // and the corresponding area is estimated (only if the building roof does not overpass the building wall + // of the neighbor) + datasource.execute(""" DROP TABLE IF EXISTS $buildVertRoofAll; CREATE TABLE $buildVertRoofAll( id_build INTEGER, @@ -730,19 +755,19 @@ String roofAreaDistribution(JdbcDataSource datasource, String rsu, String buildi IFNULL(b.vert_roof_to_remove,0) FROM $buildRoofSurfIni a LEFT JOIN $buildVertRoofInter b - ON a.$ID_COLUMN_BU=b.$ID_COLUMN_BU);""".toString() + ON a.$ID_COLUMN_BU=b.$ID_COLUMN_BU);""") - // Indexes and spatial indexes are created on rsu and building Tables - datasource """CREATE SPATIAL INDEX IF NOT EXISTS ids_bu ON $buildVertRoofAll (the_geom); + // Indexes and spatial indexes are created on rsu and building Tables + datasource.execute("""CREATE SPATIAL INDEX IF NOT EXISTS ids_bu ON $buildVertRoofAll (the_geom); CREATE INDEX IF NOT EXISTS id_bu ON $buildVertRoofAll (id_build); - CREATE INDEX IF NOT EXISTS id_rsu ON $buildVertRoofAll (id_rsu);""".toString() + CREATE INDEX IF NOT EXISTS id_rsu ON $buildVertRoofAll (id_rsu);""") - //TODO : PEUT-ETRE MIEUX VAUT-IL FAIRE L'INTERSECTION À PART POUR ÉVITER DE LA FAIRE 2 FOIS ICI ? + //TODO : PEUT-ETRE MIEUX VAUT-IL FAIRE L'INTERSECTION À PART POUR ÉVITER DE LA FAIRE 2 FOIS ICI ? - // Update the roof areas (vertical and non vertical) taking into account the vertical roofs shared with - // the neighbor facade and the roof surfaces that are not in the RSU. Note that half of the facade - // are considered as vertical roofs, the other to "normal roof". - datasource """ + // Update the roof areas (vertical and non vertical) taking into account the vertical roofs shared with + // the neighbor facade and the roof surfaces that are not in the RSU. Note that half of the facade + // are considered as vertical roofs, the other to "normal roof". + datasource.execute(""" DROP TABLE IF EXISTS $buildRoofSurfTot; CREATE TABLE $buildRoofSurfTot( id_build INTEGER, @@ -766,67 +791,70 @@ String roofAreaDistribution(JdbcDataSource datasource, String rsu, String buildi a.building_total_facade_length)) FROM $buildVertRoofAll a, $rsu b WHERE a.id_rsu=b.$ID_COLUMN_RSU - GROUP BY b.$GEOMETRIC_COLUMN_RSU, a.id_build, a.id_rsu, a.z_max, a.z_min, a.delta_h);""".toString() + GROUP BY b.$GEOMETRIC_COLUMN_RSU, a.id_build, a.id_rsu, a.z_max, a.z_min, a.delta_h);""") - // The roof area is calculated for each level except the last one (> 50 m in the default case) - def finalQuery = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT b.id_rsu, " - def nonVertQuery = "" - def vertQuery = "" - for (i in 1..(listLayersBottom.size() - 1)) { - nonVertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= ${listLayersBottom[i - 1]}, 0, CASEWHEN(" + - "a.z_max <= ${listLayersBottom[i]}, CASEWHEN(a.delta_h=0, a.non_vertical_roof_area, " + - "a.non_vertical_roof_area*(a.z_max-GREATEST(${listLayersBottom[i - 1]},a.z_min))/a.delta_h), " + - "CASEWHEN(a.z_min < ${listLayersBottom[i]}, a.non_vertical_roof_area*(${listLayersBottom[i]}-" + - "GREATEST(${listLayersBottom[i - 1]},a.z_min))/a.delta_h, 0)))),0) AS " + - "${getDistribIndicName('non_vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])}," - vertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= ${listLayersBottom[i - 1]}, 0, CASEWHEN(" + - "a.z_max <= ${listLayersBottom[i]}, CASEWHEN(a.delta_h=0, 0, " + - "a.vertical_roof_area*POWER((a.z_max-GREATEST(${listLayersBottom[i - 1]}," + - "a.z_min))/a.delta_h, 2)), CASEWHEN(a.z_min < ${listLayersBottom[i]}, " + - "CASEWHEN(a.z_min>${listLayersBottom[i - 1]}, a.vertical_roof_area*(1-" + - "POWER((a.z_max-${listLayersBottom[i]})/a.delta_h,2)),a.vertical_roof_area*(" + - "POWER((a.z_max-${listLayersBottom[i - 1]})/a.delta_h,2)-POWER((a.z_max-${listLayersBottom[i]})/" + - "a.delta_h,2))), 0)))),0) AS ${getDistribIndicName('vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])}," - } - // The roof area is calculated for the last level (> 50 m in the default case) - def valueLastLevel = listLayersBottom[listLayersBottom.size() - 1] - nonVertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= $valueLastLevel, 0, CASEWHEN(a.delta_h=0, a.non_vertical_roof_area, " + - "a.non_vertical_roof_area*(a.z_max-GREATEST($valueLastLevel,a.z_min))/a.delta_h))),0) AS " + - "${getDistribIndicName('non_vert_roof_area', 'H', valueLastLevel, null)}," - vertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= $valueLastLevel, 0, CASEWHEN(a.delta_h=0, a.vertical_roof_area, " + - "a.vertical_roof_area*(a.z_max-GREATEST($valueLastLevel,a.z_min))/a.delta_h))),0) " + - "${getDistribIndicName('vert_roof_area', 'H', valueLastLevel, null)}," - - def endQuery = """ FROM $buildRoofSurfTot a RIGHT JOIN $rsu b + // The roof area is calculated for each level except the last one (> 50 m in the default case) + def finalQuery = "DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT b.id_rsu, " + def nonVertQuery = "" + def vertQuery = "" + for (i in 1..(listLayersBottom.size() - 1)) { + nonVertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= ${listLayersBottom[i - 1]}, 0, CASEWHEN(" + + "a.z_max <= ${listLayersBottom[i]}, CASEWHEN(a.delta_h=0, a.non_vertical_roof_area, " + + "a.non_vertical_roof_area*(a.z_max-GREATEST(${listLayersBottom[i - 1]},a.z_min))/a.delta_h), " + + "CASEWHEN(a.z_min < ${listLayersBottom[i]}, a.non_vertical_roof_area*(${listLayersBottom[i]}-" + + "GREATEST(${listLayersBottom[i - 1]},a.z_min))/a.delta_h, 0)))),0) AS " + + "${getDistribIndicName('non_vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])}," + vertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= ${listLayersBottom[i - 1]}, 0, CASEWHEN(" + + "a.z_max <= ${listLayersBottom[i]}, CASEWHEN(a.delta_h=0, 0, " + + "a.vertical_roof_area*POWER((a.z_max-GREATEST(${listLayersBottom[i - 1]}," + + "a.z_min))/a.delta_h, 2)), CASEWHEN(a.z_min < ${listLayersBottom[i]}, " + + "CASEWHEN(a.z_min>${listLayersBottom[i - 1]}, a.vertical_roof_area*(1-" + + "POWER((a.z_max-${listLayersBottom[i]})/a.delta_h,2)),a.vertical_roof_area*(" + + "POWER((a.z_max-${listLayersBottom[i - 1]})/a.delta_h,2)-POWER((a.z_max-${listLayersBottom[i]})/" + + "a.delta_h,2))), 0)))),0) AS ${getDistribIndicName('vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])}," + } + // The roof area is calculated for the last level (> 50 m in the default case) + def valueLastLevel = listLayersBottom[listLayersBottom.size() - 1] + nonVertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= $valueLastLevel, 0, CASEWHEN(a.delta_h=0, a.non_vertical_roof_area, " + + "a.non_vertical_roof_area*(a.z_max-GREATEST($valueLastLevel,a.z_min))/a.delta_h))),0) AS " + + "${getDistribIndicName('non_vert_roof_area', 'H', valueLastLevel, null)}," + vertQuery += " COALESCE(SUM(CASEWHEN(a.z_max <= $valueLastLevel, 0, CASEWHEN(a.delta_h=0, a.vertical_roof_area, " + + "a.vertical_roof_area*(a.z_max-GREATEST($valueLastLevel,a.z_min))/a.delta_h))),0) " + + "${getDistribIndicName('vert_roof_area', 'H', valueLastLevel, null)}," + + def endQuery = """ FROM $buildRoofSurfTot a RIGHT JOIN $rsu b ON a.id_rsu = b.id_rsu GROUP BY b.id_rsu;""" - datasource finalQuery.toString() + nonVertQuery.toString() + vertQuery[0..-2].toString() + endQuery.toString() + datasource.execute(finalQuery.toString() + nonVertQuery.toString() + vertQuery[0..-2].toString() + endQuery.toString()) - // Calculate the roof density if needed - if (density) { - def optionalQuery = "ALTER TABLE $outputTableName RENAME TO $optionalTempo;" + - "CREATE INDEX IF NOT EXISTS id ON $optionalTempo USING BTREE($ID_COLUMN_RSU);" + - "DROP TABLE IF EXISTS $outputTableName; " + - "CREATE TABLE $outputTableName AS SELECT a.*, " - def optionalNonVert = "(" - def optionalVert = "(" + // Calculate the roof density if needed + if (density) { + def optionalQuery = "ALTER TABLE $outputTableName RENAME TO $optionalTempo;" + + "CREATE INDEX IF NOT EXISTS id ON $optionalTempo USING BTREE($ID_COLUMN_RSU);" + + "DROP TABLE IF EXISTS $outputTableName; " + + "CREATE TABLE $outputTableName AS SELECT a.*, " + def optionalNonVert = "(" + def optionalVert = "(" - for (i in 1..(listLayersBottom.size() - 1)) { - optionalNonVert += " a.${getDistribIndicName('non_vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])} + " - optionalVert += "a.${getDistribIndicName('vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])} + " + for (i in 1..(listLayersBottom.size() - 1)) { + optionalNonVert += " a.${getDistribIndicName('non_vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])} + " + optionalVert += "a.${getDistribIndicName('vert_roof_area', 'H', listLayersBottom[i - 1], listLayersBottom[i])} + " + } + optionalNonVert += "a.${getDistribIndicName('non_vert_roof_area', 'H', valueLastLevel, null)}) / ST_AREA(b.$GEOMETRIC_COLUMN_RSU)" + optionalVert += "a.${getDistribIndicName('vert_roof_area', 'H', valueLastLevel, null)}) / ST_AREA(b.$GEOMETRIC_COLUMN_RSU)" + optionalQuery += "$optionalNonVert AS VERT_ROOF_DENSITY, $optionalVert AS NON_VERT_ROOF_DENSITY" + + " FROM $optionalTempo a RIGHT JOIN $rsu b ON a.$ID_COLUMN_RSU = b.$ID_COLUMN_RSU;" + + datasource.execute(optionalQuery) } - optionalNonVert += "a.${getDistribIndicName('non_vert_roof_area', 'H', valueLastLevel, null)}) / ST_AREA(b.$GEOMETRIC_COLUMN_RSU)" - optionalVert += "a.${getDistribIndicName('vert_roof_area', 'H', valueLastLevel, null)}) / ST_AREA(b.$GEOMETRIC_COLUMN_RSU)" - optionalQuery += "$optionalNonVert AS VERT_ROOF_DENSITY, $optionalVert AS NON_VERT_ROOF_DENSITY" + - " FROM $optionalTempo a RIGHT JOIN $rsu b ON a.$ID_COLUMN_RSU = b.$ID_COLUMN_RSU;" - datasource optionalQuery.toString() + datasource.dropTable(buildRoofSurfIni, buildVertRoofInter, buildVertRoofAll, buildRoofSurfTot, optionalTempo) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the roof area distribution at RSU scale", e) + } finally { + datasource.dropTable(buildRoofSurfIni, buildVertRoofInter, buildVertRoofAll, buildRoofSurfTot, optionalTempo) } - - datasource """DROP TABLE IF EXISTS $buildRoofSurfIni, $buildVertRoofInter, - $buildVertRoofAll, $buildRoofSurfTot, $optionalTempo;""".toString() - - return outputTableName } /** @@ -870,54 +898,58 @@ String effectiveTerrainRoughnessLength(JdbcDataSource datasource, String rsuTabl String projectedFacadeAreaName, String geometricMeanBuildingHeightName, List listLayersBottom = [0, 10, 20, 30, 40, 50], - int numberOfDirection = 12, String prefixName) { - def GEOMETRIC_COLUMN = "the_geom" - def ID_COLUMN_RSU = id_rsu - def BASE_NAME = "effective_terrain_roughness_length" - - debug "Executing RSU effective terrain roughness length" - - // Processes used for the indicator calculation - // Some local variables are initialized - def names = [] - // The projection should be performed at the median of the angle interval - def dirRangeDeg = round(360 / numberOfDirection) - + int numberOfDirection = 12, String prefixName) throws Exception { // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names def lambdaTable = postfix "lambdaTable" + try { + def GEOMETRIC_COLUMN = "the_geom" + def ID_COLUMN_RSU = id_rsu + def BASE_NAME = "effective_terrain_roughness_length" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - - def layerSize = listLayersBottom.size() - // The lambda_f indicator is first calculated - def lambdaQuery = "DROP TABLE IF EXISTS $lambdaTable;" + - "CREATE TABLE $lambdaTable AS SELECT $ID_COLUMN_RSU, $geometricMeanBuildingHeightName, (" - for (int i in 1..layerSize) { - //TODO : why an array here and not a variable - names[i - 1] = "${projectedFacadeAreaName}_H${listLayersBottom[i - 1]}_${listLayersBottom[i]}" - if (i == layerSize) { - names[layerSize - 1] = - "${projectedFacadeAreaName}_H${listLayersBottom[layerSize - 1]}" - } - for (int d = 0; d < numberOfDirection / 2; d++) { - def dirDeg = d * 360 / numberOfDirection - lambdaQuery += "${names[i - 1]}_D${dirDeg}_${dirDeg + dirRangeDeg}+" + debug "Executing RSU effective terrain roughness length" + + // Processes used for the indicator calculation + // Some local variables are initialized + def names = [] + // The projection should be performed at the median of the angle interval + def dirRangeDeg = round(360 / numberOfDirection) + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + + def layerSize = listLayersBottom.size() + // The lambda_f indicator is first calculated + def lambdaQuery = "DROP TABLE IF EXISTS $lambdaTable;" + + "CREATE TABLE $lambdaTable AS SELECT $ID_COLUMN_RSU, $geometricMeanBuildingHeightName, (" + for (int i in 1..layerSize) { + //TODO : why an array here and not a variable + names[i - 1] = "${projectedFacadeAreaName}_H${listLayersBottom[i - 1]}_${listLayersBottom[i]}" + if (i == layerSize) { + names[layerSize - 1] = + "${projectedFacadeAreaName}_H${listLayersBottom[layerSize - 1]}" + } + for (int d = 0; d < numberOfDirection / 2; d++) { + def dirDeg = d * 360 / numberOfDirection + lambdaQuery += "${names[i - 1]}_D${dirDeg}_${dirDeg + dirRangeDeg}+" + } } - } - lambdaQuery = lambdaQuery[0..-2] + ")/(${numberOfDirection / 2}*ST_AREA($GEOMETRIC_COLUMN)) " + - "AS lambda_f FROM $rsuTable" - datasource lambdaQuery.toString() + lambdaQuery = lambdaQuery[0..-2] + ")/(${numberOfDirection / 2}*ST_AREA($GEOMETRIC_COLUMN)) " + + "AS lambda_f FROM $rsuTable" + datasource.execute(lambdaQuery.toString()) - // The rugosity z0 is calculated according to the indicator lambda_f (the value of indicator z0 is limited to 3 m) - datasource """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName + // The rugosity z0 is calculated according to the indicator lambda_f (the value of indicator z0 is limited to 3 m) + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT $ID_COLUMN_RSU, CASEWHEN(lambda_f < 0.15, CASEWHEN(lambda_f*$geometricMeanBuildingHeightName>3, 3,lambda_f*$geometricMeanBuildingHeightName), CASEWHEN(0.15*$geometricMeanBuildingHeightName>3,3, 0.15*$geometricMeanBuildingHeightName)) AS $BASE_NAME FROM $lambdaTable; - DROP TABLE IF EXISTS $lambdaTable""".toString() + DROP TABLE IF EXISTS $lambdaTable""") - return outputTableName + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the effective terrain roughness length at RSU scale", e) + } finally { + datasource.dropTable(lambdaTable) + } } /** Performs operations on the linear of road within the RSU scale objects. Note that when a road is located at @@ -945,7 +977,7 @@ String effectiveTerrainRoughnessLength(JdbcDataSource datasource, String rsuTabl * @author Jérémy Bernard */ String linearRoadOperations(JdbcDataSource datasource, String rsuTable, String roadTable, List operations, int angleRangeSize = 30, - List levelConsiderated = [0], String prefixName) { + List levelConsiderated = [0], String prefixName) throws Exception { def OPS = ["road_direction_distribution", "linear_road_density"] def GEOMETRIC_COLUMN_RSU = "the_geom" def ID_COLUMN_RSU = "id_rsu" @@ -955,10 +987,10 @@ String linearRoadOperations(JdbcDataSource datasource, String rsuTable, String r debug "Executing Operations on the linear of road" - datasource.createSpatialIndex(rsuTable,"the_geom") - datasource.createIndex(rsuTable,"id_rsu") - datasource.createSpatialIndex(roadTable,"the_geom") - datasource.createIndex(roadTable,"zindex") + datasource.createSpatialIndex(rsuTable, "the_geom") + datasource.createIndex(rsuTable, "id_rsu") + datasource.createSpatialIndex(roadTable, "the_geom") + datasource.createIndex(roadTable, "zindex") // Test whether the angleRangeSize is a divisor of 180° if (180 % angleRangeSize == 0 && 180 / angleRangeSize > 1) { @@ -981,127 +1013,132 @@ String linearRoadOperations(JdbcDataSource datasource, String rsuTable, String r def roadDistTot = postfix "roadDistTot" def roadDensTot = postfix "roadDensTot" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - - // 1. Whatever are the operations to proceed, this step is done the same way - // Only some of the roads are selected according to the level they are located - // Initialize some parameters - def ifZindex = "" - def baseFiltering = "a.$GEOMETRIC_COLUMN_RSU && b.$GEOMETRIC_COLUMN_ROAD AND " + - "ST_INTERSECTS(a.$GEOMETRIC_COLUMN_RSU,b.$GEOMETRIC_COLUMN_ROAD) " - def filtering = baseFiltering - def nameDens = [] - def nameDistrib = [] - def caseQueryDistrib = "" - def caseQueryDens = "" - - if (levelConsiderated != null) { - ifZindex = ", b.$Z_INDEX AS zindex" - filtering = "" - levelConsiderated.each { filtering += "$baseFiltering AND b.$Z_INDEX=$it OR " } - filtering = filtering[0..-4] - } - datasource.execute("""DROP TABLE IF EXISTS $roadInter; + try { + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME + + // 1. Whatever are the operations to proceed, this step is done the same way + // Only some of the roads are selected according to the level they are located + // Initialize some parameters + def ifZindex = "" + def baseFiltering = "a.$GEOMETRIC_COLUMN_RSU && b.$GEOMETRIC_COLUMN_ROAD AND " + + "ST_INTERSECTS(a.$GEOMETRIC_COLUMN_RSU,b.$GEOMETRIC_COLUMN_ROAD) " + def filtering = baseFiltering + def nameDens = [] + def nameDistrib = [] + def caseQueryDistrib = "" + def caseQueryDens = "" + + if (levelConsiderated != null) { + ifZindex = ", b.$Z_INDEX AS zindex" + filtering = "" + levelConsiderated.each { filtering += "$baseFiltering AND b.$Z_INDEX=$it OR " } + filtering = filtering[0..-4] + } + datasource.execute("""DROP TABLE IF EXISTS $roadInter; CREATE TABLE $roadInter AS SELECT a.$ID_COLUMN_RSU AS id_rsu, ST_AREA(a.$GEOMETRIC_COLUMN_RSU) AS rsu_area, ST_INTERSECTION(a.$GEOMETRIC_COLUMN_RSU, b.$GEOMETRIC_COLUMN_ROAD) AS the_geom $ifZindex FROM $rsuTable a, $roadTable b - WHERE $filtering;""".toString()) - - // If all roads are considered at the same level... - if (!levelConsiderated) { - nameDens.add("linear_road_density") - caseQueryDens = "SUM(ST_LENGTH(the_geom))/rsu_area AS linear_road_density " - for (int d = angleRangeSize; d <= 180; d += angleRangeSize) { - caseQueryDistrib += "SUM(CASEWHEN(azimuth>=${d - angleRangeSize} AND azimuth<$d, length, 0)) AS " + - "${getRoadDirIndic(d, angleRangeSize, null)}," - nameDistrib.add(getRoadDirIndic(d, angleRangeSize, null)) - } - } - // If only certain levels are considered independently - else { - ifZindex = ", zindex " - levelConsiderated.each { lev -> - caseQueryDens += "SUM(CASEWHEN(zindex = $lev, ST_LENGTH(the_geom), 0))/rsu_area " + - "AS linear_road_density_h${lev.toString().replaceAll('-', 'minus')}," - nameDens.add("linear_road_density_h${lev.toString().replaceAll('-', 'minus')}") + WHERE $filtering;""") + + // If all roads are considered at the same level... + if (!levelConsiderated) { + nameDens.add("linear_road_density") + caseQueryDens = "SUM(ST_LENGTH(the_geom))/rsu_area AS linear_road_density " for (int d = angleRangeSize; d <= 180; d += angleRangeSize) { - caseQueryDistrib += "SUM(CASEWHEN(azimuth>=${d - angleRangeSize} AND azimuth<$d AND " + - "zindex = $lev, length, 0)) AS " + - "${getRoadDirIndic(d, angleRangeSize, lev)}," - nameDistrib.add(getRoadDirIndic(d, angleRangeSize, lev)) + caseQueryDistrib += "SUM(CASEWHEN(azimuth>=${d - angleRangeSize} AND azimuth<$d, length, 0)) AS " + + "${getRoadDirIndic(d, angleRangeSize, null)}," + nameDistrib.add(getRoadDirIndic(d, angleRangeSize, null)) + } + } + // If only certain levels are considered independently + else { + ifZindex = ", zindex " + levelConsiderated.each { lev -> + caseQueryDens += "SUM(CASEWHEN(zindex = $lev, ST_LENGTH(the_geom), 0))/rsu_area " + + "AS linear_road_density_h${lev.toString().replaceAll('-', 'minus')}," + nameDens.add("linear_road_density_h${lev.toString().replaceAll('-', 'minus')}") + for (int d = angleRangeSize; d <= 180; d += angleRangeSize) { + caseQueryDistrib += "SUM(CASEWHEN(azimuth>=${d - angleRangeSize} AND azimuth<$d AND " + + "zindex = $lev, length, 0)) AS " + + "${getRoadDirIndic(d, angleRangeSize, lev)}," + nameDistrib.add(getRoadDirIndic(d, angleRangeSize, lev)) + } } } - } - // 2. Depending on the operations to proceed, the queries executed during this step will differ - // If the road direction distribution is calculated, explode the roads into segments in order to calculate - // their length for each azimuth range - if (operations.contains("road_direction_distribution")) { - def queryExpl = "DROP TABLE IF EXISTS $roadExpl;" + - "CREATE TABLE $roadExpl AS SELECT id_rsu, the_geom, " + - "CASEWHEN(ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom))>=pi()," + - "ROUND(DEGREES(ST_AZIMUTH(ST_STARTPOINT(the_geom), " + - "ST_ENDPOINT(the_geom))))-180," + - "ROUND(DEGREES(ST_AZIMUTH(ST_STARTPOINT(the_geom), " + - "ST_ENDPOINT(the_geom))))) AS azimuth," + - "ST_LENGTH(the_geom) AS length $ifZindex " + - "FROM ST_EXPLODE('(SELECT ST_TOMULTISEGMENTS(the_geom)" + - " AS the_geom, id_rsu $ifZindex FROM $roadInter)');" - // Calculate the road direction for each direction and optionally level - def queryDistrib = queryExpl + "CREATE TABLE $roadDistrib AS SELECT id_rsu, " + - caseQueryDistrib[0..-2] + - " FROM $roadExpl GROUP BY id_rsu;" + - "CREATE INDEX IF NOT EXISTS id_d ON $roadDistrib (id_rsu);" + - "DROP TABLE IF EXISTS $roadDistTot; CREATE TABLE $roadDistTot($ID_COLUMN_RSU INTEGER," + - "${nameDistrib.join(" double precision,")} double precision) AS (SELECT a.$ID_COLUMN_RSU," + - "COALESCE(b.${nameDistrib.join(",0),COALESCE(b.")},0) " + - "FROM $rsuTable a LEFT JOIN $roadDistrib b ON a.$ID_COLUMN_RSU=b.id_rsu);" - datasource queryDistrib.toString() - - if (!operations.contains("linear_road_density")) { - datasource """DROP TABLE IF EXISTS $outputTableName; - ALTER TABLE $roadDistTot RENAME TO $outputTableName""".toString() + // 2. Depending on the operations to proceed, the queries executed during this step will differ + // If the road direction distribution is calculated, explode the roads into segments in order to calculate + // their length for each azimuth range + if (operations.contains("road_direction_distribution")) { + def queryExpl = "DROP TABLE IF EXISTS $roadExpl;" + + "CREATE TABLE $roadExpl AS SELECT id_rsu, the_geom, " + + "CASEWHEN(ST_AZIMUTH(ST_STARTPOINT(the_geom), ST_ENDPOINT(the_geom))>=pi()," + + "ROUND(DEGREES(ST_AZIMUTH(ST_STARTPOINT(the_geom), " + + "ST_ENDPOINT(the_geom))))-180," + + "ROUND(DEGREES(ST_AZIMUTH(ST_STARTPOINT(the_geom), " + + "ST_ENDPOINT(the_geom))))) AS azimuth," + + "ST_LENGTH(the_geom) AS length $ifZindex " + + "FROM ST_EXPLODE('(SELECT ST_TOMULTISEGMENTS(the_geom)" + + " AS the_geom, id_rsu $ifZindex FROM $roadInter)');" + // Calculate the road direction for each direction and optionally level + def queryDistrib = queryExpl + "CREATE TABLE $roadDistrib AS SELECT id_rsu, " + + caseQueryDistrib[0..-2] + + " FROM $roadExpl GROUP BY id_rsu;" + + "CREATE INDEX IF NOT EXISTS id_d ON $roadDistrib (id_rsu);" + + "DROP TABLE IF EXISTS $roadDistTot; CREATE TABLE $roadDistTot($ID_COLUMN_RSU INTEGER," + + "${nameDistrib.join(" double precision,")} double precision) AS (SELECT a.$ID_COLUMN_RSU," + + "COALESCE(b.${nameDistrib.join(",0),COALESCE(b.")},0) " + + "FROM $rsuTable a LEFT JOIN $roadDistrib b ON a.$ID_COLUMN_RSU=b.id_rsu);" + datasource.execute(queryDistrib) + + if (!operations.contains("linear_road_density")) { + datasource.execute( """DROP TABLE IF EXISTS $outputTableName; + ALTER TABLE $roadDistTot RENAME TO $outputTableName""") + } } - } - // If the rsu linear density should be calculated - if (operations.contains("linear_road_density")) { - String queryDensity = "DROP TABLE IF EXISTS $roadDens;" + - "CREATE TABLE $roadDens AS SELECT id_rsu, " + caseQueryDens[0..-2] + - " FROM $roadInter GROUP BY id_rsu;" + - "CREATE INDEX IF NOT EXISTS id_d ON $roadDens (id_rsu);" + - "DROP TABLE IF EXISTS $roadDensTot; CREATE TABLE $roadDensTot($ID_COLUMN_RSU INTEGER," + - "${nameDens.join(" double,")} double) AS (SELECT a.$ID_COLUMN_RSU," + - "COALESCE(b.${nameDens.join(",0),COALESCE(b.")},0) " + - "FROM $rsuTable a LEFT JOIN $roadDens b ON a.$ID_COLUMN_RSU=b.id_rsu)" - datasource queryDensity - if (!operations.contains("road_direction_distribution")) { - datasource """DROP TABLE IF EXISTS $outputTableName; - ALTER TABLE $roadDensTot RENAME TO $outputTableName""".toString() + // If the rsu linear density should be calculated + if (operations.contains("linear_road_density")) { + String queryDensity = "DROP TABLE IF EXISTS $roadDens;" + + "CREATE TABLE $roadDens AS SELECT id_rsu, " + caseQueryDens[0..-2] + + " FROM $roadInter GROUP BY id_rsu;" + + "CREATE INDEX IF NOT EXISTS id_d ON $roadDens (id_rsu);" + + "DROP TABLE IF EXISTS $roadDensTot; CREATE TABLE $roadDensTot($ID_COLUMN_RSU INTEGER," + + "${nameDens.join(" double,")} double) AS (SELECT a.$ID_COLUMN_RSU," + + "COALESCE(b.${nameDens.join(",0),COALESCE(b.")},0) " + + "FROM $rsuTable a LEFT JOIN $roadDens b ON a.$ID_COLUMN_RSU=b.id_rsu)" + datasource.execute(queryDensity) + if (!operations.contains("road_direction_distribution")) { + datasource.execute( """DROP TABLE IF EXISTS $outputTableName; + ALTER TABLE $roadDensTot RENAME TO $outputTableName""") + } } - } - if (operations.contains("road_direction_distribution") && - operations.contains("linear_road_density")) { - datasource """DROP TABLE if exists $outputTableName; + if (operations.contains("road_direction_distribution") && + operations.contains("linear_road_density")) { + datasource.execute( """DROP TABLE if exists $outputTableName; CREATE INDEX IF NOT EXISTS idx_$roadDistTot ON $roadDistTot (id_rsu); CREATE INDEX IF NOT EXISTS idx_$roadDensTot ON $roadDensTot (id_rsu); CREATE TABLE $outputTableName AS SELECT a.*, b.${nameDens.join(",b.")} FROM $roadDistTot a LEFT JOIN $roadDensTot b - ON a.id_rsu=b.id_rsu""".toString() + ON a.id_rsu=b.id_rsu""") + } + datasource.dropTable(roadInter, roadExpl, roadDistrib, + roadDens, roadDistTot, roadDensTot) + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the linear road operations at RSU scale", e) + } finally { + datasource.dropTable(roadInter, roadExpl, roadDistrib, + roadDens, roadDistTot, roadDensTot) } - datasource """DROP TABLE IF EXISTS $roadInter, $roadExpl, $roadDistrib, - $roadDens, $roadDistTot, $roadDensTot""".toString() - - return outputTableName - } else { - error "One of several operations are not valid." + throw new SQLException("One of several operations are not valid.") } } } else { - error "Cannot compute the indicator. The range size (angleRangeSize) should be a divisor of 180°" + throw new SQLException("Cannot compute the indicator. The range size (angleRangeSize) should be a divisor of 180°") } } @@ -1116,14 +1153,13 @@ String linearRoadOperations(JdbcDataSource datasource, String rsuTable, String r * * @author Jérémy Bernard */ -String getRoadDirIndic(int d, Integer angleRangeSize, Integer lev){ +String getRoadDirIndic(int d, Integer angleRangeSize, Integer lev) { String name - if(lev == null){ + if (lev == null) { name = "road_direction_distribution_d${d - angleRangeSize}_$d" - } - else{ + } else { name = "road_direction_distribution_h${lev.toString().replaceAll("-", "minus")}" + - "_d${d - angleRangeSize}_$d" + "_d${d - angleRangeSize}_$d" } return name @@ -1153,16 +1189,18 @@ String getRoadDirIndic(int d, Integer angleRangeSize, Integer lev){ * * @author Jérémy Bernard */ -String effectiveTerrainRoughnessClass(JdbcDataSource datasource, String rsu, String id_rsu, String effectiveTerrainRoughnessLength, String prefixName) { - def BASE_NAME = "effective_terrain_roughness_class" +String effectiveTerrainRoughnessClass(JdbcDataSource datasource, String rsu, String id_rsu, + String effectiveTerrainRoughnessLength, String prefixName) throws Exception { + try { + def BASE_NAME = "effective_terrain_roughness_class" - debug "Executing RSU effective terrain roughness class" + debug "Executing RSU effective terrain roughness class" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - // Based on the lookup Table of Davenport - datasource """DROP TABLE IF EXISTS $outputTableName; + // Based on the lookup Table of Davenport + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT $id_rsu, CASEWHEN($effectiveTerrainRoughnessLength<0.0 OR $effectiveTerrainRoughnessLength IS NULL, null, CASEWHEN($effectiveTerrainRoughnessLength<0.00035, 1, @@ -1171,9 +1209,11 @@ String effectiveTerrainRoughnessClass(JdbcDataSource datasource, String rsu, Str CASEWHEN($effectiveTerrainRoughnessLength<0.175, 4, CASEWHEN($effectiveTerrainRoughnessLength<0.375, 5, CASEWHEN($effectiveTerrainRoughnessLength<0.75, 6, - CASEWHEN($effectiveTerrainRoughnessLength<1.5, 7, 8)))))))) AS $BASE_NAME FROM $rsu""".toString() - - return outputTableName + CASEWHEN($effectiveTerrainRoughnessLength<1.5, 7, 8)))))))) AS $BASE_NAME FROM $rsu""") + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the effective terrain roughness classes at RSU scale", e) + } } /** @@ -1214,68 +1254,72 @@ String effectiveTerrainRoughnessClass(JdbcDataSource datasource, String rsu, Str * @author Erwan Bocher */ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, String rsu, String buContiguityColumn, - String buTotalFacadeLengthColumn, float buffDist = 10, String prefixName) { - - def GEOMETRIC_FIELD = "the_geom" - def ID_FIELD_RSU = "id_rsu" - def HEIGHT_WALL = "height_wall" - def BASE_NAME = "extended_free_facade_fraction" - - debug "Executing RSU free facade fraction (for SVF fast)" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + String buTotalFacadeLengthColumn, float buffDist = 10, String prefixName) throws Exception { // Temporary tables are created def extRsuTable = postfix "extRsu" def inclBu = postfix "inclBu" def fullInclBu = postfix "fullInclBu" def notIncBu = postfix "notIncBu" + try { + def GEOMETRIC_FIELD = "the_geom" + def ID_FIELD_RSU = "id_rsu" + def HEIGHT_WALL = "height_wall" + def BASE_NAME = "extended_free_facade_fraction" - datasource """DROP TABLE IF EXISTS $extRsuTable; CREATE TABLE $extRsuTable AS SELECT + debug "Executing RSU free facade fraction (for SVF fast)" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, "rsu_" + BASE_NAME + + datasource.execute( """DROP TABLE IF EXISTS $extRsuTable; CREATE TABLE $extRsuTable AS SELECT ST_BUFFER($GEOMETRIC_FIELD, $buffDist, 2) AS $GEOMETRIC_FIELD, - $ID_FIELD_RSU FROM $rsu;""".toString() + $ID_FIELD_RSU FROM $rsu;""") - // The facade area of buildings being entirely included in the RSU buffer is calculated - datasource.createSpatialIndex(extRsuTable,GEOMETRIC_FIELD) - datasource.createIndex(extRsuTable,ID_FIELD_RSU) - datasource.createSpatialIndex(building,GEOMETRIC_FIELD) + // The facade area of buildings being entirely included in the RSU buffer is calculated + datasource.createSpatialIndex(extRsuTable, GEOMETRIC_FIELD) + datasource.createIndex(extRsuTable, ID_FIELD_RSU) + datasource.createSpatialIndex(building, GEOMETRIC_FIELD) - datasource """DROP TABLE IF EXISTS $inclBu; CREATE TABLE $inclBu AS SELECT + datasource.execute( """DROP TABLE IF EXISTS $inclBu; CREATE TABLE $inclBu AS SELECT COALESCE(SUM((1-a.$buContiguityColumn)*a.$buTotalFacadeLengthColumn*a.$HEIGHT_WALL), 0) AS FAC_AREA, b.$ID_FIELD_RSU FROM $building a, $extRsuTable b WHERE a.$GEOMETRIC_FIELD && b.$GEOMETRIC_FIELD and ST_COVERS(b.$GEOMETRIC_FIELD, - a.$GEOMETRIC_FIELD) GROUP BY b.$ID_FIELD_RSU;""".toString() + a.$GEOMETRIC_FIELD) GROUP BY b.$ID_FIELD_RSU;""") - // All RSU are feeded with default value - datasource.createIndex(inclBu,ID_FIELD_RSU) - datasource.createIndex(rsu,ID_FIELD_RSU) + // All RSU are feeded with default value + datasource.createIndex(inclBu, ID_FIELD_RSU) + datasource.createIndex(rsu, ID_FIELD_RSU) - datasource """DROP TABLE IF EXISTS $fullInclBu; CREATE TABLE $fullInclBu AS SELECT + datasource.execute( """DROP TABLE IF EXISTS $fullInclBu; CREATE TABLE $fullInclBu AS SELECT COALESCE(a.FAC_AREA, 0) AS FAC_AREA, b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD, st_area(b.$GEOMETRIC_FIELD) as rsu_buff_area - FROM $inclBu a RIGHT JOIN $extRsuTable b ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU;""".toString() + FROM $inclBu a RIGHT JOIN $extRsuTable b ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU;""") - // The facade area of buildings being partially included in the RSU buffer is calculated - datasource """DROP TABLE IF EXISTS $notIncBu; CREATE TABLE $notIncBu AS SELECT + // The facade area of buildings being partially included in the RSU buffer is calculated + datasource.execute( """DROP TABLE IF EXISTS $notIncBu; CREATE TABLE $notIncBu AS SELECT COALESCE(SUM(ST_LENGTH(ST_INTERSECTION(ST_TOMULTILINE(a.$GEOMETRIC_FIELD), b.$GEOMETRIC_FIELD))*a.$HEIGHT_WALL), 0) AS FAC_AREA, b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD FROM $building a, $extRsuTable b - WHERE a.$GEOMETRIC_FIELD && b.$GEOMETRIC_FIELD and ST_OVERLAPS(b.$GEOMETRIC_FIELD, a.$GEOMETRIC_FIELD) GROUP BY b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD;""".toString() + WHERE a.$GEOMETRIC_FIELD && b.$GEOMETRIC_FIELD and ST_OVERLAPS(b.$GEOMETRIC_FIELD, a.$GEOMETRIC_FIELD) GROUP BY b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD;""") - // The facade fraction is calculated - datasource.createIndex(notIncBu,ID_FIELD_RSU) - datasource.createIndex(fullInclBu,ID_FIELD_RSU) + // The facade fraction is calculated + datasource.createIndex(notIncBu, ID_FIELD_RSU) + datasource.createIndex(fullInclBu, ID_FIELD_RSU) - datasource """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS + datasource.execute( """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT COALESCE((a.FAC_AREA + b.FAC_AREA) /(a.FAC_AREA + b.FAC_AREA + a.rsu_buff_area), a.FAC_AREA / (a.FAC_AREA + a.rsu_buff_area)) AS $BASE_NAME, a.$ID_FIELD_RSU, a.$GEOMETRIC_FIELD FROM $fullInclBu a LEFT JOIN $notIncBu b - ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU;""".toString() - - // Drop intermediate tables - datasource "DROP TABLE IF EXISTS $extRsuTable, $inclBu, $fullInclBu, $notIncBu;".toString() - - return outputTableName + ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU;""") + + // Drop intermediate tables + datasource.execute( "DROP TABLE IF EXISTS $extRsuTable, $inclBu, $fullInclBu, $notIncBu;") + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the extended free facade fractions at RSU scale", e) + } finally { + datasource.dropTable(extRsuTable, inclBu, fullInclBu, notIncBu) + } } /** @@ -1311,34 +1355,35 @@ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, St */ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_zone, String building, String road, String water, String vegetation, - String impervious,String rail, String prefixName) { + String impervious, String rail, String prefixName) throws Exception { //All table names cannot be null or empty if (!building && !road && !water && !vegetation && !impervious) { - return + throw new IllegalArgumentException("Cannot compute the smallest commun geometry on null input") } - def BASE_NAME = "RSU_SMALLEST_COMMUN_GEOMETRY" - - debug "Compute the smallest geometries" - - //To avoid column name duplication - def ID_COLUMN_NAME = postfix "id" - - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - - if (zone && datasource.hasTable(zone)) { - datasource.createIndex(zone,id_zone) - datasource.createSpatialIndex(zone,"the_geom") - def tablesToMerge = [:] - tablesToMerge += ["$zone": "select ST_ExteriorRing(the_geom) as the_geom, ${id_zone} from $zone"] - if (road && datasource.hasTable(road)&& !datasource.isEmpty(road)) { - debug "Preparing table : $road" - datasource.createSpatialIndex(road,"the_geom") - //Separate road features according the zindex - def roadTable_zindex0_buffer = postfix "road_zindex0_buffer" - def road_tmp = postfix "road_zindex0" - - datasource """DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; + try { + def BASE_NAME = "RSU_SMALLEST_COMMUN_GEOMETRY" + + debug "Compute the smallest geometries" + + //To avoid column name duplication + def ID_COLUMN_NAME = postfix "id" + + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME + + if (zone && datasource.hasTable(zone)) { + datasource.createIndex(zone, id_zone) + datasource.createSpatialIndex(zone, "the_geom") + def tablesToMerge = [:] + tablesToMerge += ["$zone": "select ST_ExteriorRing(the_geom) as the_geom, ${id_zone} from $zone"] + if (road && datasource.hasTable(road) && !datasource.isEmpty(road)) { + debug "Preparing table : $road" + datasource.createSpatialIndex(road, "the_geom") + //Separate road features according the zindex + def roadTable_zindex0_buffer = postfix "road_zindex0_buffer" + def road_tmp = postfix "road_zindex0" + + datasource.execute( """DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; CREATE TABLE $roadTable_zindex0_buffer as SELECT ST_CollectionExtract(st_intersection(a.the_geom,b.the_geom),2) AS the_geom, a.WIDTH, b.${id_zone} FROM $road as a, $zone AS b WHERE a.the_geom && b.the_geom AND st_intersects(a.the_geom, b.the_geom) and a.ZINDEX=0 ; @@ -1347,17 +1392,17 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ ${id_zone} FROM $roadTable_zindex0_buffer AS a group by ${id_zone} ; DROP TABLE IF EXISTS $roadTable_zindex0_buffer; - """.toString() - tablesToMerge += ["$road_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $road_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - } + """) + tablesToMerge += ["$road_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $road_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + } - if (rail && datasource.hasTable(rail) && !datasource.isEmpty(rail)) { - debug "Preparing table : $rail" - datasource.createSpatialIndex(rail,"the_geom") - //Separate rail features according the zindex - def railTable_zindex0_buffer = postfix "rail_zindex0_buffer" - def rail_tmp = postfix "rail_zindex0" - datasource """DROP TABLE IF EXISTS $railTable_zindex0_buffer, $rail_tmp; + if (rail && datasource.hasTable(rail) && !datasource.isEmpty(rail)) { + debug "Preparing table : $rail" + datasource.createSpatialIndex(rail, "the_geom") + //Separate rail features according the zindex + def railTable_zindex0_buffer = postfix "rail_zindex0_buffer" + def rail_tmp = postfix "rail_zindex0" + datasource.execute( """DROP TABLE IF EXISTS $railTable_zindex0_buffer, $rail_tmp; CREATE TABLE $railTable_zindex0_buffer as SELECT ST_CollectionExtract(st_intersection(a.the_geom,b.the_geom),3) AS the_geom, a.WIDTH, b.${id_zone} FROM $rail as a ,$zone AS b WHERE a.the_geom && b.the_geom AND st_intersects(a.the_geom, b.the_geom) and a.ZINDEX=0 ; @@ -1366,158 +1411,158 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ ${id_zone} FROM $railTable_zindex0_buffer AS a GROUP BY ${id_zone}; DROP TABLE IF EXISTS $railTable_zindex0_buffer; - """.toString() - tablesToMerge += ["$rail_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $rail_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - } + """) + tablesToMerge += ["$rail_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $rail_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + } - if (vegetation && datasource.hasTable(vegetation)&& !datasource.isEmpty(vegetation)) { - debug "Preparing table : $vegetation" - datasource.createSpatialIndex(vegetation,"the_geom") - def low_vegetation_rsu_tmp = postfix "low_vegetation_rsu_zindex0" - def low_vegetation_tmp = postfix "low_vegetation_zindex0" - def high_vegetation_tmp = postfix "high_vegetation_zindex0" - datasource """DROP TABLE IF EXISTS $low_vegetation_tmp, $low_vegetation_rsu_tmp; + if (vegetation && datasource.hasTable(vegetation) && !datasource.isEmpty(vegetation)) { + debug "Preparing table : $vegetation" + datasource.createSpatialIndex(vegetation, "the_geom") + def low_vegetation_rsu_tmp = postfix "low_vegetation_rsu_zindex0" + def low_vegetation_tmp = postfix "low_vegetation_zindex0" + def high_vegetation_tmp = postfix "high_vegetation_zindex0" + datasource.execute( """DROP TABLE IF EXISTS $low_vegetation_tmp, $low_vegetation_rsu_tmp; CREATE TABLE $low_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='low'; CREATE TABLE $high_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='high'; - """.toString() - tablesToMerge += ["$low_vegetation_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $low_vegetation_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - tablesToMerge += ["$high_vegetation_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $high_vegetation_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - } + """) + tablesToMerge += ["$low_vegetation_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $low_vegetation_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + tablesToMerge += ["$high_vegetation_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $high_vegetation_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + } - if (water && datasource.hasTable(water)&& !datasource.isEmpty(water)) { - debug "Preparing table : $water" - datasource.createSpatialIndex(water,"the_geom") - def water_tmp = postfix "water_zindex0" - datasource """DROP TABLE IF EXISTS $water_tmp; + if (water && datasource.hasTable(water) && !datasource.isEmpty(water)) { + debug "Preparing table : $water" + datasource.createSpatialIndex(water, "the_geom") + def water_tmp = postfix "water_zindex0" + datasource.execute( """DROP TABLE IF EXISTS $water_tmp; CREATE TABLE $water_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $water AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom)""".toString() - tablesToMerge += ["$water_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $water_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - } + AND ST_INTERSECTS(a.the_geom, b.the_geom)""") + tablesToMerge += ["$water_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $water_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + } - if (impervious && datasource.hasTable(impervious)&& !datasource.isEmpty(impervious)) { - debug "Preparing table : $impervious" - datasource.createSpatialIndex(impervious,"the_geom") - def impervious_tmp = postfix "impervious_zindex0" - datasource """DROP TABLE IF EXISTS $impervious_tmp; + if (impervious && datasource.hasTable(impervious) && !datasource.isEmpty(impervious)) { + debug "Preparing table : $impervious" + datasource.createSpatialIndex(impervious, "the_geom") + def impervious_tmp = postfix "impervious_zindex0" + datasource.execute( """DROP TABLE IF EXISTS $impervious_tmp; CREATE TABLE $impervious_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $impervious AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom)""".toString() - tablesToMerge += ["$impervious_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $impervious_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - } + AND ST_INTERSECTS(a.the_geom, b.the_geom)""") + tablesToMerge += ["$impervious_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $impervious_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + } - if (building && datasource.hasTable(building)&& !datasource.isEmpty(building)) { - debug "Preparing table : $building" - datasource.createSpatialIndex(building,"the_geom") - def building_tmp = postfix "building_zindex0" - datasource """DROP TABLE IF EXISTS $building_tmp; + if (building && datasource.hasTable(building) && !datasource.isEmpty(building)) { + debug "Preparing table : $building" + datasource.createSpatialIndex(building, "the_geom") + def building_tmp = postfix "building_zindex0" + datasource.execute( """DROP TABLE IF EXISTS $building_tmp; CREATE TABLE $building_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $building AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.zindex=0""".toString() - tablesToMerge += ["$building_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $building_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] - } + AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.zindex=0""") + tablesToMerge += ["$building_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $building_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] + } - //Merging all tables in one - debug "Grouping all tables in one..." - if (!tablesToMerge) { - error "Any features to compute surface fraction statistics" - return - } - def tmp_tables = postfix "tmp_tables_zindex0" - datasource """DROP TABLE if exists $tmp_tables; + //Merging all tables in one + debug "Grouping all tables in one..." + if (!tablesToMerge) { + error "Any features to compute surface fraction statistics" + return + } + def tmp_tables = postfix "tmp_tables_zindex0" + datasource.execute( """DROP TABLE if exists $tmp_tables; CREATE TABLE $tmp_tables(the_geom GEOMETRY, ${id_zone} integer) AS ${tablesToMerge.values().join(' union ')}; - """.toString() + """) - //Polygonize the input tables - debug "Generating " + - "minimum polygon areas" - def tmp_point_polygonize = postfix "tmp_point_polygonize_zindex0" - datasource """DROP TABLE IF EXISTS $tmp_point_polygonize; + //Polygonize the input tables + debug "Generating " + + "minimum polygon areas" + def tmp_point_polygonize = postfix "tmp_point_polygonize_zindex0" + datasource.execute( """DROP TABLE IF EXISTS $tmp_point_polygonize; CREATE INDEX ON $tmp_tables($id_zone); CREATE TABLE $tmp_point_polygonize as select EXPLOD_ID as ${ID_COLUMN_NAME}, st_pointonsurface(the_geom) as the_geom , st_area(the_geom) as area , ${id_zone} from st_explode ('(select st_polygonize(st_union(st_force2d( - st_precisionreducer(st_node(st_accum(a.the_geom)), 3)))) as the_geom, ${id_zone} from $tmp_tables as a group by ${id_zone})')""".toString() + st_precisionreducer(st_node(st_accum(a.the_geom)), 3)))) as the_geom, ${id_zone} from $tmp_tables as a group by ${id_zone})')""") - //Create indexes - datasource.createSpatialIndex(tmp_point_polygonize,"the_geom") - datasource.createIndex(tmp_point_polygonize,id_zone) + //Create indexes + datasource.createSpatialIndex(tmp_point_polygonize, "the_geom") + datasource.createIndex(tmp_point_polygonize, id_zone) - def final_polygonize = postfix "final_polygonize_zindex0" - datasource """ + def final_polygonize = postfix "final_polygonize_zindex0" + datasource.execute( """ DROP TABLE IF EXISTS $final_polygonize; CREATE TABLE $final_polygonize as select a.AREA , a.the_geom as the_geom, a.${ID_COLUMN_NAME}, b.${id_zone} from $tmp_point_polygonize as a, $zone as b - where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - - datasource.createSpatialIndex(final_polygonize,"the_geom") - datasource.createIndex(final_polygonize,id_zone) - - def finalMerge = [] - def tmpTablesToDrop = [] - tablesToMerge.each { entry -> - debug "Processing table $entry.key" - def tmptableName = "tmp_stats_$entry.key" - tmpTablesToDrop << tmptableName - if (entry.key.startsWith("high_vegetation")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """DROP TABLE IF EXISTS $tmptableName; + where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + + datasource.createSpatialIndex(final_polygonize, "the_geom") + datasource.createIndex(final_polygonize, id_zone) + + def finalMerge = [] + def tmpTablesToDrop = [] + tablesToMerge.each { entry -> + debug "Processing table $entry.key" + def tmptableName = "tmp_stats_$entry.key" + tmpTablesToDrop << tmptableName + if (entry.key.startsWith("high_vegetation")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area,0 as low_vegetation, 1 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") - } else if (entry.key.startsWith("low_vegetation")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """DROP TABLE IF EXISTS $tmptableName; + $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } else if (entry.key.startsWith("low_vegetation")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area,1 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom,b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") - } else if (entry.key.startsWith("water")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """CREATE TABLE $tmptableName AS SELECT b.area,0 as low_vegetation, 0 as high_vegetation, 1 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") - } else if (entry.key.startsWith("road")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """DROP TABLE IF EXISTS $tmptableName; + $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom,b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } else if (entry.key.startsWith("water")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """CREATE TABLE $tmptableName AS SELECT b.area,0 as low_vegetation, 0 as high_vegetation, 1 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, + $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } else if (entry.key.startsWith("road")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 1 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") - } else if (entry.key.startsWith("rail")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """DROP TABLE IF EXISTS $tmptableName; + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } else if (entry.key.startsWith("rail")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 0 as building,1 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") - }else if (entry.key.startsWith("impervious")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """DROP TABLE IF EXISTS $tmptableName; + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } else if (entry.key.startsWith("impervious")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 1 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") - } else if (entry.key.startsWith("building")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource.createIndex(entry.key,id_zone) - datasource """DROP TABLE IF EXISTS $tmptableName; + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } else if (entry.key.startsWith("building")) { + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.createIndex(entry.key, id_zone) + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 1 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""".toString() - finalMerge.add("SELECT * FROM $tmptableName") + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") + finalMerge.add("SELECT * FROM $tmptableName") + } } - } - if (finalMerge) { - //Do not drop RSU table - tablesToMerge.remove("$zone") - def allInfoTableName = postfix "allInfoTableName" - datasource """DROP TABLE IF EXISTS $allInfoTableName, $tmp_point_polygonize, $final_polygonize, $tmp_tables, $outputTableName; + if (finalMerge) { + //Do not drop RSU table + tablesToMerge.remove("$zone") + def allInfoTableName = postfix "allInfoTableName" + datasource.execute( """DROP TABLE IF EXISTS $allInfoTableName, $tmp_point_polygonize, $final_polygonize, $tmp_tables, $outputTableName; CREATE TABLE $allInfoTableName as ${finalMerge.join(' union all ')}; CREATE INDEX ON $allInfoTableName (${ID_COLUMN_NAME}); CREATE INDEX ON $allInfoTableName (${id_zone}); @@ -1525,10 +1570,9 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ MAX(HIGH_VEGETATION) AS HIGH_VEGETATION, MAX(WATER) AS WATER, MAX(IMPERVIOUS) AS IMPERVIOUS, MAX(ROAD) AS ROAD, MAX(BUILDING) AS BUILDING, MAX(RAIL) AS RAIL, ${id_zone} FROM $allInfoTableName GROUP BY ${ID_COLUMN_NAME}, ${id_zone}; - DROP TABLE IF EXISTS ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${tmpTablesToDrop.join(",")}""".toString() - } - else{ - datasource """DROP TABLE IF EXISTS $outputTableName; + DROP TABLE IF EXISTS ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${tmpTablesToDrop.join(",")}""") + } else { + datasource.execute( """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName(AREA DOUBLE PRECISION, LOW_VEGETATION INTEGER, HIGH_VEGETATION INTEGER, @@ -1537,13 +1581,15 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ ROAD INTEGER, BUILDING INTEGER, RAIL INTEGER, - ${id_zone} INTEGER)""" + ${id_zone} INTEGER);""") + } + } else { + throw new SQLException("""Cannot compute the smallest geometries""") } - - } else { - error """Cannot compute the smallest geometries""" + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the smallest geometries", e) } - return outputTableName } /** @@ -1575,11 +1621,11 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ */ String surfaceFractions(JdbcDataSource datasource, String rsu, String id_rsu, String spatialRelationsTable, - Map superpositions = ["high_vegetation": ["water", "building", "low_vegetation","rail", "road", "impervious"]], + Map superpositions = ["high_vegetation": ["water", "building", "low_vegetation", "rail", "road", "impervious"]], List priorities = ["water", "building", "high_vegetation", "low_vegetation", "rail", "road", "impervious"], - String prefixName) { + String prefixName) throws Exception { def BASE_TABLE_NAME = "RSU_SURFACE_FRACTIONS" - def LAYERS = ["rail","road", "water", "high_vegetation", "low_vegetation", "impervious", "building"] + def LAYERS = ["rail", "road", "water", "high_vegetation", "low_vegetation", "impervious", "building"] debug "Executing RSU surface fractions computation" // The name of the outputTableName is constructed @@ -1587,112 +1633,117 @@ String surfaceFractions(JdbcDataSource datasource, // Temporary tables are created def withoutUndefined = postfix "without_undefined" + try { + // Create the indexes on each of the input tables + datasource.createIndex(rsu, id_rsu) + datasource.createIndex(spatialRelationsTable, id_rsu) + + // Need to set priority number for future sorting + def prioritiesMap = [:] + def i = 0 + priorities.each { val -> + prioritiesMap[val] = i + i++ + } - // Create the indexes on each of the input tables - datasource.createIndex(rsu,id_rsu) - datasource.createIndex(spatialRelationsTable,id_rsu) + def query = """DROP TABLE IF EXISTS $withoutUndefined; CREATE TABLE $withoutUndefined AS SELECT b.${id_rsu} """ + def end_query = """ FROM $spatialRelationsTable AS a RIGHT JOIN $rsu b + ON a.${id_rsu}=b.${id_rsu} GROUP BY b.${id_rsu};""" - // Need to set priority number for future sorting - def prioritiesMap = [:] - def i = 0 - priorities.each { val -> - prioritiesMap[val] = i - i++ - } + if (superpositions) { + // Calculates the fraction of overlapped layers according to "superpositionsWithPriorities" + superpositions.each { key, values -> + // Calculating the overlaying layer when it has no overlapped layer + def tempoLayers = LAYERS.minus([key]) - def query = """DROP TABLE IF EXISTS $withoutUndefined; CREATE TABLE $withoutUndefined AS SELECT b.${id_rsu} """ - def end_query = """ FROM $spatialRelationsTable AS a RIGHT JOIN $rsu b - ON a.${id_rsu}=b.${id_rsu} GROUP BY b.${id_rsu};""" + query += ", COALESCE(SUM(CASE WHEN a.$key =1 AND a.${tempoLayers.join(" =0 AND a.")} =0 THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${key}_fraction " - if (superpositions) { - // Calculates the fraction of overlapped layers according to "superpositionsWithPriorities" - superpositions.each { key, values -> - // Calculating the overlaying layer when it has no overlapped layer - def tempoLayers = LAYERS.minus([key]) - - query += ", COALESCE(SUM(CASE WHEN a.$key =1 AND a.${tempoLayers.join(" =0 AND a.")} =0 THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${key}_fraction " - - // Calculate each combination of overlapped layer for the current overlaying layer - def notOverlappedLayers = priorities.minus(values).minus([key]) - // If an non overlapped layer is prioritized, its number should be 0 for the overlapping to happen - def nonOverlappedQuery = "" - def positionOverlapping = prioritiesMap."$key" - if (notOverlappedLayers) { - notOverlappedLayers.each { val -> - if (positionOverlapping > prioritiesMap.get(val)) { - nonOverlappedQuery += " AND a.$val =0 " + // Calculate each combination of overlapped layer for the current overlaying layer + def notOverlappedLayers = priorities.minus(values).minus([key]) + // If an non overlapped layer is prioritized, its number should be 0 for the overlapping to happen + def nonOverlappedQuery = "" + def positionOverlapping = prioritiesMap."$key" + if (notOverlappedLayers) { + notOverlappedLayers.each { val -> + if (positionOverlapping > prioritiesMap.get(val)) { + nonOverlappedQuery += " AND a.$val =0 " + } } } + def var2Zero = [] + def prioritiesWithoutOverlapping = priorities.minus(key) + prioritiesWithoutOverlapping.each { val -> + if (values.contains(val)) { + def var2ZeroQuery = "" + if (var2Zero) { + var2ZeroQuery = " AND a." + var2Zero.join("=0 AND a.") + " =0 " + } + query += ", COALESCE(SUM(CASE WHEN a.$key =1 AND a.$val =1 $var2ZeroQuery $nonOverlappedQuery THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${key}_${val}_fraction " + + } + var2Zero.add(val) + } } + + // Calculates the fraction for each individual layer using the "priorities" table and considering + // already calculated superpositions + def varAlreadyUsedQuery = "" def var2Zero = [] - def prioritiesWithoutOverlapping = priorities.minus(key) - prioritiesWithoutOverlapping.each { val -> - if (values.contains(val)) { - def var2ZeroQuery = "" - if (var2Zero) { - var2ZeroQuery = " AND a." + var2Zero.join("=0 AND a.") + " =0 " + def overlappingLayers = superpositions.keySet() + priorities.each { val -> + def var2ZeroQuery = "" + if (var2Zero) { + var2ZeroQuery = " AND a." + var2Zero.join("=0 AND a.") + " =0 " + } + var2Zero.add(val) + if (!overlappingLayers.contains(val)) { + // Overlapping layers should be set to zero when they arrive after the current layer + // in order of priority + def nonOverlappedQuery = "" + superpositions.each { key, values -> + def positionOverlapping = prioritiesMap.get(key) + if (values.contains(val) & (positionOverlapping > prioritiesMap.get(val))) { + nonOverlappedQuery += " AND a.$key =0 " + } } - query += ", COALESCE(SUM(CASE WHEN a.$key =1 AND a.$val =1 $var2ZeroQuery $nonOverlappedQuery THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${key}_${val}_fraction " + query += ", COALESCE(SUM(CASE WHEN a.$val =1 $var2ZeroQuery $varAlreadyUsedQuery $nonOverlappedQuery THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${val}_fraction " } - var2Zero.add(val) } - } + datasource query.toString() + end_query.toString() - // Calculates the fraction for each individual layer using the "priorities" table and considering - // already calculated superpositions - def varAlreadyUsedQuery = "" - def var2Zero = [] - def overlappingLayers = superpositions.keySet() - priorities.each { val -> - def var2ZeroQuery = "" - if (var2Zero) { - var2ZeroQuery = " AND a." + var2Zero.join("=0 AND a.") + " =0 " - } - var2Zero.add(val) - if (!overlappingLayers.contains(val)) { - // Overlapping layers should be set to zero when they arrive after the current layer - // in order of priority - def nonOverlappedQuery = "" - superpositions.each { key, values -> - def positionOverlapping = prioritiesMap.get(key) - if (values.contains(val) & (positionOverlapping > prioritiesMap.get(val))) { - nonOverlappedQuery += " AND a.$key =0 " - } + } else { + def var2Zero = [] + priorities.each { val -> + def var2ZeroQuery = "" + if (var2Zero) { + var2ZeroQuery = " AND a." + var2Zero.join("=0 AND a.") + " = 0 " } - query += ", COALESCE(SUM(CASE WHEN a.$val =1 $var2ZeroQuery $varAlreadyUsedQuery $nonOverlappedQuery THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${val}_fraction " - + var2Zero.add(val) + query += ", COALESCE(SUM(CASE WHEN a.$val =1 $var2ZeroQuery THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${val}_fraction " } - } - datasource query.toString() + end_query.toString() + datasource query.toString() + end_query.toString() - } else { - def var2Zero = [] - priorities.each { val -> - def var2ZeroQuery = "" - if (var2Zero) { - var2ZeroQuery = " AND a." + var2Zero.join("=0 AND a.") + " = 0 " - } - var2Zero.add(val) - query += ", COALESCE(SUM(CASE WHEN a.$val =1 $var2ZeroQuery THEN a.area ELSE 0 END),0)/st_area(b.the_geom) AS ${val}_fraction " } - datasource query.toString() + end_query.toString() - - } - // Calculates the fraction of land without defined surface - def allCols = datasource.getTable(withoutUndefined).getColumns() - def allFractionCols = allCols.minus(id_rsu.toUpperCase()) - datasource """ DROP TABLE IF EXISTS $outputTableName; + // Calculates the fraction of land without defined surface + def allCols = datasource.getColumnNames(withoutUndefined) + def allFractionCols = allCols.minus(id_rsu.toUpperCase()) + datasource.execute( """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT *, 1-(${allFractionCols.join("+")}) AS UNDEFINED_FRACTION - FROM $withoutUndefined""" - - // Drop intermediate tables - datasource "DROP TABLE IF EXISTS $withoutUndefined;".toString() - - //Cache the table name to re-use it - cacheTableName(BASE_TABLE_NAME, outputTableName) - return outputTableName + FROM $withoutUndefined""") + + // Drop intermediate tables + datasource.execute( "DROP TABLE IF EXISTS $withoutUndefined;") + + //Cache the table name to re-use it + cacheTableName(BASE_TABLE_NAME, outputTableName) + return outputTableName + }catch (SQLException e){ + throw new SQLException("Cannot compute surface fractions",e) + }finally { + datasource.dropTable(withoutUndefined) + } } /** @@ -1716,26 +1767,30 @@ String surfaceFractions(JdbcDataSource datasource, */ String buildingSurfaceDensity(JdbcDataSource datasource, String facadeDensityTable, String buildingFractionTable, String facDensityColumn, String buFractionColumn, - String idRsu, String prefixName) { - def BASE_NAME = "building_surface_fraction" + String idRsu, String prefixName) throws Exception{ + try { + def BASE_NAME = "building_surface_fraction" - debug "Executing building surface density" + debug "Executing building surface density" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME - // Sum free facade density and building fraction... - datasource."$facadeDensityTable"."$idRsu".createIndex() - datasource."$buildingFractionTable"."$idRsu".createIndex() - datasource """ + // Sum free facade density and building fraction... + datasource.createIndex(facadeDensityTable, idRsu) + datasource.createIndex(buildingFractionTable, idRsu) + datasource.execute( """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.$idRsu, a.$buFractionColumn + b.$facDensityColumn AS BUILDING_SURFACE_DENSITY FROM $buildingFractionTable AS a LEFT JOIN $facadeDensityTable AS b - ON a.$idRsu = b.$idRsu""".toString() + ON a.$idRsu = b.$idRsu""") - return outputTableName + return outputTableName + }catch (SQLException e){ + throw new SQLException("Cannot compute building surface density at RSU scale", e) + } } /** @@ -1760,7 +1815,7 @@ String buildingSurfaceDensity(JdbcDataSource datasource, String facadeDensityTab */ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, String building, String idRsu, List listLayersBottom = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], - boolean cutBuilding=true, String prefixName) { + boolean cutBuilding = true, String prefixName) throws Exception{ def GEOMETRIC_COLUMN_RSU = "the_geom" def GEOMETRIC_COLUMN_BU = "the_geom" def ID_COLUMN_BU = "id_build" @@ -1777,46 +1832,46 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri // To avoid overwriting the output files of this step, a unique identifier is created // Temporary table names - def buildInter =building + def buildInter = building def rsuBuildingArea = postfix "rsu_building_area" def buildFracH = postfix "roof_frac_H" def bufferTable = postfix "buffer_table" - if(cutBuilding) { + if (cutBuilding) { buildInter = postfix "build_inter" // 1. Create the intersection between buildings and RSU polygons - datasource."$building"."$ID_COLUMN_BU".createIndex() - datasource."$rsu"."$idRsu".createIndex() - datasource """ + datasource.createIndex(building,ID_COLUMN_BU) + datasource.createIndex(rsu,idRsu) + datasource.execute(""" DROP TABLE IF EXISTS $buildInter; CREATE TABLE $buildInter AS SELECT a.$ID_COLUMN_BU, a.$idRsu, ST_INTERSECTION(a.$GEOMETRIC_COLUMN_BU, b.$GEOMETRIC_COLUMN_RSU) AS $GEOMETRIC_COLUMN_BU, (a.$HEIGHT_WALL + a.$HEIGHT_ROOF) / 2 AS $BUILD_HEIGHT FROM $building AS a LEFT JOIN $rsu AS b - ON a.$idRsu = b.$idRsu""".toString() + ON a.$idRsu = b.$idRsu""") } // 2. Calculate the total building roof area within each RSU datasource.createIndex(buildInter, idRsu) - datasource """ + datasource.execute(""" DROP TABLE IF EXISTS $rsuBuildingArea; CREATE TABLE $rsuBuildingArea AS SELECT $idRsu, SUM(ST_AREA($GEOMETRIC_COLUMN_BU)) AS $BUILDING_AREA FROM $buildInter - GROUP BY $idRsu""".toString() + GROUP BY $idRsu""") // 3. Calculate the fraction of roof for each level of the canopy (defined by 'listLayersBottom') except the last - datasource.createIndex(buildInter,BUILD_HEIGHT) - datasource.createIndex(rsuBuildingArea,idRsu) + datasource.createIndex(buildInter, BUILD_HEIGHT) + datasource.createIndex(rsuBuildingArea, idRsu) def tab_H = [:] def indicToJoin = [:] for (i in 1..(listLayersBottom.size() - 1)) { def layer_top = listLayersBottom[i] def layer_bottom = listLayersBottom[i - 1] def indicNameH = getDistribIndicName(BASE_NAME, 'H', layer_bottom, layer_top).toString() - tab_H[i - 1] = "${buildFracH}_$layer_bottom".toString() - datasource """ + tab_H[i - 1] = "${buildFracH}_$layer_bottom" + datasource.execute(""" DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT a.$idRsu, @@ -1827,16 +1882,16 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri FROM $rsuBuildingArea AS a LEFT JOIN $buildInter AS b ON a.$idRsu = b.$idRsu WHERE b.$BUILD_HEIGHT >= $layer_bottom AND b.$BUILD_HEIGHT < $layer_top - GROUP BY b.$idRsu""".toString() + GROUP BY b.$idRsu""") // Fill missing values with 0 - datasource."$bufferTable"."$idRsu".createIndex() - datasource """ + datasource.createIndex(bufferTable,idRsu) + datasource.execute( """ DROP TABLE IF EXISTS ${tab_H[i - 1]}; CREATE TABLE ${tab_H[i - 1]} AS SELECT a.$idRsu, COALESCE(b.$indicNameH, 0) AS $indicNameH FROM $rsu AS a LEFT JOIN $bufferTable AS b - ON a.$idRsu = b.$idRsu""".toString() + ON a.$idRsu = b.$idRsu""") // Declare this layer to the layer to join at the end indicToJoin.put(tab_H[i - 1], idRsu) } @@ -1844,8 +1899,8 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri // 4. Calculate the fraction of roof for the last level of the canopy def layer_bottom = listLayersBottom[listLayersBottom.size() - 1] def indicNameH = getDistribIndicName(BASE_NAME, 'H', layer_bottom, null).toString() - tab_H[listLayersBottom.size() - 1] = "${buildFracH}_$layer_bottom".toString() - datasource """ + tab_H[listLayersBottom.size() - 1] = "${buildFracH}_$layer_bottom" + datasource.execute( """ DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT a.$idRsu, @@ -1856,16 +1911,16 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri FROM $rsuBuildingArea AS a LEFT JOIN $buildInter AS b ON a.$idRsu = b.$idRsu WHERE b.$BUILD_HEIGHT >= $layer_bottom - GROUP BY b.$idRsu""".toString() + GROUP BY b.$idRsu""") // Fill missing values with 0 - datasource."$bufferTable"."$idRsu".createIndex() - datasource """ + datasource.createIndex(bufferTable,idRsu) + datasource.execute( """ DROP TABLE IF EXISTS ${tab_H[listLayersBottom.size() - 1]}; CREATE TABLE ${tab_H[listLayersBottom.size() - 1]} AS SELECT a.$idRsu, COALESCE(b.$indicNameH, 0) AS $indicNameH FROM $rsu AS a LEFT JOIN $bufferTable AS b - ON a.$idRsu = b.$idRsu""".toString() + ON a.$idRsu = b.$idRsu""") // Declare this layer to the layer to join at the end indicToJoin.put(tab_H[listLayersBottom.size() - 1], idRsu) @@ -1876,8 +1931,8 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri return } - datasource """DROP TABLE IF EXISTS $buildInter, $rsuBuildingArea, $bufferTable, - ${tab_H.values().join(",")}""".toString() + datasource.execute( """DROP TABLE IF EXISTS $buildInter, $rsuBuildingArea, $bufferTable, + ${tab_H.values().join(",")}""") return outputTableName } @@ -1911,7 +1966,7 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri */ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, String rsu, String idRsu, List listLayersBottom = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], - int numberOfDirection = 12, boolean distributionAsIndex = true, String prefixName) { + int numberOfDirection = 12, boolean distributionAsIndex = true, String prefixName) throws Exception{ def GEOMETRIC_FIELD_RSU = "the_geom" def GEOMETRIC_FIELD_BU = "the_geom" def ID_FIELD_BU = "id_build" @@ -1935,22 +1990,22 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, def snap_tolerance = 0.01 // 1. Convert the building polygons into lines and create the intersection with RSU polygons - datasource."$building"."$idRsu".createIndex() - datasource."$rsu"."$idRsu".createIndex() - datasource """ + datasource.createIndex(building,idRsu) + datasource.createIndex(rsu,idRsu) + datasource.execute( """ DROP TABLE IF EXISTS $buildLine; CREATE TABLE $buildLine AS SELECT a.$ID_FIELD_BU, a.$idRsu, ST_CollectionExtract(ST_INTERSECTION(ST_TOMULTILINE(a.$GEOMETRIC_FIELD_BU), b.$GEOMETRIC_FIELD_RSU),2) AS $GEOMETRIC_FIELD_BU, a.$HEIGHT_WALL FROM $building AS a LEFT JOIN $rsu AS b - ON a.$idRsu = b.$idRsu""".toString() + ON a.$idRsu = b.$idRsu""") // 2. Keep only intersected facades within a given distance and calculate their length, height and azimuth - datasource.createSpatialIndex(buildLine,GEOMETRIC_FIELD_BU) - datasource.createIndex(buildLine,idRsu) - datasource.createIndex(buildLine,ID_FIELD_BU) - datasource """ + datasource.createSpatialIndex(buildLine, GEOMETRIC_FIELD_BU) + datasource.createIndex(buildLine, idRsu) + datasource.createIndex(buildLine, ID_FIELD_BU) + datasource.execute( """ DROP TABLE IF EXISTS $allLinesRsu; CREATE TABLE $allLinesRsu AS SELECT -ST_LENGTH($GEOMETRIC_FIELD_BU) AS LENGTH, @@ -1980,7 +2035,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, $HEIGHT_WALL, $idRsu, $ID_FIELD_BU FROM $buildLine)') - WHERE ST_LENGTH($GEOMETRIC_FIELD_BU) > 0;""".toString() + WHERE ST_LENGTH($GEOMETRIC_FIELD_BU) > 0;""") // 3. Make the calculations for all directions of each level except the highest one def dirQueryVertFrac = [:] @@ -1989,12 +2044,12 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, def angleRangeDeg = 360 / numberOfDirection def tab_H = [:] def indicToJoin = [:] - datasource."$rsu"."$idRsu".createIndex() + datasource.createIndex(rsu,idRsu) for (i in 1..(listLayersBottom.size() - 1)) { def layer_top = listLayersBottom[i] def layer_bottom = listLayersBottom[i - 1] def deltaH = layer_top - layer_bottom - tab_H[i - 1] = "${buildFracH}_$layer_bottom".toString() + tab_H[i - 1] = "${buildFracH}_$layer_bottom" // Define queries and indic names def dirList = [:] @@ -2003,7 +2058,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, // Indicator name def indicName = "FRONTAL_AREA_INDEX_H${layer_bottom}_${layer_top}_D${k * angleRangeDeg}_${(k + 1) * angleRangeDeg}" // Define query to sum the projected facade for buildings and shared facades - if(distributionAsIndex) { + if (distributionAsIndex) { dirQueryVertFrac[k] = """ CASE WHEN $v > AZIMUTH AND $v-AZIMUTH < PI() THEN CASE WHEN $HEIGHT_WALL >= $layer_top @@ -2019,7 +2074,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, END END AS $indicName""" dirQueryDiv[k] = """COALESCE(SUM(b.$indicName)/ST_AREA(a.$GEOMETRIC_FIELD_RSU), 0) AS $indicName""" - }else{ + } else { dirQueryVertFrac[k] = """ CASE WHEN $v > AZIMUTH AND $v-AZIMUTH < PI() THEN CASE WHEN $HEIGHT_WALL >= $layer_top @@ -2038,23 +2093,23 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, } } // Calculates projected surfaces for buildings and shared facades - datasource """ + datasource.execute( """ DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT $idRsu, ${dirQueryVertFrac.values().join(",")} FROM $allLinesRsu - WHERE $HEIGHT_WALL > $layer_bottom""".toString() + WHERE $HEIGHT_WALL > $layer_bottom""") // Fill missing values with 0 - datasource."$bufferTable"."$idRsu".createIndex() - datasource """ + datasource.createIndex(bufferTable,idRsu) + datasource.execute( """ DROP TABLE IF EXISTS ${tab_H[i - 1]}; CREATE TABLE ${tab_H[i - 1]} AS SELECT a.$idRsu, ${dirQueryDiv.values().join(",")} FROM $rsu AS a LEFT JOIN $bufferTable AS b ON a.$idRsu = b.$idRsu - GROUP BY a.$idRsu""".toString() + GROUP BY a.$idRsu""") // Declare this layer to the layer to join at the end indicToJoin.put(tab_H[i - 1], idRsu) } @@ -2062,9 +2117,9 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, // 4. Make the calculations for the last level def layer_bottom = listLayersBottom[listLayersBottom.size() - 1] // Get the maximum building height - def layer_top = datasource.firstRow("SELECT CAST(MAX($HEIGHT_WALL) AS INTEGER) +1 AS MAXH FROM $building".toString()).MAXH + def layer_top = datasource.firstRow("SELECT CAST(MAX($HEIGHT_WALL) AS INTEGER) +1 AS MAXH FROM $building").MAXH def deltaH = layer_top - layer_bottom - tab_H[listLayersBottom.size() - 1] = "${buildFracH}_$layer_bottom".toString() + tab_H[listLayersBottom.size() - 1] = "${buildFracH}_$layer_bottom" // Define queries and indic names def dirList = [:] @@ -2072,7 +2127,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, dirList.each { k, v -> // Indicator name def indicName = "FRONTAL_AREA_INDEX_H${layer_bottom}_${layer_top}_D${k * angleRangeDeg}_${(k + 1) * angleRangeDeg}" - if(!distributionAsIndex){ + if (!distributionAsIndex) { indicName = "FRONTAL_AREA_INDEX_H${layer_bottom}_D${k * angleRangeDeg}_${(k + 1) * angleRangeDeg}" } // Define query to calculate the vertical fraction of projected facade for buildings and shared facades @@ -2087,23 +2142,23 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, dirQueryDiv[k] = """COALESCE(SUM(b.$indicName)/ST_AREA(a.$GEOMETRIC_FIELD_RSU), 0) AS $indicName""" } // Calculates projected surfaces for buildings and shared facades - datasource """ + datasource.execute( """ DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT $idRsu, ${dirQueryVertFrac.values().join(",")} FROM $allLinesRsu - WHERE $HEIGHT_WALL > $layer_bottom""".toString() + WHERE $HEIGHT_WALL > $layer_bottom""") // Fill missing values with 0 - datasource."$bufferTable"."$idRsu".createIndex() - datasource """ + datasource.createIndex(bufferTable,idRsu) + datasource.execute( """ DROP TABLE IF EXISTS ${tab_H[listLayersBottom.size() - 1]}; CREATE TABLE ${tab_H[listLayersBottom.size() - 1]} AS SELECT a.$idRsu, ${dirQueryDiv.values().join(",")} FROM $rsu AS a LEFT JOIN $bufferTable AS b ON a.$idRsu = b.$idRsu - GROUP BY a.$idRsu""".toString() + GROUP BY a.$idRsu""") // Declare this layer to the layer to join at the end indicToJoin.put(tab_H[listLayersBottom.size() - 1], idRsu) @@ -2115,8 +2170,8 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, } // The temporary tables are deleted - datasource """DROP TABLE IF EXISTS $buildLine, $allLinesRsu, - $bufferTable, ${tab_H.values().join(",")}""".toString() + datasource.execute( """DROP TABLE IF EXISTS $buildLine, $allLinesRsu, + $bufferTable, ${tab_H.values().join(",")}""") } return outputTableName @@ -2132,7 +2187,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, * * @author Erwan Bocher, CNRS */ -String rsuPopulation(JdbcDataSource datasource, String rsu, String population, List populationColumns = []) { +String rsuPopulation(JdbcDataSource datasource, String rsu, String population, List populationColumns = []) throws Exception{ def BASE_NAME = "rsu_with_population" def ID_RSU = "id_rsu" def ID_POP = "id_pop" @@ -2143,12 +2198,12 @@ String rsuPopulation(JdbcDataSource datasource, String rsu, String population, L def outputTableName = postfix BASE_NAME //Indexing table - datasource.createSpatialIndex(rsu,"the_geom") - datasource.createSpatialIndex(population,"the_geom") + datasource.createSpatialIndex(rsu, "the_geom") + datasource.createSpatialIndex(population, "the_geom") def popColumns = [] def sum_popColumns = [] if (populationColumns) { - datasource."$population".getColumns().each { col -> + datasource.getColumnNames(population).each { col -> if (!["the_geom", "id_pop"].contains(col.toLowerCase() ) && populationColumns.contains(col.toLowerCase())) { popColumns << "b.$col" @@ -2170,7 +2225,7 @@ String rsuPopulation(JdbcDataSource datasource, String rsu, String population, L st_intersects(a.the_geom, b.the_geom); create index on $inputRsuTableName_pop ($ID_RSU); create index on $inputRsuTableName_pop ($ID_POP); - """.toString()) + """) def inputRsuTableName_pop_sum = postfix "rsu_pop_sum" def inputRsuTableName_area_sum = postfix "rsu_area_sum" @@ -2186,7 +2241,7 @@ String rsuPopulation(JdbcDataSource datasource, String rsu, String population, L DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.*, ${popColumns.join(",")} from $rsu a LEFT JOIN $inputRsuTableName_pop_sum b on a.$ID_RSU=b.$ID_RSU; - drop table if exists $inputRsuTableName_pop,$inputRsuTableName_pop_sum, $inputRsuTableName_area_sum ;""".toString()) + drop table if exists $inputRsuTableName_pop,$inputRsuTableName_pop_sum, $inputRsuTableName_area_sum ;""") return outputTableName } @@ -2224,7 +2279,7 @@ String rsuPopulation(JdbcDataSource datasource, String rsu, String population, L */ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, String building, String road, String water, String vegetation, - String impervious, List priorities = ["building", "road", "water", "high_vegetation", "low_vegetation", "impervious"]) { + String impervious, List priorities = ["building", "road", "water", "high_vegetation", "low_vegetation", "impervious"]) throws Exception{ if (!id_zone) { error "The id_zone identifier cannot be null or empty" @@ -2246,17 +2301,17 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, def outputTableName = postfix BASE_NAME if (zone && datasource.hasTable(zone)) { - datasource.createIndex(zone,id_zone) - datasource.createSpatialIndex(zone,"the_geom") + datasource.createIndex(zone, id_zone) + datasource.createSpatialIndex(zone, "the_geom") def tablesToMerge = [:] tablesToMerge += ["$zone": "select ST_ExteriorRing(the_geom) as the_geom, ${id_zone} from $zone"] if (road && datasource.hasTable(road) && priorities.contains("road")) { debug "Preparing table : $road" - datasource.createSpatialIndex(road,"the_geom") + datasource.createSpatialIndex(road, "the_geom") //Separate road features according the zindex def roadTable_zindex0_buffer = postfix "road_zindex0_buffer" def road_tmp = postfix "road_zindex0" - datasource """DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; + datasource.execute( """DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; CREATE TABLE $roadTable_zindex0_buffer as SELECT st_buffer(the_geom, WIDTH::DOUBLE PRECISION/2, 2) AS the_geom, surface as type FROM $road where ZINDEX=0 ; @@ -2264,63 +2319,63 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, CREATE TABLE $road_tmp AS SELECT ST_CollectionExtract(st_intersection(st_union(st_accum(a.the_geom)),b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $roadTable_zindex0_buffer AS a, $zone AS b WHERE a.the_geom && b.the_geom AND st_intersects(a.the_geom, b.the_geom) GROUP BY b.${id_zone}, a.type; DROP TABLE IF EXISTS $roadTable_zindex0_buffer; - """.toString() + """) tablesToMerge += ["$road_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $road_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] } if (vegetation && datasource.hasTable(vegetation)) { debug "Preparing table : $vegetation" - datasource.createSpatialIndex(vegetation,"the_geom") + datasource.createSpatialIndex(vegetation, "the_geom") def low_vegetation_tmp = postfix "low_vegetation_zindex0" def high_vegetation_tmp = postfix "high_vegetation_zindex0" if (priorities.contains("low_vegetation")) { - datasource """DROP TABLE IF EXISTS $low_vegetation_tmp; + datasource.execute( """DROP TABLE IF EXISTS $low_vegetation_tmp; CREATE TABLE $low_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='low'; """.toString() + AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='low'; """) tablesToMerge += ["$low_vegetation_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $low_vegetation_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] } if (priorities.contains("high_vegetation")) { - datasource """DROP TABLE IF EXISTS $high_vegetation_tmp; + datasource.execute( """DROP TABLE IF EXISTS $high_vegetation_tmp; CREATE TABLE $high_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='high'; - """.toString() + """) tablesToMerge += ["$high_vegetation_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $high_vegetation_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] } } if (water && datasource.hasTable(water) && priorities.contains("water")) { debug "Preparing table : $water" - datasource.createSpatialIndex(water,"the_geom") + datasource.createSpatialIndex(water, "the_geom") def water_tmp = postfix "water_zindex0" - datasource """DROP TABLE IF EXISTS $water_tmp; + datasource.execute( """DROP TABLE IF EXISTS $water_tmp; CREATE TABLE $water_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, 'water' as type FROM $water AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom)""".toString() + AND ST_INTERSECTS(a.the_geom, b.the_geom)""") tablesToMerge += ["$water_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $water_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] } if (impervious && datasource.hasTable(impervious) && priorities.contains("impervious")) { debug "Preparing table : $impervious" - datasource.createSpatialIndex(impervious,"the_geom") + datasource.createSpatialIndex(impervious, "the_geom") def impervious_tmp = postfix "impervious_zindex0" - datasource """DROP TABLE IF EXISTS $impervious_tmp; + datasource.execute( """DROP TABLE IF EXISTS $impervious_tmp; CREATE TABLE $impervious_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, 'impervious' as type FROM $impervious AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom)""".toString() + AND ST_INTERSECTS(a.the_geom, b.the_geom)""") tablesToMerge += ["$impervious_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $impervious_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] } if (building && datasource.hasTable(building) && priorities.contains("building")) { debug "Preparing table : $building" - datasource.createSpatialIndex(building,"the_geom") + datasource.createSpatialIndex(building, "the_geom") def building_tmp = postfix "building_zindex0" - datasource """DROP TABLE IF EXISTS $building_tmp; + datasource.execute( """DROP TABLE IF EXISTS $building_tmp; CREATE TABLE $building_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $building AS a, $zone AS b WHERE a.the_geom && b.the_geom - AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.zindex=0""".toString() + AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.zindex=0""") tablesToMerge += ["$building_tmp": "select ST_ToMultiLine(the_geom) as the_geom, ${id_zone} from $building_tmp WHERE ST_ISEMPTY(THE_GEOM)=false"] } @@ -2331,24 +2386,24 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, return } def tmp_tables = postfix "tmp_tables_zindex0" - datasource """DROP TABLE if exists $tmp_tables; + datasource.execute( """DROP TABLE if exists $tmp_tables; CREATE TABLE $tmp_tables(the_geom GEOMETRY, ${id_zone} integer) AS ${tablesToMerge.values().join(' union ')}; - """.toString() + """) //Polygonize the input tables debug "Generating " + "minimum polygon areas" def final_polygonize = postfix "tmp_point_polygonize_zindex0" - datasource """DROP TABLE IF EXISTS $final_polygonize; + datasource.execute( """DROP TABLE IF EXISTS $final_polygonize; CREATE INDEX ON $tmp_tables($id_zone); CREATE TABLE $final_polygonize as select CAST((row_number() over()) as Integer) as ${ID_COLUMN_NAME}, the_geom , st_area(the_geom) as area from st_explode ('(select st_polygonize(st_union(st_force2d( - st_precisionreducer(st_node(st_accum(a.the_geom)), 3)))) as the_geom from $tmp_tables as a group by ${id_zone})')""".toString() + st_precisionreducer(st_node(st_accum(a.the_geom)), 3)))) as the_geom from $tmp_tables as a group by ${id_zone})')""") //Create indexes - datasource.createSpatialIndex(final_polygonize,"the_geom") - datasource.createIndex(final_polygonize,ID_COLUMN_NAME) + datasource.createSpatialIndex(final_polygonize, "the_geom") + datasource.createIndex(final_polygonize, ID_COLUMN_NAME) def finalMerge = [] def tmpTablesToDrop = [] @@ -2357,39 +2412,39 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, def tmptableName = "tmp_stats_$entry.key" tmpTablesToDrop << tmptableName if (entry.key.startsWith("high_vegetation")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource """DROP TABLE IF EXISTS $tmptableName; + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'high_vegetation' as layer, a.type, ${priorities.findIndexOf { it == "high_vegetation" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom))""".toString() + $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("low_vegetation")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource """DROP TABLE IF EXISTS $tmptableName; + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'low_vegetation' as layer, a.type, ${priorities.findIndexOf { it == "low_vegetation" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom,st_pointonsurface(b.the_geom))""".toString() + $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom,st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("water")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource """CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'water' as layer, a.type,${priorities.findIndexOf { it == "water" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom))""".toString() + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.execute( """CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'water' as layer, a.type,${priorities.findIndexOf { it == "water" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, + $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("road")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource """DROP TABLE IF EXISTS $tmptableName; + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area, 'road' as layer, a.type,${priorities.findIndexOf { it == "road" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""".toString() + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("impervious")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource """DROP TABLE IF EXISTS $tmptableName; + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area, 'impervious' as layer, 'impervious' as type,${priorities.findIndexOf { it == "impervious" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""".toString() + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("building")) { - datasource.createSpatialIndex(entry.key,"the_geom") - datasource """DROP TABLE IF EXISTS $tmptableName; + datasource.createSpatialIndex(entry.key, "the_geom") + datasource.execute( """DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area, 'building' as layer, a.type, ${priorities.findIndexOf { it == "building" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, - $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""".toString() + $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } } @@ -2398,17 +2453,17 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, tablesToMerge.remove("$zone") def allInfoTableName = postfix "allInfoTableName" def groupedLandTypes = postfix("grouped_land_type") - datasource """DROP TABLE IF EXISTS $allInfoTableName,$groupedLandTypes , $tmp_tables, $outputTableName; - CREATE TABLE $allInfoTableName as ${finalMerge.join(' union all ')};""".toString() - datasource """ + datasource.execute( """DROP TABLE IF EXISTS $allInfoTableName,$groupedLandTypes , $tmp_tables, $outputTableName; + CREATE TABLE $allInfoTableName as ${finalMerge.join(' union all ')};""") + datasource.execute( """ CREATE INDEX ON $allInfoTableName (${ID_COLUMN_NAME}); CREATE TABLE $groupedLandTypes as select distinct ${ID_COLUMN_NAME}, first_value(type) over(partition by ${ID_COLUMN_NAME} order by priority, area) as type, first_value(layer) over(partition by ${ID_COLUMN_NAME} order by priority, area) as layer FROM $allInfoTableName; - """.toString() - datasource """CREATE INDEX ON $groupedLandTypes ($ID_COLUMN_NAME); + """) + datasource.execute( """CREATE INDEX ON $groupedLandTypes ($ID_COLUMN_NAME); CREATE TABLE $outputTableName as SELECT a.$ID_COLUMN_NAME, a.the_geom, b.* EXCEPT($ID_COLUMN_NAME) FROM $final_polygonize as a left join $groupedLandTypes as b - on a.$ID_COLUMN_NAME= b.$ID_COLUMN_NAME;""".toString() - datasource """DROP TABLE IF EXISTS $final_polygonize, ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${groupedLandTypes}, ${tmpTablesToDrop.join(",")}""".toString() + on a.$ID_COLUMN_NAME= b.$ID_COLUMN_NAME;""") + datasource.execute( """DROP TABLE IF EXISTS $final_polygonize, ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${groupedLandTypes}, ${tmpTablesToDrop.join(",")}""") } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy index b2e2c7ea89..252dc28995 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy @@ -63,35 +63,36 @@ import static org.h2gis.network.functions.ST_ConnectedComponents.getConnectedCom String createTSU(JdbcDataSource datasource, String zone, double area = 1f, String road, String rail, String vegetation, String water, String sea_land_mask, String urban_areas, - double surface_vegetation, double surface_hydro, double surface_urban_areas, String prefixName) { - def BASE_NAME = "rsu" + double surface_vegetation, double surface_hydro, double surface_urban_areas, String prefixName) throws Exception { - debug "Creating the reference spatial units" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME - datasource """DROP TABLE IF EXISTS $outputTableName;""".toString() + def tablesToDrop = [] + try { + def BASE_NAME = "rsu" - def tsuDataPrepared = prepareTSUData(datasource, - zone, road, rail, - vegetation, water, sea_land_mask, urban_areas, surface_vegetation, surface_hydro, surface_urban_areas, prefixName) + debug "Creating the reference spatial units" + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME + datasource.execute("""DROP TABLE IF EXISTS $outputTableName;""") - if (!tsuDataPrepared) { - info "Cannot prepare the data for RSU calculation." - return - } - def outputTsuTableName = Geoindicators.SpatialUnits.createTSU(datasource, tsuDataPrepared, zone, - area, prefixName) - if (!outputTsuTableName) { - info "Cannot compute the RSU." - return - } - datasource.dropTable(tsuDataPrepared) + def tsuDataPrepared = prepareTSUData(datasource, + zone, road, rail, + vegetation, water, sea_land_mask, urban_areas, surface_vegetation, surface_hydro, surface_urban_areas, prefixName) - datasource """ALTER TABLE $outputTsuTableName RENAME TO $outputTableName;""".toString() + tablesToDrop << tsuDataPrepared + def outputTsuTableName = Geoindicators.SpatialUnits.createTSU(datasource, tsuDataPrepared, zone, + area, prefixName) + datasource.dropTable(tsuDataPrepared) - debug "Reference spatial units table created" + datasource.execute("""ALTER TABLE $outputTsuTableName RENAME TO $outputTableName;""") - return outputTableName + debug "Reference spatial units table created" + + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot compute the TSU", e) + } finally { + datasource.dropTable(tablesToDrop) + } } /** @@ -105,7 +106,8 @@ String createTSU(JdbcDataSource datasource, String zone, * @param area TSU less or equals than area are removed * @return A database table name and the name of the column ID */ -String createTSU(JdbcDataSource datasource, String inputTableName, String inputzone, double area = 1d, String prefixName) { +String createTSU(JdbcDataSource datasource, String inputTableName, String inputzone, + double area = 1d, String prefixName) throws Exception { def COLUMN_ID_NAME = "id_rsu" def BASE_NAME = "tsu" @@ -114,18 +116,14 @@ String createTSU(JdbcDataSource datasource, String inputTableName, String inputz def outputTableName = prefix prefixName, BASE_NAME if (!inputTableName) { - error "The input data to compute the TSU cannot be null or empty" - return null + throw new IllegalArgumentException("The input data to compute the TSU cannot be null or empty") } def epsg = datasource.getSrid(inputTableName) if (area <= 0) { - error "The area value to filter the TSU must be greater to 0" - return null + throw new IllegalArgumentException("The area value to filter the TSU must be greater to 0") } if (inputzone) { - - //Clip the geometry datasource.createSpatialIndex(inputTableName) datasource.createSpatialIndex(inputzone) @@ -137,7 +135,7 @@ String createTSU(JdbcDataSource datasource, String inputTableName, String inputz """.toString()) //Create the polygons from the TSU lines - String polygons = postfix("polygons") + String polygons = postfix("polygons") datasource.execute(""" DROP TABLE IF EXISTS $polygons; CREATE TABLE $polygons as SELECT @@ -162,7 +160,6 @@ String createTSU(JdbcDataSource datasource, String inputTableName, String inputz } debug "Reference spatial units table created" - return outputTableName } @@ -189,90 +186,89 @@ String createTSU(JdbcDataSource datasource, String inputTableName, String inputz */ String prepareTSUData(JdbcDataSource datasource, String zone, String road, String rail, String vegetation, String water, String sea_land_mask, String urban_areas, - double surface_vegetation, double surface_hydro, double surface_urban_areas, String prefixName = "unified_abstract_model") { + double surface_vegetation, double surface_hydro, + double surface_urban_areas, String prefixName = "unified_abstract_model") + throws Exception { if (surface_vegetation <= 100) { - error("The surface of vegetation must be greater or equal than 100 m²") - return + throw new IllegalArgumentException("The surface of vegetation must be greater or equal than 100 m²") } if (surface_hydro <= 100) { - error("The surface of water must be greater or equal than 100 m²") - return + throw new IllegalArgumentException("The surface of water must be greater or equal than 100 m²") } if (surface_urban_areas <= 100) { - error("The surface of urban areas must be greater or equal than 100 m²") - return + throw new IllegalArgumentException("The surface of urban areas must be greater or equal than 100 m²") } - def BASE_NAME = "prepared_tsu_data" - - debug "Preparing the abstract model to build the TSU" + try { + def BASE_NAME = "prepared_tsu_data" + debug "Preparing the abstract model to build the TSU" - // The name of the outputTableName is constructed - def outputTableName = prefix prefixName, BASE_NAME + // The name of the outputTableName is constructed + def outputTableName = prefix prefixName, BASE_NAME - // Create temporary table names (for tables that will be removed at the end of the method) - def vegetation_tmp - def hydrographic_tmp + // Create temporary table names (for tables that will be removed at the end of the method) + def vegetation_tmp + def hydrographic_tmp - def queryCreateOutputTable = [:] - def dropTableList = [] + def queryCreateOutputTable = [:] + def dropTableList = [] - def numberZone = datasource.firstRow("SELECT COUNT(*) AS nb FROM $zone".toString()).nb + def numberZone = datasource.firstRow("SELECT COUNT(*) AS nb FROM $zone".toString()).nb - if (numberZone == 1) { - def epsg = datasource."$zone".srid - //Add the land mask - if (sea_land_mask && datasource.hasTable(sea_land_mask)) { - debug "Preparing land mask..." - queryCreateOutputTable += [land_mask_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $sea_land_mask where type ='land')"] - } - if (vegetation && datasource.hasTable(vegetation)) { - debug "Preparing vegetation..." - if (datasource.getColumnNames(vegetation)) { - vegetation_tmp = postfix "vegetation_tmp" - def vegetation_graph = postfix "vegetation_graph" - def subGraphTableNodes = postfix vegetation_graph, "NODE_CC" - def subGraphTableEdges = postfix vegetation_graph, "EDGE_CC" - def subGraphBlocksLow = postfix "subgraphblocks_low" - def subGraphBlocksHigh = postfix "subgraphblocks_high" - - datasource "DROP TABLE IF EXISTS $vegetation_tmp, $vegetation_graph, $subGraphTableNodes, $subGraphTableEdges, $subGraphBlocksLow, $subGraphBlocksHigh".toString() - - datasource.createIndex(vegetation, "ID_VEGET") - datasource.createSpatialIndex(vegetation, "THE_GEOM") - datasource.execute """ + if (numberZone == 1) { + def epsg = datasource.getSrid(zone) + //Add the land mask + if (sea_land_mask && datasource.hasTable(sea_land_mask)) { + debug "Preparing land mask..." + queryCreateOutputTable += [land_mask_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $sea_land_mask where type ='land')"] + } + if (vegetation && datasource.hasTable(vegetation)) { + debug "Preparing vegetation..." + if (datasource.getColumnNames(vegetation)) { + vegetation_tmp = postfix "vegetation_tmp" + def vegetation_graph = postfix "vegetation_graph" + def subGraphTableNodes = postfix vegetation_graph, "NODE_CC" + def subGraphTableEdges = postfix vegetation_graph, "EDGE_CC" + def subGraphBlocksLow = postfix "subgraphblocks_low" + def subGraphBlocksHigh = postfix "subgraphblocks_high" + + datasource "DROP TABLE IF EXISTS $vegetation_tmp, $vegetation_graph, $subGraphTableNodes, $subGraphTableEdges, $subGraphBlocksLow, $subGraphBlocksHigh".toString() + + datasource.createIndex(vegetation, "ID_VEGET") + datasource.createSpatialIndex(vegetation, "THE_GEOM") + datasource.execute(""" CREATE TABLE $vegetation_graph (EDGE_ID SERIAL, START_NODE INT, END_NODE INT) AS SELECT CAST((row_number() over()) as Integer), a.ID_VEGET as START_NODE, b.ID_VEGET AS END_NODE FROM $vegetation AS a, $vegetation AS b WHERE a.ID_VEGET <>b.ID_VEGET AND a.the_geom && b.the_geom AND ST_INTERSECTS(b.the_geom,a.the_geom); - """.toString() + """) - //Recherche des clusters - getConnectedComponents(datasource.getConnection(), vegetation_graph, "undirected") + //Recherche des clusters + getConnectedComponents(datasource.getConnection(), vegetation_graph, "undirected") - //Unify water geometries that share a boundary - debug "Merging spatial clusters..." - //Processing low vegetation - datasource """ + //Unify water geometries that share a boundary + debug "Merging spatial clusters..." + //Processing low vegetation + datasource """ CREATE INDEX ON $subGraphTableNodes (NODE_ID); CREATE TABLE $subGraphBlocksLow AS SELECT ST_ToMultiLine(ST_UNION(ST_ACCUM(A.THE_GEOM))) AS THE_GEOM FROM $vegetation A, $subGraphTableNodes B WHERE a.ID_VEGET=b.NODE_ID AND a.HEIGHT_CLASS= 'low' GROUP BY B.CONNECTED_COMPONENT HAVING SUM(st_area(A.THE_GEOM)) >= $surface_vegetation;""".toString() - //Processing high vegetation - datasource """ + //Processing high vegetation + datasource """ CREATE TABLE $subGraphBlocksHigh AS SELECT ST_ToMultiLine(ST_UNION(ST_ACCUM(A.THE_GEOM))) AS THE_GEOM FROM $vegetation A, $subGraphTableNodes B WHERE a.ID_VEGET=b.NODE_ID AND a.HEIGHT_CLASS= 'high' GROUP BY B.CONNECTED_COMPONENT HAVING SUM(st_area(A.THE_GEOM)) >= $surface_vegetation;""".toString() - debug "Creating the vegetation block table..." + debug "Creating the vegetation block table..." - datasource """DROP TABLE IF EXISTS $vegetation_tmp; + datasource """DROP TABLE IF EXISTS $vegetation_tmp; CREATE TABLE $vegetation_tmp (THE_GEOM GEOMETRY) AS SELECT the_geom FROM $subGraphBlocksLow UNION ALL SELECT the_geom FROM $subGraphBlocksHigh @@ -280,26 +276,26 @@ String prepareTSUData(JdbcDataSource datasource, String zone, String road, Strin LEFT JOIN $subGraphTableNodes b ON a.ID_VEGET = b.NODE_ID WHERE b.NODE_ID IS NULL AND st_area(a.the_geom)>=$surface_vegetation; DROP TABLE $subGraphTableNodes,$subGraphTableEdges, $vegetation_graph, $subGraphBlocksLow, $subGraphBlocksHigh;""".toString() - queryCreateOutputTable += [vegetation_tmp: "(SELECT the_geom FROM $vegetation_tmp)"] - dropTableList.addAll([vegetation_tmp]) + queryCreateOutputTable += [vegetation_tmp: "(SELECT the_geom FROM $vegetation_tmp)"] + dropTableList.addAll([vegetation_tmp]) + } } - } - if (water && datasource.hasTable(water)) { - if (datasource.getColumnNames(water).size() > 0) { - //Extract water - debug "Preparing hydrographic..." - hydrographic_tmp = postfix "hydrographic_tmp" - def water_graph = postfix "water_graphes" - def subGraphTableNodes = postfix water_graph, "NODE_CC" - def subGraphTableEdges = postfix water_graph, "EDGE_CC" - def subGraphBlocks = postfix "subgraphblocks" - - datasource "DROP TABLE IF EXISTS $hydrographic_tmp, $water_graph, $subGraphTableNodes, $subGraphTableEdges, $subGraphBlocks".toString() - - datasource.createIndex(water, "ID_WATER") - datasource.createSpatialIndex(water, "THE_GEOM") - datasource.execute """ + if (water && datasource.hasTable(water)) { + if (datasource.getColumnNames(water).size() > 0) { + //Extract water + debug "Preparing hydrographic..." + hydrographic_tmp = postfix "hydrographic_tmp" + def water_graph = postfix "water_graphes" + def subGraphTableNodes = postfix water_graph, "NODE_CC" + def subGraphTableEdges = postfix water_graph, "EDGE_CC" + def subGraphBlocks = postfix "subgraphblocks" + + datasource "DROP TABLE IF EXISTS $hydrographic_tmp, $water_graph, $subGraphTableNodes, $subGraphTableEdges, $subGraphBlocks".toString() + + datasource.createIndex(water, "ID_WATER") + datasource.createSpatialIndex(water, "THE_GEOM") + datasource.execute """ CREATE TABLE $water_graph (EDGE_ID SERIAL, START_NODE INT, END_NODE INT) AS SELECT CAST((row_number() over()) as Integer), a.ID_WATER as START_NODE, b.ID_WATER AS END_NODE FROM $water AS a, $water AS b @@ -307,20 +303,20 @@ String prepareTSUData(JdbcDataSource datasource, String zone, String road, Strin AND ST_INTERSECTS(b.the_geom,a.the_geom) and a.ZINDEX=0; """.toString() - //Recherche des clusters - getConnectedComponents(datasource.getConnection(), water_graph, "undirected") + //Recherche des clusters + getConnectedComponents(datasource.getConnection(), water_graph, "undirected") - //Unify water geometries that share a boundary - debug "Merging spatial clusters..." + //Unify water geometries that share a boundary + debug "Merging spatial clusters..." - datasource """ + datasource """ CREATE INDEX ON $subGraphTableNodes (NODE_ID); CREATE TABLE $subGraphBlocks AS SELECT ST_ToMultiLine(ST_UNION(ST_ACCUM(A.THE_GEOM))) AS THE_GEOM FROM $water A, $subGraphTableNodes B WHERE a.ID_WATER=b.NODE_ID GROUP BY B.CONNECTED_COMPONENT HAVING SUM(st_area(A.THE_GEOM)) >= $surface_hydro;""".toString() - debug "Creating the water block table..." - datasource """DROP TABLE IF EXISTS $hydrographic_tmp; + debug "Creating the water block table..." + datasource """DROP TABLE IF EXISTS $hydrographic_tmp; CREATE TABLE $hydrographic_tmp (THE_GEOM GEOMETRY) AS SELECT the_geom FROM $subGraphBlocks UNION ALL SELECT ST_ToMultiLine(a.the_geom) as the_geom FROM $water a @@ -328,37 +324,35 @@ String prepareTSUData(JdbcDataSource datasource, String zone, String road, Strin st_area(a.the_geom)>=$surface_hydro; DROP TABLE $subGraphTableNodes,$subGraphTableEdges, $water_graph, $subGraphBlocks ;""".toString() - queryCreateOutputTable += [hydrographic_tmp: "(SELECT the_geom FROM $hydrographic_tmp)"] - dropTableList.addAll([hydrographic_tmp]) + queryCreateOutputTable += [hydrographic_tmp: "(SELECT the_geom FROM $hydrographic_tmp)"] + dropTableList.addAll([hydrographic_tmp]) + } } - } - if (road && datasource.hasTable(road)) { - debug "Preparing road..." - if (datasource.getColumnNames(road).size() > 0) { - queryCreateOutputTable += [road_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $road where (zindex=0 or crossing in ('bridge', 'crossing')) " + - "and type not in ('track','service', 'path', 'cycleway', 'steps'))"] + if (road && datasource.hasTable(road)) { + debug "Preparing road..." + if (datasource.getColumnNames(road).size() > 0) { + queryCreateOutputTable += [road_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $road where (zindex=0 or crossing in ('bridge', 'crossing')) " + + "and type not in ('track','service', 'path', 'cycleway', 'steps'))"] + } } - } - if (rail && datasource.hasTable(rail)) { - debug "Preparing rail..." - if (datasource.getColumnNames(rail).size() > 0) { - queryCreateOutputTable += [rail_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $rail where (zindex=0 and usage='main') or (crossing = 'bridge' and usage='main'))"] + if (rail && datasource.hasTable(rail)) { + debug "Preparing rail..." + if (datasource.getColumnNames(rail).size() > 0) { + queryCreateOutputTable += [rail_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $rail where (zindex=0 and usage='main') or (crossing = 'bridge' and usage='main'))"] + } } - } - - if (urban_areas && datasource.hasTable(urban_areas)) { - if (datasource.getColumnNames(urban_areas).size() > 0) { - debug "Preparing urban areas..." - queryCreateOutputTable += [urban_areas_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $urban_areas WHERE st_area(the_geom)>=$surface_urban_areas and type not in ('social_building'))"] + if (urban_areas && datasource.hasTable(urban_areas)) { + if (datasource.getColumnNames(urban_areas).size() > 0) { + debug "Preparing urban areas..." + queryCreateOutputTable += [urban_areas_tmp: "(SELECT ST_ToMultiLine(THE_GEOM) FROM $urban_areas WHERE st_area(the_geom)>=$surface_urban_areas and type not in ('social_building'))"] + } } - } - - // The input table that contains the geometries to be transformed as TSU - debug "Grouping all tables..." - if (queryCreateOutputTable) { - datasource """ + // The input table that contains the geometries to be transformed as TSU + debug "Grouping all tables..." + if (queryCreateOutputTable) { + datasource """ DROP TABLE if exists $outputTableName; CREATE TABLE $outputTableName(the_geom GEOMETRY) AS ( @@ -367,21 +361,24 @@ String prepareTSUData(JdbcDataSource datasource, String zone, String road, Strin UNION ${queryCreateOutputTable.values().join(' union ')}; DROP TABLE IF EXISTS ${queryCreateOutputTable.keySet().join(' , ')} """.toString() - } else { - datasource """DROP TABLE if exists $outputTableName; + } else { + datasource.execute("""DROP TABLE if exists $outputTableName; CREATE TABLE $outputTableName(the_geom GEOMETRY) AS (SELECT st_setsrid(ST_ToMultiLine(THE_GEOM),$epsg) - FROM $zone);""".toString() - } - if (dropTableList) { - datasource "DROP TABLE IF EXISTS ${dropTableList.join(',')};".toString() - } - debug "TSU created..." + FROM $zone);""") + } + if (dropTableList) { + datasource "DROP TABLE IF EXISTS ${dropTableList.join(',')};".toString() + } + debug "TSU created..." - } else { - error "Cannot compute the TSU. The input zone table must have one row." - outputTableName = null + } else { + error "Cannot compute the TSU. The input zone table must have one row." + outputTableName = null + } + return outputTableName + } catch (SQLException e) { + throw new SQLException("Cannot prepare the TSU data", e) } - return outputTableName } /** @@ -395,7 +392,8 @@ String prepareTSUData(JdbcDataSource datasource, String zone, String road, Strin * @param outputTableName The name of the output table * @return A database table name and the name of the column ID */ -String createBlocks(JdbcDataSource datasource, String inputTableName, double snappingTolerance = 0.0d, String prefixName = "block") { +String createBlocks(JdbcDataSource datasource, String inputTableName, + double snappingTolerance = 0.0d, String prefixName = "block") throws Exception { def BASE_NAME = "blocks" @@ -455,7 +453,7 @@ String createBlocks(JdbcDataSource datasource, String inputTableName, double sna //Create the blocks debug "Creating the block table..." - def blocks = postfix("blocks") + def blocks = postfix("blocks") datasource.execute("""DROP TABLE IF EXISTS $blocks; CREATE TABLE $blocks as SELECT st_force2d(ST_MAKEVALID(THE_GEOM)) as the_geom FROM $subGraphBlocks @@ -497,7 +495,7 @@ String createBlocks(JdbcDataSource datasource, String inputTableName, double sna * @return outputTableName A table name containing ID from table 1, ID from table 2 and AREA shared by the two objects (if pointOnSurface = false) */ String spatialJoin(JdbcDataSource datasource, String sourceTable, String targetTable, - String idColumnTarget, boolean pointOnSurface = false, Integer nbRelations, String prefixName) { + String idColumnTarget, boolean pointOnSurface = false, Integer nbRelations, String prefixName) throws Exception { def GEOMETRIC_COLUMN_SOURCE = "the_geom" def GEOMETRIC_COLUMN_TARGET = "the_geom" @@ -560,21 +558,19 @@ String spatialJoin(JdbcDataSource datasource, String sourceTable, String targetT * * @author Emmanuel Renault, CNRS, 2020 * */ -String createGrid(JdbcDataSource datasource, Geometry geometry, double deltaX, double deltaY, boolean rowCol = false, String prefixName = "") { +String createGrid(JdbcDataSource datasource, Geometry geometry, double deltaX, + double deltaY, boolean rowCol = false, String prefixName = "") throws Exception { if (rowCol) { if (!deltaX || !deltaY || deltaX < 1 || deltaY < 1) { - debug "Invalid grid size padding. Must be greater or equal than 1" - return + throw new IllegalArgumentException("Invalid grid size padding. Must be greater or equal than 1") } } else { if (!deltaX || !deltaY || deltaX <= 0 || deltaY <= 0) { - error "Invalid grid size padding. Must be greater than 0" - return + throw new IllegalArgumentException("Invalid grid size padding. Must be greater than 0") } } if (!geometry) { - error "The envelope is null or empty. Cannot compute the grid" - return + throw new IllegalArgumentException("The envelope is null or empty. Cannot compute the grid") } def BASENAME = "grid" @@ -617,8 +613,7 @@ String createGrid(JdbcDataSource datasource, Geometry geometry, double deltaX, d preparedStatement.executeBatch() } } catch (SQLException e) { - error("Cannot create the grid with the parameters.\n", e) - return null + throw new SQLException("Cannot create the grid with the parameters.", e) } finally { if (preparedStatement != null) { preparedStatement.close() @@ -641,24 +636,22 @@ String createGrid(JdbcDataSource datasource, Geometry geometry, double deltaX, d * @param distance value to erode (delete) small sprawl areas * @author Erwan Bocher (CNRS) */ -String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, float distance = 100) { +String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, + float distance = 100) throws Exception { //We must compute the grid if (!grid_indicators) { - error("No grid_indicators table to compute the sprawl areas layer") - return + throw new IllegalArgumentException("No grid_indicators table to compute the sprawl areas layer") } if (distance < 0) { - error("Please set a distance greater or equal than 0") - return + throw new IllegalArgumentException("Please set a distance greater or equal than 0") } if (datasource.getRowCount(grid_indicators) == 0) { - error("No grid cells to compute the sprawl areas layer") - return + throw new IllegalArgumentException("No grid cells to compute the sprawl areas layer") } def gridCols = datasource.getColumnNames(grid_indicators) def lcz_columns_urban = ["LCZ_PRIMARY", "LCZ_WARM"] def lcz_columns = gridCols.intersect(lcz_columns_urban) - if (lcz_columns.size()>0) { + if (lcz_columns.size() > 0) { def outputTableName = postfix("sprawl_areas") if (distance == 0) { datasource.execute("""DROP TABLE IF EXISTS $outputTableName; @@ -678,7 +671,8 @@ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, flo $grid_indicators where lcz_warm>=2 and LCZ_PRIMARY NOT IN (101, 102,103,104,106, 107))') where st_isempty(st_buffer(the_geom, -100,2)) =false""".toString()) - datasource.execute("""CREATE TABLE $outputTableName as SELECT CAST((row_number() over()) as Integer) as id, + + datasource.execute("""CREATE TABLE $outputTableName as SELECT CAST((row_number() over()) as Integer) as id, the_geom FROM ST_EXPLODE('( @@ -692,8 +686,7 @@ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, flo return outputTableName } } - error("No LCZ_PRIMARY column to compute the sprawl areas layer") - return + throw new IllegalArgumentException("No LCZ_PRIMARY column to compute the sprawl areas layer") } /** @@ -702,7 +695,7 @@ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, flo * @param input_polygons a layer that contains polygons * @author Erwan Bocher (CNRS) */ -String inversePolygonsLayer(JdbcDataSource datasource, String input_polygons) { +String inversePolygonsLayer(JdbcDataSource datasource, String input_polygons) throws Exception { def outputTableName = postfix("inverse_geometries") def tmp_extent = postfix("tmp_extent") datasource.execute("""DROP TABLE IF EXISTS $tmp_extent, $outputTableName; @@ -719,33 +712,31 @@ String inversePolygonsLayer(JdbcDataSource datasource, String input_polygons) { } - /** * This methods allows to extract the cool area geometries inside polygons * A cool area is continous geometry defined by vegetation and water fractions. * * @author Erwan Bocher (CNRS) */ -String extractCoolAreas(JdbcDataSource datasource, String grid_indicators,float distance = 100) { +String extractCoolAreas(JdbcDataSource datasource, String grid_indicators, + float distance = 100) throws Exception { if (!grid_indicators) { - error("No grid_indicators table to extract the cool areas layer") - return + throw new IllegalArgumentException("No grid_indicators table to extract the cool areas layer") } def gridCols = datasource.getColumnNames(grid_indicators) def lcz_columns_urban = ["LCZ_PRIMARY"] def lcz_columns = gridCols.intersect(lcz_columns_urban) - if (lcz_columns.size()>0) { + if (lcz_columns.size() > 0) { def outputTableName = postfix("cool_areas") datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName as SELECT CAST((row_number() over()) as Integer) as id, the_geom FROM ST_EXPLODE('( SELECT ST_UNION(ST_ACCUM(a.THE_GEOM)) AS THE_GEOM FROM $grid_indicators as a where - a.LCZ_PRIMARY in (101, 102, 103,104, 106, 107))') ${distance>0?" where st_isempty(st_buffer(the_geom, -$distance,2)) =false":""}; + a.LCZ_PRIMARY in (101, 102, 103,104, 106, 107))') ${distance > 0 ? " where st_isempty(st_buffer(the_geom, -$distance,2)) =false" : ""}; """.toString()) return outputTableName } - error("No LCZ_PRIMARY column to extract the cool areas") - return + throw new IllegalArgumentException("No LCZ_PRIMARY column to extract the cool areas") } \ No newline at end of file diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy index eb4715e4dc..202f3e7e14 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy @@ -87,7 +87,8 @@ import java.util.zip.GZIPOutputStream String identifyLczType(JdbcDataSource datasource, String rsuLczIndicators, String rsuAllIndicators, String normalisationType = "AVG", Map mapOfWeights = ["sky_view_factor" : 1, "aspect_ratio": 1, "building_surface_fraction": 1, "impervious_surface_fraction" : 1, "pervious_surface_fraction": 1, - "height_of_roughness_elements": 1, "terrain_roughness_length": 1], String prefixName) { + "height_of_roughness_elements": 1, "terrain_roughness_length": 1], + String prefixName) throws Exception{ def OPS = ["AVG", "MEDIAN"] def ID_FIELD_RSU = "id_rsu" def CENTER_NAME = "center" @@ -512,12 +513,11 @@ def createRandomForestModel(JdbcDataSource datasource, String trainingTableName, } debug "Create a Random Forest model" - def trainingTable = datasource."$trainingTableName" + def trainingTableColumns = datasource.getColumnNames(trainingTableName) //Check if the column names exists - if (!trainingTable.hasColumn(varToModel)) { - error "The training table should have a column named $varToModel" - return + if (!trainingTableColumns.contains(varToModel)) { + throw new IllegalArgumentException("The training table should have a column named $varToModel".toString()) } // If needed, select only some specific columns for the training in the dataframe def df @@ -529,7 +529,7 @@ def createRandomForestModel(JdbcDataSource datasource, String trainingTableName, df = DataFrame.of(tabFin) } def formula = Formula.lhs(varToModel) - def columnTypes = df.getColumnsTypes() + def columnTypes = df.getColumnNamesTypes() def dfFactorized = df.omitNullRows() // Identify columns being string (thus needed to be factorized) @@ -592,7 +592,7 @@ def createRandomForestModel(JdbcDataSource datasource, String trainingTableName, * @author Jérémy Bernard */ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVariablesTableName, String pathAndFileName, String idName, - String prefixName) { + String prefixName) throws Exception{ debug "Apply a Random Forest model" File inputModelFile = new File(pathAndFileName) def modelName = FilenameUtils.getBaseName(pathAndFileName) @@ -606,8 +606,7 @@ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVaria if (!localInputModelFile.exists()) { FileUtils.copyURLToFile(new URL(modelURL), localInputModelFile) if (!localInputModelFile.exists()) { - error "Cannot find any model file to apply the classification tree" - return null + throw new IllegalArgumentException("Cannot find any model file to apply the classification tree") } } inputModelFile = localInputModelFile; @@ -615,8 +614,7 @@ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVaria if (FilenameUtils.isExtension(pathAndFileName, "model")) { modelName = FilenameUtils.getBaseName(pathAndFileName) } else { - error "The extension of the model file must be .model" - return null + throw new IllegalArgumentException( "The extension of the model file must be .model") } } def fileInputStream = new FileInputStream(inputModelFile) @@ -640,8 +638,7 @@ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVaria putModel(modelName, model) } if (!model) { - error "Cannot find the requiered columns to apply the model" - return + throw new IllegalArgumentException( "Cannot find the requiered columns to apply the model") } // The name of the outputTableName is constructed @@ -667,7 +664,7 @@ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVaria } } //Check the column names before building the dataframe and apply the model - def inputColumns = datasource."$explicativeVariablesTableName".getColumnsTypes() + def inputColumns = datasource.getColumnNamesTypes(explicativeVariablesTableName) def allowedColumnNames = modelColumnNames.intersect(inputColumns.keySet()) def notSameColumnNames = allowedColumnNames.size() != modelColumnNames.size() @@ -734,8 +731,7 @@ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVaria preparedStatement.executeBatch(); } } catch (SQLException e) { - error("Cannot save the dataframe.\n", e); - return null; + throw new SQLException("Cannot save the dataframe.", e) } finally { outputconnection.setAutoCommit(true); if (preparedStatement != null) { @@ -743,8 +739,7 @@ String applyRandomForestModel(JdbcDataSource datasource, String explicativeVaria } } } catch (SQLException e) { - error("Cannot save the dataframe.\n", e); - return null; + throw new SQLException("Cannot save the dataframe.", e) } return tableName } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicators.groovy index 0a9b581c65..739b1c532a 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicators.groovy @@ -41,7 +41,8 @@ import java.sql.SQLException * @return */ String computeBuildingsIndicators(JdbcDataSource datasource, String building, String road, - List indicatorUse = ["LCZ", "UTRF", "TEB"], String prefixName = "") { + List indicatorUse = ["LCZ", "UTRF", "TEB"], + String prefixName = "") throws Exception{ info "Start computing building indicators..." @@ -64,10 +65,6 @@ String computeBuildingsIndicators(JdbcDataSource datasource, String building, St } def buildTableGeometryProperties = Geoindicators.GenericIndicators.geometryProperties(datasource, building, ["id_build"], geometryOperations, buildingPrefixName) - if (!buildTableGeometryProperties) { - info "Cannot compute the length,perimeter,area properties of the buildings" - return - } finalTablesToJoin.put(buildTableGeometryProperties, idColumnBu) // building_volume + building_floor_area + building_total_facade_length @@ -81,11 +78,6 @@ String computeBuildingsIndicators(JdbcDataSource datasource, String building, St } def buildTableSizeProperties = Geoindicators.BuildingIndicators.sizeProperties(datasource, building, sizeOperations as List, buildingPrefixName) - if (!buildTableSizeProperties) { - info "Cannot compute the building_volume, building_floor_area, building_total_facade_length " + - "indicators for the buildings" - return - } finalTablesToJoin.put(buildTableSizeProperties, idColumnBu) // For indicators that are useful for UTRF OR for LCZ classification @@ -97,11 +89,6 @@ String computeBuildingsIndicators(JdbcDataSource datasource, String building, St } def buildTableComputeNeighborsProperties = Geoindicators.BuildingIndicators.neighborsProperties(datasource, building, neighborOperations, buildingPrefixName) - if (!buildTableComputeNeighborsProperties) { - info "Cannot compute the building_contiguity, building_common_wall_fraction, " + - "number_building_neighbor indicators for the buildings" - return - } finalTablesToJoin.put(buildTableComputeNeighborsProperties, idColumnBu) if (indicatorUse*.toUpperCase().contains("UTRF")) { @@ -111,59 +98,36 @@ String computeBuildingsIndicators(JdbcDataSource datasource, String building, St "raw_compactness", "perimeter_convexity"], buildingPrefixName) - if (!buildTableFormProperties) { - info "Cannot compute the area_concavity, form_factor, raw_compactness, " + - "perimeter_convexity indicators for the buildings" - return - } finalTablesToJoin.put(buildTableFormProperties, idColumnBu) // building_minimum_building_spacing def buildTableComputeMinimumBuildingSpacing = Geoindicators.BuildingIndicators.minimumBuildingSpacing(datasource, building, 100, buildingPrefixName) - if (!buildTableComputeMinimumBuildingSpacing) { - info "Cannot compute the minimum building spacing indicator" - return - } finalTablesToJoin.put(buildTableComputeMinimumBuildingSpacing, idColumnBu) // building_road_distance def buildTableComputeRoadDistance = Geoindicators.BuildingIndicators.roadDistance(datasource, building, road, 100, buildingPrefixName) - if (!buildTableComputeRoadDistance) { - info "Cannot compute the closest minimum distance to a road at 100 meters." - return - } finalTablesToJoin.put(buildTableComputeRoadDistance, idColumnBu) // Join for building_likelihood def computeJoinNeighbors = Geoindicators.DataUtils.joinTables(datasource, [(buildTableComputeNeighborsProperties): idColumnBu, (building) : idColumnBu], buildingPrefixName + "_neighbors") - if (!computeJoinNeighbors) { - info "Cannot join the number of neighbors of a building." - return - } + buildTableJoinNeighbors = computeJoinNeighbors // building_likelihood_large_building def computeLikelihoodLargeBuilding = Geoindicators.BuildingIndicators.likelihoodLargeBuilding(datasource, buildTableJoinNeighbors, "number_building_neighbor", buildingPrefixName) - if (!computeLikelihoodLargeBuilding) { - info "Cannot compute the like likelihood large building indicator." - return - } + def buildTableComputeLikelihoodLargeBuilding = computeLikelihoodLargeBuilding finalTablesToJoin.put(buildTableComputeLikelihoodLargeBuilding, idColumnBu) } } def buildingTableJoin = Geoindicators.DataUtils.joinTables(datasource, finalTablesToJoin, buildingPrefixName) - if (!buildingTableJoin) { - info "Cannot merge all indicator in the table $buildingPrefixName." - return - } // Rename the last table to the right output table name datasource.execute """DROP TABLE IF EXISTS $outputTableName; @@ -185,7 +149,8 @@ String computeBuildingsIndicators(JdbcDataSource datasource, String building, St * * @return */ -String computeBlockIndicators(JdbcDataSource datasource, String inputBuildingTableName, String inputBlockTableName, String prefixName = "") { +String computeBlockIndicators(JdbcDataSource datasource, String inputBuildingTableName, String inputBlockTableName, + String prefixName = "") throws Exception{ def BASE_NAME = "block_indicators" info "Start computing block indicators..." @@ -209,20 +174,12 @@ String computeBlockIndicators(JdbcDataSource datasource, String inputBuildingTab "floor_area": ["SUM"], "volume" : ["SUM"]], blockPrefixName) - if (!computeSimpleStats) { - info "Cannot compute the sum of of the building area, building volume and block floor area." - return - } finalTablesToJoin.put(computeSimpleStats, id_block) //Ratio between the holes area and the blocks area // block_hole_area_density def computeHoleAreaDensity = Geoindicators.BlockIndicators.holeAreaDensity(datasource, inputBlockTableName, blockPrefixName) - if (!computeHoleAreaDensity) { - info "Cannot compute the hole area density." - return - } finalTablesToJoin.put(computeHoleAreaDensity, id_block) //Perkins SKill Score of the distribution of building direction within a block @@ -232,29 +189,17 @@ String computeBlockIndicators(JdbcDataSource datasource, String inputBuildingTab id_block, 15, blockPrefixName) - if (!computePerkinsSkillScoreBuildingDirection) { - info "Cannot compute perkins skill indicator. " - return - } finalTablesToJoin.put(computePerkinsSkillScoreBuildingDirection, id_block) //Block closingness String computeClosingness = Geoindicators.BlockIndicators.closingness(datasource, inputBuildingTableName, inputBlockTableName, blockPrefixName) - if (!computeClosingness) { - info "Cannot compute closingness indicator. " - return - } finalTablesToJoin.put(computeClosingness, id_block) //Block net compactness def computeNetCompactness = Geoindicators.BlockIndicators.netCompactness(datasource, inputBuildingTableName, "volume", "contiguity", blockPrefixName) - if (!computeNetCompactness) { - info "Cannot compute the net compactness indicator. " - return - } finalTablesToJoin.put(computeNetCompactness, id_block) //Block mean building height @@ -262,25 +207,17 @@ String computeBlockIndicators(JdbcDataSource datasource, String inputBuildingTab def computeWeightedAggregatedStatistics = Geoindicators.GenericIndicators.weightedAggregatedStatistics(datasource, inputBuildingTableName, inputBlockTableName, id_block, ["height_roof": ["area": ["AVG", "STD"]]], blockPrefixName) - if (!computeWeightedAggregatedStatistics) { - info "Cannot compute the block mean building height and standard deviation building height indicators. " - return - } finalTablesToJoin.put(computeWeightedAggregatedStatistics, id_block) //Merge all in one table def blockTableJoin = Geoindicators.DataUtils.joinTables(datasource, finalTablesToJoin, blockPrefixName) - if (!blockTableJoin) { - info "Cannot merge all tables in $blockPrefixName. " - return - } // Rename the last table to the right output table name datasource.execute """DROP TABLE IF EXISTS $outputTableName; ALTER TABLE ${blockTableJoin} RENAME TO $outputTableName""".toString() // Modify all indicators which do not have the expected name - def listColumnNames = datasource.getTable(outputTableName).columns + def listColumnNames = datasource.getColumnNames(outputTableName) def mapIndic2Change = ["SUM_AREA" : "AREA", "SUM_FLOOR_AREA": "FLOOR_AREA", "SUM_VOLUME": "VOLUME"] def query2ModifyNames = "" @@ -482,7 +419,8 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, "high_vegetation_rail_fraction"], "water_fraction_lcz" : ["water_fraction", "high_vegetation_water_fraction"]], - buildingFractions : ["high_vegetation_building_fraction", "building_fraction"]], String prefixName = "") { + buildingFractions : ["high_vegetation_building_fraction", "building_fraction"]], String prefixName = "") + throws Exception{ info "Start computing RSU indicators..." def to_start = System.currentTimeMillis() @@ -520,19 +458,11 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def superpositionsTable = Geoindicators.RsuIndicators.smallestCommunGeometry(datasource, rsu, "id_rsu", buildingTable, road, water, vegetation, impervious, rail, temporaryPrefName) - if (!superpositionsTable) { - info "Cannot compute the smallest commun geometries" - return - } // Calculate the surface fractions from the commun geom def computeSurfaceFractions = Geoindicators.RsuIndicators.surfaceFractions( datasource, rsu, "id_rsu", superpositionsTable, parameters.surfSuperpositions, parameters.surfPriorities, temporaryPrefName) - if (!computeSurfaceFractions) { - info "Cannot compute the surface fractions" - return - } tablesToDrop << superpositionsTable @@ -563,8 +493,8 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, datasource.execute queryUrbSurfFrac.toString() finalTablesToJoin.put(utrfFractionIndic, columnIdRsu) } else { - error """'utrfSurfFraction' and 'surfSuperpositions' parameters given by the user are not consistent. - Impossible to find the following indicators in the surface fractions table: ${missingElementsUrb.join(", ")}""" + throw new IllegalArgumentException("""'utrfSurfFraction' and 'surfSuperpositions' parameters given by the user are not consistent. + Impossible to find the following indicators in the surface fractions table: ${missingElementsUrb.join(", ")}""".toString()) } } @@ -584,8 +514,8 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, datasource.execute querylczSurfFrac.toString() finalTablesToJoin.put(lczFractionIndic, columnIdRsu) } else { - error """'lczSurfFraction' and 'surfSuperpositions' parameters given by the user are not consistent. - Impossible to find the following indicators in the surface fractions table: ${missingElementsLcz.join(", ")}""" + throw new IllegalArgumentException("""'lczSurfFraction' and 'surfSuperpositions' parameters given by the user are not consistent. + Impossible to find the following indicators in the surface fractions table: ${missingElementsLcz.join(", ")}""".toString()) } } @@ -594,20 +524,12 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def rsuTableTypeProportionLcz = Geoindicators.GenericIndicators.typeProportion(datasource, buildingTable, columnIdRsu, "type", rsu, parameters.buildingAreaTypeAndCompositionLcz, parameters.floorAreaTypeAndCompositionLcz, temporaryPrefName + "_LCZ") - if (!rsuTableTypeProportionLcz) { - info "Cannot compute the building type proportion for the LCZ at the RSU scale" - return - } finalTablesToJoin.put(rsuTableTypeProportionLcz, columnIdRsu) } if (indicatorUse*.toUpperCase().contains("TEB")) { def rsuTableTypeProportionTeb = Geoindicators.GenericIndicators.typeProportion(datasource, buildingTable, columnIdRsu, "type", rsu, parameters.buildingAreaTypeAndCompositionTeb, parameters.floorAreaTypeAndCompositionTeb, temporaryPrefName + "_TEB") - if (!rsuTableTypeProportionTeb) { - info "Cannot compute the building type proportion for TEB at the RSU scale" - return - } finalTablesToJoin.put(rsuTableTypeProportionTeb, columnIdRsu) } @@ -615,10 +537,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, if (indicatorUse*.toUpperCase().contains("UTRF")) { def computeGeometryProperties = Geoindicators.GenericIndicators.geometryProperties(datasource, rsu, [columnIdRsu], ["st_area"], temporaryPrefName) - if (!computeGeometryProperties) { - info "Cannot compute the area of the RSU" - return - } finalTablesToJoin.put(computeGeometryProperties, columnIdRsu) } @@ -628,11 +546,8 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def roofFractionDistributionExact = Geoindicators.RsuIndicators.roofFractionDistributionExact(datasource, rsu, buildingTable, columnIdRsu, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], true, prefixName) - if (roofFractionDistributionExact) { - finalTablesToJoin.put(roofFractionDistributionExact, columnIdRsu) - } else { - info "Cannot compute the roof fraction distribution." - } + finalTablesToJoin.put(roofFractionDistributionExact, columnIdRsu) + } // Building free external facade density @@ -640,10 +555,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def rsu_free_ext_density = Geoindicators.RsuIndicators.freeExternalFacadeDensity(datasource, buildingTable, rsu, "contiguity", "total_facade_length", temporaryPrefName) - if (!rsu_free_ext_density) { - info "Cannot compute the free external facade density for the RSU" - return - } intermediateJoin.put(rsu_free_ext_density, columnIdRsu) } @@ -670,10 +581,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def rsuStatisticsUnweighted = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, buildingTable, rsu, columnIdRsu, columnIdBuild, inputVarAndOperations, temporaryPrefName) - if (!rsuStatisticsUnweighted) { - info "Cannot compute the building unweighted statistics at RSU scale" - return - } // Join in an intermediate table (for perviousness fraction) intermediateJoin.put(rsuStatisticsUnweighted, columnIdRsu) @@ -683,11 +590,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, rsu, columnIdRsu, ["height_roof": ["area": ["AVG", "STD"]], "nb_lev" : ["area": ["AVG"]]], temporaryPrefName) - if (!rsuStatisticsWeighted) { - info "Cannot compute the weighted indicators mean, std height building and \n\ - mean volume building." - return - } finalTablesToJoin.put(rsuStatisticsWeighted, columnIdRsu) } @@ -699,10 +601,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, } def linearRoadOperations = Geoindicators.RsuIndicators.linearRoadOperations(datasource, rsu, road, roadOperations, parameters.angleRangeSizeRoDirection, [0], temporaryPrefName) - if (!linearRoadOperations) { - info "Cannot compute the linear road density and road direction distribution" - return - } finalTablesToJoin.put(linearRoadOperations, columnIdRsu) } @@ -714,10 +612,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def roofAreaDist = Geoindicators.RsuIndicators.roofAreaDistribution(datasource, rsu, buildingTable, facadeDensListLayersBottom, temporaryPrefName) - if (!roofAreaDist) { - info "Cannot compute the roof area distribution. " - return - } finalTablesToJoin.put(roofAreaDist, columnIdRsu) } @@ -731,10 +625,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def projFacadeDist = Geoindicators.RsuIndicators.projectedFacadeAreaDistribution(datasource, buildingTable, rsu, "id_rsu", facadeDensListLayersBottom, facadeDensNumberOfDirection, temporaryPrefName) - if (!projFacadeDist) { - info "Cannot compute the projected facade distribution. " - return - } intermediateJoin.put(projFacadeDist, columnIdRsu) } @@ -749,10 +639,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, // Create an intermediate join tables to have all needed input fields for future indicator calculation def intermediateJoinTable = Geoindicators.DataUtils.joinTables(datasource, intermediateJoin, "tab4aspratio") - if (!intermediateJoinTable) { - info "Cannot merge the tables used for aspect ratio calculation. " - return - } finalTablesToJoin.put(intermediateJoinTable, columnIdRsu) @@ -761,10 +647,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def aspectRatio = Geoindicators.RsuIndicators.aspectRatio(datasource, intermediateJoinTable, "free_external_facade_density", "BUILDING_TOTAL_FRACTION", temporaryPrefName) - if (!aspectRatio) { - info "Cannot compute the aspect ratio calculation " - return - } finalTablesToJoin.put(aspectRatio, columnIdRsu) } @@ -775,10 +657,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, computeExtFF = Geoindicators.RsuIndicators.extendedFreeFacadeFraction(datasource, buildingTable, intermediateJoinTable, "contiguity", "total_facade_length", 10, temporaryPrefName) - if (!computeExtFF) { - info "Cannot compute the SVF calculation. " - return - } datasource.execute """DROP TABLE IF EXISTS $SVF; CREATE TABLE SVF AS SELECT 1-extended_free_facade_fraction AS GROUND_SKY_VIEW_FACTOR, $columnIdRsu FROM ${computeExtFF}; DROP TABLE ${computeExtFF}""".toString() @@ -787,10 +665,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, buildingTable, parameters.svfPointDensity, parameters.svfRayLength, parameters.svfNumberOfDirection, temporaryPrefName) - if (!computeSVF) { - info "Cannot compute the SVF calculation. " - return - } SVF = computeSVF } finalTablesToJoin.put(SVF, columnIdRsu) @@ -803,19 +677,11 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, "projected_facade_area_distribution", "geom_avg_$heightColumnName", facadeDensListLayersBottom, facadeDensNumberOfDirection, temporaryPrefName) - if (!effRoughHeight) { - info "Cannot compute the projected_facade_area_distribution." - return - } finalTablesToJoin.put(effRoughHeight, columnIdRsu) // rsu_terrain_roughness_class if (indicatorUse*.toUpperCase().contains("LCZ")) { def roughClass = Geoindicators.RsuIndicators.effectiveTerrainRoughnessClass(datasource, effRoughHeight, "id_rsu", "effective_terrain_roughness_length", temporaryPrefName) - if (!roughClass) { - info "Cannot compute the roughness class." - return - } finalTablesToJoin.put(roughClass, columnIdRsu) } } @@ -825,10 +691,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, def perkinsDirection = Geoindicators.GenericIndicators.buildingDirectionDistribution(datasource, buildingTable, rsu, columnIdRsu, parameters.angleRangeSizeBuDirection, temporaryPrefName) - if (!perkinsDirection) { - info "Cannot compute the perkins Skill Score building direction distribution." - return - } finalTablesToJoin.put(perkinsDirection, columnIdRsu) } @@ -836,10 +698,6 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, // To avoid duplicate the_geom in the join table, remove it from the intermediate table datasource.execute("ALTER TABLE $intermediateJoinTable DROP COLUMN the_geom;".toString()) def rsuTableJoin = Geoindicators.DataUtils.joinTables(datasource, finalTablesToJoin, outputTableName) - if (!rsuTableJoin) { - info "Cannot merge all tables. " - return - } // Modify all indicators which do not have the expected name def listColumnNames = datasource.getColumnNames(outputTableName) @@ -890,7 +748,7 @@ String computeRSUIndicators(JdbcDataSource datasource, String buildingTable, * @return 4 tables: rsu_lcz, rsu_utrf_area, rsu_utrf_floor_area, building_utrf */ Map computeTypologyIndicators(JdbcDataSource datasource, String building_indicators, String block_indicators, - String rsu_indicators, Map parameters, String prefixName) { + String rsu_indicators, Map parameters, String prefixName) throws Exception{ info "Start computing Typology indicators..." def tablesToDrop = [] @@ -903,7 +761,7 @@ Map computeTypologyIndicators(JdbcDataSource datasource, String building_indicat if (!utrfModelName) { runUTRFTypology = false } else if (!modelCheck(utrfModelName)) { - return + throw new IllegalArgumentException("Cannot find UTRF model") } } // Temporary (and output tables) are created @@ -951,12 +809,6 @@ Map computeTypologyIndicators(JdbcDataSource datasource, String building_indicat def classifyLCZ = Geoindicators.TypologyClassification.identifyLczType(datasource, lczIndicTable, rsu_indicators, "AVG", mapOfWeights, prefixName) - - if (!classifyLCZ) { - datasource.execute "DROP TABLE IF EXISTS $lczIndicTable".toString() - info "Cannot compute the LCZ classification." - return - } rsuLcz = classifyLCZ datasource.execute "DROP TABLE IF EXISTS $lczIndicTable".toString() @@ -973,10 +825,6 @@ Map computeTypologyIndicators(JdbcDataSource datasource, String building_indicat if (!datasource.isEmpty(gatheredScales)) { def utrfBuild = Geoindicators.TypologyClassification.applyRandomForestModel(datasource, gatheredScales, utrfModelName, COLUMN_ID_BUILD, prefixName) - if (!utrfBuild) { - error "Cannot apply the urban typology model $utrfModelName" - return - } tablesToDrop << utrfBuild @@ -1119,7 +967,8 @@ Map createUnitsOfAnalysis(JdbcDataSource datasource, String zone, String buildin String water, String sea_land_mask, String urban_areas, String rsu, double surface_vegetation, double surface_hydro, double surface_urban_areas, - double snappingTolerance, List indicatorUse = ["LCZ", "UTRF", "TEB"], String prefixName = "") { + double snappingTolerance, + List indicatorUse = ["LCZ", "UTRF", "TEB"], String prefixName = "") throws Exception{ info "Create the spatial units..." def idRsu = "id_rsu" def tablesToDrop = [] @@ -1129,10 +978,6 @@ Map createUnitsOfAnalysis(JdbcDataSource datasource, String zone, String buildin vegetation, water, sea_land_mask, urban_areas, surface_vegetation, surface_hydro, surface_urban_areas, prefixName) - if (!rsu) { - info "Cannot compute the RSU." - return - } } @@ -1144,19 +989,10 @@ Map createUnitsOfAnalysis(JdbcDataSource datasource, String zone, String buildin if (indicatorUse.contains("UTRF")) { // Create the blocks String createBlocks = Geoindicators.SpatialUnits.createBlocks(datasource, building, snappingTolerance, prefixName) - if (!createBlocks) { - info "Cannot create the blocks." - return - } // Create the relations between RSU and blocks (store in the block table) String createScalesRelationsRsuBl = Geoindicators.SpatialUnits.spatialJoin(datasource, createBlocks, rsu, idRsu, 1, prefixName) - if (!createScalesRelationsRsuBl) { - info "Cannot compute the scales relations between blocks and RSU." - return - } tableRsuBlocks = createScalesRelationsRsuBl - // Create the relations between buildings and blocks (store in the buildings table) String createScalesRelationsBlBu = Geoindicators.SpatialUnits.spatialJoin(datasource, building, createBlocks, "id_block", 1, prefixName) @@ -1178,17 +1014,12 @@ Map createUnitsOfAnalysis(JdbcDataSource datasource, String zone, String buildin String createScalesRelationsRsuBlBu = Geoindicators.SpatialUnits.spatialJoin(datasource, inputLowerScaleBuRsu, rsu, idRsu, 1, prefixName) - if (!createScalesRelationsRsuBlBu) { - info "Cannot compute the scales relations between buildings and RSU." - return - } //Replace the building table with a new one that contains the relations between block and RSU datasource.execute("""DROP TABLE IF EXISTS $building; ALTER TABLE $createScalesRelationsRsuBlBu RENAME TO $building; """.toString()) tablesToDrop << createScalesRelationsRsuBlBu datasource.dropTable(tablesToDrop) - return ["building": building, "block" : tableRsuBlocks, "rsu" : rsu] @@ -1364,12 +1195,11 @@ Map getParameters(Map parameters) { Map computeAllGeoIndicators(JdbcDataSource datasource, String zone, String building, String road, String rail, String vegetation, String water, String impervious, String buildingEstimateTableName, String sea_land_mask, String urban_areas, String rsuTable, - Map parameters = [:], String prefixName) { + Map parameters = [:], String prefixName) throws Exception{ Map inputParameters = getParameters() if (parameters) { inputParameters = getParameters(parameters) } - def surface_vegetation = inputParameters.surface_vegetation def surface_hydro = inputParameters.surface_hydro def surface_urban_areas = inputParameters.surface_urban_areas @@ -1398,8 +1228,7 @@ Map computeAllGeoIndicators(JdbcDataSource datasource, String zone, String build snappingTolerance, buildingHeightModelName, prefixName) if (!estimHeight) { - error "Cannot estimate building height" - return + throw new Exception("Cannot estimate building height") } else { buildingTableName = estimHeight.building rsuTableForHeightEst = estimHeight.rsu @@ -1437,10 +1266,6 @@ Map computeAllGeoIndicators(JdbcDataSource datasource, String zone, String build surface_vegetation, surface_hydro, surface_urban_areas, snappingTolerance, indicatorUse, prefixName) - if (!spatialUnitsForCalc) { - error "Cannot create the spatial units" - return null - } buildingForGeoCalc = spatialUnitsForCalc.building blocksForGeoCalc = spatialUnitsForCalc.block rsuForGeoCalc = spatialUnitsForCalc.rsu @@ -1487,10 +1312,6 @@ Map computeAllGeoIndicators(JdbcDataSource datasource, String zone, String build surface_vegetation, surface_hydro, surface_urban_areas, snappingTolerance, indicatorUse, prefixName) - if (!spatialUnits) { - error "Cannot create the spatial units" - return null - } def relationBuildings = spatialUnits.building def relationBlocks = spatialUnits.block rsuTable = spatialUnits.rsu @@ -1499,8 +1320,7 @@ Map computeAllGeoIndicators(JdbcDataSource datasource, String zone, String build relationBuildings, relationBlocks, rsuTable, road, vegetation, water, impervious, rail, inputParameters, prefixName) if (!geoIndicators) { - error "Cannot build the geoindicators" - return + throw new Exception("Cannot build the geoindicators") } else { geoIndicators.put("building", building) return geoIndicators @@ -1522,14 +1342,13 @@ Map estimateBuildingHeight(JdbcDataSource datasource, String zone, String buildi String water, String impervious, String building_estimate, String sea_land_mask, String urban_areas, String rsu, double surface_vegetation, double surface_hydro, double surface_urban_areas, - double snappingTolerance, String buildingHeightModelName, String prefixName = "") { + double snappingTolerance, String buildingHeightModelName, String prefixName = "") throws Exception{ if (!building_estimate) { - error "To estimate the building height a table that contains the list of building to estimate must be provided" - return + throw new IllegalArgumentException("To estimate the building height a table that contains the list of building to estimate must be provided") } if (!modelCheck(buildingHeightModelName)) { - return + throw new IllegalArgumentException("Cannot find the building height model") } info "Geoclimate will try to estimate the building heights with the model $buildingHeightModelName." @@ -1539,10 +1358,6 @@ Map estimateBuildingHeight(JdbcDataSource datasource, String zone, String buildi water, sea_land_mask, urban_areas, rsu, surface_vegetation, surface_hydro, surface_urban_areas, snappingTolerance, ["UTRF"], prefixName) - if (!spatialUnits) { - error "Cannot create the spatial units" - return null - } def relationBuildings = spatialUnits.building def relationBlocks = spatialUnits.block def rsuTable = spatialUnits.rsu @@ -1552,10 +1367,6 @@ Map estimateBuildingHeight(JdbcDataSource datasource, String zone, String buildi relationBuildings, relationBlocks, rsuTable, road, vegetation, water, impervious, rail, getParameters([indicatorUse: ["UTRF"]]), prefixName) - if (!geoIndicatorsEstH) { - error "Cannot build the geoindicators to estimate the building height" - return - } datasource.dropTable(relationBlocks) @@ -1603,10 +1414,6 @@ Map estimateBuildingHeight(JdbcDataSource datasource, String zone, String buildi //Apply RF model buildEstimatedHeight = Geoindicators.TypologyClassification.applyRandomForestModel(datasource, gatheredScales, buildingHeightModelName, "id_build", prefixName) - if (!buildEstimatedHeight) { - error "Cannot apply the building height model $buildingHeightModelName" - return - } //Update the abstract building table info "Replace the input building table by the estimated height" @@ -1666,7 +1473,7 @@ Map computeGeoclimateIndicators(JdbcDataSource datasource, String zone, String b "mapOfWeights" : ["sky_view_factor" : 1, "aspect_ratio": 1, "building_surface_fraction": 1, "impervious_surface_fraction" : 1, "pervious_surface_fraction": 1, "height_of_roughness_elements": 1, "terrain_roughness_length": 1], - "utrfModelName": "", "nbEstimatedBuildHeight": 0], String prefixName = "") { + "utrfModelName": "", "nbEstimatedBuildHeight": 0], String prefixName = "") throws Exception{ info "Start computing the geoindicators..." def start = System.currentTimeMillis() @@ -1674,19 +1481,11 @@ Map computeGeoclimateIndicators(JdbcDataSource datasource, String zone, String b //Compute building indicators String buildingIndicators = computeBuildingsIndicators(datasource, buildingsWithRelations, road, indicatorUse, prefixName) - if (!buildingIndicators) { - error "Cannot compute the building indicators" - return null - } //Compute block indicators String blockIndicators = null if (indicatorUse*.toUpperCase().contains("UTRF")) { blockIndicators = computeBlockIndicators(datasource, buildingIndicators, blocksWithRelations, prefixName) - if (!blockIndicators) { - error "Cannot compute the block indicators" - return null - } } //Compute RSU indicators @@ -1694,19 +1493,12 @@ Map computeGeoclimateIndicators(JdbcDataSource datasource, String zone, String b rsu, vegetation, road, water, impervious, rail, parameters, prefixName) - if (!rsuIndicators) { - error "Cannot compute the RSU indicators" - return null - } // Compute the typology indicators (LCZ and UTRF) Map computeTypologyIndicators = Geoindicators.WorkflowGeoIndicators.computeTypologyIndicators(datasource, buildingIndicators, blockIndicators, rsuIndicators, parameters, prefixName) - if (!computeTypologyIndicators) { - info "Cannot compute the Typology indicators." - return - } + info "All geoindicators have been computed" def rsuLcz = computeTypologyIndicators.rsu_lcz def utrfArea = computeTypologyIndicators.rsu_utrf_area @@ -1779,7 +1571,7 @@ String rasterizeIndicators(JdbcDataSource datasource, String building, String road, String vegetation, String water, String impervious, String rsu_lcz, String rsu_utrf_area, String rsu_utrf_floor_area, String sea_land_mask, - String prefixName = "") { + String prefixName = "") throws Exception{ if (!list_indicators) { info "The list of indicator names cannot be null or empty" return @@ -1820,11 +1612,11 @@ String rasterizeIndicators(JdbcDataSource datasource, "GREATEST", true, true, "lcz") // Rename the standard indicators into names consistent with the current method (LCZ type...) - datasource """ ALTER TABLE $resultsDistrib RENAME COLUMN EXTREMUM_COL TO LCZ_PRIMARY; + datasource.execute(""" ALTER TABLE $resultsDistrib RENAME COLUMN EXTREMUM_COL TO LCZ_PRIMARY; ALTER TABLE $resultsDistrib RENAME COLUMN UNIQUENESS_VALUE TO LCZ_UNIQUENESS_VALUE; ALTER TABLE $resultsDistrib RENAME COLUMN EQUALITY_VALUE TO LCZ_EQUALITY_VALUE; ALTER TABLE $resultsDistrib RENAME COLUMN EXTREMUM_COL2 TO LCZ_SECONDARY; - ALTER TABLE $resultsDistrib RENAME COLUMN EXTREMUM_VAL TO MIN_DISTANCE;""".toString() + ALTER TABLE $resultsDistrib RENAME COLUMN EXTREMUM_VAL TO MIN_DISTANCE;""") // Need to replace the string LCZ values by an integer datasource.createIndex(resultsDistrib, "lcz_primary") @@ -1867,11 +1659,8 @@ String rasterizeIndicators(JdbcDataSource datasource, String upperScaleAreaStatistics = Geoindicators.GenericIndicators.upperScaleAreaStatistics(datasource, grid, grid_column_identifier, rsu_utrf_area, indicatorName, "AREA_TYPO_MAJ", false, "utrf_area") - if (upperScaleAreaStatistics) { - indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) - } else { - info "Cannot aggregate the Urban Typology at grid scale" - } + indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) + } if (list_indicators_upper.contains("UTRF_FLOOR_AREA_FRACTION") && rsu_utrf_floor_area) { @@ -1879,11 +1668,8 @@ String rasterizeIndicators(JdbcDataSource datasource, def upperScaleAreaStatistics = Geoindicators.GenericIndicators.upperScaleAreaStatistics(datasource, grid, grid_column_identifier, rsu_utrf_floor_area, indicatorName, "FLOOR_AREA_TYPO_MAJ", false, "utrf_floor_area") - if (upperScaleAreaStatistics) { - indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) - } else { - info "Cannot aggregate the Urban Typology at grid scale" - } + indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) + } // If any surface fraction calculation is needed, create the priority list containing only needed fractions @@ -1935,9 +1721,8 @@ String rasterizeIndicators(JdbcDataSource datasource, surfaceFractionsProcess = Geoindicators.RsuIndicators.surfaceFractions( datasource, grid, grid_column_identifier, superpositionsTableGrid, [:], priorities_tmp, prefixName) - if (surfaceFractionsProcess) { - indicatorTablesToJoin.put(surfaceFractionsProcess, grid_column_identifier) - } + indicatorTablesToJoin.put(surfaceFractionsProcess, grid_column_identifier) + tablesToDrop << superpositionsTableGrid } else { info "Cannot compute the surface fractions at grid scale" @@ -1950,18 +1735,10 @@ String rasterizeIndicators(JdbcDataSource datasource, createScalesRelationsGridBl = Geoindicators.SpatialUnits.spatialJoin(datasource, building, grid, grid_column_identifier, null, prefixName) - if (!createScalesRelationsGridBl) { - info "Cannot compute the scales relations between buildings and grid cells." - return - } computeBuildingStats = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(datasource, createScalesRelationsGridBl, grid, grid_column_identifier, grid_column_identifier, unweightedBuildingIndicators, prefixName) - if (!computeBuildingStats) { - info "Cannot compute the building statistics on grid cells." - return - } indicatorTablesToJoin.put(computeBuildingStats, grid_column_identifier) } @@ -1970,17 +1747,10 @@ String rasterizeIndicators(JdbcDataSource datasource, if (weightedBuildingIndicators) { //Cut the building to compute exact fractions buildingCutted = cutBuilding(datasource, grid, building) - if (!buildingCutted) { - info "Cannot split the building with the grid to compute the weighted statistics" - return - } def computeWeightedAggregStat = Geoindicators.GenericIndicators.weightedAggregatedStatistics(datasource, buildingCutted, grid, grid_column_identifier, weightedBuildingIndicators, prefixName) - if (!computeWeightedAggregStat) { - info "Cannot compute the weighted aggregated statistics on grid cells." - return - } + indicatorTablesToJoin.put(computeWeightedAggregStat, grid_column_identifier) } @@ -1988,10 +1758,6 @@ String rasterizeIndicators(JdbcDataSource datasource, if (list_indicators_upper.contains("BUILDING_TYPE_FRACTION") && building) { if (!buildingCutted) { buildingCutted = cutBuilding(datasource, grid, building) - if (!buildingCutted) { - info "Cannot split the building with the grid to compute the weighted statistics" - return - } } def indicatorName = "TYPE" def upperScaleAreaStatistics = Geoindicators.GenericIndicators.upperScaleAreaStatistics(datasource, grid, @@ -1999,14 +1765,10 @@ String rasterizeIndicators(JdbcDataSource datasource, buildingCutted, indicatorName, indicatorName, false, "building_type_fraction") - if (upperScaleAreaStatistics) { indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) tablesToDrop << upperScaleAreaStatistics - } else { - info "Cannot aggregate the building type at grid scale" - } - } + } if ((list_indicators_upper.intersect(["FREE_EXTERNAL_FACADE_DENSITY", "ASPECT_RATIO", "BUILDING_SURFACE_DENSITY"]) && building)) { if (!createScalesRelationsGridBl) { @@ -2014,10 +1776,6 @@ String rasterizeIndicators(JdbcDataSource datasource, createScalesRelationsGridBl = Geoindicators.SpatialUnits.spatialJoin(datasource, building, grid, grid_column_identifier, null, prefixName) - if (!createScalesRelationsGridBl) { - info "Cannot compute the scales relations between buildings and grid cells." - return - } } def freeFacadeDensityExact = Geoindicators.RsuIndicators.freeExternalFacadeDensityExact(datasource, createScalesRelationsGridBl, grid, grid_column_identifier, prefixName) @@ -2049,19 +1807,12 @@ String rasterizeIndicators(JdbcDataSource datasource, if (list_indicators_upper.contains("BUILDING_HEIGHT_DIST") && building) { if (!buildingCutted) { buildingCutted = cutBuilding(datasource, grid, building) - if (!buildingCutted) { - info "Cannot split the building with the grid to compute the building height distribution" - return - } } def roofFractionDistributionExact = Geoindicators.RsuIndicators.roofFractionDistributionExact(datasource, grid, buildingCutted, grid_column_identifier, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], false, prefixName) - if (roofFractionDistributionExact) { - indicatorTablesToJoin.put(roofFractionDistributionExact, grid_column_identifier) - } else { - info "Cannot compute the roof fraction distribution." - } + indicatorTablesToJoin.put(roofFractionDistributionExact, grid_column_identifier) + } if (list_indicators_upper.contains("FRONTAL_AREA_INDEX") && building) { @@ -2071,19 +1822,11 @@ String rasterizeIndicators(JdbcDataSource datasource, createScalesRelationsGridBl = Geoindicators.SpatialUnits.spatialJoin(datasource, building, grid, grid_column_identifier, null, prefixName) - if (!createScalesRelationsGridBl) { - info "Cannot compute the scales relations between buildings and grid cells." - return - } } def frontalAreaIndexDistribution = Geoindicators.RsuIndicators.frontalAreaIndexDistribution(datasource, createScalesRelationsGridBl, grid, grid_column_identifier, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], 12, true, prefixName) - if (frontalAreaIndexDistribution) { - indicatorTablesToJoin.put(frontalAreaIndexDistribution, grid_column_identifier) - } else { - info "Cannot compute the frontal area index." - } + indicatorTablesToJoin.put(frontalAreaIndexDistribution, grid_column_identifier) } } @@ -2116,16 +1859,14 @@ String rasterizeIndicators(JdbcDataSource datasource, tesselatedSeaLandTab, seaLandTypeField, seaLandTypeField, prefixName) tablesToDrop << tesselatedSeaLandTab - if (upperScaleAreaStatistics) { - // Modify columns name to postfix with "_FRACTION" - datasource """ + + // Modify columns name to postfix with "_FRACTION" + datasource """ ALTER TABLE ${upperScaleAreaStatistics} RENAME COLUMN TYPE_LAND TO LAND_FRACTION; ALTER TABLE ${upperScaleAreaStatistics} RENAME COLUMN TYPE_SEA TO SEA_FRACTION; ALTER TABLE ${upperScaleAreaStatistics} DROP COLUMN THE_GEOM;""" - indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) - } else { - info "Cannot compute the sea land fractions." - } + indicatorTablesToJoin.put(upperScaleAreaStatistics, grid_column_identifier) + } }else{ //Update the final table @@ -2141,19 +1882,11 @@ String rasterizeIndicators(JdbcDataSource datasource, createScalesRelationsGridBl = Geoindicators.SpatialUnits.spatialJoin(datasource, building, grid, grid_column_identifier, null, prefixName) - if (!createScalesRelationsGridBl) { - info "Cannot compute the scales relations between buildings and grid cells." - return - } } def svf_fraction = Geoindicators.RsuIndicators.groundSkyViewFactor(datasource, grid, grid_column_identifier, createScalesRelationsGridBl, 0.008, 100, 60, "grid") - if (svf_fraction) { - datasource """ ALTER TABLE ${svf_fraction} RENAME COLUMN GROUND_SKY_VIEW_FACTOR TO SVF""".toString() - indicatorTablesToJoin.put(svf_fraction, grid_column_identifier) - } else { - info "Cannot compute the sky view factor." - } + datasource """ ALTER TABLE ${svf_fraction} RENAME COLUMN GROUND_SKY_VIEW_FACTOR TO SVF""".toString() + indicatorTablesToJoin.put(svf_fraction, grid_column_identifier) tablesToDrop << svf_fraction } @@ -2166,17 +1899,10 @@ String rasterizeIndicators(JdbcDataSource datasource, createScalesRelationsGridBl = Geoindicators.SpatialUnits.spatialJoin(datasource, building, grid, grid_column_identifier, null, prefixName) - if (!createScalesRelationsGridBl) { - info "Cannot compute the scales relations between buildings and grid cells." - return - } } def frontalAreaIndexDistribution = Geoindicators.RsuIndicators.frontalAreaIndexDistribution(datasource, createScalesRelationsGridBl, grid, grid_column_identifier, - facadeDensListLayersBottom, facadeDensNumberOfDirection, false, prefixName,) - if (!frontalAreaIndexDistribution) { - info "Cannot compute the frontal area index at grid scale." - } + facadeDensListLayersBottom, facadeDensNumberOfDirection, false, prefixName) def tablesToJoin = [:] tablesToJoin.put(frontalAreaIndexDistribution, grid_column_identifier) @@ -2186,11 +1912,6 @@ String rasterizeIndicators(JdbcDataSource datasource, tablesToDrop << frontalAreaIndexDistribution def grid_for_roughness = Geoindicators.DataUtils.joinTables(datasource, tablesToJoin, "grid_for_roughness") - if (!grid_for_roughness) { - info "Cannot prepare the grid roughness indicators." - return - } - tablesToDrop << grid_for_roughness def effRoughHeight = Geoindicators.RsuIndicators.effectiveTerrainRoughnessLength(datasource, grid_for_roughness, @@ -2198,10 +1919,6 @@ String rasterizeIndicators(JdbcDataSource datasource, "frontal_area_index", "geom_avg_$heightColumnName", facadeDensListLayersBottom, facadeDensNumberOfDirection, prefixName) - if (!effRoughHeight) { - info "Cannot compute the the effective terrain roughness length at grid scale." - return - } indicatorTablesToJoin.put(effRoughHeight, grid_column_identifier) tablesToDrop << effRoughHeight @@ -2209,22 +1926,13 @@ String rasterizeIndicators(JdbcDataSource datasource, if (list_indicators_upper.contains("TERRAIN_ROUGHNESS_CLASS")) { def roughClass = Geoindicators.RsuIndicators.effectiveTerrainRoughnessClass(datasource, effRoughHeight, grid_column_identifier, "effective_terrain_roughness_length", prefixName) - if (!roughClass) { - info "Cannot compute the roughness class." - return - } indicatorTablesToJoin.put(roughClass, grid_column_identifier) tablesToDrop << roughClass } } - //Join all indicators at grid scale def joinGrids = Geoindicators.DataUtils.joinTables(datasource, indicatorTablesToJoin, grid_indicators_table) - if (!joinGrids) { - info "Cannot merge all indicators in grid table $grid_indicators_table." - return - } //Compute the aspect_ratio if (list_indicators_upper.contains("ASPECT_RATIO") && building) { @@ -2259,7 +1967,7 @@ String rasterizeIndicators(JdbcDataSource datasource, * @param building * @return */ -String cutBuilding(JdbcDataSource datasource, String grid, String building) { +String cutBuilding(JdbcDataSource datasource, String grid, String building) throws Exception{ String buildingCutted = postfix("building_cutted") datasource.createSpatialIndex(grid) datasource.createSpatialIndex(building) @@ -2275,7 +1983,7 @@ String cutBuilding(JdbcDataSource datasource, String grid, String building) { """.toString()) } catch (SQLException ex) { - buildingCutted = null + throw new SQLException("Cannot cut the building",ex) } return buildingCutted } @@ -2293,7 +2001,7 @@ String cutBuilding(JdbcDataSource datasource, String grid, String building) { String createGrid(JdbcDataSource datasource, Geometry envelope, int x_size, int y_size, - int srid, boolean rowCol = false) { + int srid, boolean rowCol = false) throws Exception{ //Start to compute the grid def grid_table_name = Geoindicators.SpatialUnits.createGrid(datasource, envelope, x_size, y_size, rowCol) if (grid_table_name) { @@ -2318,20 +2026,19 @@ String createGrid(JdbcDataSource datasource, * @param epsg srid code of the output table * @return The name of the final buildings table */ -String formatEstimatedBuilding(JdbcDataSource datasource, String inputTableName, int epsg, float h_lev_min = 3) { +String formatEstimatedBuilding(JdbcDataSource datasource, String inputTableName, int epsg, float h_lev_min = 3) throws Exception{ def outputTableName = postfix "INPUT_BUILDING_REFORMATED_" info 'Re-formating building layer' def outputEstimateTableName = "" - datasource """ + datasource.execute(""" DROP TABLE if exists ${outputTableName}; CREATE TABLE ${outputTableName} (THE_GEOM GEOMETRY(POLYGON, $epsg), id_build INTEGER, ID_SOURCE VARCHAR, HEIGHT_WALL FLOAT, HEIGHT_ROOF FLOAT, NB_LEV INTEGER, TYPE VARCHAR, MAIN_USE VARCHAR, ZINDEX INTEGER, ID_BLOCK INTEGER, ID_RSU INTEGER); - """ + """) if (inputTableName) { def queryMapper = "SELECT " - def inputSpatialTable = datasource."$inputTableName" - if (inputSpatialTable.rowCount > 0) { - def columnNames = inputSpatialTable.columns + if (datasource.getRowCount(inputTableName) > 0) { + def columnNames = datasource.getColumnNames(inputTableName) queryMapper += " ${columnNames.join(",")} FROM $inputTableName" datasource.withBatch(100) { stmt -> datasource.eachRow(queryMapper) { row -> @@ -2429,10 +2136,9 @@ static Map formatHeightsAndNbLevels(def heightWall, def heightRoof, def nbLevels * @param modelName * @return true if the model exists or if we can download it on the repository */ -static boolean modelCheck(String modelName) { +static boolean modelCheck(String modelName) throws Exception{ if (!modelName) { - error "Cannot find any model file" - return + throw new IllegalArgumentException("Cannot find any model file") } File inputModelFile = new File(modelName) def baseNameModel = FilenameUtils.getBaseName(modelName) @@ -2444,14 +2150,12 @@ static boolean modelCheck(String modelName) { if (!localInputModelFile.exists()) { FileUtils.copyURLToFile(new URL(modelURL), localInputModelFile) if (!localInputModelFile.exists()) { - error "Cannot find any model file" - return + throw new IllegalArgumentException( "Cannot find any model file") } } } else { if (!FilenameUtils.isExtension(modelName, "model")) { - error "The extension of the model file must be .model" - return + throw new IllegalArgumentException( "The extension of the model file must be .model") } } return true @@ -2469,10 +2173,10 @@ static boolean modelCheck(String modelName) { * @param distance the erode and dilate the geometries * @return the sprawl_areas layer plus new distance columns on the input grid_indicators */ -Map sprawlIndicators(JdbcDataSource datasource, String grid_indicators, String id_grid, List list_indicators, float distance) { +Map sprawlIndicators(JdbcDataSource datasource, String grid_indicators, String id_grid, List list_indicators, + float distance) throws Exception{ if (!list_indicators) { - info "The list of indicator names cannot be null or empty" - return + throw new IllegalArgumentException( "The list of indicator names cannot be null or empty") } //Concert the list of indicators to upper case diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowUtilities.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowUtilities.groovy index efb4391879..264e8ee219 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowUtilities.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowUtilities.groovy @@ -153,7 +153,7 @@ def saveToAscGrid(def outputTable, def subFolder, def filePrefix, JdbcDataSource if (outputTable && h2gis_datasource.hasTable(outputTable)) { def env if (!reproject) { - env = h2gis_datasource.getSpatialTable(outputTable).getExtent().getEnvelopeInternal() + env = h2gis_datasource.getExtent(outputTable).getEnvelopeInternal() } else { def geom = h2gis_datasource.firstRow("SELECT st_transform(ST_EXTENT(the_geom), $outputSRID) as geom from $outputTable".toString()).geom if (geom) { @@ -170,8 +170,7 @@ def saveToAscGrid(def outputTable, def subFolder, def filePrefix, JdbcDataSource double dy = env.getMaxY() - ymin def x_size = dy / nbrows - def IndicsTable = h2gis_datasource."$outputTable" - List columnNames = IndicsTable.columns + List columnNames = h2gis_datasource.getColumnNames(outputTable) columnNames.remove("THE_GEOM") columnNames.remove("ID_GRID") columnNames.remove("ID_COL") diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy index 6c9483d0e9..f0b37282a8 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy @@ -26,6 +26,7 @@ import org.junit.jupiter.api.io.TempDir import org.orbisgis.geoclimate.Geoindicators import static org.junit.jupiter.api.Assertions.assertEquals +import static org.junit.jupiter.api.Assertions.assertThrows import static org.orbisgis.data.H2GIS.open class BlockIndicatorsTests { @@ -102,4 +103,9 @@ class BlockIndicatorsTests { h2GIS.eachRow("SELECT * FROM test_block_closingness") { sum += it.closingness } assert 450 == sum } + + @Test + void holeAreaDensityExceptionTest() { + assertThrows(Exception.class, ()->Geoindicators.BlockIndicators.holeAreaDensity(h2GIS, "myblock_table", "test")) + } } \ No newline at end of file diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/DataUtilsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/DataUtilsTests.groovy index 2c41f9d229..8efa27ba27 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/DataUtilsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/DataUtilsTests.groovy @@ -59,13 +59,10 @@ class DataUtilsTests { def p = Geoindicators.DataUtils.joinTables(h2GIS, [tablea: "ida", tableb: "idb", tablec: "idc"], "test") - assert p - - def table = h2GIS."${p}" - assert "IDA,NAME,LAB,LOCATION" == table.columns.join(",") - assert 1 == table.rowCount + assert "IDA,NAME,LAB,LOCATION" == h2GIS.getColumnNames(p).join(",") + assert 1 == h2GIS.getRowCount(p) - table.eachRow { assert it.lab.equals('CNRS') && it.location.equals('Vannes') } + h2GIS.getTable(p).eachRow { assert it.get("LAB").equals('CNRS') && it.get("LOCATION").equals('Vannes') } } @Test @@ -74,9 +71,9 @@ class DataUtilsTests { [tablea: "ida", tableb: "idb", tablec: "idc"], "test", true) assert p - def table = h2GIS."${p}" - assert "TABLEA_IDA,TABLEA_NAME,TABLEB_LAB,TABLEC_LOCATION" == table.columns.join(",") - assert 1 == table.rowCount + def table = h2GIS.getTable(p) + assert "TABLEA_IDA,TABLEA_NAME,TABLEB_LAB,TABLEC_LOCATION" == table.getColumnNames().join(",") + assert 1 == table.getRowCount() table.eachRow { assert it.tableb_lab.equals('CNRS') && it.tablec_location.equals('Vannes') } } @@ -89,7 +86,7 @@ class DataUtilsTests { directory) assert p - assert 1 == h2GIS.table(h2GIS.load(directory + File.separator + "tablegeom.fgb", true)).rowCount - assert 1 == h2GIS.table(h2GIS.load(directory + File.separator + "tablea.csv", true)).rowCount + assert 1 == h2GIS.getRowCount(h2GIS.load(directory + File.separator + "tablegeom.fgb", true)) + assert 1 == h2GIS.getRowCount(h2GIS.load(directory + File.separator + "tablea.csv", true)) } } diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy index 4a70d7d4ea..8a6f7a175b 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy @@ -168,7 +168,7 @@ class GenericIndicatorsTests { "test") assert p assert p == "test_geometry_properties" - h2GIS.test_geometry_properties.eachRow { + h2GIS.getTable(p).eachRow { row -> assert row.the_geom assert row.issimple @@ -364,11 +364,10 @@ class GenericIndicatorsTests { FROM building_test a, rsu_test b WHERE id_build < 4;""" // Test 1 - def p = Geoindicators.GenericIndicators.typeProportion(h2GIS, + assertThrows(Exception.class, ()-> Geoindicators.GenericIndicators.typeProportion(h2GIS, "tempo_build", "id_rsu", "type", "rsu_test", - null, null, "") - assertNull(p) + null, null, "")) } @Test @@ -398,7 +397,7 @@ class GenericIndicatorsTests { "RSU", ["AVG", "STD"], "test") assert gatheredScales1 - def finalColRsu = h2GIS."$gatheredScales1".columns.collect { it.toLowerCase() } + def finalColRsu = h2GIS.getColumnNames(gatheredScales1).collect { it.toLowerCase() } assertEquals colRsu.sort(), finalColRsu.sort() } @@ -432,7 +431,7 @@ class GenericIndicatorsTests { "tempo_rsu", "BUILDING", ["AVG", "STD"], "test") assert gatheredScales2 - def finalColBuild = h2GIS."$gatheredScales2".columns.collect { it.toLowerCase() } + def finalColBuild = h2GIS.getColumnNames(gatheredScales2).collect { it.toLowerCase() } assertEquals colBuild.sort(), finalColBuild.sort() } @@ -490,7 +489,7 @@ class GenericIndicatorsTests { assertNotNull(upperStats) def nb_indicators = h2GIS.rows "SELECT distinct ${indicatorName} AS nb FROM $indicatorTableName" - def columns = upperStats.getColumns() + def columns = upperStats.getColumnNames() columns.remove("ID_GRID") columns.remove("THE_GEOM") assertEquals(nb_indicators.size(), columns.size()) diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy index fd04fdc4f7..dfc8d1adfc 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy @@ -23,6 +23,7 @@ import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.TempDir +import org.orbisgis.data.H2GIS import org.orbisgis.geoclimate.Geoindicators import static org.junit.jupiter.api.Assertions.* @@ -33,7 +34,7 @@ class RsuIndicatorsTests { @TempDir static File folder - private static def h2GIS + private static H2GIS h2GIS @BeforeAll static void beforeAll() { @@ -176,8 +177,8 @@ class RsuIndicatorsTests { @Test void projectedFacadeAreaDistributionTest() { // Only the first 5 first created buildings are selected for the tests - h2GIS "DROP TABLE IF EXISTS tempo_build, test_rsu_projected_facade_area_distribution;" + - " CREATE TABLE tempo_build AS SELECT * FROM building_test WHERE id_build < 6" + h2GIS.execute("DROP TABLE IF EXISTS tempo_build, test_rsu_projected_facade_area_distribution;" + + " CREATE TABLE tempo_build AS SELECT * FROM building_test WHERE id_build < 6") def listLayersBottom = [0, 10, 20, 30, 40, 50] def numberOfDirection = 4 diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy index 436be71dbc..dcc68e6045 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy @@ -87,7 +87,7 @@ class SpatialUnitsTests { assertNotNull(outputTableGeoms) - assert h2GIS."$outputTableGeoms" + assert h2GIS.hasTable(outputTableGeoms) def outputTable = Geoindicators.SpatialUnits.createTSU(h2GIS, outputTableGeoms, "", "tsu") assert h2GIS.getSpatialTable(outputTable).save(new File(folder, "tsu.shp").getAbsolutePath(), true) @@ -196,7 +196,7 @@ class SpatialUnitsTests { assertNotNull(outputTableGeoms) - assert h2GIS."$outputTableGeoms" + assert h2GIS.hasTable(outputTableGeoms) def outputTable = Geoindicators.SpatialUnits.createTSU(h2GIS, outputTableGeoms, "", "tsu") def countRows = h2GIS.firstRow "select count(*) as numberOfRows from $outputTable" @@ -224,7 +224,7 @@ class SpatialUnitsTests { def box = wktReader.read('POLYGON((-5 -5, 5 -5, 5 5, -5 5, -5 -5))') def outputTable = Geoindicators.SpatialUnits.createGrid(postGIS, box, 1, 1) assert outputTable - assert postGIS."$outputTable" + assert postGIS.hasTable(outputTable) def countRows = postGIS.firstRow "select count(*) as numberOfRows from $outputTable" assert 100 == countRows.numberOfRows } diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy index ff97027a2f..80b96098b5 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy @@ -28,6 +28,7 @@ import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.Disabled import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.TempDir +import org.orbisgis.data.H2GIS import org.orbisgis.data.dataframe.DataFrame import org.orbisgis.geoclimate.Geoindicators import smile.classification.DataFrameClassifier @@ -43,11 +44,11 @@ class TypologyClassificationTests { @TempDir static File folder - private static def h2GIS + private static H2GIS h2GIS @BeforeAll static void beforeAll() { - h2GIS = open(folder.getAbsolutePath() + File.separator + "typologyClassificationTests;AUTO_SERVER=TRUE") + h2GIS = H2GIS.open(folder.getAbsolutePath() + File.separator + "typologyClassificationTests;AUTO_SERVER=TRUE") } @Test @@ -58,7 +59,7 @@ class TypologyClassificationTests { "AVG", "test") assertNotNull(pavg) def results = [:] - h2GIS."$pavg".eachRow { row -> + h2GIS.getTable(pavg).eachRow { row -> def id = row.id_rsu results[id] = [:] results[id]["LCZ_PRIMARY"] = row.LCZ_PRIMARY @@ -101,9 +102,9 @@ class TypologyClassificationTests { "buff_rsu_test_all_indics_for_lcz", "MEDIAN", "test") assertNotNull(pmed) - assert h2GIS."$pmed".columns.contains("THE_GEOM") + assert h2GIS.getColumnNames(pmed).contains("THE_GEOM") - h2GIS."$pmed".eachRow { + h2GIS.getTable(pmed).eachRow { row -> if (row.id_rsu == 1) { assert 1 == row.LCZ_PRIMARY @@ -165,7 +166,7 @@ class TypologyClassificationTests { def uuid = UUID.randomUUID().toString().replaceAll("-", "_") String savePath = new File(folder, "geoclimate_rf_${uuid}.model").getAbsolutePath() - def trainingTable = h2GIS.table(h2GIS.load(trainingURL, trainingTableName, true)) + def trainingTable = h2GIS.getTable(h2GIS.load(trainingURL, trainingTableName, true)) assert trainingTable // Variable to model @@ -175,7 +176,7 @@ class TypologyClassificationTests { def colsToRemove = ["PK2", "THE_GEOM", "PK"] // Remove unnecessary column - h2GIS "ALTER TABLE $trainingTableName DROP COLUMN ${colsToRemove.join(",")};" + h2GIS.execute("ALTER TABLE $trainingTableName DROP COLUMN ${colsToRemove.join(",")};") //Reload the table due to the schema modification trainingTable.reload() @@ -244,7 +245,7 @@ class TypologyClassificationTests { def namesStr = names.join(",") assert namesStr - def columns = trainingTable.columns + def columns = trainingTable.getColumnNames() assert columns columns = columns.minus(var2model) assert columns @@ -284,7 +285,7 @@ class TypologyClassificationTests { h2GIS.load(indicatorsPath, indicatorsTable) // Replace the id_rsu (coming from a specific city) by the id (coming from the true values of LCZ) - def allColumns = h2GIS.getTable(indicatorsTable).columns + def allColumns = h2GIS.getColumnNames(indicatorsTable) allColumns.remove("ID_RSU") allColumns.remove("ID") @@ -418,7 +419,7 @@ class TypologyClassificationTests { //Reload the table due to the schema modification h2GIS.getTable("tempo").reload() - def columns = h2GIS.getTable("tempo").getColumns() + def columns = h2GIS.getColumnNames("tempo") columns = columns.minus(var2model) h2GIS """ DROP TABLE IF EXISTS $trainingTableName; @@ -485,7 +486,7 @@ class TypologyClassificationTests { //Reload the table due to the schema modification h2GIS.getTable("tempo").reload() - def columns = h2GIS.getTable("tempo").getColumns() + def columns = h2GIS.getColumnNames("tempo") columns = columns.minus(var2model) h2GIS """ DROP TABLE IF EXISTS $trainingTableName; @@ -560,7 +561,7 @@ class TypologyClassificationTests { //Reload the table due to the schema modification h2GIS.getTable("tempo").reload() - def columns = h2GIS.getTable("tempo").getColumns() + def columns = h2GIS.getColumnNames("tempo") columns = columns.minus(var2model) h2GIS """ DROP TABLE IF EXISTS $trainingTableName; @@ -637,7 +638,7 @@ class TypologyClassificationTests { //Reload the table due to the schema modification h2GIS.getTable("tempo").reload() - def columns = h2GIS.getTable("tempo").getColumns() + def columns = h2GIS.getColumnNames("tempo") columns = columns.minus(var2model) h2GIS """ DROP TABLE IF EXISTS $trainingTableName; diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy index 12c71ac478..bce3a96b15 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy @@ -198,14 +198,14 @@ class WorkflowGeoIndicatorsTest { datasource.save(geoIndicatorsCompute_i.rsu_indicators, "/tmp/rsu.geojson" , true) assertNotNull(geoIndicatorsCompute_i) checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) - assertEquals(listUrbTyp.Bu.sort(), datasource.getTable(geoIndicatorsCompute_i.building_indicators).columns.sort()) - assertEquals(listUrbTyp.Bl.sort(), datasource.getTable(geoIndicatorsCompute_i.block_indicators).columns.sort()) + assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) + assertEquals(listUrbTyp.Bl.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.block_indicators).sort()) List expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() List expectListRsu = expectListRsuTempo.toUnique() - List realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + List realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { assertTrue realListRsu.contains(i) @@ -213,14 +213,14 @@ class WorkflowGeoIndicatorsTest { def expectListLczTempo = listColLcz expectListLczTempo = expectListLczTempo + listColBasic def expectListLcz = expectListLczTempo.sort() - assertEquals(expectListLcz, datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort()) + assertEquals(expectListLcz, datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort()) - def dfRsu = DataFrame.of(datasource."$geoIndicatorsCompute_i.rsu_indicators") + def dfRsu = DataFrame.of(datasource.getTable(geoIndicatorsCompute_i.rsu_indicators)) assertEquals dfRsu.nrows(), dfRsu.omitNullRows().nrows() - def dfBuild = DataFrame.of(datasource."$geoIndicatorsCompute_i.building_indicators") + def dfBuild = DataFrame.of(datasource.getTable(geoIndicatorsCompute_i.building_indicators)) dfBuild = dfBuild.drop("ID_RSU") assertEquals dfBuild.nrows(), dfBuild.omitNullRows().nrows() - def dfBlock = DataFrame.of(datasource."$geoIndicatorsCompute_i.block_indicators") + def dfBlock = DataFrame.of(datasource.getTable(geoIndicatorsCompute_i.block_indicators)) dfBlock = dfBlock.drop("ID_RSU") assertEquals dfBlock.nrows(), dfBlock.omitNullRows().nrows() @@ -241,7 +241,6 @@ class WorkflowGeoIndicatorsTest { def sum_fafrac_teb = datasource.firstRow("SELECT AVG(${listFloorBuildTypTeb.join("+")}) AS SUM_FRAC FROM ${"$geoIndicatorsCompute_i.rsu_indicators"} WHERE BUILDING_DIRECTION_UNIQUENESS <> -1") assertEquals sum_fafrac_teb.SUM_FRAC, 1.0, 0.01 } - } @Test @@ -262,11 +261,10 @@ class WorkflowGeoIndicatorsTest { checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) if (indicatorUse.contains("UTRF")) { - assertEquals(listUrbTyp.Bu.sort(), datasource.getTable(geoIndicatorsCompute_i.building_indicators).columns.sort()) - assertEquals(listUrbTyp.Bl.sort(), datasource.getTable(geoIndicatorsCompute_i.block_indicators).columns.sort()) + assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) + assertEquals(listUrbTyp.Bl.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.block_indicators).sort()) // Check that the sum of proportion (or building area) for each RSU is equal to 1 - def utrfArea = datasource."$geoIndicatorsCompute_i.rsu_utrf_area" - def colUtrfArea = utrfArea.getColumns() + def colUtrfArea = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_utrf_area) colUtrfArea = colUtrfArea.minus(["ID_RSU", "THE_GEOM", "TYPO_MAJ", "TYPO_SECOND", "UNIQUENESS_VALUE"]) def countSumAreaEqual1 = datasource.firstRow("""SELECT COUNT(*) AS NB FROM ${geoIndicatorsCompute_i.rsu_utrf_area} @@ -277,8 +275,7 @@ class WorkflowGeoIndicatorsTest { assertEquals countSumAreaRemove0.NB, countSumAreaEqual1.NB // Check that the sum of proportion (or building floor area) for each RSU is equal to 1 - def utrfFloorArea = datasource."$geoIndicatorsCompute_i.rsu_utrf_floor_area" - def colUtrfFloorArea = utrfFloorArea.getColumns() + def colUtrfFloorArea = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_utrf_floor_area) // Test that the TYPO_SECOND is inside the RSU UTRF table assertEquals 1, colUtrfFloorArea.count("TYPO_SECOND") @@ -293,7 +290,7 @@ class WorkflowGeoIndicatorsTest { assertEquals countSumFloorAreaRemove0.NB, countSumFloorAreaEqual1.NB // Check that all buildings being in the zone have a value different than 0 (0 being no value) - def dfBuild = DataFrame.of(datasource."$geoIndicatorsCompute_i.building_utrf") + def dfBuild = DataFrame.of(datasource.getTable(geoIndicatorsCompute_i.building_utrf)) def nbNull = datasource.firstRow("""SELECT COUNT(*) AS NB FROM ${geoIndicatorsCompute_i.building_utrf} WHERE I_TYPO = 'unknown'""") @@ -303,7 +300,7 @@ class WorkflowGeoIndicatorsTest { def expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() def expectListRsu = expectListRsuTempo.toUnique() - def realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + def realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { @@ -313,7 +310,7 @@ class WorkflowGeoIndicatorsTest { def expectListLczTempo = listColLcz expectListLczTempo = expectListLczTempo + listColBasic def expectListLcz = expectListLczTempo.sort() - assertEquals(expectListLcz, datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort()) + assertEquals(expectListLcz, datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort()) } else { assertEquals(null, geoIndicatorsCompute_i.rsu_lcz) } @@ -338,13 +335,13 @@ class WorkflowGeoIndicatorsTest { checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) if (indicatorUse.contains("UTRF")) { - assertEquals(listUrbTyp.Bu.sort(), datasource.getTable(geoIndicatorsCompute_i.building_indicators).columns.sort()) - assertEquals(listUrbTyp.Bl.sort(), datasource.getTable(geoIndicatorsCompute_i.block_indicators).columns.sort()) + assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) + assertEquals(listUrbTyp.Bl.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.block_indicators).sort()) } def expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() def expectListRsu = expectListRsuTempo.toUnique() - def realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + def realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { assertTrue realListRsu.contains(i) @@ -353,7 +350,7 @@ class WorkflowGeoIndicatorsTest { def expectListLczTempo = listColLcz expectListLczTempo = expectListLczTempo + listColBasic def expectListLcz = expectListLczTempo.sort() - assertEquals(expectListLcz, datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort()) + assertEquals(expectListLcz, datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort()) } else { assertEquals(null, geoIndicatorsCompute_i.rsu_lcz) } @@ -379,13 +376,13 @@ class WorkflowGeoIndicatorsTest { checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) if (indicatorUse.contains("UTRF")) { - assertEquals(listUrbTyp.Bu.sort(), datasource.getTable(geoIndicatorsCompute_i.building_indicators).columns.sort()) - assertEquals(listUrbTyp.Bl.sort(), datasource.getTable(geoIndicatorsCompute_i.block_indicators).columns.sort()) + assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) + assertEquals(listUrbTyp.Bl.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.block_indicators).sort()) } def expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() def expectListRsu = expectListRsuTempo.toUnique() - def realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + def realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { assertTrue realListRsu.contains(i) @@ -394,7 +391,7 @@ class WorkflowGeoIndicatorsTest { def expectListLczTempo = listColLcz expectListLczTempo = expectListLczTempo + listColBasic def expectListLcz = expectListLczTempo.sort() - assertEquals(expectListLcz, datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort()) + assertEquals(expectListLcz, datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort()) } else { assertEquals(null, geoIndicatorsCompute_i.rsu_lcz) } @@ -419,13 +416,13 @@ class WorkflowGeoIndicatorsTest { checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) if (indicatorUse.contains("UTRF")) { - assertEquals(listUrbTyp.Bu.sort(), datasource.getTable(geoIndicatorsCompute_i.building_indicators).columns.sort()) - assertEquals(listUrbTyp.Bl.sort(), datasource.getTable(geoIndicatorsCompute_i.block_indicators).columns.sort()) + assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) + assertEquals(listUrbTyp.Bl.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.block_indicators).sort()) } def expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() def expectListRsu = expectListRsuTempo.toUnique() - def realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + def realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { assertTrue realListRsu.contains(i) @@ -434,7 +431,7 @@ class WorkflowGeoIndicatorsTest { def expectListLczTempo = listColLcz expectListLczTempo = expectListLczTempo + listColBasic def expectListLcz = expectListLczTempo.sort() - assertEquals(expectListLcz, datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort()) + assertEquals(expectListLcz, datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort()) } else { assertEquals(null, geoIndicatorsCompute_i.rsu_lcz) } @@ -463,13 +460,13 @@ class WorkflowGeoIndicatorsTest { checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) if (indicatorUse.contains("UTRF")) { - assertEquals(listUrbTyp.Bu.sort(), datasource.getTable(geoIndicatorsCompute_i.building_indicators).columns.sort()) - assertEquals(listUrbTyp.Bl.sort(), datasource.getTable(geoIndicatorsCompute_i.block_indicators).columns.sort()) + assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) + assertEquals(listUrbTyp.Bl.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.block_indicators).sort()) } def expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() def expectListRsu = expectListRsuTempo.toUnique() - def realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + def realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { assertTrue realListRsu.contains(i) @@ -478,7 +475,7 @@ class WorkflowGeoIndicatorsTest { def expectListLczTempo = listColLcz expectListLczTempo = expectListLczTempo + listColBasic def expectListLcz = expectListLczTempo.sort() - assertEquals(expectListLcz, datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort()) + assertEquals(expectListLcz, datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort()) } else { assertEquals(null, geoIndicatorsCompute_i.rsu_lcz) } @@ -503,13 +500,13 @@ class WorkflowGeoIndicatorsTest { def expectListRsuTempo = listColBasic + listColCommon expectListRsuTempo = (expectListRsuTempo + indicatorUse.collect { listNames[it] }).flatten() def expectListRsu = expectListRsuTempo.toUnique() - def realListRsu = datasource.getTable(geoIndicatorsCompute_i.rsu_indicators).columns + def realListRsu = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_indicators) // We test that there is no missing indicators in the RSU table for (i in expectListRsu) { assertTrue realListRsu.contains(i) } if (indicatorUse.contains("LCZ")) { - assertEquals("ID_RSU,LCZ_EQUALITY_VALUE,LCZ_PRIMARY,LCZ_SECONDARY,LCZ_UNIQUENESS_VALUE,MIN_DISTANCE,THE_GEOM", datasource.getTable(geoIndicatorsCompute_i.rsu_lcz).columns.sort().join(",")) + assertEquals("ID_RSU,LCZ_EQUALITY_VALUE,LCZ_PRIMARY,LCZ_SECONDARY,LCZ_UNIQUENESS_VALUE,MIN_DISTANCE,THE_GEOM", datasource.getColumnNames(geoIndicatorsCompute_i.rsu_lcz).sort().join(",")) } else { assertEquals(null, geoIndicatorsCompute_i.rsu_lcz) } diff --git a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy index 1c8eae62ad..78dff61ec1 100644 --- a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy +++ b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy @@ -45,7 +45,8 @@ import java.util.regex.Pattern * @return outputTableName The name of the final buildings table * @return outputEstimatedTableName The name of the table containing the state of estimation for each building */ -Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone = "", String urban_areas = "", int h_lev_min = 3, String jsonFilename = "") { +Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone = "", + String urban_areas = "", int h_lev_min = 3, String jsonFilename = "") throws Exception{ if (!h_lev_min) { h_lev_min = 3 } @@ -71,10 +72,9 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone def typeAndLevel = parametersMap.level def queryMapper = "SELECT " def columnToMap = parametersMap.columns - def inputSpatialTable = datasource."$building" - if (inputSpatialTable.rowCount > 0) { + if (datasource.getRowCount(building)> 0) { def heightPattern = Pattern.compile("((?:\\d+\\/|(?:\\d+|^|\\s)\\.)?\\d+)\\s*([^\\s\\d+\\-.,:;^\\/]+(?:\\^\\d+(?:\$|(?=[\\s:;\\/])))?(?:\\/[^\\s\\d+\\-.,:;^\\/]+(?:\\^\\d+(?:\$|(?=[\\s:;\\/])))?)*)?", Pattern.CASE_INSENSITIVE) - def columnNames = inputSpatialTable.columns + def columnNames = datasource.getColumnNames(building) columnNames.remove("THE_GEOM") queryMapper += columnsMapper(columnNames, columnToMap) queryMapper += " , st_force2D(a.the_geom) as the_geom FROM $building as a " @@ -188,8 +188,6 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone } } } - - //Improve building type using the urban areas table if (urban_areas) { datasource.createSpatialIndex(outputTableName, "the_geom") @@ -286,7 +284,7 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone * @return outputTableName The name of the final roads table */ String formatRoadLayer( - JdbcDataSource datasource, String road, String zone = "", String jsonFilename = "") { + JdbcDataSource datasource, String road, String zone = "", String jsonFilename = "") throws Exception{ debug('Formating road layer') def outputTableName = postfix "INPUT_ROAD" datasource """ @@ -437,7 +435,7 @@ String formatRoadLayer( * @param jsonFilename name of the json formatted file containing the filtering parameters * @return outputTableName The name of the final rails table */ -String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "", String jsonFilename = "") { +String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "", String jsonFilename = "") throws Exception{ debug('Rails transformation starts') def outputTableName = "INPUT_RAILS_${UUID.randomUUID().toString().replaceAll("-", "_")}" datasource.execute """ drop table if exists $outputTableName; @@ -525,7 +523,7 @@ String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "" * @param jsonFilename name of the json formatted file containing the filtering parameters * @return outputTableName The name of the final vegetation table */ -String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "", String jsonFilename = "") { +String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "", String jsonFilename = "") throws Exception{ debug('Vegetation transformation starts') def outputTableName = postfix "INPUT_VEGET" datasource """ @@ -600,7 +598,7 @@ String formatVegetationLayer(JdbcDataSource datasource, String vegetation, Strin * @param zone an envelope to reduce the study area * @return outputTableName The name of the final hydro table */ -String formatWaterLayer(JdbcDataSource datasource, String water, String zone = "") { +String formatWaterLayer(JdbcDataSource datasource, String water, String zone = "") throws Exception{ debug('Hydro transformation starts') def outputTableName = "INPUT_HYDRO_${UUID.randomUUID().toString().replaceAll("-", "_")}" datasource.execute """Drop table if exists $outputTableName; @@ -656,7 +654,7 @@ String formatWaterLayer(JdbcDataSource datasource, String water, String zone = " * @param zone an envelope to reduce the study area * @return outputTableName The name of the final impervious table */ -String formatImperviousLayer(JdbcDataSource datasource, String impervious, String zone = "", String jsonFilename = "") { +String formatImperviousLayer(JdbcDataSource datasource, String impervious, String zone = "", String jsonFilename = "") throws Exception{ debug('Impervious transformation starts') def outputTableName = "INPUT_IMPERVIOUS_${UUID.randomUUID().toString().replaceAll("-", "_")}" debug(impervious) @@ -1027,7 +1025,7 @@ static Map parametersMapping(def file, def altResourceStream) { * @param zone an envelope to reduce the study area * @return outputTableName The name of the final urban areas table */ -String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zone = "", String jsonFilename = "") { +String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zone = "", String jsonFilename = "") throws Exception{ debug('Urban areas transformation starts') def outputTableName = "INPUT_URBAN_AREAS_${UUID.randomUUID().toString().replaceAll("-", "_")}" datasource.execute """Drop table if exists $outputTableName; @@ -1105,7 +1103,7 @@ String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zo * @param water The name of the input water table to improve sea extraction * @return outputTableName The name of the final buildings table */ -String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zone = "", String water = "") { +String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zone = "", String water = "") throws Exception{ String outputTableName = postfix "INPUT_SEA_LAND_MASK_" datasource.execute """Drop table if exists $outputTableName; CREATE TABLE $outputTableName (THE_GEOM GEOMETRY, id serial, type varchar);""".toString() diff --git a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy index 82dc1b3a99..387d1bcdd6 100644 --- a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy +++ b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy @@ -44,10 +44,10 @@ import org.orbisgis.geoclimate.osmtools.utils.Utilities * railTableName, vegetationTableName, hydroTableName, zone, zoneEnvelopeTableName and urbanAreasTableName. * Note that the GIS tables are projected in a local utm projection */ -Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, float distance = 0, boolean downloadAllOSMData = true) { +Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, float distance = 0, + boolean downloadAllOSMData = true) throws Exception{ if (datasource == null) { - error('The datasource cannot be null') - return null + throw new Exception('The datasource cannot be null') } if (zoneToExtract) { def outputZoneTable = "ZONE_${UUID.randomUUID().toString().replaceAll("-", "_")}" @@ -55,16 +55,14 @@ Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, f def GEOMETRY_TYPE = "GEOMETRY" Geometry geom = Utilities.getArea(zoneToExtract) if (!geom) { - error("Cannot find an area from the place name ${zoneToExtract}") - return null + throw new Exception("Cannot find an area from the place name ${zoneToExtract}") } if (geom instanceof Polygon) { GEOMETRY_TYPE = "POLYGON" } else if (geom instanceof MultiPolygon) { GEOMETRY_TYPE = "MULTIPOLYGON" } else { - error("Invalid geometry to extract the OSM data ${geom.getGeometryType()}") - return null + throw new Exception("Invalid geometry to extract the OSM data ${geom.getGeometryType()}") } /** @@ -118,15 +116,14 @@ Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, f zone_envelope: outputZoneEnvelopeTable, coastline : results.coastline] } else { - error "Cannot load the OSM file ${extract}" + throw new Exception("Cannot load the OSM file ${extract}".toString()) } } else { - error "Cannot execute the overpass query $query" + throw new Exception("Cannot execute the overpass query $query".toString()) } } else { - error "The zone to extract cannot be null or empty" - return null + throw new Exception("The zone to extract cannot be null or empty") } } @@ -139,7 +136,7 @@ Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, f * @return The name of the resulting GIS tables : buildingTableName, roadTableName, * railTableName, vegetationTableName, hydroTableName, imperviousTableName */ -Map createGISLayers(JdbcDataSource datasource, String osmFilePath, int epsg = -1) { +Map createGISLayers(JdbcDataSource datasource, String osmFilePath, int epsg = -1) throws Exception{ return createGISLayers( datasource, osmFilePath, null, epsg) } @@ -152,10 +149,10 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, int epsg = -1 * @return The name of the resulting GIS tables : buildingTableName, roadTableName, * railTableName, vegetationTableName, hydroTableName, imperviousTableName */ -Map createGISLayers(JdbcDataSource datasource, String osmFilePath, org.locationtech.jts.geom.Geometry geometry, int epsg = -1) { +Map createGISLayers(JdbcDataSource datasource, String osmFilePath, + org.locationtech.jts.geom.Geometry geometry, int epsg = -1) throws Exception{ if (epsg <= -1) { - error "Invalid epsg code $epsg" - return null + throw new Exception("Invalid epsg code $epsg".toString()) } def prefix = "OSM_DATA_${UUID.randomUUID().toString().replaceAll("-", "_")}" debug "Loading" diff --git a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy index 0b0cf1c200..66f81cbf02 100644 --- a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy +++ b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy @@ -136,7 +136,7 @@ import java.sql.SQLException * Meteorological Society 93, no. 12 (2012): 1879-1900. * */ -Map workflow(def input) { +Map workflow(def input) throws Exception { //OSM workflow parameters Map parameters = null if (input) { @@ -144,25 +144,21 @@ Map workflow(def input) { //Check if it's a path to a file def configFile = new File(input) if (!configFile.isFile()) { - error "The configuration file doesn't exist" - return + throw new Exception("The configuration file doesn't exist") } if (!FileUtilities.isExtensionWellFormated(configFile, "json")) { - error "The configuration file must be a json file" - return + throw new Exception("The configuration file must be a json file") } parameters = Geoindicators.WorkflowUtilities.readJSON(configFile) } else if (input instanceof Map) { parameters = input } } else { - error "The input parameters cannot be null or empty.\n Please set a path to a configuration file or " + - "a map with all required parameters" - return + throw new Exception("The input parameters cannot be null or empty.\n Please set a path to a configuration file or " + + "a map with all required parameters") } if (!parameters) { - error "Wrong input parameters" - return + throw new Exception("Wrong input parameters") } debug "Reading file parameters" @@ -183,13 +179,11 @@ Map workflow(def input) { if (!tmp_folder_db.exists()) { if (!tmp_folder_db.mkdir()) { h2gis_folder = null - error "You don't have permission to write in the folder $h2gis_folder \n" + - "Please check the folder." - return + throw new Exception("You don't have permission to write in the folder $h2gis_folder \n" + + "Please check the folder.") } } else if (!tmp_folder_db.isDirectory()) { - error "Invalid output folder $h2gis_folder." - return + throw new Exception("Invalid output folder $h2gis_folder.") } databaseFolder = h2gis_folder } @@ -216,24 +210,21 @@ Map workflow(def input) { } } if (!inputParameters) { - error "Cannot find any input parameters." - return + throw new Exception("Cannot find any input parameters.") } def locations = inputParameters.locations as Set if (!locations) { - error "Please set at least one OSM location (place name or bounding box)." - return + throw new Exception("Please set at least one OSM location (place name or bounding box).") } def downloadAllOSMData = inputParameters.get("all") if (!downloadAllOSMData) { downloadAllOSMData = true } else if (!downloadAllOSMData in Boolean) { - error "The all parameter must be a boolean value" - return null + throw new Exception("The all parameter must be a boolean value") } - def osm_date= inputParameters.get("date") + def osm_date = inputParameters.get("date") def osm_size_area = inputParameters.get("area") @@ -241,15 +232,13 @@ Map workflow(def input) { //Default size in km² osm_size_area = 1000 } else if (osm_size_area < 0) { - error "The area of the bounding box to be extracted from OSM must be greater than 0 km²" - return null + throw new Exception("The area of the bounding box to be extracted from OSM must be greater than 0 km²") } def overpass_timeout = inputParameters.get("timeout") if (!overpass_timeout) { overpass_timeout = 900 } else if (overpass_timeout <= 180) { - error "The timeout value must be greater than the default value : 180 s" - return null + throw new Exception("The timeout value must be greater than the default value : 180 s") } def overpass_maxsize = inputParameters.get("maxsize") @@ -257,8 +246,7 @@ Map workflow(def input) { if (!overpass_maxsize) { overpass_maxsize = 536870912 } else if (overpass_maxsize <= 536870912) { - error "The maxsize value must be greater than the default value : 536870912 (512 MB)" - return null + throw new Exception("The maxsize value must be greater than the default value : 536870912 (512 MB)") } //Change the endpoint to get the overpass data @@ -273,8 +261,7 @@ Map workflow(def input) { if (!deleteOSMFile) { deleteOSMFile = false } else if (!Boolean.valueOf(deleteOSMFile)) { - error "The delete option must be false or true" - return null + throw new Exception("The delete option must be false or true") } def outputWorkflowTableNames = ["building_indicators", @@ -303,7 +290,7 @@ Map workflow(def input) { //Get processing parameters def processing_parameters = extractProcessingParameters(parameters.get("parameters")) if (!processing_parameters) { - return + throw new Exception("Invalid processing parameters") } def outputDatasource @@ -320,13 +307,11 @@ Map workflow(def input) { if (!deleteOutputData) { deleteOutputData = true } else if (!deleteOutputData in Boolean) { - error "The delete parameter must be a boolean value" - return null + throw new Exception("The delete parameter must be a boolean value") } outputSRID = outputParameter.get("srid") if (outputSRID && outputSRID <= 0) { - error "The output srid must be greater or equal than 0" - return null + throw new Exception("The output srid must be greater or equal than 0") } if (outputFolder) { //Check if we can write in the output folder @@ -337,13 +322,11 @@ Map workflow(def input) { if (!file_outputFolder.exists()) { if (file_outputFolder.mkdir()) { file_outputFolder = null - error "You don't have permission to write in the folder $outputFolder \n" + - "Please check the folder." - return + throw new Exception("You don't have permission to write in the folder $outputFolder \n" + + "Please check the folder.".toString()) } } else if (!file_outputFolder.isDirectory()) { - error "Invalid output folder $file_outputFolder." - return + throw new Exception("Invalid output folder $file_outputFolder.".toString()) } } if (outputDataBase) { @@ -357,17 +340,11 @@ Map workflow(def input) { if (locations && locations in Collection) { def h2gis_datasource = H2GIS.open(h2gis_properties) if (!h2gis_datasource) { - error "Cannot load the local H2GIS database to run Geoclimate" - return + throw new Exception("Cannot load the local H2GIS database to run Geoclimate") } - def logTableZones = postfix("log_zones") Map osmprocessing = osm_processing(h2gis_datasource, processing_parameters, locations.findAll { it }, file_outputFolder, outputFileTables, - outputDatasource, outputTables, outputSRID, downloadAllOSMData, deleteOutputData, deleteOSMFile, logTableZones, osm_size_area, - overpass_timeout, overpass_maxsize, osm_date) - if (!osmprocessing) { - h2gis_datasource.save(logTableZones,"${file_outputFolder.getAbsolutePath() + File.separator}logzones.fgb", true) - return null - } + outputDatasource, outputTables, outputSRID, downloadAllOSMData, deleteOutputData, deleteOSMFile, osm_size_area, + overpass_timeout, overpass_maxsize, osm_date, databaseFolder) if (delete_h2gis) { def localCon = h2gis_datasource.getConnection() if (localCon) { @@ -375,14 +352,13 @@ Map workflow(def input) { DeleteDbFiles.execute(databaseFolder, databaseName, true) debug "The local H2GIS database : ${databasePath} has been deleted" } else { - error "Cannot delete the local H2GIS database : ${databasePath} " + throw new Exception("Cannot delete the local H2GIS database : ${databasePath}".toString()) } } return osmprocessing } else { - error "Invalid OSM area from $locations" - return null + throw new Exception("Invalid OSM area from $locations".toString()) } } @@ -403,313 +379,267 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d File outputFolder, def ouputTableFiles, def output_datasource, def outputTableNames, def outputSRID, def downloadAllOSMData, def deleteOutputData, def deleteOSMFile, - def logTableZones, def bbox_size, - def overpass_timeout, def overpass_maxsize,def overpass_date) { + def bbox_size, + def overpass_timeout, def overpass_maxsize, def overpass_date, String databaseFolder) throws Exception { //Store the zone identifier and the names of the tables def outputTableNamesResult = [:] - //Create the table to log on the processed zone - h2gis_datasource.execute """DROP TABLE IF EXISTS $logTableZones; - CREATE TABLE $logTableZones (the_geom GEOMETRY(GEOMETRY, 4326), - location VARCHAR, info VARCHAR, version VARCHAR, build_number VARCHAR);""".toString() int nbAreas = id_zones.size() info "$nbAreas osm areas will be processed" id_zones.each { id_zone -> - //Extract the zone table and read its SRID - def zones = extractOSMZone(h2gis_datasource, id_zone, processing_parameters.distance, bbox_size) - if (zones) { - id_zone = id_zone in Collection ? id_zone.join('_') : id_zone - def utm_zone_table = zones.utm_zone_table - def utm_extended_bbox_table = zones.utm_extended_bbox_table - if (h2gis_datasource.getRowCount(utm_zone_table) == 0) { - error "Cannot find any geometry to define the zone to extract the OSM data" - return - } - def srid = zones.utm_srid - def reproject = false - if (outputSRID) { - if (outputSRID != srid) { - reproject = true + //Store the current OSM zone can be null + Geometry osm_zone_geometry = null + try { + //Extract the zone table and read its SRID + def zones = extractOSMZone(h2gis_datasource, id_zone, processing_parameters.distance, bbox_size) + if (zones) { + osm_zone_geometry = zones.osm_geometry + id_zone = id_zone in Collection ? id_zone.join('_') : id_zone + def utm_zone_table = zones.utm_zone_table + def utm_extended_bbox_table = zones.utm_extended_bbox_table + def srid = zones.utm_srid + def reproject = false + if (outputSRID) { + if (outputSRID != srid) { + reproject = true + } + } else { + outputSRID = srid } - } else { - outputSRID = srid - } - //Prepare OSM extraction from the osm_envelope_extented - //TODO set key values ? - def osm_date="" - if(overpass_date){ - osm_date = "[date:\"$overpass_date\"]" - } - def query = "[timeout:$overpass_timeout][maxsize:$overpass_maxsize]$osm_date" + OSMTools.Utilities.buildOSMQuery(zones.osm_envelope_extented, null, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) - - if (downloadAllOSMData) { - //Create a custom OSM query to download all requiered data. It will take more time and resources - //because much more OSM elements will be returned - def keysValues = ["building", "railway", "amenity", - "leisure", "highway", "natural", - "landuse", "landcover", - "vegetation", "waterway", "area", "aeroway", "area:aeroway", "tourism", "sport", "power"] - query = "[timeout:$overpass_timeout][maxsize:$overpass_maxsize]$osm_date" + OSMTools.Utilities.buildOSMQueryWithAllData(zones.osm_envelope_extented, keysValues, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) - } - - def extract = OSMTools.Loader.extract(query) - if (extract) { + //Prepare OSM extraction from the osm_envelope_extented + //TODO set key values ? + def osm_date = "" + if (overpass_date) { + osm_date = "[date:\"$overpass_date\"]" + } + def query = "[timeout:$overpass_timeout][maxsize:$overpass_maxsize]$osm_date" + OSMTools.Utilities.buildOSMQuery(zones.osm_envelope_extented, null, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) + + if (downloadAllOSMData) { + //Create a custom OSM query to download all requiered data. It will take more time and resources + //because much more OSM elements will be returned + def keysValues = ["building", "railway", "amenity", + "leisure", "highway", "natural", + "landuse", "landcover", + "vegetation", "waterway", "area", "aeroway", "area:aeroway", "tourism", "sport", "power"] + query = "[timeout:$overpass_timeout][maxsize:$overpass_maxsize]$osm_date" + OSMTools.Utilities.buildOSMQueryWithAllData(zones.osm_envelope_extented, keysValues, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) + } + def extract = OSMTools.Loader.extract(query) //We must build the GIS layers on the extended bbox area Geometry utm_extented_geom = h2gis_datasource.getExtent(utm_extended_bbox_table) utm_extented_geom.setSRID(srid) Map gisLayersResults = OSM.InputDataLoading.createGISLayers(h2gis_datasource, extract, utm_extented_geom, srid) - if (gisLayersResults) { - if (deleteOSMFile) { - if (new File(extract).delete()) { - debug "The osm file ${extract}has been deleted" - } + + if (deleteOSMFile) { + if (new File(extract).delete()) { + debug "The osm file ${extract}has been deleted" } - def rsu_indicators_params = processing_parameters.rsu_indicators - def grid_indicators_params = processing_parameters.grid_indicators - def road_traffic = processing_parameters.road_traffic - def worldpop_indicators = processing_parameters.worldpop_indicators + } + def rsu_indicators_params = processing_parameters.rsu_indicators + def grid_indicators_params = processing_parameters.grid_indicators + def road_traffic = processing_parameters.road_traffic + def worldpop_indicators = processing_parameters.worldpop_indicators - info "Formating OSM GIS layers" - //Format urban areas - String urbanAreasTable = OSM.InputDataFormatting.formatUrbanAreas(h2gis_datasource, gisLayersResults.urban_areas, utm_extended_bbox_table) + info "Formating OSM GIS layers" + //Format urban areas + String urbanAreasTable = OSM.InputDataFormatting.formatUrbanAreas(h2gis_datasource, gisLayersResults.urban_areas, utm_extended_bbox_table) - info "Urban areas formatted" - /* - * Do not filter the data when formatting becausethe job is already done when extracting osm data * - */ - Map formatBuilding = OSM.InputDataFormatting.formatBuildingLayer( - h2gis_datasource, gisLayersResults.building, - null, urbanAreasTable, - processing_parameters.hLevMin) + info "Urban areas formatted" + /* + * Do not filter the data when formatting becausethe job is already done when extracting osm data * + */ + Map formatBuilding = OSM.InputDataFormatting.formatBuildingLayer( + h2gis_datasource, gisLayersResults.building, + null, urbanAreasTable, + processing_parameters.hLevMin) - info "Building formatted" - def buildingTableName = formatBuilding.building - def buildingEstimateTableName = formatBuilding.building_estimated + info "Building formatted" + def buildingTableName = formatBuilding.building + def buildingEstimateTableName = formatBuilding.building_estimated - String railTableName = OSM.InputDataFormatting.formatRailsLayer(h2gis_datasource, gisLayersResults.rail, null) + String railTableName = OSM.InputDataFormatting.formatRailsLayer(h2gis_datasource, gisLayersResults.rail, null) - info "Rail formatted" + info "Rail formatted" - String vegetationTableName = OSM.InputDataFormatting.formatVegetationLayer(h2gis_datasource, gisLayersResults.vegetation, utm_extended_bbox_table) + String vegetationTableName = OSM.InputDataFormatting.formatVegetationLayer(h2gis_datasource, gisLayersResults.vegetation, utm_extended_bbox_table) - info "Vegetation formatted" + info "Vegetation formatted" - String hydrographicTableName = OSM.InputDataFormatting.formatWaterLayer( - h2gis_datasource, gisLayersResults.water, - utm_extended_bbox_table) + String hydrographicTableName = OSM.InputDataFormatting.formatWaterLayer( + h2gis_datasource, gisLayersResults.water, + utm_extended_bbox_table) - info "Water formatted" + info "Water formatted" - String imperviousTableName = OSM.InputDataFormatting.formatImperviousLayer( - h2gis_datasource, gisLayersResults.impervious, utm_extended_bbox_table) + String imperviousTableName = OSM.InputDataFormatting.formatImperviousLayer( + h2gis_datasource, gisLayersResults.impervious, utm_extended_bbox_table) - info "Impervious formatted" + info "Impervious formatted" - //Sea/Land mask - String seaLandMaskTableName = OSM.InputDataFormatting.formatSeaLandMask( - h2gis_datasource, gisLayersResults.coastline, utm_extended_bbox_table, hydrographicTableName) + //Sea/Land mask + String seaLandMaskTableName = OSM.InputDataFormatting.formatSeaLandMask( + h2gis_datasource, gisLayersResults.coastline, utm_extended_bbox_table, hydrographicTableName) - info "Sea/Land formatted" + info "Sea/Land formatted" - if(h2gis_datasource.getRowCount(seaLandMaskTableName)>0){ + if (h2gis_datasource.getRowCount(seaLandMaskTableName) > 0) { //Select the water and sea features h2gis_datasource.execute """Drop table if exists $hydrographicTableName; CREATE TABLE $hydrographicTableName as select the_geom, id as id_water, cast(0 as integer) as zindex, type from $seaLandMaskTableName where type in ('water', 'sea') """.toString() - } - - //Format road - String roadTableName = OSM.InputDataFormatting.formatRoadLayer( - h2gis_datasource, gisLayersResults.road, - utm_extended_bbox_table) - - info "Road formatted" - - info "All layers have been formatted" - - //Drop the intermediate GIS layers - h2gis_datasource.dropTable(gisLayersResults.values().toArray(new String[0])) - - //Add the GIS layers to the list of results - def results = [:] - results.put("zone", utm_zone_table) - results.put("road", roadTableName) - results.put("rail", railTableName) - results.put("water", hydrographicTableName) - results.put("vegetation", vegetationTableName) - results.put("impervious", imperviousTableName) - results.put("urban_areas", urbanAreasTable) - results.put("building", buildingTableName) - results.put("sea_land_mask", seaLandMaskTableName) - results.put("building_height_missing", buildingEstimateTableName) - - //Compute traffic flow - if (road_traffic) { - String format_traffic = Geoindicators.RoadIndicators.build_road_traffic(h2gis_datasource, roadTableName) - results.put("road_traffic", format_traffic) - } + } - //Compute the RSU indicators - if (rsu_indicators_params.indicatorUse) { - String estimateHeight = rsu_indicators_params."estimateHeight" ? "BUILDING_HEIGHT_OSM_RF_2_2.model" : "" - rsu_indicators_params.put("utrfModelName", "UTRF_OSM_RF_2_2.model") - rsu_indicators_params.put("buildingHeightModelName", estimateHeight) - Map geoIndicators = Geoindicators.WorkflowGeoIndicators.computeAllGeoIndicators( - h2gis_datasource, utm_zone_table, - buildingTableName, roadTableName, - railTableName, vegetationTableName, - hydrographicTableName, imperviousTableName, - buildingEstimateTableName, - seaLandMaskTableName, - urbanAreasTable,"", - rsu_indicators_params, - processing_parameters.prefixName) - if (!geoIndicators) { - error "Cannot build the geoindicators for the zone $id_zone" - h2gis_datasource.execute(""" - INSERT INTO $logTableZones VALUES(st_geomfromtext('${zones.geometry}',4326) , - '$id_zone', 'Error computing geoindicators', - '${Geoindicators.version()}', - '${Geoindicators.buildNumber()}' )""".toString()) - return - } else { - results.putAll(geoIndicators) - } - } + //Format road + String roadTableName = OSM.InputDataFormatting.formatRoadLayer( + h2gis_datasource, gisLayersResults.road, + utm_extended_bbox_table) + + info "Road formatted" + + info "All layers have been formatted" + + //Drop the intermediate GIS layers + h2gis_datasource.dropTable(gisLayersResults.values().toArray(new String[0])) + + //Add the GIS layers to the list of results + def results = [:] + results.put("zone", utm_zone_table) + results.put("road", roadTableName) + results.put("rail", railTableName) + results.put("water", hydrographicTableName) + results.put("vegetation", vegetationTableName) + results.put("impervious", imperviousTableName) + results.put("urban_areas", urbanAreasTable) + results.put("building", buildingTableName) + results.put("sea_land_mask", seaLandMaskTableName) + results.put("building_height_missing", buildingEstimateTableName) + + //Compute traffic flow + if (road_traffic) { + String format_traffic = Geoindicators.RoadIndicators.build_road_traffic(h2gis_datasource, roadTableName) + results.put("road_traffic", format_traffic) + } - //Extract and compute population indicators for the specified year - //This data can be used by the grid_indicators process - if (worldpop_indicators) { - def bbox = [zones.osm_envelope_extented.getMinY() as Float, zones.osm_envelope_extented.getMinX() as Float, - zones.osm_envelope_extented.getMaxY() as Float, zones.osm_envelope_extented.getMaxX() as Float] - String coverageId = "wpGlobal:ppp_2020" - String worldPopFile = WorldPopTools.Extract.extractWorldPopLayer(coverageId, bbox) - if (worldPopFile) { - String worldPopTableName = WorldPopTools.Extract.importAscGrid(h2gis_datasource, worldPopFile, srid, coverageId.replaceAll(":", "_")) - if (worldPopTableName) { - results.put("population", worldPopTableName) - String buildingWithPop = Geoindicators.BuildingIndicators.buildingPopulation(h2gis_datasource, results.building, worldPopTableName, ["pop"]) - h2gis_datasource.dropTable(worldPopTableName) - if (!buildingWithPop) { - info "Cannot compute any population data at building level" - } - else{ - h2gis_datasource.dropTable(results.building) - //Update the building table with the population data - results.put("building", buildingWithPop) - } + //Compute the RSU indicators + if (rsu_indicators_params.indicatorUse) { + String estimateHeight = rsu_indicators_params."estimateHeight" ? "BUILDING_HEIGHT_OSM_RF_2_2.model" : "" + rsu_indicators_params.put("utrfModelName", "UTRF_OSM_RF_2_2.model") + rsu_indicators_params.put("buildingHeightModelName", estimateHeight) + Map geoIndicators = Geoindicators.WorkflowGeoIndicators.computeAllGeoIndicators( + h2gis_datasource, utm_zone_table, + buildingTableName, roadTableName, + railTableName, vegetationTableName, + hydrographicTableName, imperviousTableName, + buildingEstimateTableName, + seaLandMaskTableName, + urbanAreasTable, "", + rsu_indicators_params, + processing_parameters.prefixName) + results.putAll(geoIndicators) + } + //Extract and compute population indicators for the specified year + //This data can be used by the grid_indicators process + if (worldpop_indicators) { + def bbox = [zones.osm_envelope_extented.getMinY() as Float, zones.osm_envelope_extented.getMinX() as Float, + zones.osm_envelope_extented.getMaxY() as Float, zones.osm_envelope_extented.getMaxX() as Float] + String coverageId = "wpGlobal:ppp_2020" + String worldPopFile = WorldPopTools.Extract.extractWorldPopLayer(coverageId, bbox) + if (worldPopFile) { + String worldPopTableName = WorldPopTools.Extract.importAscGrid(h2gis_datasource, worldPopFile, srid, coverageId.replaceAll(":", "_")) + if (worldPopTableName) { + results.put("population", worldPopTableName) + String buildingWithPop = Geoindicators.BuildingIndicators.buildingPopulation(h2gis_datasource, results.building, worldPopTableName, ["pop"]) + h2gis_datasource.dropTable(worldPopTableName) + if (!buildingWithPop) { + info "Cannot compute any population data at building level" } else { - info "Cannot import the worldpop asc file $worldPopFile" - info "Create a default empty worldpop table" - def outputTableWorldPopName = postfix "world_pop" - h2gis_datasource.execute("""drop table if exists $outputTableWorldPopName; - create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $srid), ID_POP INTEGER, POP FLOAT);""".toString()) - results.put("population", outputTableWorldPopName) + h2gis_datasource.dropTable(results.building) + //Update the building table with the population data + results.put("building", buildingWithPop) } } else { - info "Cannot find the population grid $coverageId \n Create a default empty worldpop table" + info "Cannot import the worldpop asc file $worldPopFile" + info "Create a default empty worldpop table" def outputTableWorldPopName = postfix "world_pop" h2gis_datasource.execute("""drop table if exists $outputTableWorldPopName; - create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $srid), ID_POP INTEGER, POP FLOAT);""".toString()) + create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $srid), ID_POP INTEGER, POP FLOAT);""".toString()) results.put("population", outputTableWorldPopName) } + + } else { + info "Cannot find the population grid $coverageId \n Create a default empty worldpop table" + def outputTableWorldPopName = postfix "world_pop" + h2gis_datasource.execute("""drop table if exists $outputTableWorldPopName; + create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $srid), ID_POP INTEGER, POP FLOAT);""".toString()) + results.put("population", outputTableWorldPopName) } - def noise_indicators = processing_parameters.noise_indicators - - def geomEnv; - if (noise_indicators) { - if (noise_indicators.ground_acoustic) { - geomEnv = h2gis_datasource.getSpatialTable(utm_zone_table).getExtent() - def outputTable = Geoindicators.SpatialUnits.createGrid(h2gis_datasource, geomEnv, 200, 200) - if (outputTable) { - String ground_acoustic = Geoindicators.NoiseIndicators.groundAcousticAbsorption(h2gis_datasource, outputTable, "id_grid", - results.building, roadTableName, hydrographicTableName, - vegetationTableName, imperviousTableName) - if (ground_acoustic) { - results.put("ground_acoustic", ground_acoustic) - } - h2gis_datasource.execute("DROP TABLE IF EXISTS $outputTable".toString()) - } - } - } - //Default - def outputGrid = "fgb" - if (grid_indicators_params) { - info("Start computing grid_indicators") - if (!geomEnv) { - geomEnv = h2gis_datasource.getSpatialTable(utm_zone_table).getExtent() - } - outputGrid = grid_indicators_params.output - def x_size = grid_indicators_params.x_size - def y_size = grid_indicators_params.y_size - String grid = Geoindicators.WorkflowGeoIndicators.createGrid(h2gis_datasource, geomEnv, - x_size, y_size, srid, grid_indicators_params.rowCol) - if (grid) { - String rasterizedIndicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(h2gis_datasource, grid, - grid_indicators_params.indicators, - grid_indicators_params.lcz_lod, - results.building, roadTableName, vegetationTableName, - hydrographicTableName, imperviousTableName, - results.rsu_lcz, - results.rsu_utrf_area, - results.rsu_utrf_floor_area, - seaLandMaskTableName, - processing_parameters.prefixName) - if (rasterizedIndicators) { - h2gis_datasource.dropTable(grid) - results.put("grid_indicators", rasterizedIndicators) - def sprawl_indic = Geoindicators.WorkflowGeoIndicators.sprawlIndicators(h2gis_datasource,rasterizedIndicators, "id_grid", grid_indicators_params.indicators, - Math.max(x_size,y_size).floatValue()) - if(sprawl_indic){ - results.put("sprawl_areas", sprawl_indic.sprawl_areas) - results.put("grid_indicators", sprawl_indic.grid_indicators) - } - info("End computing grid_indicators") - } - } else { - info "Cannot create a grid to aggregate the indicators" - h2gis_datasource.execute("""INSERT INTO $logTableZones - VALUES(st_geomfromtext('${zones.osm_geometry}',4326) ,'$id_zone', 'Error computing the grid indicators' - '${Geoindicators.version()}', - '${Geoindicators.buildNumber()}')""".toString()) + } + def noise_indicators = processing_parameters.noise_indicators + + def geomEnv; + if (noise_indicators) { + if (noise_indicators.ground_acoustic) { + geomEnv = h2gis_datasource.getSpatialTable(utm_zone_table).getExtent() + def outputTable = Geoindicators.SpatialUnits.createGrid(h2gis_datasource, geomEnv, 200, 200) + + String ground_acoustic = Geoindicators.NoiseIndicators.groundAcousticAbsorption(h2gis_datasource, outputTable, "id_grid", + results.building, roadTableName, hydrographicTableName, + vegetationTableName, imperviousTableName) + if (ground_acoustic) { + results.put("ground_acoustic", ground_acoustic) } - } - - if (outputFolder && ouputTableFiles) { - saveOutputFiles(h2gis_datasource, id_zone, results, ouputTableFiles, outputFolder, "osm_", outputSRID, reproject, deleteOutputData, outputGrid) + h2gis_datasource.execute("DROP TABLE IF EXISTS $outputTable".toString()) } - if (output_datasource) { - saveTablesInDatabase(output_datasource, h2gis_datasource, outputTableNames, results, id_zone, srid, outputSRID, reproject) + } + //Default + def outputGrid = "fgb" + if (grid_indicators_params) { + info("Start computing grid_indicators") + if (!geomEnv) { + geomEnv = h2gis_datasource.getSpatialTable(utm_zone_table).getExtent() } + outputGrid = grid_indicators_params.output + def x_size = grid_indicators_params.x_size + def y_size = grid_indicators_params.y_size + String grid = Geoindicators.WorkflowGeoIndicators.createGrid(h2gis_datasource, geomEnv, + x_size, y_size, srid, grid_indicators_params.rowCol) + String rasterizedIndicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(h2gis_datasource, grid, + grid_indicators_params.indicators, + grid_indicators_params.lcz_lod, + results.building, roadTableName, vegetationTableName, + hydrographicTableName, imperviousTableName, + results.rsu_lcz, + results.rsu_utrf_area, + results.rsu_utrf_floor_area, + seaLandMaskTableName, + processing_parameters.prefixName) + if (rasterizedIndicators) { + h2gis_datasource.dropTable(grid) + results.put("grid_indicators", rasterizedIndicators) + def sprawl_indic = Geoindicators.WorkflowGeoIndicators.sprawlIndicators(h2gis_datasource, rasterizedIndicators, "id_grid", grid_indicators_params.indicators, + Math.max(x_size, y_size).floatValue()) + if (sprawl_indic) { + results.put("sprawl_areas", sprawl_indic.sprawl_areas) + results.put("grid_indicators", sprawl_indic.grid_indicators) + } + info("End computing grid_indicators") + } + } - outputTableNamesResult.put(id_zone in Collection ? id_zone.join("_") : id_zone, results.findAll { it.value != null }) - - h2gis_datasource.dropTable(Geoindicators.getCachedTableNames()) - - } else { - h2gis_datasource.execute("""INSERT INTO $logTableZones - VALUES(st_geomfromtext('${zones.osm_geometry}',4326) ,'$id_zone', 'Error loading the OSM file', - '${Geoindicators.version()}', - '${Geoindicators.buildNumber()}')""".toString()) - error "Cannot load the OSM file ${extract}" - return + if (outputFolder && ouputTableFiles) { + saveOutputFiles(h2gis_datasource, id_zone, results, ouputTableFiles, outputFolder, "osm_", + outputSRID, reproject, deleteOutputData, outputGrid) } - } else { - //Log in table - h2gis_datasource.execute("""INSERT INTO $logTableZones - VALUES(st_geomfromtext('${zones.osm_geometry}',4326) ,'$id_zone', 'Error to extract the data with OverPass' - ,'${Geoindicators.version()}', '${Geoindicators.buildNumber()}')""".toString()) - error "Cannot execute the overpass query $query" - return + if (output_datasource) { + saveTablesInDatabase(output_datasource, h2gis_datasource, outputTableNames, results, id_zone, srid, outputSRID, reproject) + } + outputTableNamesResult.put(id_zone in Collection ? id_zone.join("_") : id_zone, results.findAll { it.value != null }) + h2gis_datasource.dropTable(Geoindicators.getCachedTableNames()) } - } else { - //Log in table - h2gis_datasource.execute("""INSERT INTO $logTableZones - VALUES(null,'$id_zone', 'Error to extract the zone with Nominatim', - '${Geoindicators.version()}', - '${Geoindicators.buildNumber()}')""".toString()) - return + } catch (Exception e) { + saveLogZoneTable(h2gis_datasource,databaseFolder, id_zone in Collection?id_zone.join("_"):id_zone, osm_zone_geometry, e.getLocalizedMessage()) + //eat the exception and process other zone + warn("The zone $id_zone has not been processed. Please check the log table to get more informations." ) } } if (outputTableNamesResult) { @@ -717,6 +647,35 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d } } +/** + * Method to log message in a table + * @param dataSource + * @param logTableZones + * @param id_zone + * @param osm_geometry + * @param message + * @throws Exception + */ +void saveLogZoneTable(JdbcDataSource dataSource, String databaseFolder, String id_zone, Geometry osm_geometry, String message) throws Exception { + def logTableZones = postfix("log_zones") + //Create the table to log on the processed zone + dataSource.execute("""DROP TABLE IF EXISTS $logTableZones; + CREATE TABLE $logTableZones (the_geom GEOMETRY(GEOMETRY, 4326), + location VARCHAR, info VARCHAR, version VARCHAR, build_number VARCHAR);""") + if (osm_geometry == null) { + dataSource.execute("""INSERT INTO $logTableZones + VALUES(null,'$id_zone', '$message', + '${Geoindicators.version()}', + '${Geoindicators.buildNumber()}')""") + } else { + dataSource.execute("""INSERT INTO $logTableZones + VALUES(st_geomfromtext('${osm_geometry}',4326) ,'$id_zone', '$message', + '${Geoindicators.version()}', + '${Geoindicators.buildNumber()}')""") + } + dataSource.save(logTableZones, databaseFolder+File.separator+"log_zones_"+id_zone+".fgb", true ) +} + /** * Extract the OSM zone and its envelope area from Nominatim API * @@ -730,23 +689,21 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d * osm_geometry the geometry that represents the processed zone in lat/lon * utm_srid the UTM srid code */ -def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_size) { +def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_size) throws Exception{ def outputZoneTable = "ZONE_${UUID.randomUUID().toString().replaceAll("-", "_")}".toString() def outputZoneEnvelopeTable = "ZONE_ENVELOPE_${UUID.randomUUID().toString().replaceAll("-", "_")}".toString() if (zoneToExtract) { def GEOMETRY_TYPE = "GEOMETRY" Geometry geom = OSMTools.Utilities.getArea(zoneToExtract) if (!geom) { - error("Cannot find an area from the location ${zoneToExtract}") - return null + throw new Exception("Cannot find an area from the location ${zoneToExtract}".toString()) } if (geom instanceof Polygon) { GEOMETRY_TYPE = "POLYGON" } else if (geom instanceof MultiPolygon) { GEOMETRY_TYPE = "MULTIPOLYGON" } else { - error("Invalid geometry to extract the OSM data ${geom.getGeometryType()}") - return null + throw new Exception("Invalid geometry to extract the OSM data ${geom.getGeometryType()}".toString()) } /** @@ -764,9 +721,8 @@ def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_siz //Check the size of the bbox if ((source_geom_utm.getEnvelopeInternal().getArea() / 1.0E+6) >= bbox_size) { - error("The size of the OSM BBOX is greated than the limit : ${bbox_size} in km².\n" + - "Please increase the area parameter if you want to skip this limit.") - return null + throw new Exception("The size of the OSM BBOX is greated than the limit : ${bbox_size} in km².\n" + + "Please increase the area parameter if you want to skip this limit.".toString()) } def lat_lon_bbox_geom_extended = geom.getFactory().toGeometry(lat_lon_bbox_extended) lat_lon_bbox_geom_extended.setSRID(4326) @@ -782,15 +738,13 @@ def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_siz return ["utm_zone_table" : outputZoneTable, "utm_extended_bbox_table": outputZoneEnvelopeTable, - "osm_envelope_extented" : lat_lon_bbox_extended, - "osm_geometry" : geom, - "utm_srid" : epsg + "osm_envelope_extented" : lat_lon_bbox_extended, + "osm_geometry" : geom, + "utm_srid" : epsg ] } else { - error "The zone to extract cannot be null or empty" - return null + throw new Exception("The zone to extract cannot be null or empty") } - return null } @@ -801,24 +755,24 @@ def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_siz * @param processing_parameters the file parameters * @return a filled map of parameters */ -def extractProcessingParameters(def processing_parameters) { +def extractProcessingParameters(def processing_parameters) throws Exception{ def defaultParameters = [distance: 0f, prefixName: "", hLevMin : 3] - def rsu_indicators_default = [indicatorUse : [], - svfSimplified : true, - surface_vegetation: 10000f, - surface_hydro : 2500f, - surface_urban_areas : 10000f, - snappingTolerance : 0.01f, - mapOfWeights : ["sky_view_factor" : 4, - "aspect_ratio" : 3, - "building_surface_fraction" : 8, - "impervious_surface_fraction" : 0, - "pervious_surface_fraction" : 0, - "height_of_roughness_elements": 6, - "terrain_roughness_length" : 0.5], - estimateHeight : true, - utrfModelName : "UTRF_OSM_RF_2_2.model"] + def rsu_indicators_default = [indicatorUse : [], + svfSimplified : true, + surface_vegetation : 10000f, + surface_hydro : 2500f, + surface_urban_areas: 10000f, + snappingTolerance : 0.01f, + mapOfWeights : ["sky_view_factor" : 4, + "aspect_ratio" : 3, + "building_surface_fraction" : 8, + "impervious_surface_fraction" : 0, + "pervious_surface_fraction" : 0, + "height_of_roughness_elements": 6, + "terrain_roughness_length" : 0.5], + estimateHeight : true, + utrfModelName : "UTRF_OSM_RF_2_2.model"] defaultParameters.put("rsu_indicators", rsu_indicators_default) if (processing_parameters) { @@ -846,8 +800,7 @@ def extractProcessingParameters(def processing_parameters) { if (allowedOutputRSUIndicators) { rsu_indicators_default.indicatorUse = indicatorUseP } else { - error "Please set a valid list of RSU indicator names in ${allowedOutputRSUIndicators}" - return + throw new Exception("Please set a valid list of RSU indicator names in ${allowedOutputRSUIndicators}".toString()) } } else { rsu_indicators_default.indicatorUse = [] @@ -881,8 +834,7 @@ def extractProcessingParameters(def processing_parameters) { if (mapOfWeightsP && mapOfWeightsP in Map) { Map defaultmapOfWeights = rsu_indicators_default.mapOfWeights if ((defaultmapOfWeights + mapOfWeightsP).size() != defaultmapOfWeights.size()) { - error("The number of mapOfWeights parameters must contain exactly the parameters ${defaultmapOfWeights.keySet().join(",")}") - return + throw new Exception(("The number of mapOfWeights parameters must contain exactly the parameters ${defaultmapOfWeights.keySet().join(",")}".toString())) } else { rsu_indicators_default.mapOfWeights = mapOfWeightsP } @@ -899,19 +851,17 @@ def extractProcessingParameters(def processing_parameters) { def list_indicators = grid_indicators.indicators if (x_size && y_size) { if (x_size <= 0 || y_size <= 0) { - error "Invalid grid size padding. Must be greater that 0" - return + throw new Exception( "Invalid grid size padding. Must be greater that 0") } if (!list_indicators) { - error "The list of indicator names cannot be null or empty" - return + throw new Exception( "The list of indicator names cannot be null or empty") } def allowed_grid_indicators = ["BUILDING_FRACTION", "BUILDING_HEIGHT", "BUILDING_POP", "BUILDING_TYPE_FRACTION", "WATER_FRACTION", "VEGETATION_FRACTION", - "ROAD_FRACTION", "IMPERVIOUS_FRACTION", "UTRF_AREA_FRACTION","UTRF_FLOOR_AREA_FRACTION", + "ROAD_FRACTION", "IMPERVIOUS_FRACTION", "UTRF_AREA_FRACTION", "UTRF_FLOOR_AREA_FRACTION", "LCZ_FRACTION", "LCZ_PRIMARY", "FREE_EXTERNAL_FACADE_DENSITY", "BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", "BUILDING_HEIGHT_DIST", "FRONTAL_AREA_INDEX", "SEA_LAND_FRACTION", "ASPECT_RATIO", "SVF", - "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS","SPRAWL_AREAS", + "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", "SPRAWL_AREAS", "SPRAWL_DISTANCES", "SPRAWL_COOL_DISTANCE"] def allowedOutputIndicators = allowed_grid_indicators.intersect(list_indicators*.toUpperCase()) if (allowedOutputIndicators) { @@ -942,9 +892,8 @@ def extractProcessingParameters(def processing_parameters) { } def lcz_lod = grid_indicators.lcz_lod if (lcz_lod && lcz_lod in Integer) { - if (lcz_lod < 0 && lcz_lod >10) { - error "The number of level of details to aggregate the LCZ must be between 0 and 10" - return + if (lcz_lod < 0 && lcz_lod > 10) { + throw new Exception( "The number of level of details to aggregate the LCZ must be between 0 and 10") } grid_indicators_tmp.put("lcz_lod", lcz_lod) } @@ -954,8 +903,7 @@ def extractProcessingParameters(def processing_parameters) { } defaultParameters.put("grid_indicators", grid_indicators_tmp) } else { - error "Please set a valid list of indicator names in ${allowed_grid_indicators}" - return + throw new Exception( "Please set a valid list of indicator names in ${allowed_grid_indicators}") } } } @@ -998,7 +946,8 @@ def extractProcessingParameters(def processing_parameters) { * @param outputGrid file format of the grid * @return */ -def saveOutputFiles(def h2gis_datasource, def id_zone, def results, def outputFiles, def ouputFolder, def subFolderName, def outputSRID, def reproject, def deleteOutputData, def outputGrid) { +def saveOutputFiles(def h2gis_datasource, def id_zone, def results, def outputFiles, def ouputFolder, def subFolderName, def outputSRID, + def reproject, def deleteOutputData, def outputGrid) throws Exception{ //Create a subfolder to store each results def folderName = id_zone in Collection ? id_zone.join("_") : id_zone def subFolder = new File(ouputFolder.getAbsolutePath() + File.separator + subFolderName + folderName) @@ -1033,7 +982,8 @@ def saveOutputFiles(def h2gis_datasource, def id_zone, def results, def outputFi * @param reproject the output table * @return */ -def saveTablesInDatabase(JdbcDataSource output_datasource, JdbcDataSource h2gis_datasource, def outputTableNames, def h2gis_tables, def id_zone, def inputSRID, def outputSRID, def reproject) { +def saveTablesInDatabase(JdbcDataSource output_datasource, JdbcDataSource h2gis_datasource, def outputTableNames, + def h2gis_tables, def id_zone, def inputSRID, def outputSRID, def reproject) throws Exception{ //Export building indicators indicatorTableBatchExportTable(output_datasource, outputTableNames.building_indicators, id_zone, h2gis_datasource, h2gis_tables.building_indicators , "WHERE ID_RSU IS NOT NULL", inputSRID, outputSRID, reproject) @@ -1136,7 +1086,9 @@ def saveTablesInDatabase(JdbcDataSource output_datasource, JdbcDataSource h2gis_ * @param outputSRID srid code used to reproject the output table * @return */ -def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, def filter, def inputSRID, def outputSRID, def reproject) { +def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, + def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, + def filter, def inputSRID, def outputSRID, def reproject) throws Exception{ if (output_table) { if (h2gis_datasource.hasTable(h2gis_table_to_save)) { if (output_datasource.hasTable(output_table)) { @@ -1146,10 +1098,10 @@ def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, def out int BATCH_MAX_SIZE = 100 ITable inputRes = prepareTableOutput(h2gis_table_to_save, filter, inputSRID, h2gis_datasource, output_table, outputSRID, output_datasource) if (inputRes) { - def outputColumns = output_datasource.getTable(output_table).getColumnsTypes() + def outputColumns = output_datasource.getColumnNamesTypes(output_table) def outputconnection = output_datasource.getConnection() try { - def inputColumns = inputRes.getColumnsTypes(); + def inputColumns = inputRes.getColumnNamesTypes(); //We check if the number of columns is not the same //If there is more columns in the input table we alter the output table def outPutColumnsNames = outputColumns.keySet() @@ -1255,7 +1207,9 @@ def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, def out * @param outputSRID srid code used to reproject the output table * @return */ -def indicatorTableBatchExportTable(def output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, def filter, def inputSRID, def outputSRID, def reproject) { +def indicatorTableBatchExportTable(JdbcDataSource output_datasource, def output_table, def id_zone, + def h2gis_datasource, h2gis_table_to_save, def filter, def inputSRID, def outputSRID, + def reproject) throws Exception{ if (output_table) { if (h2gis_table_to_save) { if (h2gis_datasource.hasTable(h2gis_table_to_save)) { @@ -1266,11 +1220,11 @@ def indicatorTableBatchExportTable(def output_datasource, def output_table, def int BATCH_MAX_SIZE = 100; ITable inputRes = prepareTableOutput(h2gis_table_to_save, filter, inputSRID, h2gis_datasource, output_table, outputSRID, output_datasource) if (inputRes) { - def outputColumns = output_datasource.getTable(output_table).getColumnsTypes(); + def outputColumns = output_datasource.getColumnNamesTypes(output_table) outputColumns.remove("gid") def outputconnection = output_datasource.getConnection() try { - def inputColumns = inputRes.getColumnsTypes(); + def inputColumns = inputRes.getColumnNamesTypes() //We check if the number of columns is not the same //If there is more columns in the input table we alter the output table def outPutColumnsNames = outputColumns.keySet() @@ -1379,12 +1333,13 @@ def indicatorTableBatchExportTable(def output_datasource, def output_table, def * @param output_datasource * @return */ -def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, H2GIS h2gis_datasource, def output_table, def outputSRID, def output_datasource) { +def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, H2GIS h2gis_datasource, + def output_table, def outputSRID, def output_datasource) throws Exception{ def targetTableSrid = output_datasource.getSpatialTable(output_table).srid if (filter) { if (outputSRID == 0) { if (inputSRID == targetTableSrid) { - inputRes = h2gis_datasource.getTable(h2gis_table_to_save).filter(filter).getTable() + return h2gis_datasource.getTable(h2gis_table_to_save).filter(filter).getTable() } else { if (targetTableSrid == 0 && inputSRID == 0) { return h2gis_datasource.getTable(h2gis_table_to_save).filter(filter).getTable() @@ -1452,10 +1407,9 @@ def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, H2GIS * @return */ Map buildGeoclimateLayers(JdbcDataSource datasource, Object zoneToExtract, - float distance = 500, int hLevMin = 3) { + float distance = 500, int hLevMin = 3) throws Exception{ if (datasource == null) { - error "Cannot access to the database to store the osm data" - return + throw new Exception("Cannot access to the database to store the osm data") } debug "Building OSM GIS layers" diff --git a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy index 707679e3cf..a8e51356ed 100644 --- a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy +++ b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy @@ -464,7 +464,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { "terrain_roughness_class" : 1]] ] ] - OSM.workflow(osm_parmeters) + assertThrows(Exception.class, ()->OSM.workflow(osm_parmeters)) } @Test @@ -570,7 +570,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { "svfSimplified": true] ] ] - OSM.workflow(osm_parmeters) + assertThrows(Exception.class, ()->OSM.workflow(osm_parmeters)) } diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy index 14fc2f9e5b..4b035a5239 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy @@ -49,14 +49,12 @@ import static org.orbisgis.geoclimate.osmtools.utils.OSMElement.* * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) { +Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) throws Exception{ if (!datasource) { - error("No datasource provided.") - return + throw new Exception("No datasource provided.") } if (!filterArea) { - error("Filter area not defined") - return + throw new Exception("Filter area not defined") } def outputZoneTable = postfix "ZONE" def outputZoneEnvelopeTable = postfix "ZONE_ENVELOPE" @@ -71,18 +69,17 @@ Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) { geom = Utilities.geometryFromValues(filterArea) } else { - error "The filter area must be an Envelope or a Polygon" - return + throw new Exception("The filter area must be an Envelope or a Polygon") } def epsg = DEFAULT_SRID def env = org.h2gis.utilities.GeographyUtilities.expandEnvelopeByMeters(geom.getEnvelopeInternal(), distance) //Create table to store the geometry and the envelope of the extracted area - datasource """ + datasource.execute(""" CREATE TABLE $outputZoneTable (the_geom GEOMETRY(POLYGON, $epsg)); INSERT INTO $outputZoneTable VALUES (ST_GEOMFROMTEXT('${geom}', $epsg)); - """.toString() + """) def geometryFactory = new GeometryFactory() def geomEnv = geometryFactory.toGeometry(env) @@ -102,10 +99,10 @@ Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) { prefix : osmTablesPrefix, epsg : epsg] } else { - error "Cannot load the OSM data from the area $filterArea" + throw new Exception("Cannot load the OSM data from the area $filterArea".toString()) } } else { - error "Cannot download OSM data from the area $filterArea" + throw new Exception("Cannot download OSM data from the area $filterArea".toString()) } } @@ -123,14 +120,12 @@ Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) { * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) { +Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) throws Exception{ if (!placeName) { - error("Cannot find an area from a void place name.") - return + throw new Exception("Cannot find an area from a void place name.") } if (!datasource) { - error("No datasource provided.") - return + throw new Exception("No datasource provided.") } def formatedPlaceName = placeName.trim().replaceAll("([\\s|,|\\-|])+", "_") def outputZoneTable = postfix "ZONE_$formatedPlaceName" @@ -141,32 +136,29 @@ Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) { Map nominatimRes = OSMTools.Utilities.getNominatimData(placeName); if(!nominatimRes){ - error("Cannot find an area from the place name $placeName") - return + throw new Exception("Cannot find an area from the place name $placeName".toString()) } def geom = nominatimRes["geom"] if (!geom) { - error("Cannot find an area from the place name $placeName") - return + throw new Exception("Cannot find an area from the place name $placeName".toString()) } if (distance < 0) { - error("Cannot use a negative distance") - return + throw new Exception("Cannot use a negative distance") } def env = org.h2gis.utilities.GeographyUtilities.expandEnvelopeByMeters(geom.getEnvelopeInternal(), distance) //Create table to store the geometry and the envelope of the extracted area - datasource """ + datasource.execute(""" CREATE TABLE $outputZoneTable (the_geom GEOMETRY(POLYGON, $epsg), ID_ZONE VARCHAR); INSERT INTO $outputZoneTable VALUES (ST_GEOMFROMTEXT('${geom}', $epsg), '$placeName'); - """ + """) def geometryFactory = new GeometryFactory() def geomEnv = geometryFactory.toGeometry(env) - datasource """ + datasource.execute(""" CREATE TABLE $outputZoneEnvelopeTable (the_geom GEOMETRY(POLYGON, $epsg), ID_ZONE VARCHAR); INSERT INTO $outputZoneEnvelopeTable VALUES (ST_GEOMFROMTEXT('$geomEnv',$epsg), '$placeName'); - """ + """) def query = OSMTools.Utilities.buildOSMQuery(geomEnv, [], NODE, WAY, RELATION) String extract = extract(query) @@ -178,11 +170,11 @@ Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) { envelope: outputZoneEnvelopeTable, prefix : osmTablesPrefix] } else { - error "Cannot load the OSM data from the place $placeName" + throw new Exception("Cannot load the OSM data from the place $placeName".toString()) } } else { - error "Cannot download OSM data from the place $placeName" + throw new Exception("Cannot download OSM data from the place $placeName".toString()) } } @@ -194,11 +186,10 @@ Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) { * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -String extract(String overpassQuery) { +String extract(String overpassQuery) throws Exception{ info "Extract the OSM data" if (!overpassQuery) { - error "The query should not be null or empty." - return + throw new Exception("The query should not be null or empty.") } def bboxUrl = OSMTools.Utilities.utf8ToUrl(overpassQuery); //hash the query to cache it @@ -213,8 +204,7 @@ String extract(String overpassQuery) { info "The OSM file has been downloaded at ${osmFilePath}." } else { outputOSMFile.delete() - error "Cannot extract the OSM data for the query $overpassQuery" - return + throw new Exception("Cannot extract the OSM data for the query $overpassQuery") } } } else { @@ -226,8 +216,7 @@ String extract(String overpassQuery) { info "The OSM file has been downloaded at ${osmFilePath}." } else { outputOSMFile.delete() - error "Cannot extract the OSM data for the query $overpassQuery" - return + throw new Exception("Cannot extract the OSM data for the query $overpassQuery") } } } @@ -246,26 +235,22 @@ String extract(String overpassQuery) { * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -boolean load(JdbcDataSource datasource, String osmTablesPrefix, String osmFilePath) { +boolean load(JdbcDataSource datasource, String osmTablesPrefix, String osmFilePath) throws Exception{ if (!datasource) { - error "Please set a valid database connection." - return false + throw new Exception("Please set a valid database connection.") } if (!osmTablesPrefix || !Pattern.compile('^[a-zA-Z0-9_]*$').matcher(osmTablesPrefix).matches()) { - error "Please set a valid table prefix." - return false + throw new Exception("Please set a valid table prefix.") } if (!osmFilePath) { - error "Please set a valid osm file path." - return false + throw new Exception("Please set a valid osm file path.") } def osmFile = new File(osmFilePath) if (!osmFile.exists()) { - error "The input OSM file does not exist." - return false + throw new Exception("The input OSM file does not exist.") } info "Load the OSM file in the database." @@ -273,8 +258,7 @@ boolean load(JdbcDataSource datasource, String osmTablesPrefix, String osmFilePa info "The input OSM file has been loaded in the database." //We must check if there is some data at least one tag if (datasource.getRowCount("${osmTablesPrefix}_node".toString())==0) { - error "The downloaded OSM file doesn't contain any data.\n Please check the file ${osmFile} to see what happens.".toString() - return false + throw new Exception("The downloaded OSM file doesn't contain any data.\n Please check the file ${osmFile} to see what happens.".toString()) } return true } diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy index 123c3952b9..1a35e830bc 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy @@ -44,7 +44,8 @@ import static org.orbisgis.geoclimate.osmtools.utils.GeometryTypes.POLYGONS * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Lesaux (UBS LAB-STICC) */ -String toPoints(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], Geometry geometry) { +String toPoints(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], + Geometry geometry) { String outputTableName = postfix "OSM_POINTS" def pointsNodes = OSMTools.TransformUtils.extractNodesAsPoints(datasource, osmTablesPrefix, epsgCode, outputTableName, tags, columnsToKeep, geometry) if (pointsNodes) { diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy index 60c0bbe66a..399f9f0c25 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy @@ -124,8 +124,8 @@ String toPolygonOrLine(GeometryTypes type, JdbcDataSource datasource, String osm if (outputWay && outputRelation) { //Merge ways and relations - def columnsWays = datasource.getTable(outputWay).columns - def columnsRelations = datasource.getTable(outputRelation).columns + def columnsWays = datasource.getColumnNames(outputWay) + def columnsRelations = datasource.getColumnNames(outputRelation) def allColumns = arrayUnion(true, columnsWays, columnsRelations) def leftSelect = "" def rightSelect = "" @@ -248,26 +248,21 @@ boolean extractNodesAsPoints(JdbcDataSource datasource, String osmTablesPrefix, * @author Elisabeth Lesaux (UBS LAB-STICC) */ boolean extractNodesAsPoints(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode, - String outputNodesPoints, def tags, def columnsToKeep, Geometry geometry) { + String outputNodesPoints, def tags, def columnsToKeep, Geometry geometry) throws Exception{ if (!datasource) { - error("The datasource should not be null") - return false + throw new Exception("The datasource should not be null") } if (osmTablesPrefix == null) { - error "Invalid null OSM table prefix" - return false + throw new Exception("Invalid null OSM table prefix") } if (epsgCode == -1) { - error "Invalid EPSG code" - return false + throw new Exception("Invalid EPSG code") } if (tags == null) { - error "The tag list cannot be null" - return + throw new Exception("The tag list cannot be null") } if (outputNodesPoints == null) { - error "Invalid null output node points table name" - return false + throw new Exception("Invalid null output node points table name") } def tableNode = "${osmTablesPrefix}_node" def tableNodeTag = "${osmTablesPrefix}_node_tag" @@ -516,14 +511,12 @@ def createTagList(JdbcDataSource datasource, def selectTableQuery, List columnsT * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Lesaux (UBS LAB-STICC) */ -def buildIndexes(JdbcDataSource datasource, String osmTablesPrefix) { +def buildIndexes(JdbcDataSource datasource, String osmTablesPrefix) throws Exception{ if (!datasource) { - error "The datasource should not be null." - return false + throw new Exception("The datasource should not be null.") } if (!osmTablesPrefix) { - error "The osmTablesPrefix should not be null or empty." - return false + throw new Exception("The osmTablesPrefix should not be null or empty.") } datasource.execute """ CREATE INDEX IF NOT EXISTS ${osmTablesPrefix}_node_index ON ${osmTablesPrefix}_node(id_node); diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy index a3570cce19..27904f7a73 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy @@ -47,32 +47,28 @@ import static java.nio.charset.StandardCharsets.UTF_8 * * @return a New geometry. */ -Map getNominatimData(def placeName) { +Map getNominatimData(def placeName) throws Exception{ if (!placeName) { - error "The place name should not be null or empty." - return null + throw new Exception("The place name should not be null or empty.") } def outputOSMFile = File.createTempFile("nominatim_osm", ".geojson") if (!executeNominatimQuery(placeName, outputOSMFile)) { if (!outputOSMFile.delete()) { warn "Unable to delete the file '$outputOSMFile'." } - warn "Unable to execute the Nominatim query." - return null + throw new Exception("Unable to execute the Nominatim query.") } def jsonRoot = new JsonSlurper().parse(outputOSMFile) if (jsonRoot == null) { - error "Cannot find any data from the place $placeName." - return null + throw new Exception("Cannot find any data from the place $placeName.".toString()) } if (jsonRoot.features.size() == 0) { - error "Cannot find any features from the place $placeName." if (!outputOSMFile.delete()) { warn "Unable to delete the file '$outputOSMFile'." } - return null + throw new Exception("Cannot find any features from the place $placeName.".toString()) } GeometryFactory geometryFactory = new GeometryFactory() @@ -183,20 +179,18 @@ static Geometry getArea(def location) { * * @return A polygon. */ -Polygon parsePolygon(def coordinates, GeometryFactory geometryFactory) { +Polygon parsePolygon(def coordinates, GeometryFactory geometryFactory) throws Exception{ if (!coordinates in Collection || !coordinates || !coordinates[0] in Collection || !coordinates[0] || !coordinates[0][0] in Collection || !coordinates[0][0]) { - error "The given coordinate should be an array of an array of an array of coordinates (3D array)." - return null + throw new Exception("The given coordinate should be an array of an array of an array of coordinates (3D array).") } def ring try { ring = geometryFactory.createLinearRing(arrayToCoordinate(coordinates[0])) } catch (IllegalArgumentException e) { - error e.getMessage() - return null + throw new Exception(e) } if (coordinates.size() == 1) { return geometryFactory.createPolygon(ring) @@ -239,14 +233,12 @@ static Coordinate[] arrayToCoordinate(def coordinates) { * @return True if the file has been downloaded, false otherwise. * */ -boolean executeNominatimQuery(def query, def outputOSMFile) { +boolean executeNominatimQuery(def query, def outputOSMFile) throws Exception{ if (!query) { - error "The Nominatim query should not be null." - return false + throw new Exception("The Nominatim query should not be null.") } if (!(outputOSMFile instanceof File)) { - error "The OSM file should be an instance of File" - return false + throw new Exception("The OSM file should be an instance of File") } def endPoint = System.getProperty("NOMINATIM_ENDPOINT"); if (!endPoint) { @@ -281,8 +273,7 @@ boolean executeNominatimQuery(def query, def outputOSMFile) { outputOSMFile << connection.inputStream return true } else { - error "Cannot execute the Nominatim query." - return false + throw new Exception("Cannot execute the Nominatim query.") } } @@ -297,10 +288,9 @@ boolean executeNominatimQuery(def query, def outputOSMFile) { * * @return OSM bbox. */ -String toBBox(Geometry geometry) { +String toBBox(Geometry geometry) throws Exception{ if (!geometry) { - error "Cannot convert to an overpass bounding box." - return null + throw new Exception("Cannot convert to an overpass bounding box.") } def env = geometry.getEnvelopeInternal() return "(bbox:${env.getMinY()},${env.getMinX()},${env.getMaxY()},${env.getMaxX()})".toString() @@ -317,19 +307,16 @@ String toBBox(Geometry geometry) { * * @return The OSM polygon. */ -String toPoly(Geometry geometry) { +String toPoly(Geometry geometry) throws Exception{ if (!geometry) { - error "Cannot convert to an overpass poly filter." - return null + throw new Exception("Cannot convert to an overpass poly filter.") } if (!(geometry instanceof Polygon)) { - error "The input geometry must be polygon." - return null + throw new Exception("The input geometry must be polygon.") } def poly = (Polygon) geometry if (poly.isEmpty()) { - error "The input geometry must be polygon." - return null + throw new Exception("The input geometry must be polygon.") } Coordinate[] coordinates = poly.getExteriorRing().getCoordinates() def polyStr = "(poly:\"" @@ -352,7 +339,7 @@ String toPoly(Geometry geometry) { * * @return A string representation of the OSM query. */ -String buildOSMQuery(Envelope envelope, def keys=null) { +String buildOSMQuery(Envelope envelope, def keys=null) throws Exception{ return buildOSMQuery(envelope, keys, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) } @@ -369,10 +356,9 @@ String buildOSMQuery(Envelope envelope, def keys=null) { * * @return A string representation of the OSM query. */ - String buildOSMQuery(Envelope envelope, def keys, OSMElement... osmElement) { + String buildOSMQuery(Envelope envelope, def keys, OSMElement... osmElement) throws Exception{ if (!envelope) { - error "Cannot create the overpass query from the bbox $envelope." - return null + throw new Exception("Cannot create the overpass query from the bbox $envelope.".toString()) } def query = "[bbox:${envelope.getMinY()},${envelope.getMinX()},${envelope.getMaxY()},${envelope.getMaxX()}];\n(\n" osmElement.each { i -> @@ -398,7 +384,7 @@ String buildOSMQuery(Envelope envelope, def keys=null) { * * @return A string representation of the OSM query. */ -String buildOSMQuery(List latLonCoordinates, def keys=null) { +String buildOSMQuery(List latLonCoordinates, def keys=null) throws Exception{ return buildOSMQuery(latLonCoordinates, keys, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) } @@ -413,15 +399,13 @@ String buildOSMQuery(List latLonCoordinates, def keys=null) { * * @return A string representation of the OSM query. */ - String buildOSMQuery(List latLonCoordinates, def keys, OSMElement... osmElement) { + String buildOSMQuery(List latLonCoordinates, def keys, OSMElement... osmElement) throws Exception{ if (!latLonCoordinates) { - error "Cannot create the overpass query from the bbox $latLonCoordinates." - return null + throw new Exception("Cannot create the overpass query from the bbox $latLonCoordinates.".toString()) } Geometry geom = OSMTools.Utilities.geometryFromValues(latLonCoordinates) if(geom==null) { - error "Invalid BBOX" - return null + throw new Exception("Invalid BBOX") } return buildOSMQuery(geom.getEnvelopeInternal(), keys, osmElement) } @@ -439,10 +423,9 @@ String buildOSMQuery(List latLonCoordinates, def keys=null) { * * @return A string representation of the OSM query. */ - String buildOSMQueryWithAllData(Envelope envelope, def keys, OSMElement... osmElement) { + String buildOSMQueryWithAllData(Envelope envelope, def keys, OSMElement... osmElement) throws Exception{ if (!envelope) { - error "Cannot create the overpass query from the bbox $envelope." - return null + throw new Exception("Cannot create the overpass query from the bbox $envelope.".toString()) } def query = "[bbox:${envelope.getMinY()},${envelope.getMinX()},${envelope.getMaxY()},${envelope.getMaxX()}];\n((\n" osmElement.each { i -> @@ -469,7 +452,7 @@ String buildOSMQuery(List latLonCoordinates, def keys=null) { * * @return A string representation of the OSM query. */ -String buildOSMQuery(Polygon polygon, def keys=null) { +String buildOSMQuery(Polygon polygon, def keys=null) throws Exception{ return buildOSMQuery(polygon, keys, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) } @@ -485,14 +468,12 @@ String buildOSMQuery(Polygon polygon, def keys=null) { * * @return A string representation of the OSM query. */ -String buildOSMQuery(Polygon polygon, def keys, OSMElement... osmElement) { +String buildOSMQuery(Polygon polygon, def keys, OSMElement... osmElement) throws Exception{ if (polygon == null) { - error "Cannot create the overpass query from a null polygon." - return null + throw new Exception("Cannot create the overpass query from a null polygon.") } if (polygon.isEmpty()) { - error "Cannot create the overpass query from an empty polygon." - return null + throw new Exception("Cannot create the overpass query from an empty polygon.") } Envelope envelope = polygon.getEnvelopeInternal() def query = "[bbox:${envelope.getMinY()},${envelope.getMinX()},${envelope.getMaxY()},${envelope.getMaxX()}];\n(\n" @@ -528,10 +509,9 @@ String buildOSMQuery(Polygon polygon, def keys, OSMElement... osmElement) { * * @return A Map of parameters. */ -Map readJSONParameters(def jsonFile) { +Map readJSONParameters(def jsonFile) throws Exception{ if (!jsonFile) { - error "The given file should not be null" - return null + throw new Exception("The given file should not be null") } def file if (jsonFile instanceof InputStream) { diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy index 920af3cc4e..169a8064de 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy @@ -83,8 +83,8 @@ class LoaderTest extends AbstractOSMToolsTest { */ @Test void badFromAreaTest() { - assertNull OSMTools.Loader.fromArea(ds, null) - assertNull OSMTools.Loader.fromArea(ds, "A string") + assertThrows(Exception.class, ()-> OSMTools.Loader.fromArea(ds, null)) + assertThrows(Exception.class, ()-> OSMTools.Loader.fromArea(ds, "A string")) } /** @@ -108,14 +108,14 @@ class LoaderTest extends AbstractOSMToolsTest { def zone = ds.getSpatialTable(r.zone) assertEquals 1, zone.rowCount assertEquals 1, zone.getColumnCount() - assertTrue zone.columns.contains("THE_GEOM") + assertTrue zone.getColumnNames().contains("THE_GEOM") zone.next() assertEquals "POLYGON ((-3.076869 48.733493, -3.076869 48.733995, -3.075829 48.733995, -3.075829 48.733493, -3.076869 48.733493))", zone.getGeometry(1).toText() def zoneEnv = ds.getSpatialTable(r.envelope) assertEquals 1, zoneEnv.rowCount assertEquals 1, zoneEnv.getColumnCount() - assertTrue zoneEnv.columns.contains("THE_GEOM") + assertTrue zoneEnv.getColumnNames().contains("THE_GEOM") zoneEnv.next() assertEquals "POLYGON ((-3.076869 48.733493, -3.076869 48.733995, -3.075829 48.733995, -3.075829 48.733493, -3.076869 48.733493))", zoneEnv.getGeometry(1).toText() @@ -136,14 +136,14 @@ class LoaderTest extends AbstractOSMToolsTest { zone = ds.getSpatialTable(r.zone) assertEquals 1, zone.rowCount assertEquals 1, zone.getColumnCount() - assertTrue zone.columns.contains("THE_GEOM") + assertTrue zone.getColumnNames().contains("THE_GEOM") zone.next() assertEquals "POLYGON ((-3.076869 48.733493, -3.076869 48.733995, -3.075829 48.733995, -3.075829 48.733493, -3.076869 48.733493))", zone.getGeometry(1).toText() zoneEnv = ds.getSpatialTable(r.envelope) assertEquals 1, zoneEnv.rowCount assertEquals 1, zoneEnv.getColumnCount() - assertTrue zoneEnv.columns.contains("THE_GEOM") + assertTrue zoneEnv.getColumnNames().contains("THE_GEOM") zoneEnv.next() assertEquals "POLYGON ((-3.076869 48.733493, -3.076869 48.733995, -3.075829 48.733995, -3.075829 48.733493, -3.076869 48.733493))", zoneEnv.getGeometry(1).toText() @@ -175,14 +175,14 @@ class LoaderTest extends AbstractOSMToolsTest { def zone = ds.getSpatialTable(r.zone) assertEquals 1, zone.rowCount assertEquals 1, zone.getColumnCount() - assertTrue zone.columns.contains("THE_GEOM") + assertTrue zone.getColumnNames().contains("THE_GEOM") zone.next() assertEquals wktReader.read("POLYGON ((-3.084508 48.790598, -3.084508 48.7918, -3.082228 48.7918, -3.082228 48.790598, -3.084508 48.790598))"), zone.getGeometry(1) def zoneEnv = ds.getSpatialTable(r.envelope) assertEquals 1, zoneEnv.rowCount assertEquals 1, zoneEnv.getColumnCount() - assertTrue zoneEnv.columns.contains("THE_GEOM") + assertTrue zoneEnv.getColumnNames().contains("THE_GEOM") zoneEnv.next() assertEquals wktReader.read("POLYGON ((-3.0981436889553313 48.78161484715881, -3.0981436889553313 48.8007831528412, -3.068592311044669 48.8007831528412, -3.068592311044669 48.78161484715881, -3.0981436889553313 48.78161484715881))"), zoneEnv.getGeometry(1) @@ -201,14 +201,14 @@ class LoaderTest extends AbstractOSMToolsTest { zone = ds.getSpatialTable(r.zone) assertEquals 1, zone.rowCount assertEquals 1, zone.getColumnCount() - assertTrue zone.columns.contains("THE_GEOM") + assertTrue zone.getColumnNames().contains("THE_GEOM") zone.next() assertEquals wktReader.read("POLYGON ((-3.084508 48.790598, -3.084508 48.7918, -3.082228 48.7918, -3.082228 48.790598, -3.084508 48.790598))"), zone.getGeometry(1) zoneEnv = ds.getSpatialTable(r.envelope) assertEquals 1, zoneEnv.rowCount assertEquals 1, zoneEnv.getColumnCount() - assertTrue zoneEnv.columns.contains("THE_GEOM") + assertTrue zoneEnv.getColumnNames().contains("THE_GEOM") zoneEnv.next() assertEquals wktReader.read("POLYGON ((-3.0981436889553313 48.78161484715881, -3.0981436889553313 48.8007831528412, -3.068592311044669 48.8007831528412, -3.068592311044669 48.78161484715881, -3.0981436889553313 48.78161484715881))"), zoneEnv.getGeometry(1) } @@ -231,16 +231,18 @@ class LoaderTest extends AbstractOSMToolsTest { def zone = ds.getSpatialTable(r.zone) assertEquals 1, zone.rowCount assertEquals 2, zone.getColumnCount() - assertTrue zone.columns.contains("THE_GEOM") - assertTrue zone.columns.contains("ID_ZONE") + def columns = zone.getColumnNames() + assertTrue columns.contains("THE_GEOM") + assertTrue columns.contains("ID_ZONE") zone.next() assertNotNull zone.getGeometry(1) def zoneEnv = ds.getSpatialTable(r.envelope) assertEquals 1, zoneEnv.rowCount assertEquals 2, zoneEnv.getColumnCount() - assertTrue zoneEnv.columns.contains("THE_GEOM") - assertTrue zoneEnv.columns.contains("ID_ZONE") + columns = zone.getColumnNames() + assertTrue columns.contains("THE_GEOM") + assertTrue columns.contains("ID_ZONE") zoneEnv.next() assertEquals "POLYGON ((-3.0790622 48.7298266, -3.0790622 48.7367393, -3.0739517 48.7367393, -3.0739517 48.7298266, -3.0790622 48.7298266))", zoneEnv.getGeometry(1).toText() assertEquals "Lezoen, Plourivo", zoneEnv.getString(2) @@ -255,23 +257,24 @@ class LoaderTest extends AbstractOSMToolsTest { def placeName = " The place Name -toFind " def dist = 5 def formattedPlaceName = "The_place_Name_toFind_" - Map r = OSMTools.Loader.fromPlace(ds, placeName, dist) - assertNull(r) + assertThrows(Exception.class, ()->OSMTools.Loader.fromPlace(ds, placeName, dist)) - r = OSMTools.Loader.fromPlace(ds, "Lezoen, Plourivo", dist) + def r = OSMTools.Loader.fromPlace(ds, "Lezoen, Plourivo", dist) def zone = ds.getSpatialTable(r.zone) assertEquals 1, zone.rowCount assertEquals 2, zone.getColumnCount() - assertTrue zone.columns.contains("THE_GEOM") - assertTrue zone.columns.contains("ID_ZONE") + def columns = zone.getColumnNames() + assertTrue columns.contains("THE_GEOM") + assertTrue columns.contains("ID_ZONE") zone.next() assertNotNull zone.getGeometry(1) def zoneEnv = ds.getSpatialTable(r.envelope) assertEquals 1, zoneEnv.rowCount assertEquals 2, zoneEnv.getColumnCount() - assertTrue zoneEnv.columns.contains("THE_GEOM") - assertTrue zoneEnv.columns.contains("ID_ZONE") + columns = zone.getColumnNames() + assertTrue columns.contains("THE_GEOM") + assertTrue columns.contains("ID_ZONE") zoneEnv.next() assertEquals "POLYGON ((-3.079130303738262 48.729781684235796, -3.079130303738262 48.73678421576421, -3.073883596261738 48.73678421576421, -3.073883596261738 48.729781684235796, -3.079130303738262 48.729781684235796))", zoneEnv.getGeometry(1).toText() assertEquals "Lezoen, Plourivo", zoneEnv.getString(2) @@ -284,10 +287,10 @@ class LoaderTest extends AbstractOSMToolsTest { void badFromPlaceTest() { def placeName = " The place Name -toFind " def dist = -5 - assertNull OSMTools.Loader.fromPlace(ds, placeName, dist) - assertNull OSMTools.Loader.fromPlace(ds, placeName, -1) - assertNull OSMTools.Loader.fromPlace(ds, null) - assertNull OSMTools.Loader.fromPlace(null, placeName) + assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(ds, placeName, dist)) + assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(ds, placeName, -1)) + assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(ds, null)) + assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(null, placeName)) } /** @@ -308,9 +311,9 @@ class LoaderTest extends AbstractOSMToolsTest { */ @Test void badExtractTest() { - assertNull OSMTools.Loader.extract(null) + assertThrows(Exception.class, ()-> OSMTools.Loader.extract(null)) badOverpassQueryOverride() - assertNull OSMTools.Loader.extract("toto") + assertThrows(Exception.class, ()-> OSMTools.Loader.extract("toto")) } /** @@ -326,17 +329,17 @@ class LoaderTest extends AbstractOSMToolsTest { def prefix = uuid().toUpperCase() //Null dataSource - assertFalse OSMTools.Loader.load(null, prefix, osmFile.absolutePath) + assertThrows(Exception.class, ()-> OSMTools.Loader.load(null, prefix, osmFile.absolutePath)) //Null prefix - assertFalse OSMTools.Loader.load(ds, null, osmFile.absolutePath) + assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, null, osmFile.absolutePath)) //Bad prefix - assertFalse OSMTools.Loader.load(ds, "(╯°□°)╯︵ ┻━┻", osmFile.absolutePath) + assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, "(╯°□°)╯︵ ┻━┻", osmFile.absolutePath)) //Null path - assertFalse OSMTools.Loader.load(ds, prefix, null) + assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, prefix, null)) //Unexisting path - assertFalse OSMTools.Loader.load(ds, prefix, "ᕕ(ᐛ)ᕗ") + assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, prefix, "ᕕ(ᐛ)ᕗ")) } /** @@ -368,7 +371,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertEquals 5, nodeTable.rowCount def arrayNode = ["ID_NODE", "THE_GEOM", "ELE", "USER_NAME", "UID", "VISIBLE", "VERSION", "CHANGESET", "LAST_UPDATE", "NAME"] as String[] - assertArrayEquals(arrayNode, nodeTable.columns as String[]) + assertArrayEquals(arrayNode, nodeTable.getColumnNames() as String[]) nodeTable.eachRow { row -> switch (row.row) { case 1: @@ -446,7 +449,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertNotNull nodeMemberTable assertEquals 2, nodeMemberTable.rowCount def arrayNodeMember = ["ID_RELATION", "ID_NODE", "ROLE", "NODE_ORDER"] as String[] - assertArrayEquals(arrayNodeMember, nodeMemberTable.columns as String[]) + assertArrayEquals(arrayNodeMember, nodeMemberTable.getColumnNames() as String[]) nodeMemberTable.eachRow { row -> switch (row.row) { case 1: @@ -471,7 +474,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertNotNull nodeTagTable assertEquals 2, nodeTagTable.rowCount def arrayNodeTag = ["ID_NODE", "TAG_KEY", "TAG_VALUE"] as String[] - assertArrayEquals(arrayNodeTag, nodeTagTable.columns as String[]) + assertArrayEquals(arrayNodeTag, nodeTagTable.getColumnNames() as String[]) nodeTagTable.eachRow { row -> switch (row.row) { case 1: @@ -496,7 +499,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertEquals 1, wayTable.rowCount def arrayWay = ["ID_WAY", "USER_NAME", "UID", "VISIBLE", "VERSION", "CHANGESET", "LAST_UPDATE", "NAME"] as String[] - assertArrayEquals(arrayWay, wayTable.columns as String[]) + assertArrayEquals(arrayWay, wayTable.getColumnNames() as String[]) wayTable.eachRow { row -> switch (row.row) { case 1: @@ -519,7 +522,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertNotNull wayMemberTable assertEquals 1, wayMemberTable.rowCount def arrayWayMember = ["ID_RELATION", "ID_WAY", "ROLE", "WAY_ORDER"] as String[] - assertArrayEquals(arrayWayMember, wayMemberTable.columns as String[]) + assertArrayEquals(arrayWayMember, wayMemberTable.getColumnNames() as String[]) wayMemberTable.eachRow { row -> switch (row.row) { case 1: @@ -538,7 +541,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertNotNull wayTagTable assertEquals 1, wayTagTable.rowCount def arrayWayTag = ["ID_WAY", "TAG_KEY", "TAG_VALUE"] as String[] - assertArrayEquals(arrayWayTag, wayTagTable.columns as String[]) + assertArrayEquals(arrayWayTag, wayTagTable.getColumnNames() as String[]) wayTagTable.eachRow { row -> switch (row.row) { case 1: @@ -556,7 +559,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertNotNull wayNodeTable assertEquals 3, wayNodeTable.rowCount def arrayWayNode = ["ID_WAY", "ID_NODE", "NODE_ORDER"] as String[] - assertArrayEquals(arrayWayNode, wayNodeTable.columns as String[]) + assertArrayEquals(arrayWayNode, wayNodeTable.getColumnNames() as String[]) wayNodeTable.eachRow { row -> switch (row.row) { case 1: @@ -586,7 +589,7 @@ class LoaderTest extends AbstractOSMToolsTest { assertEquals 1, relationTable.rowCount def arrayRelation = ["ID_RELATION", "USER_NAME", "UID", "VISIBLE", "VERSION", "CHANGESET", "LAST_UPDATE"] as String[] - assertArrayEquals(arrayRelation, relationTable.columns as String[]) + assertArrayEquals(arrayRelation, relationTable.getColumnNames() as String[]) relationTable.eachRow { row -> switch (row.row) { case 1: @@ -608,14 +611,14 @@ class LoaderTest extends AbstractOSMToolsTest { assertNotNull relationMemberTable assertEquals 0, relationMemberTable.rowCount def arrayRelationMember = ["ID_RELATION", "ID_SUB_RELATION", "ROLE", "RELATION_ORDER"] as String[] - assertArrayEquals(arrayRelationMember, relationMemberTable.columns as String[]) + assertArrayEquals(arrayRelationMember, relationMemberTable.getColumnNames() as String[]) //Test on RELATION_TAG table def relationTagTable = ds.getTable(tableArray[9]) assertNotNull relationTagTable assertEquals 2, relationTagTable.rowCount def arrayRelationTag = ["ID_RELATION", "TAG_KEY", "TAG_VALUE"] as String[] - assertArrayEquals(arrayRelationTag, relationTagTable.columns as String[]) + assertArrayEquals(arrayRelationTag, relationTagTable.getColumnNames() as String[]) relationTagTable.eachRow { row -> switch (row.row) { case 1: diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy index 14e9ae1943..ff6a397a8c 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy @@ -59,13 +59,13 @@ class TransformTest extends AbstractOSMToolsTest { def epsgCode = 2453 def tags = [] def columnsToKeep = [] - assertNull OSMTools.Transform.toPoints(null, prefix, epsgCode, tags, columnsToKeep) + assertThrows(Exception.class, () -> OSMTools.Transform.toPoints(null, prefix, epsgCode, tags, columnsToKeep)) - assertNull OSMTools.Transform.toPoints(ds, prefix, -1, tags, columnsToKeep) + assertThrows(Exception.class, () -> OSMTools.Transform.toPoints(ds, prefix, -1, tags, columnsToKeep)) - assertNotNull OSMTools.Transform.toPoints(ds, prefix, epsgCode, tags, columnsToKeep) + assertNotNull(OSMTools.Transform.toPoints(ds, prefix, epsgCode, tags, columnsToKeep)) - assertNull OSMTools.Transform.toPoints(ds, prefix, epsgCode, null, null) + assertThrows(Exception.class, () -> OSMTools.Transform.toPoints(ds, prefix, epsgCode, null, null)) } /** @@ -334,7 +334,7 @@ class TransformTest extends AbstractOSMToolsTest { def tags = [building: "house"] //Test column to keep absent def result = OSMTools.Transform.extractWaysAsPolygons(ds, prefix, epsgCode, tags, ["landscape"]) - assertFalse ds.getTable(result).columns.contains("landscape") + assertFalse ds.getColumnNames(result).contains("landscape") } /** @@ -364,7 +364,7 @@ class TransformTest extends AbstractOSMToolsTest { def prefix = "OSM_" + uuid() def epsgCode = 2453 def tags = [building: "house"] - def columnsToKeep = ["building","water"] + def columnsToKeep = ["building", "water"] createData(ds, prefix) @@ -602,7 +602,7 @@ class TransformTest extends AbstractOSMToolsTest { "retail", "industrial" ]] - outputTableName = OSMTools.Transform.toPolygons(ds, prefix, 4326, tags) + outputTableName = OSMTools.Transform.toPolygons(ds, prefix, 4326, tags) assertEquals 6, ds.firstRow("select count(*) as count from ${outputTableName}").count as int assertEquals 4, ds.firstRow("select count(*) as count from ${outputTableName} where \"landuse\"='residential'").count as int @@ -615,7 +615,7 @@ class TransformTest extends AbstractOSMToolsTest { assertTrue OSMTools.Loader.load(ds, prefix, new File(this.class.getResource("san_diegeo_complex_polygon.osm").toURI()).getAbsolutePath()) String outputTableName = OSMTools.Transform.toPolygons(ds, prefix, 4326, ["leisure"], ["leisure"], true) - assertEquals(0,ds.firstRow("SELECT COUNT(*) as count FROM $outputTableName where st_isvalid(the_geom)=false").count) + assertEquals(0, ds.firstRow("SELECT COUNT(*) as count FROM $outputTableName where st_isvalid(the_geom)=false").count) } @Test @@ -624,7 +624,7 @@ class TransformTest extends AbstractOSMToolsTest { assertTrue OSMTools.Loader.load(ds, prefix, new File(this.class.getResource("san_diego_invalid_polygon.osm").toURI()).getAbsolutePath()) String outputTableName = OSMTools.Transform.toPolygons(ds, prefix, 4326, ["leisure"], ["leisure"], true) - assertEquals(0,ds.firstRow("SELECT COUNT(*) as count FROM $outputTableName where st_isvalid(the_geom)=false").count) + assertEquals(0, ds.firstRow("SELECT COUNT(*) as count FROM $outputTableName where st_isvalid(the_geom)=false").count) } @Test @@ -830,7 +830,6 @@ class TransformTest extends AbstractOSMToolsTest { String outputTableName = OSMTools.Transform.toPolygons(h2GIS, prefix, tags, columns) assertNotNull(outputTableName) h2GIS.getTable(outputTableName).save("/tmp/results.shp", true) - } } } @@ -838,13 +837,13 @@ class TransformTest extends AbstractOSMToolsTest { @Test void buildAllPolygons() { - def bbox =[47.647353,-2.090192,47.649413,-2.087274] + def bbox = [47.647353, -2.090192, 47.649413, -2.087274] def query = OSMTools.Utilities.buildOSMQuery(bbox) if (!query.isEmpty()) { def extract = OSMTools.Loader.extract(query) if (extract) { def prefix = "OSM" - assertTrue OSMTools.Loader.load(ds, prefix,extract) + assertTrue OSMTools.Loader.load(ds, prefix, extract) //Create building layer def tags = ["amenity", "landuse", "railway", "water"] String outputTableName = OSMTools.Transform.toPolygons(ds, prefix, 4326, tags) @@ -860,9 +859,9 @@ class TransformTest extends AbstractOSMToolsTest { @Test void testTransformForDebug() { H2GIS h2GIS = H2GIS.open("/tmp/geoclimate;AUTO_SERVER=TRUE") - Map r = OSMTools.Loader.fromArea(h2GIS, [48.733493,-3.076869,48.733995,-3.075829]) + Map r = OSMTools.Loader.fromArea(h2GIS, [48.733493, -3.076869, 48.733995, -3.075829]) println(r) - def lines = OSMTools.Transform.toPolygons(h2GIS, r.prefix,4326, [], []) + def lines = OSMTools.Transform.toPolygons(h2GIS, r.prefix, 4326, [], []) h2GIS.save(lines, "/tmp/building.fgb") } diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy index bc08c7b3d5..9d4d81ec9f 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy @@ -264,12 +264,9 @@ class TransformUtilsTest extends AbstractOSMToolsTest { @Test void badBuildIndexesTest() { def osmTable = "toto" - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.buildIndexes(h2gis, null) - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.buildIndexes(null, null) - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.buildIndexes(null, osmTable) + assertThrows(Exception.class,()->OSMTools.TransformUtils.buildIndexes(h2gis, null)) + assertThrows(Exception.class, ()->OSMTools.TransformUtils.buildIndexes(null, null)) + assertThrows(Exception.class, ()->OSMTools.TransformUtils.buildIndexes(null, osmTable)) } /** @@ -297,55 +294,38 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ OSMTools.TransformUtils.buildIndexes(h2gis, osmTablesPrefix) - assertNotNull h2gis.getTable("${osmTablesPrefix}_node") - assertNotNull h2gis.getTable("${osmTablesPrefix}_node")."id_node" - assertTrue h2gis.getTable("${osmTablesPrefix}_node")."id_node".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_node") - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_node")."id_node" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_node")."id_node".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_node")."node_order" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_node")."node_order".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_node")."id_way" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_node")."id_way".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_way") - assertNotNull h2gis.getTable("${osmTablesPrefix}_way")."id_way" - assertTrue h2gis.getTable("${osmTablesPrefix}_way")."id_way".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_way")."not_taken_into_account" - assertFalse h2gis.getTable("${osmTablesPrefix}_way")."not_taken_into_account".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_tag") - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_tag")."tag_key" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_tag")."tag_key".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_tag")."id_way" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_tag")."id_way".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_tag")."tag_value" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_tag")."tag_value".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation_tag") - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation_tag")."tag_key" - assertTrue h2gis.getTable("${osmTablesPrefix}_relation_tag")."tag_key".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation_tag")."id_relation" - assertTrue h2gis.getTable("${osmTablesPrefix}_relation_tag")."id_relation".indexed - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation_tag")."tag_value" - assertTrue h2gis.getTable("${osmTablesPrefix}_relation_tag")."tag_value".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation") - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation")."id_relation" - assertTrue h2gis.getTable("${osmTablesPrefix}_relation")."id_relation".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_member") - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_member")."id_relation" - assertTrue h2gis.getTable("${osmTablesPrefix}_way_member")."id_relation".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_not_taken_into_account") - assertNotNull h2gis.getTable("${osmTablesPrefix}_way_not_taken_into_account")."id_way" - assertFalse h2gis.getTable("${osmTablesPrefix}_way_not_taken_into_account")."id_way".indexed - - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation_not_taken_into_account") - assertNotNull h2gis.getTable("${osmTablesPrefix}_relation_not_taken_into_account")."id_relation" - assertFalse h2gis.getTable("${osmTablesPrefix}_relation_not_taken_into_account")."id_relation".indexed + assertTrue h2gis.hasTable("${osmTablesPrefix}_node") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_node","id_node") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_way_node") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_node","id_node") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_node","node_order") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_way") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way","id_way") + assertFalse h2gis.isIndexed("${osmTablesPrefix}_way","not_taken_into_account") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_way_tag") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag","tag_key") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag","id_way") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag","tag_value") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_relation_tag") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag","tag_key") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag","id_relation") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag","tag_value") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_relation") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation","id_relation") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_way_member") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_member","id_relation") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_way_not_taken_into_account") + assertFalse h2gis.isIndexed("${osmTablesPrefix}_way_not_taken_into_account","id_way") + + assertTrue h2gis.hasTable("${osmTablesPrefix}_relation_not_taken_into_account") + assertFalse h2gis.isIndexed("${osmTablesPrefix}_relation_not_taken_into_account","id_relation") } /** @@ -407,14 +387,10 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ loadDataForNodeExtraction(h2gis, prefix) - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.extractNodesAsPoints(null, prefix, epsgCode, outTable, tags, columnsToKeep) - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.extractNodesAsPoints(h2gis, null, epsgCode, outTable, tags, columnsToKeep) - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, -1, outTable, tags, columnsToKeep) - LOGGER.warn("An error will be thrown next") - assertFalse OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, null, tags, columnsToKeep) + assertThrows(Exception.class, ()->OSMTools.TransformUtils.extractNodesAsPoints(null, prefix, epsgCode, outTable, tags, columnsToKeep)) + assertThrows(Exception.class, ()-> OSMTools.TransformUtils.extractNodesAsPoints(h2gis, null, epsgCode, outTable, tags, columnsToKeep)) + assertThrows(Exception.class, ()->OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, -1, outTable, tags, columnsToKeep)) + assertThrows(Exception.class, ()->OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, null, tags, columnsToKeep)) assertFalse OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, outTable, [house: "false", path: 'false'], null) } @@ -485,7 +461,7 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ assertTrue OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, outTable, tags, columnsToKeep) def table = h2gis.getTable(outTable) assertNotNull table - def columns = table.columns + def columns = table.getColumnNames() assertEquals 3, columns.size() assertEquals 3, columns.intersect(["ID_NODE", "THE_GEOM", "key1"]).size() assertFalse columns.contains("WATER") @@ -514,7 +490,7 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ def table = h2gis.getTable("output") assertNotNull table - def columns = table.columns + def columns = table.getColumnNames() assertEquals 4, columns.size() assertEquals 4, columns.intersect(["ID_NODE", "THE_GEOM", "build", "key1"]).size() assertFalse columns.contains("WATER") @@ -545,7 +521,7 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ assertTrue OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, outTable, [], []) def table = h2gis.getTable("output") assertNotNull table - def columns = table.columns + def columns = table.getColumnNames() assertEquals 14, columns.size() assertEquals 14, columns.intersect(["ID_NODE", "THE_GEOM", "build", "building", "material", "road", "key", "key1", "key2", diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy index 15b6e06e92..06a53455cc 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy @@ -172,11 +172,11 @@ class UtilitiesTest extends AbstractOSMToolsTest { def poly1 = [] poly1 << outer - assertNull OSMTools.Utilities.parsePolygon(null, new GeometryFactory()) - assertNull OSMTools.Utilities.parsePolygon([], new GeometryFactory()) - assertNull OSMTools.Utilities.parsePolygon([[]], new GeometryFactory()) - assertNull OSMTools.Utilities.parsePolygon([[null]], new GeometryFactory()) - assertNull OSMTools.Utilities.parsePolygon(poly1, new GeometryFactory()) + assertThrows(Exception.class, () -> OSMTools.Utilities.parsePolygon(null, new GeometryFactory())) + assertThrows(Exception.class, () -> OSMTools.Utilities.parsePolygon([], new GeometryFactory())) + assertThrows(Exception.class, () -> OSMTools.Utilities.parsePolygon([[]], new GeometryFactory())) + assertThrows(Exception.class, () -> OSMTools.Utilities.parsePolygon([[null]], new GeometryFactory())) + assertThrows(Exception.class, () -> OSMTools.Utilities.parsePolygon(poly1, new GeometryFactory())) } /** @@ -230,7 +230,7 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badToBBoxTest() { - assertNull OSMTools.Utilities.toBBox(null) + assertThrows(Exception.class, ()-> OSMTools.Utilities.toBBox(null)) } /** @@ -255,9 +255,9 @@ class UtilitiesTest extends AbstractOSMToolsTest { @Test void badToPolyTest() { def factory = new GeometryFactory() - assertNull OSMTools.Utilities.toPoly(null) - assertNull OSMTools.Utilities.toPoly(factory.createPoint(new Coordinate(0.0, 0.0))) - assertNull OSMTools.Utilities.toPoly(factory.createPolygon()) + assertThrows(Exception.class, () -> OSMTools.Utilities.toPoly(null)) + assertThrows(Exception.class, () -> OSMTools.Utilities.toPoly(factory.createPoint(new Coordinate(0.0, 0.0)))) + assertThrows(Exception.class, () -> OSMTools.Utilities.toPoly(factory.createPolygon())) } /** @@ -339,7 +339,7 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badBuildOSMQueryFromEnvelopeTest() { - assertNull OSMTools.Utilities.buildOSMQuery((Envelope) null, ["building"], OSMElement.NODE) + assertThrows(Exception.class, () -> OSMTools.Utilities.buildOSMQuery((Envelope) null, ["building"], OSMElement.NODE)) } /** @@ -399,8 +399,8 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badBuildOSMQueryFromPolygonTest() { - assertNull OSMTools.Utilities.buildOSMQuery((Polygon) null, ["building"], OSMElement.NODE) - assertNull OSMTools.Utilities.buildOSMQuery(new GeometryFactory().createPolygon(), ["building"], OSMElement.NODE) + assertThrows(Exception.class, ()-> OSMTools.Utilities.buildOSMQuery((Polygon) null, ["building"], OSMElement.NODE)) + assertThrows(Exception.class, ()-> OSMTools.Utilities.buildOSMQuery(new GeometryFactory().createPolygon(), ["building"], OSMElement.NODE)) } /** @@ -427,8 +427,8 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badReadJSONParametersTest() { - assertNull OSMTools.Utilities.readJSONParameters(null) - assertNull OSMTools.Utilities.readJSONParameters("") + assertThrows(Exception.class, ()-> OSMTools.Utilities.readJSONParameters(null)) + assertThrows(Exception.class, ()-> OSMTools.Utilities.readJSONParameters("")) assertNull OSMTools.Utilities.readJSONParameters("toto") assertNull OSMTools.Utilities.readJSONParameters("target") assertNull OSMTools.Utilities.readJSONParameters(new File(UtilitiesTest.getResource("bad_json_params.json").toURI()).absolutePath) diff --git a/pom.xml b/pom.xml index ce758274c9..4cd5ba532a 100644 --- a/pom.xml +++ b/pom.xml @@ -42,7 +42,7 @@ 2.2.1 2.2.224 1.7.1 - 2.1.0 + 2.1.1-SNAPSHOT 2.0.10 4.0.17 4.6.3 diff --git a/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy b/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy index ba59f001f0..f53c8ca7dc 100644 --- a/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy +++ b/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy @@ -84,7 +84,8 @@ String extractWorldPopLayer(String coverageId, List bbox) { * @param tableName the name of table that contains the grid data in the database * @return the name of the imported table */ -String importAscGrid(JdbcDataSource datasource, String worldPopFilePath, int epsg = 4326, String tableName = "world_pop") { +String importAscGrid(JdbcDataSource datasource, String worldPopFilePath, int epsg = 4326, String tableName = "world_pop") + throws Exception{ info "Import the the world pop asc file" // The name of the outputTableName is constructed def outputTableWorldPopName = postfix tableName @@ -105,9 +106,9 @@ String importAscGrid(JdbcDataSource datasource, String worldPopFilePath, int eps PK AS ID_POP, Z as POP from $importTable; drop table if exists $importTable""".toString()) } catch (Exception ex) { - info "Cannot find any worldpop data on the requested area" datasource.execute("""drop table if exists $outputTableWorldPopName; create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $epsg), ID_POP INTEGER, POP FLOAT);""".toString()) + throw new Exception("Cannot find any worldpop data on the requested area") } } else { try { @@ -115,9 +116,9 @@ String importAscGrid(JdbcDataSource datasource, String worldPopFilePath, int eps datasource.execute("""ALTER TABLE $outputTableWorldPopName RENAME COLUMN PK TO ID_POP; ALTER TABLE $outputTableWorldPopName RENAME COLUMN Z TO POP;""".toString()) } catch (Exception ex) { - info "Cannot find any worldpop data on the requested area" datasource.execute("""drop table if exists $outputTableWorldPopName; create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $epsg), ID_POP INTEGER, POP FLOAT);""".toString()) + throw new Exception( "Cannot find any worldpop data on the requested area") } } return outputTableWorldPopName diff --git a/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy b/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy index ce2567403e..33e7735013 100644 --- a/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy +++ b/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy @@ -96,8 +96,8 @@ class WorldPopExtractTest { assertTrue new File(outputFilePath).exists() String outputTableWorldPopName = WorldPopTools.Extract.importAscGrid(h2GIS, outputFilePath) assertNotNull outputTableWorldPopName - assertEquals(720, h2GIS.getSpatialTable(outputTableWorldPopName).rowCount) - assertEquals(["ID_POP", "THE_GEOM", "POP"], h2GIS.getTable(outputTableWorldPopName).columns) + assertEquals(720, h2GIS.getRowCount(outputTableWorldPopName)) + assertEquals(["ID_POP", "THE_GEOM", "POP"], h2GIS.getColumnNames(outputTableWorldPopName)) } } }