diff --git a/bdtopo/pom.xml b/bdtopo/pom.xml index 5be9c3092a..9d12147833 100644 --- a/bdtopo/pom.xml +++ b/bdtopo/pom.xml @@ -1,4 +1,5 @@ - + org.orbisgis.geoclimate geoclimate-parent diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy index d8b48b2abc..62320a27a7 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/AbstractBDTopoWorkflow.groovy @@ -28,7 +28,6 @@ import org.orbisgis.data.H2GIS import org.orbisgis.data.api.dataset.ITable import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators - import org.orbisgis.geoclimate.worldpoptools.WorldPopTools import java.sql.Connection @@ -291,7 +290,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { * @param message * @throws Exception */ - void saveLogZoneTable(JdbcDataSource dataSource,String databaseFolder, String location, String message) throws Exception { + void saveLogZoneTable(JdbcDataSource dataSource, String databaseFolder, String location, String message) throws Exception { def logTableZones = postfix("log_zones") //Create the table to log on the processed zone dataSource.execute("""DROP TABLE IF EXISTS $logTableZones; @@ -310,7 +309,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { '${Geoindicators.version()}', '${Geoindicators.buildNumber()}')""") } - dataSource.save(logTableZones, databaseFolder+File.separator+"log_zones_"+id_zone+".fgb", true ) + dataSource.save(logTableZones, databaseFolder + File.separator + "log_zones_" + id_zone + ".fgb", true) } /** @@ -349,7 +348,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { def linkDataFromFolder(def inputFolder, def inputWorkflowTableNames, H2GIS h2gis_datasource, def inputSRID) throws Exception { def folder = new File(inputFolder) - if(!folder.exists()){ + if (!folder.exists()) { throw new Exception("The input folder doesn't exist") } if (folder.isDirectory()) { @@ -558,8 +557,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { "ROAD_FRACTION", "IMPERVIOUS_FRACTION", "UTRF_AREA_FRACTION", "UTRF_FLOOR_AREA_FRACTION", "LCZ_FRACTION", "LCZ_PRIMARY", "FREE_EXTERNAL_FACADE_DENSITY", "BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", "BUILDING_HEIGHT_DIST", "FRONTAL_AREA_INDEX", "SEA_LAND_FRACTION", "ASPECT_RATIO", - "SVF", "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", "SPRAWL_AREAS", - "SPRAWL_DISTANCES", "SPRAWL_COOL_DISTANCE"] + "SVF", "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", "URBAN_SPRAWL_AREAS", + "URBAN_SPRAWL_DISTANCES", "URBAN_SPRAWL_COOL_DISTANCE"] def allowedOutputIndicators = allowed_grid_indicators.intersect(list_indicators*.toUpperCase()) if (allowedOutputIndicators) { //Update the RSU indicators list according the grid indicators @@ -650,7 +649,8 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { "road_traffic", "population", "ground_acoustic", - "sprawl_areas"] + "urban_sprawl_areas", + "urban_cool_areas"] } @@ -860,7 +860,7 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { x_size, y_size, srid, grid_indicators_params.rowCol) if (gridTableName) { String rasterizedIndicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(h2gis_datasource, gridTableName, - grid_indicators_params.indicators, grid_indicators_params.lcz_lod, + grid_indicators_params.indicators, results.building, results.road, results.vegetation, results.water, results.impervious, results.rsu_lcz, results.rsu_utrf_area, "", "", @@ -871,7 +871,10 @@ abstract class AbstractBDTopoWorkflow extends BDTopoUtils { Map sprawl_indic = Geoindicators.WorkflowGeoIndicators.sprawlIndicators(h2gis_datasource, rasterizedIndicators, "id_grid", grid_indicators_params.indicators, Math.max(x_size, y_size).floatValue()) if (sprawl_indic) { - results.put("sprawl_areas", sprawl_indic.sprawl_areas) + results.put("urban_sprawl_areas", sprawl_indic.urban_sprawl_areas) + if (sprawl_indic.urban_cool_areas) { + results.put("urban_cool_areas", sprawl_indic.urban_cool_areas) + } results.put("grid_indicators", sprawl_indic.grid_indicators) } info("End computing grid_indicators") diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy index b2d110ebd9..b7b11332ca 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopo.groovy @@ -18,9 +18,6 @@ * */ package org.orbisgis.geoclimate.bdtopo - -import org.orbisgis.geoclimate.utils.LoggerUtils - /** * Main module for BDTopo processing tasks */ @@ -143,7 +140,7 @@ abstract class BDTopo extends BDTopoUtils { * Meteorological Society 93, no. 12 (2012): 1879-1900. * */ - static Map workflow(def input, int version) throws Exception{ + static Map workflow(def input, int version) throws Exception { if (version == 2) { BDTopoV2Workflow bdtopo_v2_workflow = new BDTopoV2Workflow() return bdtopo_v2_workflow.execute(input) diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy index 71eab77603..fab7d26d20 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV2Workflow.groovy @@ -39,7 +39,7 @@ import java.sql.SQLException @Override Integer loadDataFromPostGIS(Object input_database_properties, Object code, Object distance, Object inputTables, - Object inputSRID, H2GIS h2gis_datasource) throws Exception{ + Object inputSRID, H2GIS h2gis_datasource) throws Exception { def commune_location = inputTables.commune if (!commune_location) { throw new Exception("The commune table must be specified to run Geoclimate") @@ -71,7 +71,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec if (commune_srid == 0 && inputSRID) { commune_srid = inputSRID } else if (commune_srid <= 0) { - throw new Exception( "Cannot find a SRID value for the layer commune.\n" + + throw new Exception("Cannot find a SRID value for the layer commune.\n" + "Please set a valid OGC prj or use the parameter srid to force it.") } @@ -80,11 +80,11 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec //Check if code is a string or a bbox //The zone is a osm bounding box represented by ymin,xmin , ymax,xmax, if (code in Collection) { - if(code.size()==3){ - if(code[2]<100){ + if (code.size() == 3) { + if (code[2] < 100) { throw new Exception("The distance to create a bbox from a point must be greater than 100 meters") } - code = BDTopoUtils.bbox(code[0], code[1],code[2]) + code = BDTopoUtils.bbox(code[0], code[1], code[2]) } String inputTableName = """(SELECT ST_INTERSECTION(st_setsrid(the_geom, $commune_srid), ST_MakeEnvelope(${code[1]},${code[0]},${code[3]},${code[2]}, $commune_srid)) as the_geom, CODE_INSEE from $commune_location where @@ -139,7 +139,7 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec if (inputTables.troncon_voie_ferree) { //Extract troncon_voie_ferree - def inputTableName = "(SELECT ID, st_setsrid(the_geom, $commune_srid) as the_geom, NATURE, LARGEUR, NB_VOIES, POS_SOL, FRANCHISST FROM ${inputTables.troncon_voie_ferree} WHERE st_setsrid(the_geom, $commune_srid) && 'SRID=$commune_srid;$geomToExtract'::GEOMETRY AND ST_INTERSECTS(st_setsrid(the_geom, $commune_srid), 'SRID=$commune_srid;$geomToExtract'::GEOMETRY))" + def inputTableName = "(SELECT ID, st_setsrid(the_geom, $commune_srid) as the_geom, NATURE, LARGEUR, NB_VOIES, POS_SOL, FRANCHISST FROM ${inputTables.troncon_voie_ferree} WHERE st_setsrid(the_geom, $commune_srid) && 'SRID=$commune_srid;$geomToExtract'::GEOMETRY AND ST_INTERSECTS(st_setsrid(the_geom, $commune_srid), 'SRID=$commune_srid;$geomToExtract'::GEOMETRY))" outputTableName = "TRONCON_VOIE_FERREE" debug "Loading in the H2GIS database $outputTableName" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 1000) @@ -233,7 +233,7 @@ int getVersion() { } @Override -Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) throws Exception{ +Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) throws Exception { if (!hLevMin) { hLevMin = 3 } @@ -290,7 +290,7 @@ Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hL @Override def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTables, - int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception{ + int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception { def formatting_geom = "the_geom" if (sourceSRID == 0 && sourceSRID != inputSRID) { formatting_geom = "st_setsrid(the_geom, $inputSRID) as the_geom" @@ -303,11 +303,11 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl //Check if code is a string or a bbox //The zone is a osm bounding box represented by ymin,xmin , ymax,xmax, if (location in Collection) { - if(location.size()==3){ - if(location[2]<100){ + if (location.size() == 3) { + if (location[2] < 100) { throw new Exception("The distance to create a bbox from a point must be greater than 100 meters") } - location = BDTopoUtils.bbox(location[0], location[1],location[2]) + location = BDTopoUtils.bbox(location[0], location[1], location[2]) } debug "Loading in the H2GIS database $outputTableName" h2gis_datasource.execute("""DROP TABLE IF EXISTS $outputTableName ; CREATE TABLE $outputTableName as SELECT @@ -317,7 +317,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl debug "Loading in the H2GIS database $outputTableName" h2gis_datasource.execute("""DROP TABLE IF EXISTS $outputTableName ; CREATE TABLE $outputTableName as SELECT $formatting_geom, CODE_INSEE FROM ${inputTables.commune} WHERE CODE_INSEE='$location' or lower(nom)='${location.toLowerCase()}'""".toString()) - }else{ + } else { throw new Exception("Invalid location data type. Please set a text value or a collection of coordinates to specify a bbox") } def count = h2gis_datasource.getRowCount(outputTableName) diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy index ca7e664327..b2f97c753f 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/BDTopoV3Workflow.groovy @@ -39,7 +39,7 @@ import java.sql.SQLException @Override def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTables, - int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception{ + int sourceSRID, int inputSRID, H2GIS h2gis_datasource) throws Exception { def formatting_geom = "the_geom" if (sourceSRID == 0 && sourceSRID != inputSRID) { formatting_geom = "st_setsrid(the_geom, $inputSRID) as the_geom" @@ -54,34 +54,31 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl if (location in Collection) { debug "Loading in the H2GIS database $outputTableName" def communeColumns = h2gis_datasource.getColumnNames(inputTables.commune) - if(communeColumns.contains("INSEE_COM")) { - if(location.size()==3){ - if(location[2]<100){ + if (communeColumns.contains("INSEE_COM")) { + if (location.size() == 3) { + if (location[2] < 100) { throw new IllegalArgumentException("The distance to create a bbox from a point must be greater than 100 meters") } - location = BDTopoUtils.bbox(location[0], location[1],location[2]) + location = BDTopoUtils.bbox(location[0], location[1], location[2]) } h2gis_datasource.execute("""DROP TABLE IF EXISTS $outputTableName ; CREATE TABLE $outputTableName as SELECT ST_INTERSECTION(the_geom, ST_MakeEnvelope(${location[1]},${location[0]},${location[3]},${location[2]}, $sourceSRID)) as the_geom, INSEE_COM AS CODE_INSEE from ${inputTables.commune} where the_geom && ST_MakeEnvelope(${location[1]},${location[0]},${location[3]},${location[2]}, $sourceSRID) """.toString()) - } - else { + } else { throw new Exception("Cannot find a column insee_com or code_insee to filter the commune") } } else if (location instanceof String) { debug "Loading in the H2GIS database $outputTableName" def communeColumns = h2gis_datasource.getColumnNames(inputTables.commune) - if(communeColumns.contains("INSEE_COM")){ - h2gis_datasource.execute("""DROP TABLE IF EXISTS $outputTableName ; CREATE TABLE $outputTableName as SELECT $formatting_geom, + if (communeColumns.contains("INSEE_COM")) { + h2gis_datasource.execute("""DROP TABLE IF EXISTS $outputTableName ; CREATE TABLE $outputTableName as SELECT $formatting_geom, INSEE_COM AS CODE_INSEE FROM ${inputTables.commune} WHERE INSEE_COM='$location' or lower(nom)='${location.toLowerCase()}'""".toString()) - } - else { + } else { throw new Exception("Cannot find a column insee_com or code_insee to filter the commune") } - } - else { + } else { throw new IllegalArgumentException("Invalid location") } def count = h2gis_datasource.getRowCount(outputTableName) @@ -215,7 +212,7 @@ def filterLinkedShapeFiles(def location, float distance, LinkedHashMap inputTabl @Override Integer loadDataFromPostGIS(Object input_database_properties, Object code, Object distance, Object inputTables, Object inputSRID, - H2GIS h2gis_datasource) throws Exception{ + H2GIS h2gis_datasource) throws Exception { def commune_location = inputTables.commune if (!commune_location) { throw new Exception("The commune table must be specified to run Geoclimate") @@ -259,12 +256,12 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec //The zone is a osm bounding box represented by ymin,xmin , ymax,xmax, if (code in Collection) { def communeColumns = h2gis_datasource.getColumnNames(commune_location) - if(communeColumns.contains("INSEE_COM")) { - if(code.size()==3){ - if(code[2]<100){ + if (communeColumns.contains("INSEE_COM")) { + if (code.size() == 3) { + if (code[2] < 100) { throw new Exception("The distance to create a bbox from a point must be greater than 100 meters") } - code = BDTopoUtils.bbox(code[0], code[1],code[2]) + code = BDTopoUtils.bbox(code[0], code[1], code[2]) } String inputTableName = """(SELECT ST_INTERSECTION(st_setsrid(the_geom, $commune_srid), ST_MakeEnvelope(${code[1]},${code[0]},${code[3]},${code[2]}, $commune_srid)) as the_geom, INSEE_COM as CODE_INSEE from $commune_location where @@ -273,20 +270,19 @@ Integer loadDataFromPostGIS(Object input_database_properties, Object code, Objec st_intersects(st_setsrid(the_geom, $commune_srid), ST_MakeEnvelope(${code[1]},${code[0]},${code[3]},${code[2]}, $commune_srid)))""".toString() debug "Loading in the H2GIS database $outputTableName" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 100) - }else { + } else { throw new Exception("Cannot find a column insee_com or code_insee to filter the commune") } } else if (code instanceof String) { def communeColumns = h2gis_datasource.getColumnNames(commune_location) - if(communeColumns.contains("insee_com")){ + if (communeColumns.contains("insee_com")) { String inputTableName = "(SELECT st_setsrid(the_geom, $commune_srid) as the_geom, INSEE_COM as CODE_INSEE FROM $commune_location WHERE INSEE_COM='$code' or lower(nom)='${code.toLowerCase()}')" debug "Loading in the H2GIS database $outputTableName" IOMethods.exportToDataBase(sourceConnection, inputTableName, h2gis_datasource.getConnection(), outputTableName, -1, 1000) - } - else { + } else { throw new Exception("Cannot find a column insee_com to filter the commune") } - }else{ + } else { throw new Exception("Invalid location data type. Please set a text value or a collection of coordinates to specify a bbox") } def count = h2gis_datasource.getRowCount(outputTableName) @@ -427,7 +423,7 @@ int getVersion() { } @Override -Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) throws Exception{ +Map formatLayers(JdbcDataSource datasource, Map layers, float distance, float hLevMin) throws Exception { if (!hLevMin) { hLevMin = 3 } diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy index bb58795a78..9b8bef0955 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataFormatting.groovy @@ -38,7 +38,7 @@ import org.orbisgis.geoclimate.Geoindicators * @return The name of the final buildings table */ String formatBuildingLayer(JdbcDataSource datasource, String building, String zone = "", - String urban_areas = "", float h_lev_min = 3) throws Exception{ + String urban_areas = "", float h_lev_min = 3) throws Exception { if (!h_lev_min) { h_lev_min = 3 } @@ -101,147 +101,147 @@ String formatBuildingLayer(JdbcDataSource datasource, String building, String zo ] def building_type_level = [ - "building": 1, - "house": 1, - "detached": 1, - "residential": 1, - "apartments": 1, - "bungalow": 0, - "historic": 0, - "monument": 0, - "ruins": 0, - "castle": 1, - "agricultural": 0, - "farm": 0, - "farm_auxiliary": 0, - "barn": 0, - "greenhouse": 0, - "silo": 0, - "commercial": 2, - "industrial": 0, - "sport": 0, - "sports_centre": 0, - "grandstand": 0, - "transport": 0, - "train_station": 0, - "toll_booth": 0, - "toll": 0, - "terminal": 0, - "airport_terminal": 0, - "healthcare": 1, - "education": 1, - "entertainment_arts_culture": 0, - "sustenance": 1, - "military": 0, - "religious": 0, - "chapel": 0, - "church": 0, - "government": 1, - "townhall": 1, - "office": 1, - "emergency": 0, - "hotel": 2, - "hospital": 2, - "parking": 1, - "slight_construction": 0, - "water_tower" : 0, - "fortress": 0, - "abbey": 0, - "cathedral": 0, - "mosque": 0, - "musalla": 0, - "temple": 0, - "synagogue": 0, - "shrine": 0, - "place_of_worship": 0, - "wayside_shrine": 0, - "swimming_pool" : 0, - "fitness_centre": 1, - "horse_riding": 0, - "ice_rink" : 0, - "pitch": 0, - "stadium": 0, - "track": 0, - "sports_hall": 0, - "ammunition": 0, - "bunker": 0, - "barracks": 1, - "casemate": 0, - "station": 0, - "government_office": 1, - "stable": 0, - "sty": 0, - "cowshed": 0, - "digester": 0, - "farmhouse": 0, - "bank": 1, - "bureau_de_change": 0, - "boat_rental": 0, - "car_rental" : 0, - "internet_cafe": 0, - "kiosk": 0, - "money_transfer": 0, - "market": 0, - "marketplace": 0, - "pharmacy" : 0, - "post_office" : 1, - "retail": 0, - "shop" : 0, - "store": 0, - "supermarket": 0, - "warehouse": 0, - "factory": 0, - "gas" : 0, - "heating_station": 0, - "oil_mill" : 0, - "oil": 0, - "wellsite": 0, - "well_cluster": 0, - "grain_silo": 0, - "villa": 1, - "dormitory": 1, - "condominium": 1, - "sheltered_housing": 0, - "workers_dormitory" :0, - "terrace": 1, - "transportation": 0, - "hangar": 0, - "tower": 1, - "control_tower": 0, - "aeroway": 1, - "roundhouse": 0, - "social_facility": 1, - "college": 1, - "kindergarten": 0, - "school": 0, - "university": 1, - "cinema": 1, - "arts_centre": 0, - "brothel": 1, - "casino": 0, - "community_centre": 0, - "conference_centre": 1, - "events_venue": 1, - "exhibition_centre": 0, - "gambling": 0, - "music_venue": 0, - "nightclub": 0, - "planetarium": 0, - "social_centre": 0, - "studio": 0, - "theatre": 0, - "library": 1, - "museum": 0, - "aquarium": 0, - "gallery": 0, - "information": 0, - "restaurant": 0, - "bar": 0, - "cafe": 0, - "fast_food": 0, - "ice_cream": 0, - "pub": 0, - "attraction": 0 + "building" : 1, + "house" : 1, + "detached" : 1, + "residential" : 1, + "apartments" : 1, + "bungalow" : 0, + "historic" : 0, + "monument" : 0, + "ruins" : 0, + "castle" : 1, + "agricultural" : 0, + "farm" : 0, + "farm_auxiliary" : 0, + "barn" : 0, + "greenhouse" : 0, + "silo" : 0, + "commercial" : 2, + "industrial" : 0, + "sport" : 0, + "sports_centre" : 0, + "grandstand" : 0, + "transport" : 0, + "train_station" : 0, + "toll_booth" : 0, + "toll" : 0, + "terminal" : 0, + "airport_terminal" : 0, + "healthcare" : 1, + "education" : 1, + "entertainment_arts_culture": 0, + "sustenance" : 1, + "military" : 0, + "religious" : 0, + "chapel" : 0, + "church" : 0, + "government" : 1, + "townhall" : 1, + "office" : 1, + "emergency" : 0, + "hotel" : 2, + "hospital" : 2, + "parking" : 1, + "slight_construction" : 0, + "water_tower" : 0, + "fortress" : 0, + "abbey" : 0, + "cathedral" : 0, + "mosque" : 0, + "musalla" : 0, + "temple" : 0, + "synagogue" : 0, + "shrine" : 0, + "place_of_worship" : 0, + "wayside_shrine" : 0, + "swimming_pool" : 0, + "fitness_centre" : 1, + "horse_riding" : 0, + "ice_rink" : 0, + "pitch" : 0, + "stadium" : 0, + "track" : 0, + "sports_hall" : 0, + "ammunition" : 0, + "bunker" : 0, + "barracks" : 1, + "casemate" : 0, + "station" : 0, + "government_office" : 1, + "stable" : 0, + "sty" : 0, + "cowshed" : 0, + "digester" : 0, + "farmhouse" : 0, + "bank" : 1, + "bureau_de_change" : 0, + "boat_rental" : 0, + "car_rental" : 0, + "internet_cafe" : 0, + "kiosk" : 0, + "money_transfer" : 0, + "market" : 0, + "marketplace" : 0, + "pharmacy" : 0, + "post_office" : 1, + "retail" : 0, + "shop" : 0, + "store" : 0, + "supermarket" : 0, + "warehouse" : 0, + "factory" : 0, + "gas" : 0, + "heating_station" : 0, + "oil_mill" : 0, + "oil" : 0, + "wellsite" : 0, + "well_cluster" : 0, + "grain_silo" : 0, + "villa" : 1, + "dormitory" : 1, + "condominium" : 1, + "sheltered_housing" : 0, + "workers_dormitory" : 0, + "terrace" : 1, + "transportation" : 0, + "hangar" : 0, + "tower" : 1, + "control_tower" : 0, + "aeroway" : 1, + "roundhouse" : 0, + "social_facility" : 1, + "college" : 1, + "kindergarten" : 0, + "school" : 0, + "university" : 1, + "cinema" : 1, + "arts_centre" : 0, + "brothel" : 1, + "casino" : 0, + "community_centre" : 0, + "conference_centre" : 1, + "events_venue" : 1, + "exhibition_centre" : 0, + "gambling" : 0, + "music_venue" : 0, + "nightclub" : 0, + "planetarium" : 0, + "social_centre" : 0, + "studio" : 0, + "theatre" : 0, + "library" : 1, + "museum" : 0, + "aquarium" : 0, + "gallery" : 0, + "information" : 0, + "restaurant" : 0, + "bar" : 0, + "cafe" : 0, + "fast_food" : 0, + "ice_cream" : 0, + "pub" : 0, + "attraction" : 0 ] @@ -373,7 +373,7 @@ static String[] getTypeAndUse(def main_type, def main_use, def types_and_uses) { * @param road The name of the raw roads table in the DB * @return The name of the final roads table */ -String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") throws Exception{ +String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") throws Exception { debug('Formating road layer') def outputTableName = postfix "ROAD" datasource.execute(""" @@ -522,8 +522,8 @@ String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") if (!qualified_road_zindex) { qualified_road_zindex = 1 } - }else if(road_crossing=='NC'){ - qualified_crossing=null + } else if (road_crossing == 'NC') { + qualified_crossing = null } def road_sens = row.DIRECTION @@ -578,7 +578,7 @@ String formatRoadLayer(JdbcDataSource datasource, String road, String zone = "") * @param water The name of the raw hydro table in the DB * @return The name of the final hydro table */ -String formatHydroLayer(JdbcDataSource datasource, String water, String zone = "") throws Exception{ +String formatHydroLayer(JdbcDataSource datasource, String water, String zone = "") throws Exception { debug('Hydro transformation starts') def outputTableName = postfix("HYDRO") datasource.execute """Drop table if exists $outputTableName; @@ -600,31 +600,31 @@ String formatHydroLayer(JdbcDataSource datasource, String water, String zone = " } def water_types = - ["Aqueduc" : "aqueduct", - "Canal" : "canal", - "Delta" : "bay", - "Ecoulement canalisé" : "canal", - "Ecoulement endoréique" : "water", - "Ecoulement hyporhéique": "water", - "Ecoulement karstique" : "water", - "Ecoulement naturel" : "water", - "Ecoulement phréatique" : "water", - "Estuaire" : "bay", - "Inconnue" : "water", - "Lac" : "lake", - "Lagune" : "lagoon", "Mangrove": "mangrove", - "Mare" : "pond", - "Plan d'eau de gravière": "pond", - "Plan d'eau de mine": "basin", "Ravine": "water", - "Réservoir-bassin" : "basin", - "Réservoir-bassin d'orage": "basin", + ["Aqueduc" : "aqueduct", + "Canal" : "canal", + "Delta" : "bay", + "Ecoulement canalisé" : "canal", + "Ecoulement endoréique" : "water", + "Ecoulement hyporhéique" : "water", + "Ecoulement karstique" : "water", + "Ecoulement naturel" : "water", + "Ecoulement phréatique" : "water", + "Estuaire" : "bay", + "Inconnue" : "water", + "Lac" : "lake", + "Lagune" : "lagoon", "Mangrove": "mangrove", + "Mare" : "pond", + "Plan d'eau de gravière" : "pond", + "Plan d'eau de mine" : "basin", "Ravine": "water", + "Réservoir-bassin" : "basin", + "Réservoir-bassin d'orage" : "basin", "Réservoir-bassin piscicole": "basin", - "Retenue" : "basin", - "Retenuebarrage": "basin", - "Retenue-bassin portuaire": "basin", - "Retenue-digue": "basin", - "Surface d'eau" :"water", - "Bassin" :"basin" + "Retenue" : "basin", + "Retenuebarrage" : "basin", + "Retenue-bassin portuaire" : "basin", + "Retenue-digue" : "basin", + "Surface d'eau" : "water", + "Bassin" : "basin" ] int rowcount = 1 @@ -659,7 +659,7 @@ String formatHydroLayer(JdbcDataSource datasource, String water, String zone = " * @param rail The name of the raw rails table in the DB * @return The name of the final rails table */ -String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "") throws Exception{ +String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "") throws Exception { debug('Rails transformation starts') def outputTableName = postfix("RAILS") datasource.execute """ drop table if exists $outputTableName; @@ -756,7 +756,7 @@ String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "" * @param vegetation The name of the raw vegetation table in the DB * @return The name of the final vegetation table */ -String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "") throws Exception{ +String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "") throws Exception { debug('Vegetation transformation starts') def outputTableName = postfix "VEGET" datasource.execute(""" @@ -862,7 +862,7 @@ String formatVegetationLayer(JdbcDataSource datasource, String vegetation, Strin * @param impervious The name of the impervious table in the DB * @return outputTableName The name of the final impervious table */ -String formatImperviousLayer(H2GIS datasource, String impervious) throws Exception{ +String formatImperviousLayer(H2GIS datasource, String impervious) throws Exception { debug('Impervious layer') def outputTableName = postfix("IMPERVIOUS") datasource.execute """ drop table if exists $outputTableName; @@ -916,8 +916,8 @@ String formatImperviousLayer(H2GIS datasource, String impervious) throws Excepti * * @return */ -String setAliasOnColumns(String tableName){ - return columnNames.inject([]) { result, iter -> +String setAliasOnColumns(String tableName) { + return columnNames.inject([]) { result, iter -> result += "a.$iter" }.join(",") } diff --git a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy index 21a54262e3..571fe889d6 100644 --- a/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy +++ b/bdtopo/src/main/groovy/org/orbisgis/geoclimate/bdtopo/InputDataLoading.groovy @@ -63,7 +63,7 @@ def loadV2( "terrain_sport" : "", "construction_surfacique": "", "surface_route" : "", "surface_activite": "", "piste_aerodrome" : "", "reservoir": "", "zone_vegetation": ""], - float distance = 1000) throws Exception{ + float distance = 1000) throws Exception { debug('Import the BDTopo data') @@ -93,9 +93,9 @@ def loadV2( srid = currentSrid } else { if (currentSrid == 0) { - throw new IllegalArgumentException( "The process has been stopped since the table $name has a no SRID") + throw new IllegalArgumentException("The process has been stopped since the table $name has a no SRID") } else if (currentSrid > 0 && srid != currentSrid) { - throw new IllegalArgumentException( "The process has been stopped since the table $name has a different SRID from the others") + throw new IllegalArgumentException("The process has been stopped since the table $name has a different SRID from the others") } } } @@ -108,7 +108,7 @@ def loadV2( // If the COMMUNE table does not exist or is empty, then the process is stopped if (!tablesExist.get("commune")) { - throw new IllegalArgumentException( 'The process has been stopped since the table commune does not exist or is empty') + throw new IllegalArgumentException('The process has been stopped since the table commune does not exist or is empty') } // If the following tables does not exists, we create corresponding empty tables @@ -360,16 +360,16 @@ Map loadV3(JdbcDataSource datasource, "piste_d_aerodrome" : "", "reservoir": "", "construction_surfacique": "", "equipement_de_transport": "", "troncon_de_route" : "", "troncon_de_voie_ferree": "", "surface_hydrographique": "", "zone_de_vegetation": "", "aerodrome": "", "limite_terre_mer": ""], - float distance = 1000) throws Exception{ + float distance = 1000) throws Exception { if (!layers) { - throw new IllegalArgumentException( "Please set a valid list of layers") + throw new IllegalArgumentException("Please set a valid list of layers") } debug('Import the BDTopo data') def commune = layers.commune // If the Commune table is empty, then the process is stopped if (!commune) { - throw new IllegalArgumentException( 'The process has been stopped since the table Commnune is empty') + throw new IllegalArgumentException('The process has been stopped since the table Commnune is empty') } // ------------------------------------------------------------------------------- @@ -391,9 +391,9 @@ Map loadV3(JdbcDataSource datasource, srid = currentSrid } else { if (currentSrid == 0) { - throw new IllegalArgumentException( "The process has been stopped since the table $name has a no SRID") + throw new IllegalArgumentException("The process has been stopped since the table $name has a no SRID") } else if (currentSrid > 0 && srid != currentSrid) { - throw new IllegalArgumentException( "The process has been stopped since the table $name has a different SRID from the others") + throw new IllegalArgumentException("The process has been stopped since the table $name has a different SRID from the others") } } } @@ -402,7 +402,7 @@ Map loadV3(JdbcDataSource datasource, } if (!tablesExist.get("commune")) { - throw new IllegalArgumentException( 'The process has been stopped since the table zone does not exist or is empty') + throw new IllegalArgumentException('The process has been stopped since the table zone does not exist or is empty') } // ------------------------------------------------------------------------------- diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy index e65b528b43..e82ccf99ca 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowAbstractTest.groovy @@ -20,8 +20,6 @@ package org.orbisgis.geoclimate.bdtopo import org.junit.jupiter.api.Test -import org.junit.jupiter.api.io.CleanupMode -import org.junit.jupiter.api.io.TempDir import org.locationtech.jts.geom.Envelope import org.locationtech.jts.geom.Geometry import org.orbisgis.data.H2GIS @@ -31,7 +29,6 @@ import static org.junit.jupiter.api.Assertions.* abstract class WorkflowAbstractTest { - /** * Get the version of the workflow * @return @@ -61,8 +58,8 @@ abstract class WorkflowAbstractTest { abstract ArrayList getFileNames() - Map getResultFiles(String dataFolder){ - if(dataFolder){ + Map getResultFiles(String dataFolder) { + if (dataFolder) { def files = [:] new File(dataFolder).eachFileRecurse groovy.io.FileType.FILES, { file -> if (file.name.toLowerCase().endsWith(".fgb")) { @@ -347,7 +344,7 @@ abstract class WorkflowAbstractTest { "delete": false ], "input" : [ - "folder": dataFolder, + "folder" : dataFolder, "locations": [2000, 2001, 2002]], "output" : [ "folder": ["path" : folder.absolutePath, @@ -361,7 +358,7 @@ abstract class WorkflowAbstractTest { ] ] ] - assertThrows(Exception.class, ()->BDTopo.workflow(bdTopoParameters, getVersion())) + assertThrows(Exception.class, () -> BDTopo.workflow(bdTopoParameters, getVersion())) } @Test @@ -433,5 +430,5 @@ abstract class WorkflowAbstractTest { //Test road assertTrue(h2gis.firstRow("""SELECT count(*) as count from ${tableNames.road} where TYPE is not null;""".toString()).count > 0) assertTrue(h2gis.firstRow("""SELECT count(*) as count from ${tableNames.road} where WIDTH is not null or WIDTH>0 ;""".toString()).count > 0) - } + } } \ No newline at end of file diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy index 64a2d27920..5125c6f6bc 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowDebugTest.groovy @@ -58,23 +58,23 @@ class WorkflowDebugTest { */ def input = [ "locations": locations, - "database": [ - "user" : user, - "password" : password, - "url" : url, - "tables" : ["commune" : "ign_bdtopo_2018.commune", - "bati_indifferencie" : "ign_bdtopo_2018.bati_indifferencie", - "bati_industriel" : "ign_bdtopo_2018.bati_industriel", - "bati_remarquable" : "ign_bdtopo_2018.bati_remarquable", - "route" : "ign_bdtopo_2018.route", - "troncon_voie_ferree" : "ign_bdtopo_2018.troncon_voie_ferree", - "surface_eau" : "ign_bdtopo_2018.surface_eau", - "zone_vegetation" : "ign_bdtopo_2018.zone_vegetation", - "terrain_sport" : "ign_bdtopo_2018.terrain_sport", - "construction_surfacique": "ign_bdtopo_2018.construction_surfacique", - "surface_route" : "ign_bdtopo_2018.surface_route", - "surface_activite" : "ign_bdtopo_2018.surface_activite", - "piste_aerodrome" : "ign_bdtopo_2018.piste_aerodrome"] + "database" : [ + "user" : user, + "password": password, + "url" : url, + "tables" : ["commune" : "ign_bdtopo_2018.commune", + "bati_indifferencie" : "ign_bdtopo_2018.bati_indifferencie", + "bati_industriel" : "ign_bdtopo_2018.bati_industriel", + "bati_remarquable" : "ign_bdtopo_2018.bati_remarquable", + "route" : "ign_bdtopo_2018.route", + "troncon_voie_ferree" : "ign_bdtopo_2018.troncon_voie_ferree", + "surface_eau" : "ign_bdtopo_2018.surface_eau", + "zone_vegetation" : "ign_bdtopo_2018.zone_vegetation", + "terrain_sport" : "ign_bdtopo_2018.terrain_sport", + "construction_surfacique": "ign_bdtopo_2018.construction_surfacique", + "surface_route" : "ign_bdtopo_2018.surface_route", + "surface_activite" : "ign_bdtopo_2018.surface_activite", + "piste_aerodrome" : "ign_bdtopo_2018.piste_aerodrome"] ]] @@ -137,7 +137,7 @@ class WorkflowDebugTest { @Test void testIntegrationFolderInput() { def input_data = "/home/decide/Data/URBIO/Donnees_brutes/BD_TOPO/BDTOPO_3-3_TOUSTHEMES_SHP_LAMB93_D076_2024-03-15" - def locations = ["76005","76020"] + def locations = ["76005", "76020"] String directory = "/tmp/bdtopo3" File dirFile = new File(directory) dirFile.delete() @@ -156,9 +156,9 @@ class WorkflowDebugTest { "output" : [ "folder": ["path": directory]], "parameters" : - ["distance" : 0, - rsu_indicators : [ - "indicatorUse": ["LCZ", "UTRF", "TEB"] + ["distance" : 0, + rsu_indicators: [ + "indicatorUse": ["LCZ", "UTRF", "TEB"] ], /*"grid_indicators": [ diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy index 49ed68ec67..4f05288ba6 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/WorkflowParametersTest.groovy @@ -23,7 +23,6 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.CleanupMode import org.junit.jupiter.api.io.TempDir -import static org.junit.jupiter.api.Assertions.assertNull import static org.junit.jupiter.api.Assertions.assertThrows class WorkflowParametersTest { @@ -69,7 +68,7 @@ class WorkflowParametersTest { "terrain_roughness_class" : 1]] ] ] - assertThrows(Exception.class, ()->BDTopo.v2(bdTopoParameters) ) - assertThrows(Exception.class, ()-> BDTopo.v3(bdTopoParameters)) + assertThrows(Exception.class, () -> BDTopo.v2(bdTopoParameters)) + assertThrows(Exception.class, () -> BDTopo.v3(bdTopoParameters)) } } diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v2/WorkflowBDTopoV2Test.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v2/WorkflowBDTopoV2Test.groovy index b674e51a90..7dae05314c 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v2/WorkflowBDTopoV2Test.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v2/WorkflowBDTopoV2Test.groovy @@ -23,6 +23,7 @@ import org.junit.jupiter.api.io.CleanupMode import org.junit.jupiter.api.io.TempDir import org.orbisgis.data.H2GIS import org.orbisgis.geoclimate.bdtopo.WorkflowAbstractTest + import static org.junit.jupiter.api.Assertions.assertEquals import static org.junit.jupiter.api.Assertions.assertTrue @@ -58,15 +59,15 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { @Override void checkFormatData() { Map resultFiles = getResultFiles(folder.absolutePath) - H2GIS h2GIS = H2GIS.open(folder.getAbsolutePath()+File.separator+"bdtopo_${getVersion()}_format") + H2GIS h2GIS = H2GIS.open(folder.getAbsolutePath() + File.separator + "bdtopo_${getVersion()}_format") resultFiles.each { - h2GIS.load( it.value, it.key,true) + h2GIS.load(it.value, it.key, true) } //Check the data //Building int count = h2GIS.getRowCount("building") - List cols = [ "ID_BUILD", "ID_SOURCE", "HEIGHT_WALL", "HEIGHT_ROOF", "NB_LEV", "TYPE", "MAIN_USE", "ROOF_SHAPE","ZINDEX", "THE_GEOM"] - assertTrue h2GIS.getColumnNames("building").intersect(cols).size()==cols.size() + List cols = ["ID_BUILD", "ID_SOURCE", "HEIGHT_WALL", "HEIGHT_ROOF", "NB_LEV", "TYPE", "MAIN_USE", "ROOF_SHAPE", "ZINDEX", "THE_GEOM"] + assertTrue h2GIS.getColumnNames("building").intersect(cols).size() == cols.size() assertEquals(0, h2GIS.firstRow("SELECT COUNT(*) as count FROM building where HEIGHT_WALL = 0 OR HEIGHT_ROOF = 0 OR NB_LEV = 0").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM building where TYPE IS NOT NULL OR MAIN_USE IS NOT NULL").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM building where ZINDEX BETWEEN -4 AND 4").count) @@ -74,8 +75,8 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { //Road count = h2GIS.getRowCount("road") - cols = [ "ID_ROAD", "ID_SOURCE", "WIDTH","TYPE", "SURFACE", "SIDEWALK", "CROSSING","MAXSPEED", "DIRECTION", "ZINDEX", "THE_GEOM"] - assertTrue h2GIS.getColumnNames("road").intersect(cols).size()==cols.size() + cols = ["ID_ROAD", "ID_SOURCE", "WIDTH", "TYPE", "SURFACE", "SIDEWALK", "CROSSING", "MAXSPEED", "DIRECTION", "ZINDEX", "THE_GEOM"] + assertTrue h2GIS.getColumnNames("road").intersect(cols).size() == cols.size() assertEquals(0, h2GIS.firstRow("SELECT COUNT(*) as count FROM road where WIDTH = 0 ").count) assertEquals(22, h2GIS.firstRow("SELECT COUNT(*) as count FROM road where crossing in ('bridge', 'crossing')").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM road where TYPE IS NOT NULL OR SIDEWALK is not null").count) @@ -88,8 +89,8 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { //Rail count = h2GIS.getRowCount("rail") cols = ["THE_GEOM", "ID_RAIL", - "ID_SOURCE" , "TYPE" ,"CROSSING", "ZINDEX" , "WIDTH", "USAGE"] - assertTrue h2GIS.getColumnNames("rail").intersect(cols).size()==cols.size() + "ID_SOURCE", "TYPE", "CROSSING", "ZINDEX", "WIDTH", "USAGE"] + assertTrue h2GIS.getColumnNames("rail").intersect(cols).size() == cols.size() assertEquals(0, h2GIS.firstRow("SELECT COUNT(*) as count FROM rail where WIDTH = 0 ").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM rail where type is not null").count) assertEquals(2, h2GIS.firstRow("SELECT COUNT(*) as count FROM rail where crossing is not null").count) @@ -100,7 +101,7 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { //Vegetation count = h2GIS.getRowCount("vegetation") cols = ["THE_GEOM", "ID_VEGET", "ID_SOURCE", "TYPE", "HEIGHT_CLASS", "ZINDEX"] - assertTrue h2GIS.getColumnNames("vegetation").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("vegetation").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM vegetation where type is not null").count) assertEquals(670, h2GIS.firstRow("SELECT COUNT(*) as count FROM vegetation where height_class ='high'").count) assertEquals(2, h2GIS.firstRow("SELECT COUNT(*) as count FROM vegetation where height_class = 'low'").count) @@ -111,7 +112,7 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { //Water count = h2GIS.getRowCount("water") cols = ["THE_GEOM", "ID_WATER", "ID_SOURCE", "TYPE", "ZINDEX"] - assertTrue h2GIS.getColumnNames("water").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("water").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM water where type is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM water where ZINDEX BETWEEN 0 AND 1 ").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM water where ST_ISEMPTY(THE_GEOM)=false OR THE_GEOM IS NOT NULL").count) @@ -120,7 +121,7 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { //Impervious areas count = h2GIS.getRowCount("impervious") cols = ["THE_GEOM", "ID_IMPERVIOUS", "TYPE"] - assertTrue h2GIS.getColumnNames("impervious").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("impervious").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM impervious where type is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM impervious where ST_ISEMPTY(THE_GEOM)=false OR THE_GEOM IS NOT NULL").count) @@ -128,7 +129,7 @@ class WorkflowBDTopoV2Test extends WorkflowAbstractTest { //Urban areas count = h2GIS.getRowCount("urban_areas") cols = ["THE_GEOM", "ID_URBAN", "ID_SOURCE", "TYPE"] - assertTrue h2GIS.getColumnNames("urban_areas").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("urban_areas").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM urban_areas where type is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM urban_areas where ST_ISEMPTY(THE_GEOM)=false OR THE_GEOM IS NOT NULL").count) diff --git a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v3/WorkflowBDTopoV3Test.groovy b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v3/WorkflowBDTopoV3Test.groovy index a6ee12691c..96c9b47abe 100644 --- a/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v3/WorkflowBDTopoV3Test.groovy +++ b/bdtopo/src/test/groovy/org/orbisgis/geoclimate/bdtopo/v3/WorkflowBDTopoV3Test.groovy @@ -52,15 +52,15 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { @Override void checkFormatData() { Map resultFiles = getResultFiles(folder.absolutePath) - H2GIS h2GIS = H2GIS.open(folder.getAbsolutePath()+File.separator+"bdtopo_${getVersion()}_format") + H2GIS h2GIS = H2GIS.open(folder.getAbsolutePath() + File.separator + "bdtopo_${getVersion()}_format") resultFiles.each { - h2GIS.load( it.value, it.key,true) + h2GIS.load(it.value, it.key, true) } //Check the data //Building int count = h2GIS.getRowCount("building") - List cols = [ "ID_BUILD", "ID_SOURCE", "HEIGHT_WALL", "HEIGHT_ROOF", "NB_LEV", "TYPE", "MAIN_USE", "ROOF_SHAPE","ZINDEX", "THE_GEOM"] - assertTrue h2GIS.getColumnNames("building").intersect(cols).size()==cols.size() + List cols = ["ID_BUILD", "ID_SOURCE", "HEIGHT_WALL", "HEIGHT_ROOF", "NB_LEV", "TYPE", "MAIN_USE", "ROOF_SHAPE", "ZINDEX", "THE_GEOM"] + assertTrue h2GIS.getColumnNames("building").intersect(cols).size() == cols.size() assertEquals(0, h2GIS.firstRow("SELECT COUNT(*) as count FROM building where HEIGHT_WALL = 0 OR HEIGHT_ROOF = 0 OR NB_LEV = 0").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM building where TYPE IS NOT NULL OR MAIN_USE IS NOT NULL").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM building where ZINDEX BETWEEN -4 AND 4").count) @@ -69,8 +69,8 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { //Road count = h2GIS.getRowCount("road") - cols = [ "ID_ROAD", "ID_SOURCE", "WIDTH","TYPE", "SURFACE", "SIDEWALK", "CROSSING","MAXSPEED", "DIRECTION", "ZINDEX", "THE_GEOM"] - assertTrue h2GIS.getColumnNames("road").intersect(cols).size()==cols.size() + cols = ["ID_ROAD", "ID_SOURCE", "WIDTH", "TYPE", "SURFACE", "SIDEWALK", "CROSSING", "MAXSPEED", "DIRECTION", "ZINDEX", "THE_GEOM"] + assertTrue h2GIS.getColumnNames("road").intersect(cols).size() == cols.size() assertEquals(0, h2GIS.firstRow("SELECT COUNT(*) as count FROM road where WIDTH = 0 ").count) assertEquals(22, h2GIS.firstRow("SELECT COUNT(*) as count FROM road where crossing is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM road where TYPE IS NOT NULL OR SIDEWALK is not null").count) @@ -84,8 +84,8 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { count = h2GIS.getRowCount("rail") cols = ["THE_GEOM", "ID_RAIL", - "ID_SOURCE" , "TYPE" ,"CROSSING", "ZINDEX" , "WIDTH", "USAGE"] - assertTrue h2GIS.getColumnNames("rail").intersect(cols).size()==cols.size() + "ID_SOURCE", "TYPE", "CROSSING", "ZINDEX", "WIDTH", "USAGE"] + assertTrue h2GIS.getColumnNames("rail").intersect(cols).size() == cols.size() assertEquals(0, h2GIS.firstRow("SELECT COUNT(*) as count FROM rail where WIDTH = 0 ").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM rail where type is not null").count) assertEquals(2, h2GIS.firstRow("SELECT COUNT(*) as count FROM rail where crossing is not null").count) @@ -96,7 +96,7 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { //Vegetation count = h2GIS.getRowCount("vegetation") cols = ["THE_GEOM", "ID_VEGET", "ID_SOURCE", "TYPE", "HEIGHT_CLASS", "ZINDEX"] - assertTrue h2GIS.getColumnNames("vegetation").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("vegetation").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM vegetation where type is not null").count) assertEquals(670, h2GIS.firstRow("SELECT COUNT(*) as count FROM vegetation where height_class ='high'").count) assertEquals(2, h2GIS.firstRow("SELECT COUNT(*) as count FROM vegetation where height_class = 'low'").count) @@ -107,7 +107,7 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { //Water count = h2GIS.getRowCount("water") cols = ["THE_GEOM", "ID_WATER", "ID_SOURCE", "TYPE", "ZINDEX"] - assertTrue h2GIS.getColumnNames("water").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("water").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM water where type is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM water where ZINDEX BETWEEN 0 AND 1 ").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM water where ST_ISEMPTY(THE_GEOM)=false OR THE_GEOM IS NOT NULL").count) @@ -116,7 +116,7 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { //Impervious areas count = h2GIS.getRowCount("impervious") cols = ["THE_GEOM", "ID_IMPERVIOUS", "TYPE"] - assertTrue h2GIS.getColumnNames("impervious").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("impervious").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM impervious where type is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM impervious where ST_ISEMPTY(THE_GEOM)=false OR THE_GEOM IS NOT NULL").count) @@ -124,7 +124,7 @@ class WorkflowBDTopoV3Test extends WorkflowAbstractTest { //Urban areas count = h2GIS.getRowCount("urban_areas") cols = ["THE_GEOM", "ID_URBAN", "ID_SOURCE", "TYPE"] - assertTrue h2GIS.getColumnNames("urban_areas").intersect(cols).size()==cols.size() + assertTrue h2GIS.getColumnNames("urban_areas").intersect(cols).size() == cols.size() assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM urban_areas where type is not null").count) assertEquals(count, h2GIS.firstRow("SELECT COUNT(*) as count FROM urban_areas where ST_ISEMPTY(THE_GEOM)=false OR THE_GEOM IS NOT NULL").count) diff --git a/common-utils/pom.xml b/common-utils/pom.xml index 3927c5828a..5ec994b03d 100644 --- a/common-utils/pom.xml +++ b/common-utils/pom.xml @@ -1,5 +1,6 @@ - + 4.0.0 diff --git a/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/AbstractScript.groovy b/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/AbstractScript.groovy index 6669f2a5c3..56bcb97a38 100644 --- a/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/AbstractScript.groovy +++ b/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/AbstractScript.groovy @@ -27,7 +27,7 @@ abstract class AbstractScript extends Script { static String uuid() { UUID.randomUUID().toString().replaceAll("-", "_") } void info(def message) { - LoggerUtils.info( message.toString()) + LoggerUtils.info(message.toString()) } void warn(def message) { diff --git a/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/LoggerUtils.groovy b/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/LoggerUtils.groovy index 167346048b..6334e570b3 100644 --- a/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/LoggerUtils.groovy +++ b/common-utils/src/main/groovy/org/orbisgis/geoclimate/utils/LoggerUtils.groovy @@ -1,7 +1,6 @@ package org.orbisgis.geoclimate.utils import org.slf4j.Logger -import org.slf4j.LoggerFactory /** * GeoClimate is a geospatial processing toolbox for environmental and climate studies @@ -26,20 +25,20 @@ class LoggerUtils { static Logger logger - static LOGLEVEL_KEY="org.orbisgis.geoclimate.loglevel" + static LOGLEVEL_KEY = "org.orbisgis.geoclimate.loglevel" - private LoggerUtils() throws IOException{ + private LoggerUtils() throws IOException { String level = System.getProperty(LOGLEVEL_KEY) - if(level){ + if (level) { setLoggerLevel(level) - }else{ + } else { setLoggerLevel("info") } - logger =org.slf4j.LoggerFactory.getLogger("GeoClimate") + logger = org.slf4j.LoggerFactory.getLogger("GeoClimate") } - private static Logger getLogger(){ - if(logger == null){ + private static Logger getLogger() { + if (logger == null) { try { new LoggerUtils() } catch (IOException e) { @@ -50,7 +49,7 @@ class LoggerUtils { } static info(def message) { - getLogger().info( message.toString()) + getLogger().info(message.toString()) } static warn(def message) { diff --git a/geoclimate-cli/pom.xml b/geoclimate-cli/pom.xml index 6f468138e9..9498848a0c 100644 --- a/geoclimate-cli/pom.xml +++ b/geoclimate-cli/pom.xml @@ -1,5 +1,6 @@ - + org.orbisgis.geoclimate geoclimate-parent diff --git a/geoclimate-cli/src/main/groovy/org/orbisgis/geoclimate/Geoclimate.groovy b/geoclimate-cli/src/main/groovy/org/orbisgis/geoclimate/Geoclimate.groovy index 8555f4fad3..aa368dbff2 100644 --- a/geoclimate-cli/src/main/groovy/org/orbisgis/geoclimate/Geoclimate.groovy +++ b/geoclimate-cli/src/main/groovy/org/orbisgis/geoclimate/Geoclimate.groovy @@ -19,7 +19,7 @@ */ package org.orbisgis.geoclimate -import org.orbisgis.geoclimate.bdtopo.BDTopo + import org.orbisgis.geoclimate.utils.LoggerUtils import picocli.CommandLine diff --git a/geoindicators/pom.xml b/geoindicators/pom.xml index 048dfb4559..6a8225e9f2 100644 --- a/geoindicators/pom.xml +++ b/geoindicators/pom.xml @@ -1,5 +1,6 @@ - + 4.0.0 diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy index adf96302d4..20bf902ed2 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicators.groovy @@ -84,7 +84,7 @@ String holeAreaDensity(JdbcDataSource datasource, String blockTable, String pref * @author Jérémy Bernard * @author Erwan Bocher */ -String netCompactness(JdbcDataSource datasource, String building, String buildingVolumeField, String buildingContiguityField, String prefixName) throws Exception{ +String netCompactness(JdbcDataSource datasource, String building, String buildingVolumeField, String buildingContiguityField, String prefixName) throws Exception { def GEOMETRY_FIELD_BU = "the_geom" def ID_COLUMN_BL = "id_block" def HEIGHT_WALL = "height_wall" @@ -177,6 +177,6 @@ String closingness(JdbcDataSource datasource, String correlationTableName, Strin datasource.execute(query) return outputTableName } catch (SQLException e) { - throw new SQLException("Cannot compute the closingness for the building blocks",e) + throw new SQLException("Cannot compute the closingness for the building blocks", e) } } \ No newline at end of file diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy index ea07b9d45b..f093099421 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicators.groovy @@ -336,7 +336,7 @@ String buildingDirectionDistribution(JdbcDataSource datasource, String buildingT SELECT $inputIdUp, LEN_L, LEN_H, CASEWHEN(ANG_L>=180, ANG_L-180, ANG_L) AS ANG_L, CASEWHEN(ANG_H>180, ANG_H-180, ANG_H) AS ANG_H FROM $build_dir360""") - datasource.execute( "CREATE INDEX ON $build_dir180 ($inputIdUp)") + datasource.execute("CREATE INDEX ON $build_dir180 ($inputIdUp)") // The query aiming to create the building direction distribution is created def sqlQueryDist = "DROP TABLE IF EXISTS $build_dir_dist; CREATE TABLE $build_dir_dist AS SELECT " diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy index 4745767587..fc2009ecf2 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GeoindicatorsExtensionModule.groovy @@ -33,6 +33,6 @@ import org.orbisgis.data.H2GIS * @param filePath path for the file * @return */ -static String save(Geometry geometry, H2GIS h2GIS, String filePath) throws Exception{ +static String save(Geometry geometry, H2GIS h2GIS, String filePath) throws Exception { return h2GIS.save("(SELECT ST_GEOMFROMTEXT('${geometry}',${geometry.getSRID()}) as the_geom, CAST(1 as integer) as id)", filePath, true) } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy index 39c3d28a23..7e0a4ad091 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicators.groovy @@ -21,6 +21,8 @@ package org.orbisgis.geoclimate.geoindicators import groovy.transform.BaseScript import org.locationtech.jts.geom.Geometry +import org.locationtech.jts.geom.prep.PreparedGeometry +import org.locationtech.jts.geom.prep.PreparedGeometryFactory import org.locationtech.jts.operation.distance.IndexedFacetDistance import org.orbisgis.data.jdbc.JdbcDataSource import org.orbisgis.geoclimate.Geoindicators @@ -306,20 +308,20 @@ String gridDistances(JdbcDataSource datasource, String input_polygons, String gr CREATE TABLE $outputTableName (THE_GEOM GEOMETRY,$id_grid INT, DISTANCE FLOAT); """.toString()) - datasource.createSpatialIndex(input_polygons) datasource.createSpatialIndex(grid) - datasource.withBatch(100) { stmt -> datasource.eachRow("SELECT the_geom from $input_polygons".toString()) { row -> Geometry geom = row.the_geom if (geom) { + PreparedGeometry prepGEom = PreparedGeometryFactory.prepare(geom) IndexedFacetDistance indexedFacetDistance = new IndexedFacetDistance(geom) datasource.eachRow("""SELECT the_geom, ${id_grid} as id from $grid - where ST_GEOMFROMTEXT('${geom}',$epsg) && the_geom and - st_intersects(ST_GEOMFROMTEXT('${geom}',$epsg) , ST_POINTONSURFACE(the_geom))""".toString()) { cell -> + where ST_GEOMFROMTEXT('${geom}',$epsg) && the_geom """.toString()) { cell -> Geometry cell_geom = cell.the_geom - double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) - stmt.addBatch "insert into $outputTableName values(ST_GEOMFROMTEXT('${cell_geom}',$epsg), ${cell.id},${distance})".toString() + if (prepGEom.intersects(cell_geom.getCentroid())) { + double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) + stmt.addBatch "insert into $outputTableName values(ST_GEOMFROMTEXT('${cell_geom}',$epsg), ${cell.id},${distance})".toString() + } } } } @@ -328,21 +330,20 @@ String gridDistances(JdbcDataSource datasource, String input_polygons, String gr datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName ($id_grid INT, DISTANCE FLOAT); """.toString()) - - datasource.createSpatialIndex(input_polygons) datasource.createSpatialIndex(grid) - datasource.withBatch(100) { stmt -> datasource.eachRow("SELECT the_geom from $input_polygons".toString()) { row -> Geometry geom = row.the_geom if (geom) { + PreparedGeometry prepGEom = PreparedGeometryFactory.prepare(geom) IndexedFacetDistance indexedFacetDistance = new IndexedFacetDistance(geom) datasource.eachRow("""SELECT the_geom, ${id_grid} as id from $grid - where ST_GEOMFROMTEXT('${geom}',$epsg) && the_geom and - st_intersects(ST_GEOMFROMTEXT('${geom}',$epsg) , ST_POINTONSURFACE(the_geom))""".toString()) { cell -> - Geometry cell_geom = cell.the_geom - double distance = indexedFacetDistance.distance(cell_geom.getCentroid()) - stmt.addBatch "insert into $outputTableName values(${cell.id},${distance})".toString() + where ST_GEOMFROMTEXT('${geom}',$epsg) && the_geom""".toString()) { cell -> + Geometry cell_geom = cell.the_geom.getCentroid() + if (prepGEom.intersects(cell_geom)) { + double distance = indexedFacetDistance.distance(cell_geom) + stmt.addBatch "insert into $outputTableName values(${cell.id},${distance})".toString() + } } } } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy index 6e63d51abc..2e54968d70 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/PopulationIndicators.groovy @@ -82,7 +82,7 @@ String formatPopulationTable(JdbcDataSource datasource, String populationTable, Map multiScalePopulation(JdbcDataSource datasource, String populationTable, List populationColumns = [], String buildingTable, String rsuTable, String gridTable) throws Exception { if (populationTable && populationColumns) { - def tablesToDrop=[] + def tablesToDrop = [] try { def prefixName = "pop" if (buildingTable) { @@ -90,7 +90,7 @@ Map multiScalePopulation(JdbcDataSource datasource, String populationTable, List populationTable, populationColumns) datasource.execute("""DROP TABLE IF EXISTS $buildingTable; ALTER TABLE ${buildingPop} RENAME TO $buildingTable""".toString()) - tablesToDrop< @@ -100,9 +100,9 @@ Map multiScalePopulation(JdbcDataSource datasource, String populationTable, List buildingTable, rsuTable, "id_rsu", "id_rsu", unweightedBuildingIndicators, prefixName) - tablesToDrop< 0) { + if (datasource.getRowCount(road) > 0) { def columnNames = datasource.getColumnNames(road) columnNames.remove("THE_GEOM") def flatListColumns = columnNames.inject([]) { result, iter -> @@ -173,7 +173,7 @@ String build_road_traffic(JdbcDataSource datasource, String road, String zone = COMMENT ON COLUMN ${outputTableName}."SLOPE" IS 'Slope (in %) of the road section.'; COMMENT ON COLUMN ${outputTableName}."DIRECTION" IS 'Define the direction of the road section. 1 = one way road section and the traffic goes in the same way that the slope definition you have used, 2 = one way road section and the traffic goes in the inverse way that the slope definition you have used, 3 = bi-directional traffic flow, the flow is split into two components and correct half for uphill and half for downhill'""") } - }catch (SQLException e){ + } catch (SQLException e) { throw new SQLException("Cannot compute the road traffic", e) } } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy index 8779cd3800..f1acd23dfb 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicators.groovy @@ -134,7 +134,7 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building // 1. Convert the building polygons into lines and create the intersection with RSU polygons datasource.createIndex(building, idRsu) datasource.createIndex(rsu, idRsu) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $buildLine; CREATE TABLE $buildLine AS SELECT a.$ID_FIELD_BU, a.$idRsu, ST_AREA(b.$GEOMETRIC_FIELD_RSU) AS $RSU_AREA, @@ -147,7 +147,7 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building datasource.createSpatialIndex(buildLine, GEOMETRIC_FIELD_BU) datasource.createIndex(buildLine, idRsu) datasource.createIndex(buildLine, ID_FIELD_BU) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $sharedLineRsu; CREATE TABLE $sharedLineRsu AS SELECT SUM(ST_LENGTH( ST_INTERSECTION(a.$GEOMETRIC_FIELD_BU, @@ -165,7 +165,7 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building // 3. Calculates the building facade area within each RSU datasource.createIndex(buildLine, idRsu) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $buildLineRsu; CREATE TABLE $buildLineRsu AS SELECT $idRsu, MIN($RSU_AREA) AS $RSU_AREA, @@ -176,7 +176,7 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building // 4. Calculates the free facade density by RSU (subtract 3 and 2 and divide by RSU area) datasource.createIndex(buildLineRsu, idRsu) datasource.createIndex(sharedLineRsu, idRsu) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $onlyBuildRsu; CREATE TABLE $onlyBuildRsu AS SELECT a.$idRsu, @@ -193,7 +193,7 @@ String freeExternalFacadeDensityExact(JdbcDataSource datasource, String building // 5. Join RSU having no buildings and set their value to 0 datasource.createIndex(onlyBuildRsu, idRsu) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.$idRsu, @@ -277,7 +277,7 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, def to_start = System.currentTimeMillis() // Create the geometries of buildings and RSU holes included within each RSU - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $rsuDiff, $multiptsRSU, $multiptsRSUtot, $rsuDiffTot,$pts_RANG,$pts_order,$ptsRSUtot, $svfPts, $outputTableName; CREATE TABLE $rsuDiff AS (SELECT CASE WHEN ST_ISEMPTY(st_difference(a.$GEOMETRIC_COLUMN_RSU, st_makevalid(ST_ACCUM(b.$GEOMETRIC_COLUMN_BU)))) @@ -289,7 +289,7 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, b.$GEOMETRIC_COLUMN_BU) GROUP BY a.$ID_COLUMN_RSU); """) - datasource.execute( """ + datasource.execute(""" CREATE INDEX ON $rsuDiff ($ID_COLUMN_RSU); CREATE TABLE $rsuDiffTot AS SELECT b.$ID_COLUMN_RSU, case when a.$ID_COLUMN_RSU is null then b.the_geom else a.the_geom end as the_geom @@ -300,7 +300,7 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, // located outside buildings (and RSU holes) and the size of the grid mesh used to sample each RSU // (based on the building density + 10%) - if the building density exceeds 90%, // the LCZ 7 building density is then set to 90%) - datasource.execute( """CREATE TABLE $multiptsRSU AS SELECT $ID_COLUMN_RSU, THE_GEOM + datasource.execute("""CREATE TABLE $multiptsRSU AS SELECT $ID_COLUMN_RSU, THE_GEOM FROM ST_EXPLODE('(SELECT $ID_COLUMN_RSU, case when LEAST(TRUNC($pointDensity*c.rsu_area_free),100)=0 @@ -312,7 +312,7 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, FROM st_explode(''(select * from $rsuDiffTot)'') where st_area(the_geom)>0) as c)');""") // Need to identify specific points for buildings being RSU (slightly away from the wall on each facade) - datasource.execute( """ CREATE TABLE $multiptsRSUtot + datasource.execute(""" CREATE TABLE $multiptsRSUtot AS SELECT $ID_COLUMN_RSU, THE_GEOM FROM ST_EXPLODE('(SELECT $ID_COLUMN_RSU, ST_LocateAlong(THE_GEOM, 0.5, 0.01) AS THE_GEOM FROM $rsuDiffTot @@ -323,7 +323,7 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, datasource.createSpatialIndex(multiptsRSUtot, "the_geom") // The SVF calculation is performed at point scale - datasource.execute( """ + datasource.execute(""" CREATE TABLE $svfPts AS SELECT a.$ID_COLUMN_RSU, ST_SVF(ST_GEOMETRYN(a.the_geom,1), ST_ACCUM(ST_UPDATEZ(b.$GEOMETRIC_COLUMN_BU, b.$HEIGHT_WALL)), @@ -335,7 +335,7 @@ String groundSkyViewFactor(JdbcDataSource datasource, String rsu, String id_rsu, datasource.createIndex(svfPts, ID_COLUMN_RSU) // The result of the SVF calculation is averaged at RSU scale - datasource.execute( """ + datasource.execute(""" CREATE TABLE $outputTableName($ID_COLUMN_RSU integer, $BASE_NAME double) AS (SELECT a.$ID_COLUMN_RSU, CASE WHEN AVG(b.SVF) is not null THEN AVG(b.SVF) ELSE 1 END FROM $rsu a @@ -387,7 +387,7 @@ String aspectRatio(JdbcDataSource datasource, String rsuTable, String rsuFreeExt // The name of the outputTableName is constructed def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT CASE WHEN $rsuBuildingDensityColumn = 1 @@ -463,7 +463,7 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin def names = [] // Common party walls between buildings are calculated - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $buildingIntersection; CREATE TABLE $buildingIntersection( the_geom GEOMETRY, id_build_a INTEGER, id_build_b INTEGER, z_max DOUBLE, z_min DOUBLE) AS SELECT ST_CollectionExtract(t.the_geom,2), t.id_build_a , t.id_build_b , t.z_max , t.z_min @@ -487,7 +487,7 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin // buildings). // Facades of isolated buildings are unioned to free facades of non-isolated buildings which are // unioned to free intersection facades. To each facade is affected its corresponding free height - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $buildingFree; CREATE TABLE $buildingFree (the_geom GEOMETRY, z_max double precision, z_min double precision) AS (SELECT ST_TOMULTISEGMENTS(a.the_geom) as the_geom, a.$HEIGHT_WALL as z_max, 0 as z_min @@ -530,7 +530,7 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin datasource.createSpatialIndex(buildingLayer, "the_geom") // Intersections between free facades and rsu geometries are calculated - datasource.execute( """ DROP TABLE IF EXISTS $buildingFreeExpl; + datasource.execute(""" DROP TABLE IF EXISTS $buildingFreeExpl; CREATE TABLE $buildingFreeExpl($ID_COLUMN_RSU INTEGER, the_geom GEOMETRY, $namesAndType) AS (SELECT a.$ID_COLUMN_RSU, ST_INTERSECTION(a.$GEOMETRIC_COLUMN_RSU, ST_TOMULTILINE(b.the_geom)), ${onlyNamesB} FROM $rsu a, $buildingLayer b @@ -539,7 +539,7 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin // Intersections facades are exploded to multisegments - datasource.execute( """DROP TABLE IF EXISTS $rsuInter; + datasource.execute("""DROP TABLE IF EXISTS $rsuInter; CREATE TABLE $rsuInter($ID_COLUMN_RSU INTEGER, the_geom GEOMETRY, $namesAndType) AS (SELECT $ID_COLUMN_RSU, the_geom, ${onlyNames} FROM ST_EXPLODE('$buildingFreeExpl'))""") @@ -588,7 +588,7 @@ String projectedFacadeAreaDistribution(JdbcDataSource datasource, String buildin datasource.createIndex(finalIndicator, ID_COLUMN_RSU) // Sum area at RSU scale and fill null values with 0 - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE ${outputTableName} AS SELECT a.$ID_COLUMN_RSU, ${sumNamesDir} @@ -1093,7 +1093,7 @@ String linearRoadOperations(JdbcDataSource datasource, String rsuTable, String r datasource.execute(queryDistrib) if (!operations.contains("linear_road_density")) { - datasource.execute( """DROP TABLE IF EXISTS $outputTableName; + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; ALTER TABLE $roadDistTot RENAME TO $outputTableName""") } } @@ -1110,13 +1110,13 @@ String linearRoadOperations(JdbcDataSource datasource, String rsuTable, String r "FROM $rsuTable a LEFT JOIN $roadDens b ON a.$ID_COLUMN_RSU=b.id_rsu)" datasource.execute(queryDensity) if (!operations.contains("road_direction_distribution")) { - datasource.execute( """DROP TABLE IF EXISTS $outputTableName; + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; ALTER TABLE $roadDensTot RENAME TO $outputTableName""") } } if (operations.contains("road_direction_distribution") && operations.contains("linear_road_density")) { - datasource.execute( """DROP TABLE if exists $outputTableName; + datasource.execute("""DROP TABLE if exists $outputTableName; CREATE INDEX IF NOT EXISTS idx_$roadDistTot ON $roadDistTot (id_rsu); CREATE INDEX IF NOT EXISTS idx_$roadDensTot ON $roadDensTot (id_rsu); CREATE TABLE $outputTableName AS SELECT a.*, @@ -1272,7 +1272,7 @@ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, St // The name of the outputTableName is constructed def outputTableName = prefix prefixName, "rsu_" + BASE_NAME - datasource.execute( """DROP TABLE IF EXISTS $extRsuTable; CREATE TABLE $extRsuTable AS SELECT + datasource.execute("""DROP TABLE IF EXISTS $extRsuTable; CREATE TABLE $extRsuTable AS SELECT ST_BUFFER($GEOMETRIC_FIELD, $buffDist, 2) AS $GEOMETRIC_FIELD, $ID_FIELD_RSU FROM $rsu;""") @@ -1281,7 +1281,7 @@ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, St datasource.createIndex(extRsuTable, ID_FIELD_RSU) datasource.createSpatialIndex(building, GEOMETRIC_FIELD) - datasource.execute( """DROP TABLE IF EXISTS $inclBu; CREATE TABLE $inclBu AS SELECT + datasource.execute("""DROP TABLE IF EXISTS $inclBu; CREATE TABLE $inclBu AS SELECT COALESCE(SUM((1-a.$buContiguityColumn)*a.$buTotalFacadeLengthColumn*a.$HEIGHT_WALL), 0) AS FAC_AREA, b.$ID_FIELD_RSU FROM $building a, $extRsuTable b WHERE a.$GEOMETRIC_FIELD && b.$GEOMETRIC_FIELD and ST_COVERS(b.$GEOMETRIC_FIELD, a.$GEOMETRIC_FIELD) GROUP BY b.$ID_FIELD_RSU;""") @@ -1290,12 +1290,12 @@ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, St datasource.createIndex(inclBu, ID_FIELD_RSU) datasource.createIndex(rsu, ID_FIELD_RSU) - datasource.execute( """DROP TABLE IF EXISTS $fullInclBu; CREATE TABLE $fullInclBu AS SELECT + datasource.execute("""DROP TABLE IF EXISTS $fullInclBu; CREATE TABLE $fullInclBu AS SELECT COALESCE(a.FAC_AREA, 0) AS FAC_AREA, b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD, st_area(b.$GEOMETRIC_FIELD) as rsu_buff_area FROM $inclBu a RIGHT JOIN $extRsuTable b ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU;""") // The facade area of buildings being partially included in the RSU buffer is calculated - datasource.execute( """DROP TABLE IF EXISTS $notIncBu; CREATE TABLE $notIncBu AS SELECT + datasource.execute("""DROP TABLE IF EXISTS $notIncBu; CREATE TABLE $notIncBu AS SELECT COALESCE(SUM(ST_LENGTH(ST_INTERSECTION(ST_TOMULTILINE(a.$GEOMETRIC_FIELD), b.$GEOMETRIC_FIELD))*a.$HEIGHT_WALL), 0) AS FAC_AREA, b.$ID_FIELD_RSU, b.$GEOMETRIC_FIELD FROM $building a, $extRsuTable b @@ -1305,7 +1305,7 @@ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, St datasource.createIndex(notIncBu, ID_FIELD_RSU) datasource.createIndex(fullInclBu, ID_FIELD_RSU) - datasource.execute( """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT COALESCE((a.FAC_AREA + b.FAC_AREA) /(a.FAC_AREA + b.FAC_AREA + a.rsu_buff_area), a.FAC_AREA / (a.FAC_AREA + a.rsu_buff_area)) AS $BASE_NAME, @@ -1313,7 +1313,7 @@ String extendedFreeFacadeFraction(JdbcDataSource datasource, String building, St ON a.$ID_FIELD_RSU = b.$ID_FIELD_RSU;""") // Drop intermediate tables - datasource.execute( "DROP TABLE IF EXISTS $extRsuTable, $inclBu, $fullInclBu, $notIncBu;") + datasource.execute("DROP TABLE IF EXISTS $extRsuTable, $inclBu, $fullInclBu, $notIncBu;") return outputTableName } catch (SQLException e) { throw new SQLException("Cannot compute the extended free facade fractions at RSU scale", e) @@ -1383,7 +1383,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ def roadTable_zindex0_buffer = postfix "road_zindex0_buffer" def road_tmp = postfix "road_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; + datasource.execute("""DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; CREATE TABLE $roadTable_zindex0_buffer as SELECT ST_CollectionExtract(st_intersection(a.the_geom,b.the_geom),2) AS the_geom, a.WIDTH, b.${id_zone} FROM $road as a, $zone AS b WHERE a.the_geom && b.the_geom AND st_intersects(a.the_geom, b.the_geom) and a.ZINDEX=0 ; @@ -1402,7 +1402,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ //Separate rail features according the zindex def railTable_zindex0_buffer = postfix "rail_zindex0_buffer" def rail_tmp = postfix "rail_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $railTable_zindex0_buffer, $rail_tmp; + datasource.execute("""DROP TABLE IF EXISTS $railTable_zindex0_buffer, $rail_tmp; CREATE TABLE $railTable_zindex0_buffer as SELECT ST_CollectionExtract(st_intersection(a.the_geom,b.the_geom),3) AS the_geom, a.WIDTH, b.${id_zone} FROM $rail as a ,$zone AS b WHERE a.the_geom && b.the_geom AND st_intersects(a.the_geom, b.the_geom) and a.ZINDEX=0 ; @@ -1421,7 +1421,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ def low_vegetation_rsu_tmp = postfix "low_vegetation_rsu_zindex0" def low_vegetation_tmp = postfix "low_vegetation_zindex0" def high_vegetation_tmp = postfix "high_vegetation_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $low_vegetation_tmp, $low_vegetation_rsu_tmp; + datasource.execute("""DROP TABLE IF EXISTS $low_vegetation_tmp, $low_vegetation_rsu_tmp; CREATE TABLE $low_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='low'; @@ -1437,7 +1437,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ debug "Preparing table : $water" datasource.createSpatialIndex(water, "the_geom") def water_tmp = postfix "water_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $water_tmp; + datasource.execute("""DROP TABLE IF EXISTS $water_tmp; CREATE TABLE $water_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $water AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom)""") @@ -1448,7 +1448,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ debug "Preparing table : $impervious" datasource.createSpatialIndex(impervious, "the_geom") def impervious_tmp = postfix "impervious_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $impervious_tmp; + datasource.execute("""DROP TABLE IF EXISTS $impervious_tmp; CREATE TABLE $impervious_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $impervious AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom)""") @@ -1459,7 +1459,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ debug "Preparing table : $building" datasource.createSpatialIndex(building, "the_geom") def building_tmp = postfix "building_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $building_tmp; + datasource.execute("""DROP TABLE IF EXISTS $building_tmp; CREATE TABLE $building_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone} FROM $building AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.zindex=0""") @@ -1473,7 +1473,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ return } def tmp_tables = postfix "tmp_tables_zindex0" - datasource.execute( """DROP TABLE if exists $tmp_tables; + datasource.execute("""DROP TABLE if exists $tmp_tables; CREATE TABLE $tmp_tables(the_geom GEOMETRY, ${id_zone} integer) AS ${tablesToMerge.values().join(' union ')}; """) @@ -1481,7 +1481,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ debug "Generating " + "minimum polygon areas" def tmp_point_polygonize = postfix "tmp_point_polygonize_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $tmp_point_polygonize; + datasource.execute("""DROP TABLE IF EXISTS $tmp_point_polygonize; CREATE INDEX ON $tmp_tables($id_zone); CREATE TABLE $tmp_point_polygonize as select EXPLOD_ID as ${ID_COLUMN_NAME}, st_pointonsurface(the_geom) as the_geom , st_area(the_geom) as area , ${id_zone} @@ -1493,7 +1493,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ datasource.createIndex(tmp_point_polygonize, id_zone) def final_polygonize = postfix "final_polygonize_zindex0" - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $final_polygonize; CREATE TABLE $final_polygonize as select a.AREA , a.the_geom as the_geom, a.${ID_COLUMN_NAME}, b.${id_zone} from $tmp_point_polygonize as a, $zone as b @@ -1511,48 +1511,48 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ if (entry.key.startsWith("high_vegetation")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area,0 as low_vegetation, 1 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("low_vegetation")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area,1 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom,b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("water")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """CREATE TABLE $tmptableName AS SELECT b.area,0 as low_vegetation, 0 as high_vegetation, 1 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, + datasource.execute("""CREATE TABLE $tmptableName AS SELECT b.area,0 as low_vegetation, 0 as high_vegetation, 1 as water, 0 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("road")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 1 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("rail")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 0 as building,1 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("impervious")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 1 as impervious, 0 as road, 0 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("building")) { datasource.createSpatialIndex(entry.key, "the_geom") datasource.createIndex(entry.key, id_zone) - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT b.area, 0 as low_vegetation, 0 as high_vegetation, 0 as water, 0 as impervious, 0 as road, 1 as building,0 as rail, b.${ID_COLUMN_NAME}, b.${id_zone} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, b.the_geom) AND a.${id_zone} =b.${id_zone}""") finalMerge.add("SELECT * FROM $tmptableName") @@ -1562,7 +1562,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ //Do not drop RSU table tablesToMerge.remove("$zone") def allInfoTableName = postfix "allInfoTableName" - datasource.execute( """DROP TABLE IF EXISTS $allInfoTableName, $tmp_point_polygonize, $final_polygonize, $tmp_tables, $outputTableName; + datasource.execute("""DROP TABLE IF EXISTS $allInfoTableName, $tmp_point_polygonize, $final_polygonize, $tmp_tables, $outputTableName; CREATE TABLE $allInfoTableName as ${finalMerge.join(' union all ')}; CREATE INDEX ON $allInfoTableName (${ID_COLUMN_NAME}); CREATE INDEX ON $allInfoTableName (${id_zone}); @@ -1572,7 +1572,7 @@ String smallestCommunGeometry(JdbcDataSource datasource, String zone, String id_ MAX(BUILDING) AS BUILDING, MAX(RAIL) AS RAIL, ${id_zone} FROM $allInfoTableName GROUP BY ${ID_COLUMN_NAME}, ${id_zone}; DROP TABLE IF EXISTS ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${tmpTablesToDrop.join(",")}""") } else { - datasource.execute( """DROP TABLE IF EXISTS $outputTableName; + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName(AREA DOUBLE PRECISION, LOW_VEGETATION INTEGER, HIGH_VEGETATION INTEGER, @@ -1728,20 +1728,20 @@ String surfaceFractions(JdbcDataSource datasource, // Calculates the fraction of land without defined surface def allCols = datasource.getColumnNames(withoutUndefined) def allFractionCols = allCols.minus(id_rsu.toUpperCase()) - datasource.execute( """ DROP TABLE IF EXISTS $outputTableName; + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT *, 1-(${allFractionCols.join("+")}) AS UNDEFINED_FRACTION FROM $withoutUndefined""") // Drop intermediate tables - datasource.execute( "DROP TABLE IF EXISTS $withoutUndefined;") + datasource.execute("DROP TABLE IF EXISTS $withoutUndefined;") //Cache the table name to re-use it cacheTableName(BASE_TABLE_NAME, outputTableName) return outputTableName - }catch (SQLException e){ - throw new SQLException("Cannot compute surface fractions",e) - }finally { + } catch (SQLException e) { + throw new SQLException("Cannot compute surface fractions", e) + } finally { datasource.dropTable(withoutUndefined) } } @@ -1767,7 +1767,7 @@ String surfaceFractions(JdbcDataSource datasource, */ String buildingSurfaceDensity(JdbcDataSource datasource, String facadeDensityTable, String buildingFractionTable, String facDensityColumn, String buFractionColumn, - String idRsu, String prefixName) throws Exception{ + String idRsu, String prefixName) throws Exception { try { def BASE_NAME = "building_surface_fraction" @@ -1779,7 +1779,7 @@ String buildingSurfaceDensity(JdbcDataSource datasource, String facadeDensityTab // Sum free facade density and building fraction... datasource.createIndex(facadeDensityTable, idRsu) datasource.createIndex(buildingFractionTable, idRsu) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT a.$idRsu, @@ -1788,7 +1788,7 @@ String buildingSurfaceDensity(JdbcDataSource datasource, String facadeDensityTab ON a.$idRsu = b.$idRsu""") return outputTableName - }catch (SQLException e){ + } catch (SQLException e) { throw new SQLException("Cannot compute building surface density at RSU scale", e) } } @@ -1815,7 +1815,7 @@ String buildingSurfaceDensity(JdbcDataSource datasource, String facadeDensityTab */ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, String building, String idRsu, List listLayersBottom = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], - boolean cutBuilding = true, String prefixName) throws Exception{ + boolean cutBuilding = true, String prefixName) throws Exception { def GEOMETRIC_COLUMN_RSU = "the_geom" def GEOMETRIC_COLUMN_BU = "the_geom" def ID_COLUMN_BU = "id_build" @@ -1840,8 +1840,8 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri if (cutBuilding) { buildInter = postfix "build_inter" // 1. Create the intersection between buildings and RSU polygons - datasource.createIndex(building,ID_COLUMN_BU) - datasource.createIndex(rsu,idRsu) + datasource.createIndex(building, ID_COLUMN_BU) + datasource.createIndex(rsu, idRsu) datasource.execute(""" DROP TABLE IF EXISTS $buildInter; CREATE TABLE $buildInter @@ -1884,8 +1884,8 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri WHERE b.$BUILD_HEIGHT >= $layer_bottom AND b.$BUILD_HEIGHT < $layer_top GROUP BY b.$idRsu""") // Fill missing values with 0 - datasource.createIndex(bufferTable,idRsu) - datasource.execute( """ + datasource.createIndex(bufferTable, idRsu) + datasource.execute(""" DROP TABLE IF EXISTS ${tab_H[i - 1]}; CREATE TABLE ${tab_H[i - 1]} AS SELECT a.$idRsu, @@ -1900,7 +1900,7 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri def layer_bottom = listLayersBottom[listLayersBottom.size() - 1] def indicNameH = getDistribIndicName(BASE_NAME, 'H', layer_bottom, null).toString() tab_H[listLayersBottom.size() - 1] = "${buildFracH}_$layer_bottom" - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT a.$idRsu, @@ -1913,8 +1913,8 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri WHERE b.$BUILD_HEIGHT >= $layer_bottom GROUP BY b.$idRsu""") // Fill missing values with 0 - datasource.createIndex(bufferTable,idRsu) - datasource.execute( """ + datasource.createIndex(bufferTable, idRsu) + datasource.execute(""" DROP TABLE IF EXISTS ${tab_H[listLayersBottom.size() - 1]}; CREATE TABLE ${tab_H[listLayersBottom.size() - 1]} AS SELECT a.$idRsu, @@ -1931,7 +1931,7 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri return } - datasource.execute( """DROP TABLE IF EXISTS $buildInter, $rsuBuildingArea, $bufferTable, + datasource.execute("""DROP TABLE IF EXISTS $buildInter, $rsuBuildingArea, $bufferTable, ${tab_H.values().join(",")}""") return outputTableName @@ -1966,7 +1966,7 @@ String roofFractionDistributionExact(JdbcDataSource datasource, String rsu, Stri */ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, String rsu, String idRsu, List listLayersBottom = [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], - int numberOfDirection = 12, boolean distributionAsIndex = true, String prefixName) throws Exception{ + int numberOfDirection = 12, boolean distributionAsIndex = true, String prefixName) throws Exception { def GEOMETRIC_FIELD_RSU = "the_geom" def GEOMETRIC_FIELD_BU = "the_geom" def ID_FIELD_BU = "id_build" @@ -1990,9 +1990,9 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, def snap_tolerance = 0.01 // 1. Convert the building polygons into lines and create the intersection with RSU polygons - datasource.createIndex(building,idRsu) - datasource.createIndex(rsu,idRsu) - datasource.execute( """ + datasource.createIndex(building, idRsu) + datasource.createIndex(rsu, idRsu) + datasource.execute(""" DROP TABLE IF EXISTS $buildLine; CREATE TABLE $buildLine AS SELECT a.$ID_FIELD_BU, a.$idRsu, @@ -2005,7 +2005,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, datasource.createSpatialIndex(buildLine, GEOMETRIC_FIELD_BU) datasource.createIndex(buildLine, idRsu) datasource.createIndex(buildLine, ID_FIELD_BU) - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $allLinesRsu; CREATE TABLE $allLinesRsu AS SELECT -ST_LENGTH($GEOMETRIC_FIELD_BU) AS LENGTH, @@ -2044,7 +2044,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, def angleRangeDeg = 360 / numberOfDirection def tab_H = [:] def indicToJoin = [:] - datasource.createIndex(rsu,idRsu) + datasource.createIndex(rsu, idRsu) for (i in 1..(listLayersBottom.size() - 1)) { def layer_top = listLayersBottom[i] def layer_bottom = listLayersBottom[i - 1] @@ -2093,7 +2093,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, } } // Calculates projected surfaces for buildings and shared facades - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT $idRsu, @@ -2101,8 +2101,8 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, FROM $allLinesRsu WHERE $HEIGHT_WALL > $layer_bottom""") // Fill missing values with 0 - datasource.createIndex(bufferTable,idRsu) - datasource.execute( """ + datasource.createIndex(bufferTable, idRsu) + datasource.execute(""" DROP TABLE IF EXISTS ${tab_H[i - 1]}; CREATE TABLE ${tab_H[i - 1]} AS SELECT a.$idRsu, @@ -2142,7 +2142,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, dirQueryDiv[k] = """COALESCE(SUM(b.$indicName)/ST_AREA(a.$GEOMETRIC_FIELD_RSU), 0) AS $indicName""" } // Calculates projected surfaces for buildings and shared facades - datasource.execute( """ + datasource.execute(""" DROP TABLE IF EXISTS $bufferTable; CREATE TABLE $bufferTable AS SELECT $idRsu, @@ -2150,8 +2150,8 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, FROM $allLinesRsu WHERE $HEIGHT_WALL > $layer_bottom""") // Fill missing values with 0 - datasource.createIndex(bufferTable,idRsu) - datasource.execute( """ + datasource.createIndex(bufferTable, idRsu) + datasource.execute(""" DROP TABLE IF EXISTS ${tab_H[listLayersBottom.size() - 1]}; CREATE TABLE ${tab_H[listLayersBottom.size() - 1]} AS SELECT a.$idRsu, @@ -2170,7 +2170,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, } // The temporary tables are deleted - datasource.execute( """DROP TABLE IF EXISTS $buildLine, $allLinesRsu, + datasource.execute("""DROP TABLE IF EXISTS $buildLine, $allLinesRsu, $bufferTable, ${tab_H.values().join(",")}""") } @@ -2187,7 +2187,7 @@ String frontalAreaIndexDistribution(JdbcDataSource datasource, String building, * * @author Erwan Bocher, CNRS */ -String rsuPopulation(JdbcDataSource datasource, String rsu, String population, List populationColumns = []) throws Exception{ +String rsuPopulation(JdbcDataSource datasource, String rsu, String population, List populationColumns = []) throws Exception { def BASE_NAME = "rsu_with_population" def ID_RSU = "id_rsu" def ID_POP = "id_pop" @@ -2279,7 +2279,7 @@ String rsuPopulation(JdbcDataSource datasource, String rsu, String population, L */ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, String building, String road, String water, String vegetation, - String impervious, List priorities = ["building", "road", "water", "high_vegetation", "low_vegetation", "impervious"]) throws Exception{ + String impervious, List priorities = ["building", "road", "water", "high_vegetation", "low_vegetation", "impervious"]) throws Exception { if (!id_zone) { error "The id_zone identifier cannot be null or empty" @@ -2311,7 +2311,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, //Separate road features according the zindex def roadTable_zindex0_buffer = postfix "road_zindex0_buffer" def road_tmp = postfix "road_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; + datasource.execute("""DROP TABLE IF EXISTS $roadTable_zindex0_buffer, $road_tmp; CREATE TABLE $roadTable_zindex0_buffer as SELECT st_buffer(the_geom, WIDTH::DOUBLE PRECISION/2, 2) AS the_geom, surface as type FROM $road where ZINDEX=0 ; @@ -2329,7 +2329,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, def low_vegetation_tmp = postfix "low_vegetation_zindex0" def high_vegetation_tmp = postfix "high_vegetation_zindex0" if (priorities.contains("low_vegetation")) { - datasource.execute( """DROP TABLE IF EXISTS $low_vegetation_tmp; + datasource.execute("""DROP TABLE IF EXISTS $low_vegetation_tmp; CREATE TABLE $low_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='low'; """) @@ -2337,7 +2337,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, } if (priorities.contains("high_vegetation")) { - datasource.execute( """DROP TABLE IF EXISTS $high_vegetation_tmp; + datasource.execute("""DROP TABLE IF EXISTS $high_vegetation_tmp; CREATE TABLE $high_vegetation_tmp as select ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $vegetation AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.height_class='high'; @@ -2350,7 +2350,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, debug "Preparing table : $water" datasource.createSpatialIndex(water, "the_geom") def water_tmp = postfix "water_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $water_tmp; + datasource.execute("""DROP TABLE IF EXISTS $water_tmp; CREATE TABLE $water_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, 'water' as type FROM $water AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom)""") @@ -2361,7 +2361,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, debug "Preparing table : $impervious" datasource.createSpatialIndex(impervious, "the_geom") def impervious_tmp = postfix "impervious_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $impervious_tmp; + datasource.execute("""DROP TABLE IF EXISTS $impervious_tmp; CREATE TABLE $impervious_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, 'impervious' as type FROM $impervious AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom)""") @@ -2372,7 +2372,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, debug "Preparing table : $building" datasource.createSpatialIndex(building, "the_geom") def building_tmp = postfix "building_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $building_tmp; + datasource.execute("""DROP TABLE IF EXISTS $building_tmp; CREATE TABLE $building_tmp AS SELECT ST_CollectionExtract(st_intersection(a.the_geom, b.the_geom),3) AS the_geom, b.${id_zone}, a.type FROM $building AS a, $zone AS b WHERE a.the_geom && b.the_geom AND ST_INTERSECTS(a.the_geom, b.the_geom) and a.zindex=0""") @@ -2386,7 +2386,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, return } def tmp_tables = postfix "tmp_tables_zindex0" - datasource.execute( """DROP TABLE if exists $tmp_tables; + datasource.execute("""DROP TABLE if exists $tmp_tables; CREATE TABLE $tmp_tables(the_geom GEOMETRY, ${id_zone} integer) AS ${tablesToMerge.values().join(' union ')}; """) @@ -2394,7 +2394,7 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, debug "Generating " + "minimum polygon areas" def final_polygonize = postfix "tmp_point_polygonize_zindex0" - datasource.execute( """DROP TABLE IF EXISTS $final_polygonize; + datasource.execute("""DROP TABLE IF EXISTS $final_polygonize; CREATE INDEX ON $tmp_tables($id_zone); CREATE TABLE $final_polygonize as select CAST((row_number() over()) as Integer) as ${ID_COLUMN_NAME}, the_geom , st_area(the_geom) as area @@ -2413,36 +2413,36 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, tmpTablesToDrop << tmptableName if (entry.key.startsWith("high_vegetation")) { datasource.createSpatialIndex(entry.key, "the_geom") - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'high_vegetation' as layer, a.type, ${priorities.findIndexOf { it == "high_vegetation" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("low_vegetation")) { datasource.createSpatialIndex(entry.key, "the_geom") - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'low_vegetation' as layer, a.type, ${priorities.findIndexOf { it == "low_vegetation" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom,st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("water")) { datasource.createSpatialIndex(entry.key, "the_geom") - datasource.execute( """CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'water' as layer, a.type,${priorities.findIndexOf { it == "water" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, + datasource.execute("""CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area,'water' as layer, a.type,${priorities.findIndexOf { it == "water" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("road")) { datasource.createSpatialIndex(entry.key, "the_geom") - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area, 'road' as layer, a.type,${priorities.findIndexOf { it == "road" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("impervious")) { datasource.createSpatialIndex(entry.key, "the_geom") - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area, 'impervious' as layer, 'impervious' as type,${priorities.findIndexOf { it == "impervious" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") } else if (entry.key.startsWith("building")) { datasource.createSpatialIndex(entry.key, "the_geom") - datasource.execute( """DROP TABLE IF EXISTS $tmptableName; + datasource.execute("""DROP TABLE IF EXISTS $tmptableName; CREATE TABLE $tmptableName AS SELECT st_area(a.the_geom) as area, 'building' as layer, a.type, ${priorities.findIndexOf { it == "building" }} as priority, b.${ID_COLUMN_NAME} from ${entry.key} as a, $final_polygonize as b where a.the_geom && b.the_geom and ST_intersects(a.the_geom, st_pointonsurface(b.the_geom))""") finalMerge.add("SELECT * FROM $tmptableName") @@ -2453,17 +2453,17 @@ String groundLayer(JdbcDataSource datasource, String zone, String id_zone, tablesToMerge.remove("$zone") def allInfoTableName = postfix "allInfoTableName" def groupedLandTypes = postfix("grouped_land_type") - datasource.execute( """DROP TABLE IF EXISTS $allInfoTableName,$groupedLandTypes , $tmp_tables, $outputTableName; + datasource.execute("""DROP TABLE IF EXISTS $allInfoTableName,$groupedLandTypes , $tmp_tables, $outputTableName; CREATE TABLE $allInfoTableName as ${finalMerge.join(' union all ')};""") - datasource.execute( """ + datasource.execute(""" CREATE INDEX ON $allInfoTableName (${ID_COLUMN_NAME}); CREATE TABLE $groupedLandTypes as select distinct ${ID_COLUMN_NAME}, first_value(type) over(partition by ${ID_COLUMN_NAME} order by priority, area) as type, first_value(layer) over(partition by ${ID_COLUMN_NAME} order by priority, area) as layer FROM $allInfoTableName; """) - datasource.execute( """CREATE INDEX ON $groupedLandTypes ($ID_COLUMN_NAME); + datasource.execute("""CREATE INDEX ON $groupedLandTypes ($ID_COLUMN_NAME); CREATE TABLE $outputTableName as SELECT a.$ID_COLUMN_NAME, a.the_geom, b.* EXCEPT($ID_COLUMN_NAME) FROM $final_polygonize as a left join $groupedLandTypes as b on a.$ID_COLUMN_NAME= b.$ID_COLUMN_NAME;""") - datasource.execute( """DROP TABLE IF EXISTS $final_polygonize, ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${groupedLandTypes}, ${tmpTablesToDrop.join(",")}""") + datasource.execute("""DROP TABLE IF EXISTS $final_polygonize, ${tablesToMerge.keySet().join(' , ')}, ${allInfoTableName}, ${groupedLandTypes}, ${tmpTablesToDrop.join(",")}""") } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy index 252dc28995..62fcb6b422 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnits.groovy @@ -637,7 +637,7 @@ String createGrid(JdbcDataSource datasource, Geometry geometry, double deltaX, * @author Erwan Bocher (CNRS) */ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, - float distance = 100) throws Exception { + float distance = 50) throws Exception { //We must compute the grid if (!grid_indicators) { throw new IllegalArgumentException("No grid_indicators table to compute the sprawl areas layer") @@ -649,17 +649,14 @@ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, throw new IllegalArgumentException("No grid cells to compute the sprawl areas layer") } def gridCols = datasource.getColumnNames(grid_indicators) - def lcz_columns_urban = ["LCZ_PRIMARY", "LCZ_WARM"] - def lcz_columns = gridCols.intersect(lcz_columns_urban) - if (lcz_columns.size() > 0) { + if (gridCols.contains("LCZ_PRIMARY")) { def outputTableName = postfix("sprawl_areas") if (distance == 0) { datasource.execute("""DROP TABLE IF EXISTS $outputTableName; create table $outputTableName as select CAST((row_number() over()) as Integer) as id, st_removeholes(the_geom) as the_geom from ST_EXPLODE('( select st_union(st_accum(the_geom)) as the_geom from - $grid_indicators where lcz_warm>=2 - and LCZ_PRIMARY NOT IN (101, 102,103,104,106, 107))')""".toString()) + $grid_indicators where LCZ_PRIMARY NOT IN (101, 102,103,104,106, 107))') WHERE the_geom is not null or st_isempty(the_geom) = false""".toString()) return outputTableName } else { def tmp_sprawl = postfix("sprawl_tmp") @@ -668,9 +665,9 @@ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, create table $tmp_sprawl as select CAST((row_number() over()) as Integer) as id, st_removeholes(the_geom) as the_geom from ST_EXPLODE('( select st_union(st_accum(the_geom)) as the_geom from - $grid_indicators where lcz_warm>=2 - and LCZ_PRIMARY NOT IN (101, 102,103,104,106, 107))') - where st_isempty(st_buffer(the_geom, -100,2)) =false""".toString()) + $grid_indicators where + LCZ_PRIMARY NOT IN (101, 102,103,104,106, 107))') + where st_area(st_buffer(the_geom, -$distance,2)) > 1""".toString()) datasource.execute("""CREATE TABLE $outputTableName as SELECT CAST((row_number() over()) as Integer) as id, the_geom @@ -680,7 +677,8 @@ String computeSprawlAreas(JdbcDataSource datasource, String grid_indicators, st_removeholes(st_buffer(st_union(st_accum(st_buffer(st_removeholes(the_geom),$distance, ''quad_segs=2 endcap=flat join=mitre mitre_limit=2''))), -$distance, ''quad_segs=2 endcap=flat join=mitre mitre_limit=2'')) as the_geom - FROM ST_EXPLODE(''$tmp_sprawl'') )') ; + FROM ST_EXPLODE(''$tmp_sprawl'') )') where (the_geom is not null or + st_isempty(the_geom) = false) and st_area(st_buffer(the_geom, -$distance,2)) >${distance * distance}; DROP TABLE IF EXISTS $tmp_sprawl; """.toString()) return outputTableName @@ -705,36 +703,62 @@ String inversePolygonsLayer(JdbcDataSource datasource, String input_polygons) th FROM ST_EXPLODE('( select st_difference(a.the_geom, st_accum(b.the_geom)) as the_geom from $tmp_extent as a, $input_polygons - as b where st_dimension(b.the_geom)=2)'); + as b where st_dimension(b.the_geom)=2)') where st_isempty(the_geom) = false or the_geom is not null; DROP TABLE IF EXISTS $tmp_extent; """.toString()) return outputTableName } +/** + * This method is used to compute the difference between an input layer of polygons and the bounding box + * of the input layer. + * @param input_polygons a layer that contains polygons + * @param polygons_to_remove the polygons to remove in the input_polygons table + * @author Erwan Bocher (CNRS) + */ +String inversePolygonsLayer(JdbcDataSource datasource, String input_polygons, String polygons_to_remove) throws Exception { + def outputTableName = postfix("inverse_geometries") + datasource.createSpatialIndex(input_polygons) + datasource.createSpatialIndex(polygons_to_remove) + datasource.execute("""DROP TABLE IF EXISTS $outputTableName; + CREATE TABLE $outputTableName as + SELECT CAST((row_number() over()) as Integer) as id, the_geom + FROM + ST_EXPLODE('( + select st_difference(a.the_geom, st_accum(b.the_geom)) as the_geom from $input_polygons as a, $polygons_to_remove + as b where a.the_geom && b.the_geom and st_intersects(a.the_geom, st_pointonsurface(b.the_geom)) group by a.the_geom)') + where the_geom is not null or st_isempty(the_geom) = false; + """.toString()) + return outputTableName +} + /** - * This methods allows to extract the cool area geometries inside polygons - * A cool area is continous geometry defined by vegetation and water fractions. + * This methods allows to extract the cool area geometries inside a set of geometries, + * defined in a polygon mask table + * A cool area is a continous geometry defined by the LCZ 101, 102, 103,104, 106 and 107. + * * * @author Erwan Bocher (CNRS) */ -String extractCoolAreas(JdbcDataSource datasource, String grid_indicators, - float distance = 100) throws Exception { - if (!grid_indicators) { +String extractCoolAreas(JdbcDataSource datasource, String grid_indicators,String polygons_mask, + float distance = 50) throws Exception { + if (!grid_indicators || !polygons_mask) { throw new IllegalArgumentException("No grid_indicators table to extract the cool areas layer") } def gridCols = datasource.getColumnNames(grid_indicators) - def lcz_columns_urban = ["LCZ_PRIMARY"] - def lcz_columns = gridCols.intersect(lcz_columns_urban) - - if (lcz_columns.size() > 0) { + if (gridCols.contains("LCZ_PRIMARY")) { + datasource.createSpatialIndex(polygons_mask) + datasource.createSpatialIndex(grid_indicators) def outputTableName = postfix("cool_areas") datasource.execute(""" DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName as SELECT CAST((row_number() over()) as Integer) as id, the_geom FROM ST_EXPLODE('( - SELECT ST_UNION(ST_ACCUM(a.THE_GEOM)) AS THE_GEOM FROM $grid_indicators as a + SELECT ST_UNION(ST_ACCUM(a.THE_GEOM)) AS THE_GEOM FROM $grid_indicators as a, $polygons_mask as b where - a.LCZ_PRIMARY in (101, 102, 103,104, 106, 107))') ${distance > 0 ? " where st_isempty(st_buffer(the_geom, -$distance,2)) =false" : ""}; + a.LCZ_PRIMARY in (101, 102, 103,104, 106, 107) and + a.the_geom && b.the_geom and st_intersects(st_pointonsurface(a.the_geom), b.the_geom))') ${distance > 0 ? + " where (the_geom is not null or st_isempty(the_geom) = false) and st_area(st_buffer(the_geom, -$distance,2)) >${distance * distance}" : ""}; """.toString()) return outputTableName } diff --git a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy index 202f3e7e14..fe9fd5975c 100644 --- a/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy +++ b/geoindicators/src/main/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassification.groovy @@ -88,7 +88,7 @@ String identifyLczType(JdbcDataSource datasource, String rsuLczIndicators, Strin Map mapOfWeights = ["sky_view_factor" : 1, "aspect_ratio": 1, "building_surface_fraction": 1, "impervious_surface_fraction" : 1, "pervious_surface_fraction": 1, "height_of_roughness_elements": 1, "terrain_roughness_length": 1], - String prefixName) throws Exception{ + String prefixName) throws Exception { def OPS = ["AVG", "MEDIAN"] def ID_FIELD_RSU = "id_rsu" def CENTER_NAME = "center" @@ -101,7 +101,7 @@ String identifyLczType(JdbcDataSource datasource, String rsuLczIndicators, Strin // List of possible operations if (OPS.contains(normalisationType)) { - def tablesToDrop =[] + def tablesToDrop = [] def centerValue = [:] def variabilityValue = [:] def queryRangeNorm = "" @@ -172,9 +172,9 @@ String identifyLczType(JdbcDataSource datasource, String rsuLczIndicators, Strin "LCZ9": 9] // I. Rural LCZ types are classified according to a "manual" decision tree - datasource.createIndex(rsuAllIndicators,"BUILDING_FRACTION_LCZ") - datasource.createIndex(rsuAllIndicators,"ASPECT_RATIO") - datasource.createIndex(rsuAllIndicators,ID_FIELD_RSU) + datasource.createIndex(rsuAllIndicators, "BUILDING_FRACTION_LCZ") + datasource.createIndex(rsuAllIndicators, "ASPECT_RATIO") + datasource.createIndex(rsuAllIndicators, ID_FIELD_RSU) datasource """ DROP TABLE IF EXISTS $ruralLCZ; @@ -203,11 +203,11 @@ String identifyLczType(JdbcDataSource datasource, String rsuLczIndicators, Strin WHERE (BUILDING_FRACTION_LCZ < 0.1 OR BUILDING_FRACTION_LCZ IS NULL) AND ASPECT_RATIO < 0.1;""".toString() - datasource.createIndex(ruralLCZ,ID_FIELD_RSU) - datasource.createIndex(ruralLCZ,"IMPERVIOUS_FRACTION_LCZ") - datasource.createIndex(ruralLCZ,"PERVIOUS_FRACTION_LCZ") + datasource.createIndex(ruralLCZ, ID_FIELD_RSU) + datasource.createIndex(ruralLCZ, "IMPERVIOUS_FRACTION_LCZ") + datasource.createIndex(ruralLCZ, "PERVIOUS_FRACTION_LCZ") datasource.createIndex(ruralLCZ, "HIGH_ALL_VEGETATION") - datasource.createIndex(ruralLCZ,"ALL_VEGETATION") + datasource.createIndex(ruralLCZ, "ALL_VEGETATION") datasource """DROP TABLE IF EXISTS $classifiedRuralLCZ; CREATE TABLE $classifiedRuralLCZ AS SELECT $ID_FIELD_RSU, @@ -228,7 +228,7 @@ String identifyLczType(JdbcDataSource datasource, String rsuLczIndicators, Strin null AS LCZ_EQUALITY_VALUE FROM $ruralLCZ""".toString() - tablesToDrop< 1) { - tablesToDrop< diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy index f0b37282a8..89de37f827 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/BlockIndicatorsTests.groovy @@ -106,6 +106,6 @@ class BlockIndicatorsTests { @Test void holeAreaDensityExceptionTest() { - assertThrows(Exception.class, ()->Geoindicators.BlockIndicators.holeAreaDensity(h2GIS, "myblock_table", "test")) + assertThrows(Exception.class, () -> Geoindicators.BlockIndicators.holeAreaDensity(h2GIS, "myblock_table", "test")) } } \ No newline at end of file diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy index 8a6f7a175b..2e031c3fc0 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GenericIndicatorsTests.groovy @@ -364,7 +364,7 @@ class GenericIndicatorsTests { FROM building_test a, rsu_test b WHERE id_build < 4;""" // Test 1 - assertThrows(Exception.class, ()-> Geoindicators.GenericIndicators.typeProportion(h2GIS, + assertThrows(Exception.class, () -> Geoindicators.GenericIndicators.typeProportion(h2GIS, "tempo_build", "id_rsu", "type", "rsu_test", null, null, "")) @@ -436,7 +436,7 @@ class GenericIndicatorsTests { } - @Test + @Test void gatherScalesTest3() { h2GIS """ DROP TABLE IF EXISTS tempo_block, tempo_build, tempo_rsu; @@ -481,7 +481,7 @@ class GenericIndicatorsTests { assertEquals(4, h2GIS.getSpatialTable(gridProcess).getRowCount()) def upperScaleAreaStatistics = Geoindicators.GenericIndicators.upperScaleAreaStatistics(h2GIS, - gridProcess, "id_grid", indicatorTableName,indicatorName, indicatorName, "agg") + gridProcess, "id_grid", indicatorTableName, indicatorName, indicatorName, "agg") assertNotNull(upperScaleAreaStatistics) diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicatorsTests.groovy index 52a6484765..6f0192472c 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/GridIndicatorsTests.groovy @@ -6,7 +6,6 @@ import org.junit.jupiter.api.Disabled import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.TempDir import org.orbisgis.data.H2GIS -import org.orbisgis.data.POSTGIS import org.orbisgis.geoclimate.Geoindicators import static org.junit.jupiter.api.Assertions.assertEquals @@ -55,17 +54,17 @@ class GridIndicatorsTests { UPDATE grid SET lcz_primary= 2 WHERE id_row = 7 AND id_col = 8; UPDATE grid SET lcz_primary= 2 WHERE id_row = 7 AND id_col = 9; """.toString()) - String grid_scale = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS, "grid","id_grid", 2) + String grid_scale = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS, "grid", "id_grid", 2) def values = h2GIS.firstRow("SELECT * EXCEPT(THE_GEOM) FROM $grid_scale WHERE id_row = 2 AND id_col = 2 ".toString()) - def expectedValues = [ID_COL:2, ID_ROW:2, ID_GRID:10, LCZ_PRIMARY:2, LCZ_PRIMARY_N:104, LCZ_PRIMARY_NE:104, LCZ_PRIMARY_E:104, LCZ_PRIMARY_SE:104, LCZ_PRIMARY_S:104, LCZ_PRIMARY_SW:104, LCZ_PRIMARY_W:104, LCZ_PRIMARY_NW:104, LCZ_WARM:1, ID_ROW_LOD_1:1, ID_COL_LOD_1:0, LCZ_WARM_LOD_1:1, LCZ_COOL_LOD_1:8, LCZ_PRIMARY_LOD_1:104, LCZ_PRIMARY_N_LOD_1:104, LCZ_PRIMARY_NE_LOD_1:2, LCZ_PRIMARY_E_LOD_1:104, LCZ_PRIMARY_SE_LOD_1:null, LCZ_PRIMARY_S_LOD_1:null, LCZ_PRIMARY_SW_LOD_1:null, LCZ_PRIMARY_W_LOD_1:null, LCZ_PRIMARY_NW_LOD_1:null, LCZ_WARM_N_LOD_1:null, LCZ_WARM_NE_LOD_1:4, LCZ_WARM_E_LOD_1:null, LCZ_WARM_SE_LOD_1:null, LCZ_WARM_S_LOD_1:null, LCZ_WARM_SW_LOD_1:null, LCZ_WARM_W_LOD_1:null, LCZ_WARM_NW_LOD_1:null, ID_ROW_LOD_2:1, ID_COL_LOD_2:1, LCZ_WARM_LOD_2:10, LCZ_COOL_LOD_2:71, LCZ_PRIMARY_LOD_2:104, LCZ_PRIMARY_N_LOD_2:null, LCZ_PRIMARY_NE_LOD_2:null, LCZ_PRIMARY_E_LOD_2:null, LCZ_PRIMARY_SE_LOD_2:null, LCZ_PRIMARY_S_LOD_2:null, LCZ_PRIMARY_SW_LOD_2:null, LCZ_PRIMARY_W_LOD_2:null, LCZ_PRIMARY_NW_LOD_2:null, LCZ_WARM_N_LOD_2:null, LCZ_WARM_NE_LOD_2:null, LCZ_WARM_E_LOD_2:null, LCZ_WARM_SE_LOD_2:null, LCZ_WARM_S_LOD_2:null, LCZ_WARM_SW_LOD_2:null, LCZ_WARM_W_LOD_2:null, LCZ_WARM_NW_LOD_2:null] + def expectedValues = [ID_COL: 2, ID_ROW: 2, ID_GRID: 10, LCZ_PRIMARY: 2, LCZ_PRIMARY_N: 104, LCZ_PRIMARY_NE: 104, LCZ_PRIMARY_E: 104, LCZ_PRIMARY_SE: 104, LCZ_PRIMARY_S: 104, LCZ_PRIMARY_SW: 104, LCZ_PRIMARY_W: 104, LCZ_PRIMARY_NW: 104, LCZ_WARM: 1, ID_ROW_LOD_1: 1, ID_COL_LOD_1: 0, LCZ_WARM_LOD_1: 1, LCZ_COOL_LOD_1: 8, LCZ_PRIMARY_LOD_1: 104, LCZ_PRIMARY_N_LOD_1: 104, LCZ_PRIMARY_NE_LOD_1: 2, LCZ_PRIMARY_E_LOD_1: 104, LCZ_PRIMARY_SE_LOD_1: null, LCZ_PRIMARY_S_LOD_1: null, LCZ_PRIMARY_SW_LOD_1: null, LCZ_PRIMARY_W_LOD_1: null, LCZ_PRIMARY_NW_LOD_1: null, LCZ_WARM_N_LOD_1: null, LCZ_WARM_NE_LOD_1: 4, LCZ_WARM_E_LOD_1: null, LCZ_WARM_SE_LOD_1: null, LCZ_WARM_S_LOD_1: null, LCZ_WARM_SW_LOD_1: null, LCZ_WARM_W_LOD_1: null, LCZ_WARM_NW_LOD_1: null, ID_ROW_LOD_2: 1, ID_COL_LOD_2: 1, LCZ_WARM_LOD_2: 10, LCZ_COOL_LOD_2: 71, LCZ_PRIMARY_LOD_2: 104, LCZ_PRIMARY_N_LOD_2: null, LCZ_PRIMARY_NE_LOD_2: null, LCZ_PRIMARY_E_LOD_2: null, LCZ_PRIMARY_SE_LOD_2: null, LCZ_PRIMARY_S_LOD_2: null, LCZ_PRIMARY_SW_LOD_2: null, LCZ_PRIMARY_W_LOD_2: null, LCZ_PRIMARY_NW_LOD_2: null, LCZ_WARM_N_LOD_2: null, LCZ_WARM_NE_LOD_2: null, LCZ_WARM_E_LOD_2: null, LCZ_WARM_SE_LOD_2: null, LCZ_WARM_S_LOD_2: null, LCZ_WARM_SW_LOD_2: null, LCZ_WARM_W_LOD_2: null, LCZ_WARM_NW_LOD_2: null] assertTrue(values == expectedValues) values = h2GIS.firstRow("SELECT * EXCEPT(THE_GEOM) FROM $grid_scale WHERE id_row = 5 AND id_col = 5 ".toString()) - expectedValues = [ID_COL:5, ID_ROW:5, ID_GRID:40, LCZ_PRIMARY:102, LCZ_PRIMARY_N:2, LCZ_PRIMARY_NE:2, LCZ_PRIMARY_E:2, LCZ_PRIMARY_SE:104, LCZ_PRIMARY_S:104, LCZ_PRIMARY_SW:104, LCZ_PRIMARY_W:104, LCZ_PRIMARY_NW:2, LCZ_WARM:4, ID_ROW_LOD_1:2, ID_COL_LOD_1:1, LCZ_WARM_LOD_1:4, LCZ_COOL_LOD_1:5, LCZ_PRIMARY_LOD_1:2, LCZ_PRIMARY_N_LOD_1:104, LCZ_PRIMARY_NE_LOD_1:2, LCZ_PRIMARY_E_LOD_1:104, LCZ_PRIMARY_SE_LOD_1:104, LCZ_PRIMARY_S_LOD_1:104, LCZ_PRIMARY_SW_LOD_1:104, LCZ_PRIMARY_W_LOD_1:104, LCZ_PRIMARY_NW_LOD_1:104, LCZ_WARM_N_LOD_1:null, LCZ_WARM_NE_LOD_1:5, LCZ_WARM_E_LOD_1:null, LCZ_WARM_SE_LOD_1:null, LCZ_WARM_S_LOD_1:null, LCZ_WARM_SW_LOD_1:1, LCZ_WARM_W_LOD_1:null, LCZ_WARM_NW_LOD_1:null, ID_ROW_LOD_2:1, ID_COL_LOD_2:1, LCZ_WARM_LOD_2:10, LCZ_COOL_LOD_2:71, LCZ_PRIMARY_LOD_2:104, LCZ_PRIMARY_N_LOD_2:null, LCZ_PRIMARY_NE_LOD_2:null, LCZ_PRIMARY_E_LOD_2:null, LCZ_PRIMARY_SE_LOD_2:null, LCZ_PRIMARY_S_LOD_2:null, LCZ_PRIMARY_SW_LOD_2:null, LCZ_PRIMARY_W_LOD_2:null, LCZ_PRIMARY_NW_LOD_2:null, LCZ_WARM_N_LOD_2:null, LCZ_WARM_NE_LOD_2:null, LCZ_WARM_E_LOD_2:null, LCZ_WARM_SE_LOD_2:null, LCZ_WARM_S_LOD_2:null, LCZ_WARM_SW_LOD_2:null, LCZ_WARM_W_LOD_2:null, LCZ_WARM_NW_LOD_2:null] + expectedValues = [ID_COL: 5, ID_ROW: 5, ID_GRID: 40, LCZ_PRIMARY: 102, LCZ_PRIMARY_N: 2, LCZ_PRIMARY_NE: 2, LCZ_PRIMARY_E: 2, LCZ_PRIMARY_SE: 104, LCZ_PRIMARY_S: 104, LCZ_PRIMARY_SW: 104, LCZ_PRIMARY_W: 104, LCZ_PRIMARY_NW: 2, LCZ_WARM: 4, ID_ROW_LOD_1: 2, ID_COL_LOD_1: 1, LCZ_WARM_LOD_1: 4, LCZ_COOL_LOD_1: 5, LCZ_PRIMARY_LOD_1: 2, LCZ_PRIMARY_N_LOD_1: 104, LCZ_PRIMARY_NE_LOD_1: 2, LCZ_PRIMARY_E_LOD_1: 104, LCZ_PRIMARY_SE_LOD_1: 104, LCZ_PRIMARY_S_LOD_1: 104, LCZ_PRIMARY_SW_LOD_1: 104, LCZ_PRIMARY_W_LOD_1: 104, LCZ_PRIMARY_NW_LOD_1: 104, LCZ_WARM_N_LOD_1: null, LCZ_WARM_NE_LOD_1: 5, LCZ_WARM_E_LOD_1: null, LCZ_WARM_SE_LOD_1: null, LCZ_WARM_S_LOD_1: null, LCZ_WARM_SW_LOD_1: 1, LCZ_WARM_W_LOD_1: null, LCZ_WARM_NW_LOD_1: null, ID_ROW_LOD_2: 1, ID_COL_LOD_2: 1, LCZ_WARM_LOD_2: 10, LCZ_COOL_LOD_2: 71, LCZ_PRIMARY_LOD_2: 104, LCZ_PRIMARY_N_LOD_2: null, LCZ_PRIMARY_NE_LOD_2: null, LCZ_PRIMARY_E_LOD_2: null, LCZ_PRIMARY_SE_LOD_2: null, LCZ_PRIMARY_S_LOD_2: null, LCZ_PRIMARY_SW_LOD_2: null, LCZ_PRIMARY_W_LOD_2: null, LCZ_PRIMARY_NW_LOD_2: null, LCZ_WARM_N_LOD_2: null, LCZ_WARM_NE_LOD_2: null, LCZ_WARM_E_LOD_2: null, LCZ_WARM_SE_LOD_2: null, LCZ_WARM_S_LOD_2: null, LCZ_WARM_SW_LOD_2: null, LCZ_WARM_W_LOD_2: null, LCZ_WARM_NW_LOD_2: null] assertTrue(values == expectedValues) @@ -77,8 +76,8 @@ class GridIndicatorsTests { //Todo a test that shows how to create the a geom layer for each lod void multiscaleLCZGridGeomTest() { String grid_indicators = h2GIS.load("/home/ebocher/Autres/data/geoclimate/uhi_lcz/Dijon/grid_indicators.geojson", true) - int nb_levels= 3 - String grid_scale = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS, grid_indicators,"id_grid", nb_levels) + int nb_levels = 3 + String grid_scale = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS, grid_indicators, "id_grid", nb_levels) for (int i in 1..nb_levels) { def grid_lod = "grid_lod_$i" h2GIS.execute(""" @@ -105,7 +104,7 @@ class GridIndicatorsTests { CREATE TABLE polygons AS SELECT 'POLYGON ((4 4, 6 4, 6 6, 4 6, 4 4))'::GEOMETRY AS THE_GEOM ; """.toString()) - String grid_distances = Geoindicators.GridIndicators.gridDistances(h2GIS, "polygons","grid", "id") + String grid_distances = Geoindicators.GridIndicators.gridDistances(h2GIS, "polygons", "grid", "id") assertEquals(4, h2GIS.firstRow("select count(*) as count from $grid_distances where distance =0.5".toString()).count) } @@ -119,7 +118,7 @@ class GridIndicatorsTests { ST_MakeGrid('POLYGON((0 0, 9 0, 9 9, 0 0))'::GEOMETRY, 1, 1); CREATE TABLE polygons AS SELECT 'POLYGON ((2 2, 6 2, 6 6, 2 6, 2 2), (3 5, 5 5, 5 3, 3 3, 3 5))'::GEOMETRY AS THE_GEOM ; """.toString()) - String grid_distances = Geoindicators.GridIndicators.gridDistances(h2GIS, "polygons","grid", "id") + String grid_distances = Geoindicators.GridIndicators.gridDistances(h2GIS, "polygons", "grid", "id") assertEquals(12, h2GIS.firstRow("select count(*) as count from $grid_distances where distance =0.5".toString()).count) } } diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicatorsTests.groovy index 67cd1226cc..d4d3564909 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/NoiseIndicatorsTests.groovy @@ -24,7 +24,6 @@ import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.TempDir import org.orbisgis.data.H2GIS import org.orbisgis.geoclimate.Geoindicators -import org.orbisgis.geoclimate.utils.LoggerUtils import static org.junit.jupiter.api.Assertions.assertNotNull import static org.junit.jupiter.api.Assertions.assertTrue diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy index dfc8d1adfc..25b01cb47b 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/RsuIndicatorsTests.groovy @@ -142,7 +142,7 @@ class RsuIndicatorsTests { h2GIS "DROP TABLE IF EXISTS corr_tempo; CREATE TABLE corr_tempo AS SELECT a.*, b.the_geom, b.height_wall " + "FROM rsu_build_corr a, tempo_build b WHERE a.id_build = b.id_build" - def p = Geoindicators.RsuIndicators.groundSkyViewFactor(h2GIS, "rsu_test","id_rsu", "corr_tempo", + def p = Geoindicators.RsuIndicators.groundSkyViewFactor(h2GIS, "rsu_test", "id_rsu", "corr_tempo", 0.008, 100, 60, "test") assertNotNull(p) assertEquals 0.54, h2GIS.firstRow("SELECT * FROM test_rsu_ground_sky_view_factor " + @@ -183,7 +183,7 @@ class RsuIndicatorsTests { def listLayersBottom = [0, 10, 20, 30, 40, 50] def numberOfDirection = 4 def rangeDeg = 360 / numberOfDirection - def p = Geoindicators.RsuIndicators.projectedFacadeAreaDistribution(h2GIS, "tempo_build", "rsu_test","id_rsu", listLayersBottom, + def p = Geoindicators.RsuIndicators.projectedFacadeAreaDistribution(h2GIS, "tempo_build", "rsu_test", "id_rsu", listLayersBottom, numberOfDirection, "test") assertNotNull(p) def concat = "" @@ -217,7 +217,7 @@ class RsuIndicatorsTests { def listLayersBottom = [0, 10, 20, 30, 40, 50] def numberOfDirection = 4 def rangeDeg = 360 / numberOfDirection - def p = Geoindicators.RsuIndicators.projectedFacadeAreaDistribution(h2GIS, "tempo_build", "rsu_test", "id_rsu",listLayersBottom, + def p = Geoindicators.RsuIndicators.projectedFacadeAreaDistribution(h2GIS, "tempo_build", "rsu_test", "id_rsu", listLayersBottom, numberOfDirection, "test") assertNotNull(p) def concat = "" @@ -303,7 +303,7 @@ class RsuIndicatorsTests { def listLayersBottom = [0, 10, 20, 30, 40, 50] def numberOfDirection = 4 def pFacadeDistrib = Geoindicators.RsuIndicators.projectedFacadeAreaDistribution(h2GIS, "tempo_build", - "rsu_test","id_rsu", listLayersBottom, + "rsu_test", "id_rsu", listLayersBottom, numberOfDirection, "test") assertNotNull(pFacadeDistrib) def pGeomAvg = Geoindicators.GenericIndicators.unweightedOperationFromLowerScale(h2GIS, "tempo_build", @@ -320,7 +320,7 @@ class RsuIndicatorsTests { h2GIS "CREATE TABLE rsu_table AS SELECT a.*, b.geom_avg_height_roof, b.the_geom " + "FROM test_rsu_projected_facade_area_distribution a, test_unweighted_operation_from_lower_scale b " + "WHERE a.id_rsu = b.id_rsu" - def p = Geoindicators.RsuIndicators.effectiveTerrainRoughnessLength(h2GIS, "rsu_table","id_rsu", + def p = Geoindicators.RsuIndicators.effectiveTerrainRoughnessLength(h2GIS, "rsu_table", "id_rsu", "projected_facade_area_distribution", "geom_avg_height_roof", listLayersBottom, numberOfDirection, "test") assertNotNull(p) @@ -372,7 +372,7 @@ class RsuIndicatorsTests { h2GIS "DROP TABLE IF EXISTS rsu_tempo; CREATE TABLE rsu_tempo AS SELECT *, CASEWHEN(id_rsu = 1, 2.3," + "CASEWHEN(id_rsu = 2, 0.1, null)) AS effective_terrain_roughness_length FROM rsu_test" - def p = Geoindicators.RsuIndicators.effectiveTerrainRoughnessClass(h2GIS, "rsu_tempo","id_rsu", "effective_terrain_roughness_length", + def p = Geoindicators.RsuIndicators.effectiveTerrainRoughnessClass(h2GIS, "rsu_tempo", "id_rsu", "effective_terrain_roughness_length", "test") assertNotNull(p) def concat = "" @@ -413,15 +413,15 @@ class RsuIndicatorsTests { def outputTableGeoms = Geoindicators.SpatialUnits.prepareTSUData(h2GIS, 'zone_test', 'road_test', '', - 'veget_test', 'hydro_test', "","", - 10000, 2500,10000, "prepare_rsu") + 'veget_test', 'hydro_test', "", "", + 10000, 2500, 10000, "prepare_rsu") assertNotNull h2GIS.getTable(outputTableGeoms) def outputTable = Geoindicators.SpatialUnits.createTSU(h2GIS, outputTableGeoms, "", "rsu") def outputTableStats = Geoindicators.RsuIndicators.smallestCommunGeometry(h2GIS, - outputTable, "id_rsu", "building_test", "road_test", "hydro_test", "veget_test", "","", + outputTable, "id_rsu", "building_test", "road_test", "hydro_test", "veget_test", "", "", "test") assertNotNull(outputTableStats) @@ -497,7 +497,7 @@ class RsuIndicatorsTests { // Need to create the smallest geometries used as input of the surface fraction process def tempoTable = Geoindicators.RsuIndicators.smallestCommunGeometry(h2GIS, - "rsu_tempo", "id_rsu", "building_test", "", "hydro_test", "veget_test", "","", + "rsu_tempo", "id_rsu", "building_test", "", "hydro_test", "veget_test", "", "", "test") assertNotNull(tempoTable) @@ -567,7 +567,7 @@ class RsuIndicatorsTests { // Need to create the smallest geometries used as input of the surface fraction process def tempoTable = Geoindicators.RsuIndicators.smallestCommunGeometry(h2GIS, - "rsu_tempo", "id_rsu", null, "road_tempo", null, null, null,null, + "rsu_tempo", "id_rsu", null, "road_tempo", null, null, null, null, "test") assertNotNull(tempoTable) @@ -617,7 +617,7 @@ class RsuIndicatorsTests { "FROM rsu_test WHERE id_rsu = 4" // Need to create the smallest geometries used as input of the surface fraction process String tempoTable = Geoindicators.RsuIndicators.smallestCommunGeometry(h2GIS, - "rsu_tempo", "id_rsu", "building_test", null, "hydro_test", "veget_test", null,null, + "rsu_tempo", "id_rsu", "building_test", null, "hydro_test", "veget_test", null, null, "test") assertNotNull(tempoTable) @@ -726,7 +726,7 @@ class RsuIndicatorsTests { "tempo_rsu", "id_rsu", [0, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50], - 12,true, + 12, true, "test") assertNotNull(p) assertEquals 0.00566, h2GIS.firstRow("SELECT * FROM ${p} WHERE id_rsu = 1").FRONTAL_AREA_INDEX_H0_5_D30_60, 0.00001 diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy index dcc68e6045..bf0ba8cee8 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/SpatialUnitsTests.groovy @@ -27,13 +27,11 @@ import org.junit.jupiter.api.condition.EnabledIfSystemProperty import org.junit.jupiter.api.io.TempDir import org.locationtech.jts.geom.Geometry import org.locationtech.jts.io.WKTReader +import org.orbisgis.data.H2GIS import org.orbisgis.data.POSTGIS import org.orbisgis.geoclimate.Geoindicators -import static org.junit.jupiter.api.Assertions.assertEquals -import static org.junit.jupiter.api.Assertions.assertNotNull -import org.orbisgis.data.H2GIS -import static org.junit.jupiter.api.Assertions.assertTrue +import static org.junit.jupiter.api.Assertions.* class SpatialUnitsTests { @@ -82,8 +80,8 @@ class SpatialUnitsTests { def outputTableGeoms = Geoindicators.SpatialUnits.prepareTSUData(h2GIS, 'zone_test', 'road_test', 'rail_test', - 'veget_test', 'hydro_test', "","", - 10000, 2500,10000, "block") + 'veget_test', 'hydro_test', "", "", + 10000, 2500, 10000, "block") assertNotNull(outputTableGeoms) @@ -106,7 +104,7 @@ class SpatialUnitsTests { def createRSU = Geoindicators.SpatialUnits.createTSU(h2GIS, "zone_test", 'road_test', 'rail_test', 'veget_test', 'hydro_test', - "","", 10000, 2500,10000, "block") + "", "", 10000, 2500, 10000, "block") assert createRSU assert h2GIS.getSpatialTable(createRSU).save(new File(folder, "rsu.shp").getAbsolutePath(), true) @@ -191,7 +189,7 @@ class SpatialUnitsTests { def outputTableGeoms = Geoindicators.SpatialUnits.prepareTSUData(h2GIS, 'zone_test', 'road_test', 'rail_test', 'veget_test', - 'hydro_test', "", "",10000, 2500, 10000, "block") + 'hydro_test', "", "", 10000, 2500, 10000, "block") assertNotNull(outputTableGeoms) @@ -244,8 +242,7 @@ class SpatialUnitsTests { UPDATE grid SET LCZ_PRIMARY= 1 WHERE id_row = 6 AND id_col = 6; UPDATE grid SET LCZ_PRIMARY= 1 WHERE id_row = 5 AND id_col = 6; """.toString()) - String grid_scales = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS,"grid","id_grid",1) - String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, grid_scales, 0) + String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, "grid", 0) assertEquals(1, h2GIS.firstRow("select count(*) as count from $sprawl_areas".toString()).count) assertEquals(5, h2GIS.firstRow("select st_area(the_geom) as area from $sprawl_areas".toString()).area, 0.0001) } @@ -256,13 +253,13 @@ class SpatialUnitsTests { h2GIS.execute(""" --Grid values DROP TABLE IF EXISTS grid; - CREATE TABLE grid AS SELECT * EXCEPT(ID), id as id_grid, 104 AS LCZ_PRIMARY FROM + CREATE TABLE grid AS SELECT * EXCEPT(ID), id as id_grid, 105 AS LCZ_PRIMARY FROM ST_MakeGrid('POLYGON((0 0, 9 0, 9 9, 0 0))'::GEOMETRY, 1, 1); """.toString()) - String grid_scales = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS,"grid","id_grid",1) - String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, grid_scales, 0) + String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, "grid", 0) + h2GIS.save(sprawl_areas, "/tmp/sprawl.fgb", true) assertEquals(1, h2GIS.firstRow("select count(*) as count from $sprawl_areas".toString()).count) - assertTrue(h2GIS.firstRow("select st_union(st_accum(the_geom)) as the_geom from $sprawl_areas".toString()).the_geom.isEmpty()) + assertEquals(81, h2GIS.firstRow("select st_union(st_accum(the_geom)) as the_geom from $sprawl_areas".toString()).the_geom.getArea()) } @Test @@ -282,8 +279,7 @@ class SpatialUnitsTests { UPDATE grid SET LCZ_PRIMARY= 1 WHERE id_row = 6 AND id_col = 5; UPDATE grid SET LCZ_PRIMARY= 1 WHERE id_row = 6 AND id_col = 6; """.toString()) - String grid_scales = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS,"grid","id_grid",1) - String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, grid_scales, 0) + String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, "grid", 0) assertEquals(1, h2GIS.firstRow("select count(*) as count from $sprawl_areas".toString()).count) assertEquals(9, h2GIS.firstRow("select st_area(the_geom) as area from $sprawl_areas".toString()).area, 0.0001) } @@ -308,10 +304,9 @@ class SpatialUnitsTests { UPDATE grid SET LCZ_PRIMARY= 1 WHERE id_row = 9 AND id_col = 9; UPDATE grid SET LCZ_PRIMARY= 1 WHERE id_row = 1 AND id_col = 1; """.toString()) - String grid_scales = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS,"grid","id_grid",1) - String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, grid_scales, 0) - assertEquals(1, h2GIS.firstRow("select count(*) as count from $sprawl_areas".toString()).count) - assertEquals(9, h2GIS.firstRow("select st_area(st_accum(the_geom)) as area from $sprawl_areas".toString()).area, 0.0001) + String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, "grid", 0) + assertEquals(3, h2GIS.firstRow("select count(*) as count from $sprawl_areas".toString()).count) + assertEquals(11, h2GIS.firstRow("select st_area(st_accum(the_geom)) as area from $sprawl_areas".toString()).area, 0.0001) } @Test @@ -329,7 +324,7 @@ class SpatialUnitsTests { @Test void inverseGeometriesTest2() { def wktReader = new WKTReader() - Geometry expectedGeom = wktReader.read("POLYGON ((160 190, 260 190, 260 290, 320 290, 320 150, 240 150, 240 80, 160 80, 160 190))") + Geometry expectedGeom = wktReader.read("POLYGON ((160 190, 260 190, 260 290, 320 290, 320 150, 240 150, 240 80, 160 80, 160 190))") //Data for test h2GIS.execute(""" DROP TABLE IF EXISTS polygons; @@ -344,7 +339,7 @@ class SpatialUnitsTests { @Test void inverseGeometriesTest3() { def wktReader = new WKTReader() - Geometry expectedGeom = wktReader.read("MULTIPOLYGON (((160 190, 260 190, 260 290, 320 290, 320 150, 240 150, 240 80, 160 80, 160 190)), ((230 265, 230 230, 189 230, 189 265, 230 265)))") + Geometry expectedGeom = wktReader.read("MULTIPOLYGON (((160 190, 260 190, 260 290, 320 290, 320 150, 240 150, 240 80, 160 80, 160 190)), ((230 265, 230 230, 189 230, 189 265, 230 265)))") //Data for test h2GIS.execute(""" DROP TABLE IF EXISTS polygons; @@ -361,20 +356,19 @@ class SpatialUnitsTests { void sprawlAreasTestIntegration() { //Data for test String path = "/home/ebocher/Autres/data/geoclimate/uhi_lcz/Angers/" - String data = h2GIS.load("${path}grid_indicators.fgb") - String grid_scales = Geoindicators.GridIndicators.multiscaleLCZGrid(h2GIS,data,"id_grid", 1) + String grid_scales = h2GIS.load("${path}grid_indicators.geojson") String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2GIS, grid_scales, 100) h2GIS.save(sprawl_areas, "/tmp/sprawl_areas_indic.fgb", true) h2GIS.save(grid_scales, "/tmp/grid_indicators.fgb", true) - String distances = Geoindicators.GridIndicators.gridDistances(h2GIS, sprawl_areas, data, "id_grid") + String distances = Geoindicators.GridIndicators.gridDistances(h2GIS, sprawl_areas, grid_scales, "id_grid") h2GIS.save(distances, "/tmp/distances.fgb", true) //Method to compute the cool areas distances - String cool_areas = Geoindicators.SpatialUnits.extractCoolAreas(h2GIS, grid_scales) + String cool_areas = Geoindicators.SpatialUnits.extractCoolAreas(h2GIS, grid_scales, sprawl_areas) h2GIS.save(cool_areas, "/tmp/cool_areas.fgb", true) - String inverse_cool_areas = Geoindicators.SpatialUnits.inversePolygonsLayer(h2GIS,cool_areas) + String inverse_cool_areas = Geoindicators.SpatialUnits.inversePolygonsLayer(h2GIS, sprawl_areas, cool_areas) h2GIS.save(inverse_cool_areas, "/tmp/inverse_cool_areas.fgb", true) - distances = Geoindicators.GridIndicators.gridDistances(h2GIS, inverse_cool_areas, data, "id_grid") + distances = Geoindicators.GridIndicators.gridDistances(h2GIS, inverse_cool_areas, grid_scales, "id_grid") h2GIS.save(distances, "/tmp/cool_inverse_distances.fgb", true) } @@ -385,22 +379,22 @@ class SpatialUnitsTests { @Test void debugTSUTest() { String path = "/tmp/geoclimate" - String zone = h2GIS.load(path+File.separator+"zone.fgb") - String road = h2GIS.load(path+File.separator+"road.fgb") - String rail = h2GIS.load(path+File.separator+"rail.fgb") - String vegetation= h2GIS.load(path+File.separator+"vegetation.fgb") - String water= h2GIS.load(path+File.separator+"water.fgb") - String sea_land_mask= h2GIS.load(path+File.separator+"sea_land_mask.fgb") - String urban_areas= h2GIS.load(path+File.separator+"urban_areas.fgb") - double surface_vegetation =10000 - double surface_hydro=2500 - double surface_urban_areas=10000 + String zone = h2GIS.load(path + File.separator + "zone.fgb") + String road = h2GIS.load(path + File.separator + "road.fgb") + String rail = h2GIS.load(path + File.separator + "rail.fgb") + String vegetation = h2GIS.load(path + File.separator + "vegetation.fgb") + String water = h2GIS.load(path + File.separator + "water.fgb") + String sea_land_mask = h2GIS.load(path + File.separator + "sea_land_mask.fgb") + String urban_areas = h2GIS.load(path + File.separator + "urban_areas.fgb") + double surface_vegetation = 10000 + double surface_hydro = 2500 + double surface_urban_areas = 10000 double area = 1 String rsu = Geoindicators.SpatialUnits.createTSU(h2GIS, zone, - area, road, rail, vegetation, - water, sea_land_mask, urban_areas, - surface_vegetation, surface_hydro, surface_urban_areas, "rsu") - if(rsu){ + area, road, rail, vegetation, + water, sea_land_mask, urban_areas, + surface_vegetation, surface_hydro, surface_urban_areas, "rsu") + if (rsu) { h2GIS.save(rsu, "/tmp/rsu.fgb", true) } } diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy index 80b96098b5..04c28292eb 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/TypologyClassificationTests.groovy @@ -38,7 +38,6 @@ import smile.validation.Validation import java.util.zip.GZIPInputStream import static org.junit.jupiter.api.Assertions.* -import static org.orbisgis.data.H2GIS.open class TypologyClassificationTests { diff --git a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy index bce3a96b15..7b42b39edf 100644 --- a/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy +++ b/geoindicators/src/test/groovy/org/orbisgis/geoclimate/geoindicators/WorkflowGeoIndicatorsTest.groovy @@ -108,7 +108,7 @@ class WorkflowGeoIndicatorsTest { List listFacadeDistrib = [] List listRoofDensDistrib = [] int rangeDeg = 360 / parameters.facadeDensNumberOfDirection - for (int i in 0..parameters.facadeDensListLayersBottom.size()-1) { + for (int i in 0..parameters.facadeDensListLayersBottom.size() - 1) { Integer h_bot = parameters.facadeDensListLayersBottom[i] Integer h_up if (h_bot == parameters.facadeDensListLayersBottom[-1]) { @@ -130,7 +130,7 @@ class WorkflowGeoIndicatorsTest { } // Indicator list (at RSU scale) for each building height level List listHeightDistrib = [] - for (int i in 0..parameters.buildHeightListLayersBottom.size()-1) { + for (int i in 0..parameters.buildHeightListLayersBottom.size() - 1) { Integer h_bot = parameters.buildHeightListLayersBottom[i] Integer h_up if (h_bot == parameters.buildHeightListLayersBottom[-1]) { @@ -162,10 +162,10 @@ class WorkflowGeoIndicatorsTest { // Indicators common to each indicator use listColCommon = ["LOW_VEGETATION_FRACTION", "HIGH_VEGETATION_FRACTION", - "BUILDING_FRACTION", "WATER_FRACTION", "ROAD_FRACTION", "IMPERVIOUS_FRACTION", - "HIGH_VEGETATION_LOW_VEGETATION_FRACTION", "HIGH_VEGETATION_WATER_FRACTION", - "HIGH_VEGETATION_ROAD_FRACTION", "HIGH_VEGETATION_IMPERVIOUS_FRACTION", - "HIGH_VEGETATION_BUILDING_FRACTION", "UNDEFINED_FRACTION", "BUILDING_FLOOR_AREA_DENSITY"] + "BUILDING_FRACTION", "WATER_FRACTION", "ROAD_FRACTION", "IMPERVIOUS_FRACTION", + "HIGH_VEGETATION_LOW_VEGETATION_FRACTION", "HIGH_VEGETATION_WATER_FRACTION", + "HIGH_VEGETATION_ROAD_FRACTION", "HIGH_VEGETATION_IMPERVIOUS_FRACTION", + "HIGH_VEGETATION_BUILDING_FRACTION", "UNDEFINED_FRACTION", "BUILDING_FLOOR_AREA_DENSITY"] // Column names in the LCZ Table listColLcz = ["LCZ_PRIMARY", "LCZ_SECONDARY", "LCZ_EQUALITY_VALUE", "LCZ_UNIQUENESS_VALUE", "MIN_DISTANCE"] @@ -195,7 +195,7 @@ class WorkflowGeoIndicatorsTest { inputTableNames.hydrographicTable, "", "", "", "", "", ["indicatorUse": indicatorUse, svfSimplified: false], prefixName) - datasource.save(geoIndicatorsCompute_i.rsu_indicators, "/tmp/rsu.geojson" , true) + datasource.save(geoIndicatorsCompute_i.rsu_indicators, "/tmp/rsu.geojson", true) assertNotNull(geoIndicatorsCompute_i) checkRSUIndicators(datasource, geoIndicatorsCompute_i.rsu_indicators) assertEquals(listUrbTyp.Bu.sort(), datasource.getColumnNames(geoIndicatorsCompute_i.building_indicators).sort()) @@ -225,19 +225,19 @@ class WorkflowGeoIndicatorsTest { assertEquals dfBlock.nrows(), dfBlock.omitNullRows().nrows() // Test that the sum of all building fractions is 100% for both LCZ and TEB building types - if (listBuildTypTeb){ + if (listBuildTypTeb) { def sum_afrac_teb = datasource.firstRow("SELECT AVG(${listBuildTypTeb.join("+")}) AS SUM_FRAC FROM ${"$geoIndicatorsCompute_i.rsu_indicators"} WHERE BUILDING_DIRECTION_UNIQUENESS <> -1") assertEquals sum_afrac_teb.SUM_FRAC, 1.0, 0.01 } - if (listBuildTypLcz){ + if (listBuildTypLcz) { def sum_afrac_lcz = datasource.firstRow("SELECT AVG(${listBuildTypLcz.join("+")}) AS SUM_FRAC FROM ${"$geoIndicatorsCompute_i.rsu_indicators"} WHERE BUILDING_DIRECTION_UNIQUENESS <> -1") assertEquals sum_afrac_lcz.SUM_FRAC, 1.0, 0.01 } - if (listFloorBuildTypLcz){ + if (listFloorBuildTypLcz) { def sum_fafrac_lcz = datasource.firstRow("SELECT AVG(${listFloorBuildTypLcz.join("+")}) AS SUM_FRAC FROM ${"$geoIndicatorsCompute_i.rsu_indicators"} WHERE BUILDING_DIRECTION_UNIQUENESS <> -1") assertEquals sum_fafrac_lcz.SUM_FRAC, 1.0, 0.01 } - if (listFloorBuildTypTeb){ + if (listFloorBuildTypTeb) { def sum_fafrac_teb = datasource.firstRow("SELECT AVG(${listFloorBuildTypTeb.join("+")}) AS SUM_FRAC FROM ${"$geoIndicatorsCompute_i.rsu_indicators"} WHERE BUILDING_DIRECTION_UNIQUENESS <> -1") assertEquals sum_fafrac_teb.SUM_FRAC, 1.0, 0.01 } @@ -275,7 +275,7 @@ class WorkflowGeoIndicatorsTest { assertEquals countSumAreaRemove0.NB, countSumAreaEqual1.NB // Check that the sum of proportion (or building floor area) for each RSU is equal to 1 - def colUtrfFloorArea = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_utrf_floor_area) + def colUtrfFloorArea = datasource.getColumnNames(geoIndicatorsCompute_i.rsu_utrf_floor_area) // Test that the TYPO_SECOND is inside the RSU UTRF table assertEquals 1, colUtrfFloorArea.count("TYPO_SECOND") @@ -522,7 +522,7 @@ class WorkflowGeoIndicatorsTest { String grid = Geoindicators.WorkflowGeoIndicators.createGrid(datasource, datasource.getExtent("building"), 10, 10, 0) assertNotNull(grid) String grid_indicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(datasource, grid, [], - null, "building", null, null, null, null, null, + "building", null, null, null, null, null, null, null, null) assertNull(grid_indicators) def list_indicators = ["BUILDING_FRACTION", "BUILDING_HEIGHT", "BUILDING_POP", @@ -531,8 +531,8 @@ class WorkflowGeoIndicatorsTest { "BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", "SEA_LAND_FRACTION", "ASPECT_RATIO", "SVF", "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS"] - grid_indicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(datasource, grid, list_indicators, null, - "building", null, null, null, null, null, null, + grid_indicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(datasource, grid, list_indicators, + "building", null, null, null, null, null, null, null, null) assertNotNull(grid_indicators) assertEquals(1, datasource.getRowCount(grid_indicators)) @@ -618,7 +618,7 @@ class WorkflowGeoIndicatorsTest { @Disabled @Test - void test(){ + void test() { datasource.load("/tmp/road_inter.geojson", "road", true) datasource.load("/tmp/rsu_table.geojson", "rsu", true) datasource.createSpatialIndex("road") diff --git a/osm/pom.xml b/osm/pom.xml index 7607db129b..53d7f7e43e 100644 --- a/osm/pom.xml +++ b/osm/pom.xml @@ -1,5 +1,6 @@ - + geoclimate-parent org.orbisgis.geoclimate diff --git a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy index 78dff61ec1..92b8b41527 100644 --- a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy +++ b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataFormatting.groovy @@ -46,7 +46,7 @@ import java.util.regex.Pattern * @return outputEstimatedTableName The name of the table containing the state of estimation for each building */ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone = "", - String urban_areas = "", int h_lev_min = 3, String jsonFilename = "") throws Exception{ + String urban_areas = "", int h_lev_min = 3, String jsonFilename = "") throws Exception { if (!h_lev_min) { h_lev_min = 3 } @@ -72,7 +72,7 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone def typeAndLevel = parametersMap.level def queryMapper = "SELECT " def columnToMap = parametersMap.columns - if (datasource.getRowCount(building)> 0) { + if (datasource.getRowCount(building) > 0) { def heightPattern = Pattern.compile("((?:\\d+\\/|(?:\\d+|^|\\s)\\.)?\\d+)\\s*([^\\s\\d+\\-.,:;^\\/]+(?:\\^\\d+(?:\$|(?=[\\s:;\\/])))?(?:\\/[^\\s\\d+\\-.,:;^\\/]+(?:\\^\\d+(?:\$|(?=[\\s:;\\/])))?)*)?", Pattern.CASE_INSENSITIVE) def columnNames = datasource.getColumnNames(building) columnNames.remove("THE_GEOM") @@ -100,12 +100,12 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone String roof_shape = row.'roof:shape' if (formatedHeight.nbLevels > 0 && zIndex >= 0 && type) { Geometry geom = row.the_geom - if(pZone.intersects(geom)){ - def srid = geom.getSRID() - for (int i = 0; i < geom.getNumGeometries(); i++) { - Geometry subGeom = geom.getGeometryN(i) - if (subGeom instanceof Polygon && subGeom.getArea() > 1) { - stmt.addBatch """ + if (pZone.intersects(geom)) { + def srid = geom.getSRID() + for (int i = 0; i < geom.getNumGeometries(); i++) { + Geometry subGeom = geom.getGeometryN(i) + if (subGeom instanceof Polygon && subGeom.getArea() > 1) { + stmt.addBatch """ INSERT INTO ${outputTableName} values( ST_GEOMFROMTEXT('${subGeom}',$srid), $id_build, @@ -119,17 +119,17 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone ${roof_shape ? "'" + roof_shape + "'" : null}) """.toString() - if (formatedHeight.estimated) { - stmt.addBatch """ + if (formatedHeight.estimated) { + stmt.addBatch """ INSERT INTO ${outputEstimateTableName} values( $id_build, '${row.id}') """.toString() + } + id_build++ } - id_build++ } } - } } } } @@ -284,7 +284,7 @@ Map formatBuildingLayer(JdbcDataSource datasource, String building, String zone * @return outputTableName The name of the final roads table */ String formatRoadLayer( - JdbcDataSource datasource, String road, String zone = "", String jsonFilename = "") throws Exception{ + JdbcDataSource datasource, String road, String zone = "", String jsonFilename = "") throws Exception { debug('Formating road layer') def outputTableName = postfix "INPUT_ROAD" datasource """ @@ -435,7 +435,7 @@ String formatRoadLayer( * @param jsonFilename name of the json formatted file containing the filtering parameters * @return outputTableName The name of the final rails table */ -String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "", String jsonFilename = "") throws Exception{ +String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "", String jsonFilename = "") throws Exception { debug('Rails transformation starts') def outputTableName = "INPUT_RAILS_${UUID.randomUUID().toString().replaceAll("-", "_")}" datasource.execute """ drop table if exists $outputTableName; @@ -523,7 +523,7 @@ String formatRailsLayer(JdbcDataSource datasource, String rail, String zone = "" * @param jsonFilename name of the json formatted file containing the filtering parameters * @return outputTableName The name of the final vegetation table */ -String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "", String jsonFilename = "") throws Exception{ +String formatVegetationLayer(JdbcDataSource datasource, String vegetation, String zone = "", String jsonFilename = "") throws Exception { debug('Vegetation transformation starts') def outputTableName = postfix "INPUT_VEGET" datasource """ @@ -598,7 +598,7 @@ String formatVegetationLayer(JdbcDataSource datasource, String vegetation, Strin * @param zone an envelope to reduce the study area * @return outputTableName The name of the final hydro table */ -String formatWaterLayer(JdbcDataSource datasource, String water, String zone = "") throws Exception{ +String formatWaterLayer(JdbcDataSource datasource, String water, String zone = "") throws Exception { debug('Hydro transformation starts') def outputTableName = "INPUT_HYDRO_${UUID.randomUUID().toString().replaceAll("-", "_")}" datasource.execute """Drop table if exists $outputTableName; @@ -654,7 +654,7 @@ String formatWaterLayer(JdbcDataSource datasource, String water, String zone = " * @param zone an envelope to reduce the study area * @return outputTableName The name of the final impervious table */ -String formatImperviousLayer(JdbcDataSource datasource, String impervious, String zone = "", String jsonFilename = "") throws Exception{ +String formatImperviousLayer(JdbcDataSource datasource, String impervious, String zone = "", String jsonFilename = "") throws Exception { debug('Impervious transformation starts') def outputTableName = "INPUT_IMPERVIOUS_${UUID.randomUUID().toString().replaceAll("-", "_")}" debug(impervious) @@ -1025,7 +1025,7 @@ static Map parametersMapping(def file, def altResourceStream) { * @param zone an envelope to reduce the study area * @return outputTableName The name of the final urban areas table */ -String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zone = "", String jsonFilename = "") throws Exception{ +String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zone = "", String jsonFilename = "") throws Exception { debug('Urban areas transformation starts') def outputTableName = "INPUT_URBAN_AREAS_${UUID.randomUUID().toString().replaceAll("-", "_")}" datasource.execute """Drop table if exists $outputTableName; @@ -1079,7 +1079,6 @@ String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zo } } //Merging urban_areas - def mergingUrbanAreas = postfix("merging_urban_areas") datasource.execute(""" CREATE TABLE $mergingUrbanAreas as select CAST((row_number() over()) as Integer) as id_urban,the_geom, type from @@ -1103,32 +1102,30 @@ String formatUrbanAreas(JdbcDataSource datasource, String urban_areas, String zo * @param water The name of the input water table to improve sea extraction * @return outputTableName The name of the final buildings table */ -String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zone = "", String water = "") throws Exception{ +String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zone = "", String water = "") throws Exception { String outputTableName = postfix "INPUT_SEA_LAND_MASK_" datasource.execute """Drop table if exists $outputTableName; CREATE TABLE $outputTableName (THE_GEOM GEOMETRY, id serial, type varchar);""".toString() - if(!zone){ + if (!zone) { debug "A zone table must be provided to compute the sea/land mask" - } - else if (coastline) { + } else if (coastline) { if (datasource.hasTable(coastline) && datasource.getRowCount(coastline) > 0) { - debug 'Computing sea/land mask table' - datasource """ DROP TABLE if exists ${outputTableName};""".toString() - datasource.createSpatialIndex(coastline, "the_geom") - def mergingDataTable = "coatline_merged${UUID.randomUUID().toString().replaceAll("-", "_")}" - def coastLinesIntersects = "coatline_intersect_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" - def islands_mark = "islands_mark_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" - def coastLinesIntersectsPoints = "coatline_intersect_points_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" - def coastLinesPoints = "coatline_points_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" - def sea_land_mask = "sea_land_mask${UUID.randomUUID().toString().replaceAll("-", "_")}" - def water_to_be_filtered = "water_to_be_filtered${UUID.randomUUID().toString().replaceAll("-", "_")}" - def water_filtered_exploded = "water_filtered_exploded${UUID.randomUUID().toString().replaceAll("-", "_")}" - def sea_land_triangles = "sea_land_triangles${UUID.randomUUID().toString().replaceAll("-", "_")}" - def sea_id_triangles = "sea_id_triangles${UUID.randomUUID().toString().replaceAll("-", "_")}" - def water_id_triangles = "water_id_triangles${UUID.randomUUID().toString().replaceAll("-", "_")}" - - datasource.createSpatialIndex(coastline, "the_geom") - datasource.execute """DROP TABLE IF EXISTS $coastLinesIntersects, + debug 'Computing sea/land mask table' + datasource.createSpatialIndex(coastline, "the_geom") + def mergingDataTable = "coatline_merged${UUID.randomUUID().toString().replaceAll("-", "_")}" + def coastLinesIntersects = "coatline_intersect_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" + def islands_mark = "islands_mark_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" + def coastLinesIntersectsPoints = "coatline_intersect_points_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" + def coastLinesPoints = "coatline_points_zone${UUID.randomUUID().toString().replaceAll("-", "_")}" + def sea_land_mask = "sea_land_mask${UUID.randomUUID().toString().replaceAll("-", "_")}" + def water_to_be_filtered = "water_to_be_filtered${UUID.randomUUID().toString().replaceAll("-", "_")}" + def water_filtered_exploded = "water_filtered_exploded${UUID.randomUUID().toString().replaceAll("-", "_")}" + def sea_land_triangles = "sea_land_triangles${UUID.randomUUID().toString().replaceAll("-", "_")}" + def sea_id_triangles = "sea_id_triangles${UUID.randomUUID().toString().replaceAll("-", "_")}" + def water_id_triangles = "water_id_triangles${UUID.randomUUID().toString().replaceAll("-", "_")}" + + datasource.createSpatialIndex(coastline, "the_geom") + datasource.execute """DROP TABLE IF EXISTS $coastLinesIntersects, $islands_mark, $mergingDataTable, $coastLinesIntersectsPoints, $coastLinesPoints,$sea_land_mask, $water_filtered_exploded,$water_to_be_filtered, $sea_land_triangles, $sea_id_triangles, $water_id_triangles; CREATE TABLE $coastLinesIntersects AS SELECT ST_intersection(a.the_geom, b.the_geom) as the_geom @@ -1136,16 +1133,16 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon a.the_geom && b.the_geom AND st_intersects(a.the_geom, b.the_geom) and "natural"= 'coastline'; """.toString() - if (water) { - //Sometimes there is no coastlines - if (datasource.getRowCount(coastLinesIntersects) > 0) { - datasource.createSpatialIndex(water, "the_geom") - datasource.execute """ + if (water) { + //Sometimes there is no coastlines + if (datasource.getRowCount(coastLinesIntersects) > 0) { + datasource.createSpatialIndex(water, "the_geom") + datasource.execute """ CREATE TABLE $islands_mark (the_geom GEOMETRY, ID SERIAL) AS SELECT the_geom, EXPLOD_ID FROM st_explode('( SELECT ST_LINEMERGE(st_accum(THE_GEOM)) AS the_geom, NULL FROM $coastLinesIntersects)');""".toString() - datasource.execute """ + datasource.execute """ CREATE TABLE $mergingDataTable AS SELECT THE_GEOM FROM $coastLinesIntersects UNION ALL @@ -1158,7 +1155,7 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon st_explode('(SELECT st_polygonize(st_union(ST_NODE(st_accum(the_geom)))) AS the_geom FROM $mergingDataTable)') as foo where ST_DIMENSION(the_geom) = 2 AND st_area(the_geom) >0; """.toString() - datasource.execute """ + datasource.execute """ CREATE SPATIAL INDEX IF NOT EXISTS ${sea_land_mask}_the_geom_idx ON $sea_land_mask (THE_GEOM); CREATE SPATIAL INDEX IF NOT EXISTS ${islands_mark}_the_geom_idx ON $islands_mark (THE_GEOM); @@ -1171,8 +1168,8 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon CREATE SPATIAL INDEX IF NOT EXISTS ${coastLinesIntersectsPoints}_the_geom_idx ON $coastLinesIntersectsPoints (THE_GEOM);""".toString() - //Perform triangulation to tag the areas as sea or water - datasource.execute """ + //Perform triangulation to tag the areas as sea or water + datasource.execute """ DROP TABLE IF EXISTS $sea_land_triangles; CREATE TABLE $sea_land_triangles AS SELECT * FROM @@ -1187,13 +1184,13 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon st_intersects(a.THE_GEOM, b.THE_GEOM); CREATE INDEX ON $sea_id_triangles (id);""".toString() - //Set the triangles to sea - datasource.execute """ + //Set the triangles to sea + datasource.execute """ UPDATE ${sea_land_triangles} SET TYPE='sea' WHERE ID IN(SELECT ID FROM $sea_id_triangles); """.toString() - //Set the triangles to water - datasource.execute """ + //Set the triangles to water + datasource.execute """ DROP TABLE IF EXISTS $water_id_triangles; CREATE TABLE $water_id_triangles AS SELECT a.ID FROM ${sea_land_triangles} a, $water b WHERE a.THE_GEOM && b.THE_GEOM AND @@ -1205,31 +1202,31 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon UPDATE $sea_land_triangles SET TYPE='water' WHERE ID IN(SELECT ID FROM $water_id_triangles); """.toString() - //Unioning all geometries - datasource.execute(""" + //Unioning all geometries + datasource.execute("""DROP TABLE if exists ${outputTableName}; create table $outputTableName as select id , st_union(st_accum(the_geom)) the_geom, type from $sea_land_triangles a group by id, type; """.toString()) - } else { - //We look for the type of water - def waterTypes = datasource.firstRow("SELECT COUNT(*) as count, type from $water group by type".toString()) - //There is only sea geometries then we then decided to put the entire study area in a sea zone - //As a result, the water layer is replaced by the entire - if (!waterTypes.containsValue("water")) { - datasource.execute(""" + } else { + //We look for the type of water + def waterTypes = datasource.firstRow("SELECT COUNT(*) as count, type from $water group by type".toString()) + //There is only sea geometries then we then decided to put the entire study area in a sea zone + //As a result, the water layer is replaced by the entire + if (!waterTypes.containsValue("water")) { + datasource.execute(""" DROP TABLE IF EXISTS $water; CREATE TABLE $water as select CAST(1 AS INTEGER) AS ID_WATER, NULL AS ID_SOURCE , CAST(0 AS INTEGER) AS ZINDEX, the_geom, 'sea' as type from $zone ; """.toString()) - return outputTableName - } + return outputTableName } - } else { - datasource.execute """ + } + } else { + datasource.execute """ CREATE TABLE $islands_mark (the_geom GEOMETRY, ID SERIAL) AS SELECT the_geom, EXPLOD_ID FROM st_explode('( SELECT ST_LINEMERGE(st_accum(THE_GEOM)) AS the_geom, NULL FROM $coastLinesIntersects)');""".toString() - datasource.execute """ + datasource.execute """ CREATE TABLE $mergingDataTable AS SELECT THE_GEOM FROM $coastLinesIntersects UNION ALL @@ -1252,8 +1249,8 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon CREATE SPATIAL INDEX IF NOT EXISTS ${coastLinesIntersectsPoints}_the_geom_idx ON $coastLinesIntersectsPoints (THE_GEOM); """.toString() - //Perform triangulation to tag the areas as sea or water - datasource.execute """ + //Perform triangulation to tag the areas as sea or water + datasource.execute """ DROP TABLE IF EXISTS $sea_land_triangles; CREATE TABLE $sea_land_triangles AS SELECT * FROM @@ -1272,21 +1269,20 @@ String formatSeaLandMask(JdbcDataSource datasource, String coastline, String zon UPDATE ${sea_land_triangles} SET TYPE='sea' WHERE ID IN(SELECT ID FROM $sea_id_triangles); """.toString() - //Unioning all geometries - datasource.execute(""" + //Unioning all geometries + datasource.execute("""DROP TABLE if exists ${outputTableName}; create table $outputTableName as select id, st_union(st_accum(the_geom)) the_geom, type from $sea_land_triangles a group by id, type; """.toString()) - } + } - datasource.execute("""DROP TABLE IF EXISTS $coastLinesIntersects, + datasource.execute("""DROP TABLE IF EXISTS $coastLinesIntersects, $islands_mark, $mergingDataTable, $coastLinesIntersectsPoints, $coastLinesPoints,$sea_land_mask, $water_filtered_exploded,$water_to_be_filtered, $sea_land_triangles, $sea_id_triangles, $water_id_triangles """.toString()) - debug 'The sea/land mask has been computed' - return outputTableName - } - else { + debug 'The sea/land mask has been computed' + return outputTableName + } else { //There is no coatline geometries, we check the water table. if (water) { def waterTypes = datasource.firstRow("SELECT COUNT(*) as count, type from $water group by type".toString()) diff --git a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy index 387d1bcdd6..5f4cf5ad59 100644 --- a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy +++ b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/InputDataLoading.groovy @@ -45,7 +45,7 @@ import org.orbisgis.geoclimate.osmtools.utils.Utilities * Note that the GIS tables are projected in a local utm projection */ Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, float distance = 0, - boolean downloadAllOSMData = true) throws Exception{ + boolean downloadAllOSMData = true) throws Exception { if (datasource == null) { throw new Exception('The datasource cannot be null') } @@ -103,7 +103,7 @@ Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, f def extract = OSMTools.Loader.extract(query) if (extract) { - Map results = createGISLayers(datasource, extract,epsg) + Map results = createGISLayers(datasource, extract, epsg) if (results) { return [building : results.building, road : results.road, @@ -136,8 +136,8 @@ Map extractAndCreateGISLayers(JdbcDataSource datasource, Object zoneToExtract, f * @return The name of the resulting GIS tables : buildingTableName, roadTableName, * railTableName, vegetationTableName, hydroTableName, imperviousTableName */ -Map createGISLayers(JdbcDataSource datasource, String osmFilePath, int epsg = -1) throws Exception{ - return createGISLayers( datasource, osmFilePath, null, epsg) +Map createGISLayers(JdbcDataSource datasource, String osmFilePath, int epsg = -1) throws Exception { + return createGISLayers(datasource, osmFilePath, null, epsg) } /** @@ -150,7 +150,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, int epsg = -1 * railTableName, vegetationTableName, hydroTableName, imperviousTableName */ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, - org.locationtech.jts.geom.Geometry geometry, int epsg = -1) throws Exception{ + org.locationtech.jts.geom.Geometry geometry, int epsg = -1) throws Exception { if (epsg <= -1) { throw new Exception("Invalid epsg code $epsg".toString()) } @@ -171,7 +171,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, def parametersMap = readJSONParameters(paramsDefaultFile) def tags = parametersMap.get("tags") def columnsToKeep = parametersMap.get("columns") - def building = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep,geometry, true) + def building = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep, geometry, true) if (building) { outputBuildingTableName = postfix("OSM_BUILDING") datasource.execute("ALTER TABLE ${building} RENAME TO $outputBuildingTableName".toString()) @@ -197,7 +197,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, parametersMap = readJSONParameters(paramsDefaultFile) tags = parametersMap.get("tags") columnsToKeep = parametersMap.get("columns") - String rail = OSMTools.Transform.extractWaysAsLines(datasource, prefix, epsg, tags, columnsToKeep,geometry) + String rail = OSMTools.Transform.extractWaysAsLines(datasource, prefix, epsg, tags, columnsToKeep, geometry) if (rail) { outputRailTableName = postfix("OSM_RAIL") datasource.execute("ALTER TABLE ${rail} RENAME TO $outputRailTableName".toString()) @@ -208,7 +208,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, parametersMap = readJSONParameters(paramsDefaultFile) tags = parametersMap.get("tags") columnsToKeep = parametersMap.get("columns") - String vegetation = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep,geometry, true) + String vegetation = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep, geometry, true) debug "Create the vegetation layer" if (vegetation) { outputVegetationTableName = postfix("OSM_VEGETATION") @@ -221,7 +221,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, parametersMap = readJSONParameters(paramsDefaultFile) tags = parametersMap.get("tags") columnsToKeep = parametersMap.get("columns") - String water = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep,geometry, true) + String water = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep, geometry, true) debug "Create the water layer" if (water) { outputHydroTableName = postfix("OSM_WATER") @@ -234,7 +234,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, parametersMap = readJSONParameters(paramsDefaultFile) tags = parametersMap.get("tags") columnsToKeep = parametersMap.get("columns") - String impervious = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep,geometry, true) + String impervious = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep, geometry, true) debug "Create the impervious layer" if (impervious) { outputImperviousTableName = postfix("OSM_IMPERVIOUS") @@ -246,7 +246,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, parametersMap = readJSONParameters(paramsDefaultFile) tags = parametersMap.get("tags") columnsToKeep = parametersMap.get("columns") - String urban_areas = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags,columnsToKeep,geometry, true) + String urban_areas = OSMTools.Transform.toPolygons(datasource, prefix, epsg, tags, columnsToKeep, geometry, true) debug "Create the urban areas layer" if (urban_areas) { outputUrbanAreasTableName = postfix("OSM_URBAN_AREAS") @@ -260,7 +260,7 @@ Map createGISLayers(JdbcDataSource datasource, String osmFilePath, parametersMap = readJSONParameters(paramsDefaultFile) tags = parametersMap.get("tags") columnsToKeep = parametersMap.get("columns") - String coastlines = OSMTools.Transform.toLines(datasource, prefix, epsg, tags, columnsToKeep,geometry) + String coastlines = OSMTools.Transform.toLines(datasource, prefix, epsg, tags, columnsToKeep, geometry) if (coastlines) { outputCoastlineTableName = postfix("OSM_COASTLINE") datasource.execute("ALTER TABLE ${coastlines} RENAME TO $outputCoastlineTableName".toString()) diff --git a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy index 66f81cbf02..0563ea9683 100644 --- a/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy +++ b/osm/src/main/groovy/org/orbisgis/geoclimate/osm/WorkflowOSM.groovy @@ -285,7 +285,8 @@ Map workflow(def input) throws Exception { "road_traffic", "population", "ground_acoustic", - "sprawl_areas"] + "urban_sprawl_areas", + "urban_cool_areas"] //Get processing parameters def processing_parameters = extractProcessingParameters(parameters.get("parameters")) @@ -444,7 +445,7 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d info "Urban areas formatted" /* - * Do not filter the data when formatting becausethe job is already done when extracting osm data * + * Do not filter the data when formatting because the job is already done when extracting osm data * */ Map formatBuilding = OSM.InputDataFormatting.formatBuildingLayer( h2gis_datasource, gisLayersResults.building, @@ -596,7 +597,7 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d if (grid_indicators_params) { info("Start computing grid_indicators") if (!geomEnv) { - geomEnv = h2gis_datasource.getSpatialTable(utm_zone_table).getExtent() + geomEnv = h2gis_datasource.getExtent(utm_zone_table) } outputGrid = grid_indicators_params.output def x_size = grid_indicators_params.x_size @@ -605,7 +606,6 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d x_size, y_size, srid, grid_indicators_params.rowCol) String rasterizedIndicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(h2gis_datasource, grid, grid_indicators_params.indicators, - grid_indicators_params.lcz_lod, results.building, roadTableName, vegetationTableName, hydrographicTableName, imperviousTableName, results.rsu_lcz, @@ -619,7 +619,10 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d def sprawl_indic = Geoindicators.WorkflowGeoIndicators.sprawlIndicators(h2gis_datasource, rasterizedIndicators, "id_grid", grid_indicators_params.indicators, Math.max(x_size, y_size).floatValue()) if (sprawl_indic) { - results.put("sprawl_areas", sprawl_indic.sprawl_areas) + results.put("urban_sprawl_areas", sprawl_indic.urban_sprawl_areas) + if (sprawl_indic.urban_cool_areas) { + results.put("urban_cool_areas", sprawl_indic.urban_cool_areas) + } results.put("grid_indicators", sprawl_indic.grid_indicators) } info("End computing grid_indicators") @@ -637,9 +640,9 @@ Map osm_processing(JdbcDataSource h2gis_datasource, def processing_parameters, d h2gis_datasource.dropTable(Geoindicators.getCachedTableNames()) } } catch (Exception e) { - saveLogZoneTable(h2gis_datasource,databaseFolder, id_zone in Collection?id_zone.join("_"):id_zone, osm_zone_geometry, e.getLocalizedMessage()) + saveLogZoneTable(h2gis_datasource, databaseFolder, id_zone in Collection ? id_zone.join("_") : id_zone, osm_zone_geometry, e.getLocalizedMessage()) //eat the exception and process other zone - warn("The zone $id_zone has not been processed. Please check the log table to get more informations." ) + warn("The zone $id_zone has not been processed. Please check the log table to get more informations.") } } if (outputTableNamesResult) { @@ -673,7 +676,7 @@ void saveLogZoneTable(JdbcDataSource dataSource, String databaseFolder, String i '${Geoindicators.version()}', '${Geoindicators.buildNumber()}')""") } - dataSource.save(logTableZones, databaseFolder+File.separator+"log_zones_"+id_zone+".fgb", true ) + dataSource.save(logTableZones, databaseFolder + File.separator + "log_zones_" + id_zone + ".fgb", true) } /** @@ -689,7 +692,7 @@ void saveLogZoneTable(JdbcDataSource dataSource, String databaseFolder, String i * osm_geometry the geometry that represents the processed zone in lat/lon * utm_srid the UTM srid code */ -def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_size) throws Exception{ +def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_size) throws Exception { def outputZoneTable = "ZONE_${UUID.randomUUID().toString().replaceAll("-", "_")}".toString() def outputZoneEnvelopeTable = "ZONE_ENVELOPE_${UUID.randomUUID().toString().replaceAll("-", "_")}".toString() if (zoneToExtract) { @@ -755,7 +758,7 @@ def extractOSMZone(def datasource, def zoneToExtract, def distance, def bbox_siz * @param processing_parameters the file parameters * @return a filled map of parameters */ -def extractProcessingParameters(def processing_parameters) throws Exception{ +def extractProcessingParameters(def processing_parameters) throws Exception { def defaultParameters = [distance: 0f, prefixName: "", hLevMin : 3] def rsu_indicators_default = [indicatorUse : [], @@ -851,18 +854,18 @@ def extractProcessingParameters(def processing_parameters) throws Exception{ def list_indicators = grid_indicators.indicators if (x_size && y_size) { if (x_size <= 0 || y_size <= 0) { - throw new Exception( "Invalid grid size padding. Must be greater that 0") + throw new Exception("Invalid grid size padding. Must be greater that 0") } if (!list_indicators) { - throw new Exception( "The list of indicator names cannot be null or empty") + throw new Exception("The list of indicator names cannot be null or empty") } def allowed_grid_indicators = ["BUILDING_FRACTION", "BUILDING_HEIGHT", "BUILDING_POP", "BUILDING_TYPE_FRACTION", "WATER_FRACTION", "VEGETATION_FRACTION", "ROAD_FRACTION", "IMPERVIOUS_FRACTION", "UTRF_AREA_FRACTION", "UTRF_FLOOR_AREA_FRACTION", "LCZ_FRACTION", "LCZ_PRIMARY", "FREE_EXTERNAL_FACADE_DENSITY", "BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", "BUILDING_HEIGHT_DIST", "FRONTAL_AREA_INDEX", "SEA_LAND_FRACTION", "ASPECT_RATIO", "SVF", - "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", "SPRAWL_AREAS", - "SPRAWL_DISTANCES", "SPRAWL_COOL_DISTANCE"] + "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", "URBAN_SPRAWL_AREAS", + "URBAN_SPRAWL_DISTANCES", "URBAN_SPRAWL_COOL_DISTANCE"] def allowedOutputIndicators = allowed_grid_indicators.intersect(list_indicators*.toUpperCase()) if (allowedOutputIndicators) { //Update the RSU indicators list according the grid indicators @@ -893,17 +896,17 @@ def extractProcessingParameters(def processing_parameters) throws Exception{ def lcz_lod = grid_indicators.lcz_lod if (lcz_lod && lcz_lod in Integer) { if (lcz_lod < 0 && lcz_lod > 10) { - throw new Exception( "The number of level of details to aggregate the LCZ must be between 0 and 10") + throw new Exception("The number of level of details to aggregate the LCZ must be between 0 and 10") } grid_indicators_tmp.put("lcz_lod", lcz_lod) } - def sprawl_areas = grid_indicators.sprawl_areas + def sprawl_areas = grid_indicators.urban_sprawl_areas if (sprawl_areas && sprawl_areas in Boolean) { - grid_indicators_tmp.put("sprawl_areas", sprawl_areas) + grid_indicators_tmp.put("urban_sprawl_areas", sprawl_areas) } defaultParameters.put("grid_indicators", grid_indicators_tmp) } else { - throw new Exception( "Please set a valid list of indicator names in ${allowed_grid_indicators}") + throw new Exception("Please set a valid list of indicator names in ${allowed_grid_indicators}") } } } @@ -947,7 +950,7 @@ def extractProcessingParameters(def processing_parameters) throws Exception{ * @return */ def saveOutputFiles(def h2gis_datasource, def id_zone, def results, def outputFiles, def ouputFolder, def subFolderName, def outputSRID, - def reproject, def deleteOutputData, def outputGrid) throws Exception{ + def reproject, def deleteOutputData, def outputGrid) throws Exception { //Create a subfolder to store each results def folderName = id_zone in Collection ? id_zone.join("_") : id_zone def subFolder = new File(ouputFolder.getAbsolutePath() + File.separator + subFolderName + folderName) @@ -956,7 +959,7 @@ def saveOutputFiles(def h2gis_datasource, def id_zone, def results, def outputFi } else { FileUtilities.deleteFiles(subFolder) } - outputFiles.each { + outputFiles.each { it-> if (it == "grid_indicators") { if (outputGrid == "fgb") { Geoindicators.WorkflowUtilities.saveInFile(results."$it", "${subFolder.getAbsolutePath() + File.separator + it}.fgb", h2gis_datasource, outputSRID, reproject, deleteOutputData) @@ -983,7 +986,7 @@ def saveOutputFiles(def h2gis_datasource, def id_zone, def results, def outputFi * @return */ def saveTablesInDatabase(JdbcDataSource output_datasource, JdbcDataSource h2gis_datasource, def outputTableNames, - def h2gis_tables, def id_zone, def inputSRID, def outputSRID, def reproject) throws Exception{ + def h2gis_tables, def id_zone, def inputSRID, def outputSRID, def reproject) throws Exception { //Export building indicators indicatorTableBatchExportTable(output_datasource, outputTableNames.building_indicators, id_zone, h2gis_datasource, h2gis_tables.building_indicators , "WHERE ID_RSU IS NOT NULL", inputSRID, outputSRID, reproject) @@ -1088,7 +1091,7 @@ def saveTablesInDatabase(JdbcDataSource output_datasource, JdbcDataSource h2gis_ */ def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, - def filter, def inputSRID, def outputSRID, def reproject) throws Exception{ + def filter, def inputSRID, def outputSRID, def reproject) throws Exception { if (output_table) { if (h2gis_datasource.hasTable(h2gis_table_to_save)) { if (output_datasource.hasTable(output_table)) { @@ -1209,7 +1212,7 @@ def abstractModelTableBatchExportTable(JdbcDataSource output_datasource, */ def indicatorTableBatchExportTable(JdbcDataSource output_datasource, def output_table, def id_zone, def h2gis_datasource, h2gis_table_to_save, def filter, def inputSRID, def outputSRID, - def reproject) throws Exception{ + def reproject) throws Exception { if (output_table) { if (h2gis_table_to_save) { if (h2gis_datasource.hasTable(h2gis_table_to_save)) { @@ -1334,7 +1337,7 @@ def indicatorTableBatchExportTable(JdbcDataSource output_datasource, def output_ * @return */ def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, H2GIS h2gis_datasource, - def output_table, def outputSRID, def output_datasource) throws Exception{ + def output_table, def outputSRID, def output_datasource) throws Exception { def targetTableSrid = output_datasource.getSpatialTable(output_table).srid if (filter) { if (outputSRID == 0) { @@ -1407,7 +1410,7 @@ def prepareTableOutput(def h2gis_table_to_save, def filter, def inputSRID, H2GIS * @return */ Map buildGeoclimateLayers(JdbcDataSource datasource, Object zoneToExtract, - float distance = 500, int hLevMin = 3) throws Exception{ + float distance = 500, int hLevMin = 3) throws Exception { if (datasource == null) { throw new Exception("Cannot access to the database to store the osm data") } diff --git a/osm/src/main/resources/org/orbisgis/geoclimate/osm/buildingParams.json b/osm/src/main/resources/org/orbisgis/geoclimate/osm/buildingParams.json index 9fb3fab25a..1f926c2dd9 100644 --- a/osm/src/main/resources/org/orbisgis/geoclimate/osm/buildingParams.json +++ b/osm/src/main/resources/org/orbisgis/geoclimate/osm/buildingParams.json @@ -87,7 +87,7 @@ "hospital": 2, "parking": 1, "slight_construction": 0, - "water_tower" : 0, + "water_tower": 0, "fortress": 0, "abbey": 0, "cathedral": 0, @@ -98,10 +98,10 @@ "shrine": 0, "place_of_worship": 0, "wayside_shrine": 0, - "swimming_pool" : 0, + "swimming_pool": 0, "fitness_centre": 1, "horse_riding": 0, - "ice_rink" : 0, + "ice_rink": 0, "pitch": 0, "stadium": 0, "track": 0, @@ -120,23 +120,23 @@ "bank": 1, "bureau_de_change": 0, "boat_rental": 0, - "car_rental" : 0, + "car_rental": 0, "internet_cafe": 0, "kiosk": 0, "money_transfer": 0, "market": 0, "marketplace": 0, - "pharmacy" : 0, - "post_office" : 1, + "pharmacy": 0, + "post_office": 1, "retail": 0, - "shop" : 0, + "shop": 0, "store": 0, "supermarket": 0, "warehouse": 0, "factory": 0, - "gas" : 0, + "gas": 0, "heating_station": 0, - "oil_mill" : 0, + "oil_mill": 0, "oil": 0, "wellsite": 0, "well_cluster": 0, @@ -145,7 +145,7 @@ "dormitory": 1, "condominium": 1, "sheltered_housing": 0, - "workers_dormitory" :0, + "workers_dormitory": 0, "terrace": 1, "transportation": 0, "hangar": 0, @@ -1330,10 +1330,14 @@ ] }, "slight_construction": { - "wall": ["no"] + "wall": [ + "no" + ] }, "water_tower:service": { - "man_made": ["water_tower"] + "man_made": [ + "water_tower" + ] }, "building": { "building": [ diff --git a/osm/src/main/resources/org/orbisgis/geoclimate/osm/coastlineParams.json b/osm/src/main/resources/org/orbisgis/geoclimate/osm/coastlineParams.json index e299d77353..327a2394e6 100644 --- a/osm/src/main/resources/org/orbisgis/geoclimate/osm/coastlineParams.json +++ b/osm/src/main/resources/org/orbisgis/geoclimate/osm/coastlineParams.json @@ -3,7 +3,9 @@ "natural": [ "coastline" ], - "place": ["islet"] + "place": [ + "islet" + ] }, "columns": [ "natural", diff --git a/osm/src/main/resources/org/orbisgis/geoclimate/osm/imperviousParams.json b/osm/src/main/resources/org/orbisgis/geoclimate/osm/imperviousParams.json index d218220388..5d477113c1 100644 --- a/osm/src/main/resources/org/orbisgis/geoclimate/osm/imperviousParams.json +++ b/osm/src/main/resources/org/orbisgis/geoclimate/osm/imperviousParams.json @@ -29,8 +29,10 @@ ], "aeroway": [ "apron" - ],"power": [ - "plant", "substation" + ], + "power": [ + "plant", + "substation" ] }, "columns": [ @@ -74,7 +76,8 @@ "platform" ], "power": [ - "plant", "substation" + "plant", + "substation" ] }, "commercial": { diff --git a/osm/src/main/resources/org/orbisgis/geoclimate/osm/urbanAreasParams.json b/osm/src/main/resources/org/orbisgis/geoclimate/osm/urbanAreasParams.json index eae3003534..30de49c9c0 100644 --- a/osm/src/main/resources/org/orbisgis/geoclimate/osm/urbanAreasParams.json +++ b/osm/src/main/resources/org/orbisgis/geoclimate/osm/urbanAreasParams.json @@ -1,16 +1,26 @@ { "tags": { - "landuse": [ "commercial", + "landuse": [ + "commercial", "residential", "retail", "industrial", - "construction", "military", - "railway", "farmyard"], - "construction":[], - "amenity": ["school", + "construction", + "military", + "railway", + "farmyard" + ], + "construction": [], + "amenity": [ + "school", "university", - "research_institute", "community_centre"], - "power": ["plant", "substation"] + "research_institute", + "community_centre" + ], + "power": [ + "plant", + "substation" + ] }, "columns": [ "landuse", @@ -66,7 +76,8 @@ "industrial" ], "power": [ - "plant", "substation" + "plant", + "substation" ] }, "port": { diff --git a/osm/src/main/resources/org/orbisgis/geoclimate/osm/vegetParams.json b/osm/src/main/resources/org/orbisgis/geoclimate/osm/vegetParams.json index 3cfca259e7..138a23cff7 100644 --- a/osm/src/main/resources/org/orbisgis/geoclimate/osm/vegetParams.json +++ b/osm/src/main/resources/org/orbisgis/geoclimate/osm/vegetParams.json @@ -47,7 +47,9 @@ "surface": [ "grass" ], - "tourism": ["camp_site"], + "tourism": [ + "camp_site" + ], "sport": [] }, "columns": [ @@ -126,8 +128,13 @@ "surface": [ "grass" ], - "tourism": ["camp_site"], - "sport": ["soccer", "rugby"] + "tourism": [ + "camp_site" + ], + "sport": [ + "soccer", + "rugby" + ] }, "grassland": { "landcover": [ diff --git a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/InputDataFormattingTest.groovy b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/InputDataFormattingTest.groovy index ece5567722..a77854ca4a 100644 --- a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/InputDataFormattingTest.groovy +++ b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/InputDataFormattingTest.groovy @@ -288,15 +288,20 @@ class InputDataFormattingTest { } def h2GIS = H2GIS.open("${file.absolutePath + File.separator}osm_gislayers;AUTO_SERVER=TRUE".toString()) - def zoneToExtract = "Marseille" + def zoneToExtract = "Nimes" - zoneToExtract =[44.795480,12.323227,45.004622,12.627411] + //def nominatim = org.orbisgis.geoclimate.osmtools.OSMTools.Utilities.getNominatimData(zoneToExtract) + // zoneToExtract = nominatim.bbox + + zoneToExtract = [43.824643, 4.383599, 43.827271, 4.388207] + + //zoneToExtract =[44.795480,12.323227,45.004622,12.627411] Map extractData = OSM.InputDataLoading.extractAndCreateGISLayers(h2GIS, zoneToExtract) String formatedPlaceName = zoneToExtract.join("-").trim().split("\\s*(,|\\s)\\s*").join("_"); - if(!formatedPlaceName){ - formatedPlaceName=zoneToExtract + if (!formatedPlaceName) { + formatedPlaceName = zoneToExtract } if (extractData.zone != null) { @@ -306,7 +311,7 @@ class InputDataFormattingTest { //Zone envelope h2GIS.getTable(extractData.zone_envelope).save("${file.absolutePath + File.separator}zone_envelope.fgb", true) - +/* //Urban Areas def inputUrbanAreas = OSM.InputDataFormatting.formatUrbanAreas(h2GIS, extractData.urban_areas,extractData.zone) @@ -338,31 +343,32 @@ class InputDataFormattingTest { h2GIS,extractData.vegetation,extractData.zone_envelope) h2GIS.save(inputVegetationTableName,"${file.absolutePath + File.separator}vegetation.fgb", true) - println("Vegetation formatted") + println("Vegetation formatted")*/ //Hydrography def inputWaterTableName = OSM.InputDataFormatting.formatWaterLayer(h2GIS, extractData.water, extractData.zone_envelope) //Impervious - String imperviousTable = OSM.InputDataFormatting.formatImperviousLayer(h2GIS, extractData.impervious, - extractData.zone_envelope) - h2GIS.save(imperviousTable,"${file.absolutePath + File.separator}impervious.fgb", true) + /* String imperviousTable = OSM.InputDataFormatting.formatImperviousLayer(h2GIS, extractData.impervious, + extractData.zone_envelope) + h2GIS.save(imperviousTable,"${file.absolutePath + File.separator}impervious.fgb", true) - println("Impervious formatted") + println("Impervious formatted")*/ //Save coastlines to debug - h2GIS.save(extractData.coastline,"${file.absolutePath + File.separator}coastlines.fgb", true) + h2GIS.save(extractData.coastline, "${file.absolutePath + File.separator}coastlines.fgb", true) //Sea/Land mask def inputSeaLandTableName = OSM.InputDataFormatting.formatSeaLandMask(h2GIS, extractData.coastline, extractData.zone_envelope, inputWaterTableName) - h2GIS.save(inputSeaLandTableName,"${file.absolutePath + File.separator}sea_land_mask.fgb", true) + println(inputSeaLandTableName.isEmpty()) + h2GIS.save(inputSeaLandTableName, "${file.absolutePath + File.separator}sea_land_mask.fgb", true) println("Sea land mask formatted") //Save it after sea/land mask because the water table can be modified - h2GIS.save(inputWaterTableName,"${file.absolutePath + File.separator}water.fgb", true) + h2GIS.save(inputWaterTableName, "${file.absolutePath + File.separator}water.fgb", true) } else { assertTrue(false) @@ -374,7 +380,6 @@ class InputDataFormattingTest { @Disabled void createGISFormatLayersTestIntegration() { Map gISLayers = OSM.InputDataLoading.createGISLayers(h2GIS, "/tmp/map.osm", 2154) - //Format Roads def road = OSM.InputDataFormatting.formatRoadLayer(h2GIS, gISLayers.road) h2GIS.getTable(road).save("/tmp/formated_osm_road.shp", true) diff --git a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy index a8e51356ed..fcb92a7dc9 100644 --- a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy +++ b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorflowOSMTest.groovy @@ -100,7 +100,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { datasource.load(urlZone, zone, true) //Run tests geoIndicatorsCalc(dirFile.absolutePath, datasource, zone, buildingTableName, roadTableName, - railTableName, vegetationTableName, hydrographicTableName, "", "","", + railTableName, vegetationTableName, hydrographicTableName, "", "", "", saveResults, svfSimplified, indicatorUse, prefixName, false) } @@ -195,7 +195,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { //Run tests geoIndicatorsCalc(dirFile.absolutePath, datasource, zone, buildingTableName, roadTableName, - railTableName, vegetationTableName, hydrographicTableName, imperviousTableName, sealandTableName,"", + railTableName, vegetationTableName, hydrographicTableName, imperviousTableName, sealandTableName, "", saveResults, svfSimplified, indicatorUse, prefixName, true) } @@ -464,7 +464,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { "terrain_roughness_class" : 1]] ] ] - assertThrows(Exception.class, ()->OSM.workflow(osm_parmeters)) + assertThrows(Exception.class, () -> OSM.workflow(osm_parmeters)) } @Test @@ -570,7 +570,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { "svfSimplified": true] ] ] - assertThrows(Exception.class, ()->OSM.workflow(osm_parmeters)) + assertThrows(Exception.class, () -> OSM.workflow(osm_parmeters)) } @@ -649,11 +649,11 @@ class WorflowOSMTest extends WorkflowAbstractTest { File dirFile = new File(directory) dirFile.delete() dirFile.mkdir() - def location = "Redon" + def location = "Mâcon" //def nominatim = org.orbisgis.geoclimate.osmtools.OSMTools.Utilities.getNominatimData(location) def grid_size = 100 - // location = nominatim.bbox - // location=[44.795480,12.323227,45.004622,12.627411] + //location = nominatim.bbox + // location=[44.795480,12.323227,45.004622,12.627411] def osm_parmeters = [ "description" : "Example of configuration file to run the OSM workflow and store the result in a folder", "geoclimatedb": [ @@ -663,7 +663,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { ], "input" : [ "locations": [location],//["Pont-de-Veyle"],//[nominatim["bbox"]],//["Lorient"], - "area": 2800, + "area" : 2800, //"date":"2017-12-31T19:20:00Z", /*"timeout":182, "maxsize": 536870918, @@ -672,29 +672,29 @@ class WorflowOSMTest extends WorkflowAbstractTest { "folder": directory] , "parameters" : - ["distance" : 0, - "rsu_indicators" : [ - + ["distance" : 0, + "rsu_indicators" : [ "indicatorUse": ["LCZ"] //, "UTRF", "TEB"] - ],"grid_indicators": [ - "x_size": grid_size, - "y_size": grid_size, + ], "grid_indicators" : [ + "x_size" : grid_size, + "y_size" : grid_size, //"rowCol": true, - "indicators": [//"BUILDING_FRACTION","BUILDING_HEIGHT", "BUILDING_POP", - //"BUILDING_TYPE_FRACTION", - //"WATER_FRACTION","VEGETATION_FRACTION", - //"ROAD_FRACTION", "IMPERVIOUS_FRACTION", - "LCZ_PRIMARY", - //"BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", - //"SEA_LAND_FRACTION", - //"ASPECT_RATIO", - //"SVF", - // "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", - "SPRAWL_AREAS", - "SPRAWL_DISTANCES", "SPRAWL_COOL_DISTANCE"], - //"lcz_lod":2 - ], "worldpop_indicators": true/* + "indicators": [//"BUILDING_FRACTION","BUILDING_HEIGHT", "BUILDING_POP", + //"BUILDING_TYPE_FRACTION", + //"WATER_FRACTION","VEGETATION_FRACTION", + //"ROAD_FRACTION", "IMPERVIOUS_FRACTION", + "LCZ_PRIMARY", + //"BUILDING_HEIGHT_WEIGHTED", "BUILDING_SURFACE_DENSITY", + //"SEA_LAND_FRACTION", + //"ASPECT_RATIO", + //"SVF", + // "HEIGHT_OF_ROUGHNESS_ELEMENTS", "TERRAIN_ROUGHNESS_CLASS", + "URBAN_SPRAWL_AREAS", + "URBAN_SPRAWL_DISTANCES", + "URBAN_SPRAWL_COOL_DISTANCE"], + "lcz_lod":1 + ], "worldpop_indicators": true/* "road_traffic" : true, "noise_indicators" : [ @@ -702,18 +702,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { ]*/ ] ] - Map results = OSM.workflow(osm_parmeters) - /*if(results) { - H2GIS h2gis = H2GIS.open("${directory + File.separator}geoclimate_test_integration;AUTO_SERVER=TRUE") - def tableNames = results.values() - def gridTable = tableNames.grid_indicators[0] - String sprawl_areas = Geoindicators.SpatialUnits.computeSprawlAreas(h2gis, gridTable,grid_size) - def folder_save =location in Collection ? location.join("_") : location - def path = directory + File.separator + "osm_$folder_save" + File.separator - path = "/tmp/" - h2gis.save(sprawl_areas, path + "sprawl_areas.fgb", true) - h2gis.save(tableNames.rsu_lcz[0], path + "rsu_lcz.fgb", true) - }*/ + OSM.workflow(osm_parmeters) } @Disabled @@ -762,7 +751,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { def rsuLczUpdated = "UPDATED_HEIGHTTEST_RSU_LCZ" println(""" Grid indicators are calculated """) def gridProcess = Geoindicators.WorkflowGeoIndicators.createGrid(datasource, geomEnv, - x_size, y_size,srid_calc, false) + x_size, y_size, srid_calc, false) if (gridProcess) { def computeRasterizedIndicators = Geoindicators.WorkflowGeoIndicators.rasterizeIndicators(datasource, gridProcess, @@ -786,9 +775,8 @@ class WorflowOSMTest extends WorkflowAbstractTest { def outputFolder = new File('/home/decide/Data/WRF/Data/output/updated') def subFolder = new File(outputFolder.getAbsolutePath() + File.separator + "osm_" + id_zone) Geoindicators.WorkflowUtilities.saveToAscGrid(computeRasterizedIndicators, subFolder, "grid_indicators", datasource, 3007, reproject, deleteOutputData) - } - } + } @Test //Integration tests @@ -813,13 +801,13 @@ class WorflowOSMTest extends WorkflowAbstractTest { "delete": false ], "input" : [ - "locations": [[43.726898,7.298452,43.727677,7.299632]]], + "locations": [[43.726898, 7.298452, 43.727677, 7.299632]]], "output" : [ "folder": ["path" : directory, "tables": ["building", "zone"]]], "parameters" : - ["distance" : 0, - rsu_indicators: ["indicatorUse" : ["LCZ"]] + ["distance" : 0, + rsu_indicators: ["indicatorUse": ["LCZ"]] ] ] Map process = OSM.WorkflowOSM.workflow(osm_parmeters) @@ -842,13 +830,13 @@ class WorflowOSMTest extends WorkflowAbstractTest { "delete": false ], "input" : [ - "locations": [[43.726898,7.298452,100]]], + "locations": [[43.726898, 7.298452, 100]]], "output" : [ "folder": ["path" : directory, "tables": ["building", "zone"]]], "parameters" : [ - rsu_indicators: ["indicatorUse" : ["LCZ"]] + rsu_indicators: ["indicatorUse": ["LCZ"]] ] ] Map process = OSM.WorkflowOSM.workflow(osm_parmeters) @@ -872,7 +860,7 @@ class WorflowOSMTest extends WorkflowAbstractTest { "delete": false ], "input" : [ - "locations": [[43.726898,7.298452,100]]], + "locations": [[43.726898, 7.298452, 100]]], "output" : [ "folder": ["path" : directory, "tables": ["building", "zone"]]] @@ -900,19 +888,20 @@ class WorflowOSMTest extends WorkflowAbstractTest { "delete": false ], "input" : [ - "locations": [[43.726898,7.298452,100]]] + "locations": [[43.726898, 7.298452, 100]]] ] Map process = OSM.WorkflowOSM.workflow(osm_parmeters) def tableNames = process.values()[0] H2GIS h2gis = H2GIS.open("${directory + File.separator}geoclimate_chain_db") //All tables must exist in the database - tableNames.each {it-> + tableNames.each { it -> assertTrue(h2gis.hasTable(it.value)) } } - @Disabled //Because it takes some time to build the OSM query + @Disabled + //Because it takes some time to build the OSM query @Test void testEstimateBuildingWithAllInputHeightDate() { String directory = folder.absolutePath + File.separator + "test_building_height" @@ -926,14 +915,14 @@ class WorflowOSMTest extends WorkflowAbstractTest { "delete": false ], "input" : [ - "locations": [[43.726898,7.298452,43.727677,7.299632]], - "date":"2015-12-31T19:20:00Z"], + "locations": [[43.726898, 7.298452, 43.727677, 7.299632]], + "date" : "2015-12-31T19:20:00Z"], "output" : [ "folder": ["path" : directory, "tables": ["building", "zone"]]], "parameters" : - ["distance" : 0, - rsu_indicators: ["indicatorUse" : ["LCZ"]] + ["distance" : 0, + rsu_indicators: ["indicatorUse": ["LCZ"]] ] ] Map process = OSM.WorkflowOSM.workflow(osm_parmeters) @@ -962,8 +951,8 @@ class WorflowOSMTest extends WorkflowAbstractTest { "input" : [ "locations": [[47.4, -4.8, 47.6, -4.6]]], "parameters" : - ["distance" : 0, - rsu_indicators: ["indicatorUse" : ["LCZ"]] + ["distance" : 0, + rsu_indicators: ["indicatorUse": ["LCZ"]] ] ] Map process = OSM.WorkflowOSM.workflow(osm_parmeters) @@ -971,10 +960,10 @@ class WorflowOSMTest extends WorkflowAbstractTest { def lcz = tableNames.rsu_lcz[0] H2GIS h2gis = H2GIS.open("${directory + File.separator}sea_lcz_db;AUTO_SERVER=TRUE") h2gis.save(lcz, "/tmp/sea.geojson", true) - def lcz_group= h2gis.firstRow("select lcz_primary, count(*) as count from $lcz group by lcz_primary".toString()) - assertTrue(lcz_group.size()==2) - assertTrue(lcz_group.lcz_primary==107) - assertTrue(lcz_group.count==1) + def lcz_group = h2gis.firstRow("select lcz_primary, count(*) as count from $lcz group by lcz_primary".toString()) + assertTrue(lcz_group.size() == 2) + assertTrue(lcz_group.lcz_primary == 107) + assertTrue(lcz_group.count == 1) } /** diff --git a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorkflowAbstractTest.groovy b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorkflowAbstractTest.groovy index 2aa847762b..47b49815cf 100644 --- a/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorkflowAbstractTest.groovy +++ b/osm/src/test/groovy/org/orbisgis/geoclimate/osm/WorkflowAbstractTest.groovy @@ -44,8 +44,8 @@ class WorkflowAbstractTest { */ void geoIndicatorsCalc(String directory, def datasource, String zone, String buildingTableName, String roadTableName, String railTableName, String vegetationTableName, - String hydrographicTableName, String imperviousTableName , String sealandmaskTableName , - String urban_areas , + String hydrographicTableName, String imperviousTableName, String sealandmaskTableName, + String urban_areas, boolean saveResults, boolean svfSimplified = false, def indicatorUse, String prefixName = "", boolean onlySea = false) { //Create spatial units and relations : building, block, rsu @@ -66,7 +66,7 @@ class WorkflowAbstractTest { def maxBlocks = datasource.firstRow("select max(id_block) as max from ${relationBuildings}".toString()) def countBlocks = datasource.firstRow("select count(*) as count from ${relationBlocks}".toString()) - if (!onlySea){ + if (!onlySea) { assertEquals(countBlocks.count, maxBlocks.max) } @@ -79,7 +79,7 @@ class WorkflowAbstractTest { roadTableName, indicatorUse, prefixName) assert buildingIndicators - if (!onlySea){ + if (!onlySea) { assertTrue(datasource.getSpatialTable(buildingIndicators).srid > 0) } if (saveResults) { @@ -107,7 +107,7 @@ class WorkflowAbstractTest { def countRelationBlocks = datasource.firstRow("select count(*) as count from ${relationBlocks}".toString()) def countBlocksIndicators = datasource.firstRow("select count(*) as count from ${blockIndicators}".toString()) assertEquals(countRelationBlocks.count, countBlocksIndicators.count) - if (!onlySea){ + if (!onlySea) { assertTrue(datasource.getSpatialTable(blockIndicators).srid > 0) } } diff --git a/osmtools/pom.xml b/osmtools/pom.xml index 12417323f2..cbb30f7fee 100644 --- a/osmtools/pom.xml +++ b/osmtools/pom.xml @@ -1,5 +1,6 @@ - + geoclimate-parent org.orbisgis.geoclimate diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy index 4b035a5239..f6579e63eb 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Loader.groovy @@ -49,7 +49,7 @@ import static org.orbisgis.geoclimate.osmtools.utils.OSMElement.* * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) throws Exception{ +Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) throws Exception { if (!datasource) { throw new Exception("No datasource provided.") } @@ -64,11 +64,9 @@ Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) t geom = new GeometryFactory().toGeometry(filterArea) } else if (filterArea instanceof Polygon) { geom = filterArea - } - else if (filterArea in Collection && filterArea.size()==4){ + } else if (filterArea in Collection && filterArea.size() == 4) { geom = Utilities.geometryFromValues(filterArea) - } - else { + } else { throw new Exception("The filter area must be an Envelope or a Polygon") } @@ -94,10 +92,10 @@ Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) t info "Downloading OSM data from the area $filterArea" if (load(datasource, osmTablesPrefix, extract)) { info "Loading OSM data from the area $filterArea" - return [zone : outputZoneTable, - envelope : outputZoneEnvelopeTable, - prefix : osmTablesPrefix, - epsg : epsg] + return [zone : outputZoneTable, + envelope: outputZoneEnvelopeTable, + prefix : osmTablesPrefix, + epsg : epsg] } else { throw new Exception("Cannot load the OSM data from the area $filterArea".toString()) } @@ -120,7 +118,7 @@ Map fromArea(JdbcDataSource datasource, Object filterArea, float distance = 0) t * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) throws Exception{ +Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) throws Exception { if (!placeName) { throw new Exception("Cannot find an area from a void place name.") } @@ -135,7 +133,7 @@ Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) t Map nominatimRes = OSMTools.Utilities.getNominatimData(placeName); - if(!nominatimRes){ + if (!nominatimRes) { throw new Exception("Cannot find an area from the place name $placeName".toString()) } def geom = nominatimRes["geom"] @@ -166,9 +164,9 @@ Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) t info "Downloading OSM data from the place $placeName" if (load(datasource, osmTablesPrefix, extract)) { info "Loading OSM data from the place $placeName" - return [zone : outputZoneTable, + return [zone : outputZoneTable, envelope: outputZoneEnvelopeTable, - prefix : osmTablesPrefix] + prefix : osmTablesPrefix] } else { throw new Exception("Cannot load the OSM data from the place $placeName".toString()) } @@ -186,7 +184,7 @@ Map fromPlace(JdbcDataSource datasource, String placeName, float distance = 0) t * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -String extract(String overpassQuery) throws Exception{ +String extract(String overpassQuery) throws Exception { info "Extract the OSM data" if (!overpassQuery) { throw new Exception("The query should not be null or empty.") @@ -235,7 +233,7 @@ String extract(String overpassQuery) throws Exception{ * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -boolean load(JdbcDataSource datasource, String osmTablesPrefix, String osmFilePath) throws Exception{ +boolean load(JdbcDataSource datasource, String osmTablesPrefix, String osmFilePath) throws Exception { if (!datasource) { throw new Exception("Please set a valid database connection.") } @@ -257,7 +255,7 @@ boolean load(JdbcDataSource datasource, String osmTablesPrefix, String osmFilePa if (datasource.load(osmFile, osmTablesPrefix, true)) { info "The input OSM file has been loaded in the database." //We must check if there is some data at least one tag - if (datasource.getRowCount("${osmTablesPrefix}_node".toString())==0) { + if (datasource.getRowCount("${osmTablesPrefix}_node".toString()) == 0) { throw new Exception("The downloaded OSM file doesn't contain any data.\n Please check the file ${osmFile} to see what happens.".toString()) } return true diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy index 1a35e830bc..a3663d7402 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/Transform.groovy @@ -183,7 +183,7 @@ String toPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCod * @return outputTableName a name for the table that contains all polygons * @author Erwan Bocher CNRS LAB-STICC */ -String toPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], boolean valid_geom) { +String toPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], boolean valid_geom) { return OSMTools.TransformUtils.toPolygonOrLine(POLYGONS, datasource, osmTablesPrefix, epsgCode, tags, columnsToKeep, null, valid_geom) } @@ -202,8 +202,8 @@ String toPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCod * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Le Saux (UBS LAB-STICC) */ -String extractWaysAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], boolean valid_geom = false) { - return extractWaysAsPolygons( datasource, osmTablesPrefix, epsgCode , tags , columnsToKeep , null, valid_geom) +String extractWaysAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], boolean valid_geom = false) { + return extractWaysAsPolygons(datasource, osmTablesPrefix, epsgCode, tags, columnsToKeep, null, valid_geom) } /** @@ -333,7 +333,7 @@ String extractWaysAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, query += " FROM $waysPolygonTmp AS a, $osmTableTag b WHERE a.id_way=b.id_way and st_isempty(a.the_geom)=false " - if(columnsToKeep) { + if (columnsToKeep) { query += " AND b.TAG_KEY IN ('${columnsToKeep.join("','")}') " } @@ -342,7 +342,7 @@ String extractWaysAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, query += " GROUP BY a.id_way;" datasource.execute(query) if (geometry) { - def query_out ="""DROP TABLE IF EXISTS $outputTableName; + def query_out = """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName AS SELECT * FROM $allPolygonsTables as a where """ int geom_srid = geometry.getSRID() if (geom_srid == -1) { @@ -357,8 +357,7 @@ String extractWaysAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, $query_out; DROP TABLE IF EXISTS $waysPolygonTmp, $idWaysPolygons, $allPolygonsTables;""".toString() return outputTableName - } - else{ + } else { datasource """ ALTER TABLE $allPolygonsTables RENAME TO $outputTableName; DROP TABLE IF EXISTS $waysPolygonTmp, $idWaysPolygons; @@ -383,7 +382,7 @@ String extractWaysAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, * @author Elisabeth Le Saux (UBS LAB-STICC) */ def extractRelationsAsPolygons(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode = 4326, def tags = [], def columnsToKeep = [], boolean valid_geom = false) { - return extractRelationsAsPolygons(datasource, osmTablesPrefix, epsgCode, tags , columnsToKeep, null, valid_geom) + return extractRelationsAsPolygons(datasource, osmTablesPrefix, epsgCode, tags, columnsToKeep, null, valid_geom) } /** @@ -459,7 +458,7 @@ def extractRelationsAsPolygons(JdbcDataSource datasource, String osmTablesPrefix """.toString() if (columnsToKeep) { - if (datasource.getRowCount(relationFilteredKeys)< 1) { + if (datasource.getRowCount(relationFilteredKeys) < 1) { debug "Any columns to keep. Cannot create any geometry polygons. An empty table will be returned." datasource """ DROP TABLE IF EXISTS $outputTableName; @@ -584,14 +583,14 @@ def extractRelationsAsPolygons(JdbcDataSource datasource, String osmTablesPrefix WHERE a.id_relation=b.id_relation and st_isempty(a.the_geom)=false """ } - if(columnsToKeep) { + if (columnsToKeep) { query += " AND b.TAG_KEY IN ('${columnsToKeep.join("','")}') " } query += " GROUP BY a.the_geom, a.id_relation;" datasource.execute(query.toString()) if (geometry) { - def query_out ="""DROP TABLE IF EXISTS $outputTableName; + def query_out = """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName as SELECT * FROM $allRelationPolygons as a where""" int geom_srid = geometry.getSRID() if (geom_srid == -1) { @@ -604,12 +603,12 @@ def extractRelationsAsPolygons(JdbcDataSource datasource, String osmTablesPrefix datasource.createSpatialIndex(allRelationPolygons, "the_geom") datasource.execute(query_out.toString()) datasource.dropTable(relationsPolygonsOuter, relationsPolygonsInner, relationsPolygonsOuterExploded, - relationsPolygonsInnerExploded,relationsMpHoles,relationFilteredKeys, allRelationPolygons) + relationsPolygonsInnerExploded, relationsMpHoles, relationFilteredKeys, allRelationPolygons) return outputTableName - }else{ + } else { datasource.execute("""ALTER TABLE $allRelationPolygons RENAME TO $outputTableName""".toString()) datasource.dropTable(relationsPolygonsOuter, relationsPolygonsInner, relationsPolygonsOuterExploded, - relationsPolygonsInnerExploded,relationsMpHoles,relationFilteredKeys) + relationsPolygonsInnerExploded, relationsMpHoles, relationFilteredKeys) return outputTableName } @@ -747,18 +746,18 @@ String extractWaysAsLines(JdbcDataSource datasource, String osmTablesPrefix, int def query = """ DROP TABLE IF EXISTS $allLinesTables; CREATE TABLE $allLinesTables AS - SELECT 'w'||a.id_way AS id, a.the_geom ${OSMTools.TransformUtils.createTagList(datasource, columnsSelector,columnsToKeep)} + SELECT 'w'||a.id_way AS id, a.the_geom ${OSMTools.TransformUtils.createTagList(datasource, columnsSelector, columnsToKeep)} FROM $waysLinesTmp AS a, ${osmTablesPrefix}_way_tag b WHERE a.id_way=b.id_way and st_isempty(a.the_geom)=false """ - if(columnsToKeep) { + if (columnsToKeep) { query += " AND b.TAG_KEY IN ('${columnsToKeep.join("','")}') " } query += " GROUP BY a.id_way;" datasource.execute(query.toString()) if (geometry) { - def query_out = """DROP TABLE IF EXISTS $outputTableName; + def query_out = """DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName as select * from $allLinesTables as a where """ int geom_srid = geometry.getSRID() if (geom_srid == -1) { @@ -774,7 +773,7 @@ String extractWaysAsLines(JdbcDataSource datasource, String osmTablesPrefix, int DROP TABLE IF EXISTS $waysLinesTmp, $idWaysTable, $allLinesTables; """.toString() return outputTableName - }else{ + } else { datasource """ ALTER TABLE $allLinesTables RENAME TO $outputTableName; DROP TABLE IF EXISTS $waysLinesTmp, $idWaysTable; @@ -919,20 +918,20 @@ String extractRelationsAsLines(JdbcDataSource datasource, String osmTablesPrefix def columnsSelector = OSMTools.TransformUtils.getColumnSelector(osmTableTag, tags, columnsToKeep) - def allRelationLines = postfix("all_relation_lines") + def allRelationLines = postfix("all_relation_lines") def query = """ DROP TABLE IF EXISTS $allRelationLines; CREATE TABLE $allRelationLines AS SELECT 'r'||a.id_relation AS id, a.the_geom ${OSMTools.TransformUtils.createTagList(datasource, columnsSelector, columnsToKeep)} FROM $relationsLinesTmp AS a, ${osmTablesPrefix}_relation_tag b WHERE a.id_relation=b.id_relation and st_isempty(a.the_geom)=false """ - if(columnsToKeep) { + if (columnsToKeep) { query += " AND b.TAG_KEY IN ('${columnsToKeep.join("','")}') " } query += " GROUP BY a.id_relation;" datasource.execute(query.toString()) if (geometry) { - def query_out =""" DROP TABLE IF EXISTS $outputTableName; + def query_out = """ DROP TABLE IF EXISTS $outputTableName; CREATE TABLE $outputTableName as SELECT * FROM $allRelationLines as a where """ int geom_srid = geometry.getSRID() if (geom_srid == -1) { @@ -947,7 +946,7 @@ String extractRelationsAsLines(JdbcDataSource datasource, String osmTablesPrefix DROP TABLE IF EXISTS $relationsLinesTmp, $relationsFilteredKeys, $allRelationLines; """.toString()) return outputTableName - }else{ + } else { datasource.execute(""" ALTER TABLE $allRelationLines RENAME TO $outputTableName; DROP TABLE IF EXISTS $relationsLinesTmp, $relationsFilteredKeys; """.toString()) diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy index 399f9f0c25..c2c7c520d7 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtils.groovy @@ -248,7 +248,7 @@ boolean extractNodesAsPoints(JdbcDataSource datasource, String osmTablesPrefix, * @author Elisabeth Lesaux (UBS LAB-STICC) */ boolean extractNodesAsPoints(JdbcDataSource datasource, String osmTablesPrefix, int epsgCode, - String outputNodesPoints, def tags, def columnsToKeep, Geometry geometry) throws Exception{ + String outputNodesPoints, def tags, def columnsToKeep, Geometry geometry) throws Exception { if (!datasource) { throw new Exception("The datasource should not be null") } @@ -334,15 +334,15 @@ boolean extractNodesAsPoints(JdbcDataSource datasource, String osmTablesPrefix, } else { filterTableNode = tableNode } - def lastQuery = """DROP TABLE IF EXISTS $outputNodesPoints; + def lastQuery = """DROP TABLE IF EXISTS $outputNodesPoints; CREATE TABLE $outputNodesPoints AS SELECT a.id_node,ST_TRANSFORM(ST_SETSRID(a.THE_GEOM, 4326), $epsgCode) AS the_geom $tagList FROM $filterTableNode AS a, $tableNodeTag b WHERE a.id_node = b.id_node """ - if(columnsToKeep) { + if (columnsToKeep) { lastQuery += " AND b.TAG_KEY IN ('${columnsToKeep.join("','")}') " } - lastQuery+= " GROUP BY a.id_node" + lastQuery += " GROUP BY a.id_node" datasource.execute(lastQuery.toString()) } else { @@ -386,10 +386,10 @@ boolean extractNodesAsPoints(JdbcDataSource datasource, String osmTablesPrefix, FROM $filterTableNode AS a, $tableNodeTag b, $filteredNodes c WHERE a.id_node=b.id_node AND a.id_node=c.id_node """ - if(columnsToKeep){ + if (columnsToKeep) { lastQuery += " AND b.TAG_KEY IN ('${columnsToKeep.join("','")}') " } - lastQuery+=" GROUP BY a.id_node" + lastQuery += " GROUP BY a.id_node" datasource.execute(lastQuery.toString()) } datasource.dropTable(tablesToDrop) @@ -474,13 +474,13 @@ def createTagList(JdbcDataSource datasource, def selectTableQuery, List columnsT } def rowskeys = datasource.rows(selectTableQuery.toString()) def list = [] - if(!columnsToKeep){ + if (!columnsToKeep) { rowskeys.tag_key.each { it -> - if (it != null ) { + if (it != null) { list << "MAX(CASE WHEN b.tag_key = '$it' THEN b.tag_value END) AS \"${it}\"" } } - }else { + } else { def nullColumns = columnsToKeep.collect() rowskeys.tag_key.each { it -> if (it != null && columnsToKeep.contains(it)) { @@ -511,7 +511,7 @@ def createTagList(JdbcDataSource datasource, def selectTableQuery, List columnsT * @author Erwan Bocher (CNRS LAB-STICC) * @author Elisabeth Lesaux (UBS LAB-STICC) */ -def buildIndexes(JdbcDataSource datasource, String osmTablesPrefix) throws Exception{ +def buildIndexes(JdbcDataSource datasource, String osmTablesPrefix) throws Exception { if (!datasource) { throw new Exception("The datasource should not be null.") } diff --git a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy index 27904f7a73..d572ca9509 100644 --- a/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy +++ b/osmtools/src/main/groovy/org/orbisgis/geoclimate/osmtools/utils/Utilities.groovy @@ -47,7 +47,7 @@ import static java.nio.charset.StandardCharsets.UTF_8 * * @return a New geometry. */ -Map getNominatimData(def placeName) throws Exception{ +Map getNominatimData(def placeName) throws Exception { if (!placeName) { throw new Exception("The place name should not be null or empty.") } @@ -73,10 +73,10 @@ Map getNominatimData(def placeName) throws Exception{ GeometryFactory geometryFactory = new GeometryFactory() - def geometry_admin= 0 + def geometry_admin = 0 def data = [:] jsonRoot.features.find() { feature -> - if (feature.geometry != null ) { + if (feature.geometry != null) { if (feature.geometry.type.equalsIgnoreCase("polygon")) { def area = parsePolygon(feature.geometry.coordinates, geometryFactory) area.setSRID(4326) @@ -85,7 +85,7 @@ Map getNominatimData(def placeName) throws Exception{ data.putAll(feature.properties) def bbox = feature.bbox data.put("bbox", [bbox[1], bbox[0], bbox[3], bbox[2]]) - if(feature.properties.type=="administrative" && feature.properties.category=='boundary'){ + if (feature.properties.type == "administrative" && feature.properties.category == 'boundary') { return true } } else if (feature.geometry.type.equalsIgnoreCase("multipolygon")) { @@ -99,7 +99,7 @@ Map getNominatimData(def placeName) throws Exception{ data.putAll(feature.properties) def bbox = feature.bbox data.put("bbox", [bbox[1], bbox[0], bbox[3], bbox[2]]) - if(feature.properties.type=="administrative"&& feature.properties.category=='boundary'){ + if (feature.properties.type == "administrative" && feature.properties.category == 'boundary') { return true } } else { @@ -179,7 +179,7 @@ static Geometry getArea(def location) { * * @return A polygon. */ -Polygon parsePolygon(def coordinates, GeometryFactory geometryFactory) throws Exception{ +Polygon parsePolygon(def coordinates, GeometryFactory geometryFactory) throws Exception { if (!coordinates in Collection || !coordinates || !coordinates[0] in Collection || !coordinates[0] || !coordinates[0][0] in Collection || !coordinates[0][0]) { @@ -233,7 +233,7 @@ static Coordinate[] arrayToCoordinate(def coordinates) { * @return True if the file has been downloaded, false otherwise. * */ -boolean executeNominatimQuery(def query, def outputOSMFile) throws Exception{ +boolean executeNominatimQuery(def query, def outputOSMFile) throws Exception { if (!query) { throw new Exception("The Nominatim query should not be null.") } @@ -288,7 +288,7 @@ boolean executeNominatimQuery(def query, def outputOSMFile) throws Exception{ * * @return OSM bbox. */ -String toBBox(Geometry geometry) throws Exception{ +String toBBox(Geometry geometry) throws Exception { if (!geometry) { throw new Exception("Cannot convert to an overpass bounding box.") } @@ -307,7 +307,7 @@ String toBBox(Geometry geometry) throws Exception{ * * @return The OSM polygon. */ -String toPoly(Geometry geometry) throws Exception{ +String toPoly(Geometry geometry) throws Exception { if (!geometry) { throw new Exception("Cannot convert to an overpass poly filter.") } @@ -339,7 +339,7 @@ String toPoly(Geometry geometry) throws Exception{ * * @return A string representation of the OSM query. */ -String buildOSMQuery(Envelope envelope, def keys=null) throws Exception{ +String buildOSMQuery(Envelope envelope, def keys = null) throws Exception { return buildOSMQuery(envelope, keys, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) } @@ -356,7 +356,7 @@ String buildOSMQuery(Envelope envelope, def keys=null) throws Exception{ * * @return A string representation of the OSM query. */ - String buildOSMQuery(Envelope envelope, def keys, OSMElement... osmElement) throws Exception{ +String buildOSMQuery(Envelope envelope, def keys, OSMElement... osmElement) throws Exception { if (!envelope) { throw new Exception("Cannot create the overpass query from the bbox $envelope.".toString()) } @@ -384,7 +384,7 @@ String buildOSMQuery(Envelope envelope, def keys=null) throws Exception{ * * @return A string representation of the OSM query. */ -String buildOSMQuery(List latLonCoordinates, def keys=null) throws Exception{ +String buildOSMQuery(List latLonCoordinates, def keys = null) throws Exception { return buildOSMQuery(latLonCoordinates, keys, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) } @@ -399,12 +399,12 @@ String buildOSMQuery(List latLonCoordinates, def keys=null) throws Exception{ * * @return A string representation of the OSM query. */ - String buildOSMQuery(List latLonCoordinates, def keys, OSMElement... osmElement) throws Exception{ +String buildOSMQuery(List latLonCoordinates, def keys, OSMElement... osmElement) throws Exception { if (!latLonCoordinates) { throw new Exception("Cannot create the overpass query from the bbox $latLonCoordinates.".toString()) } Geometry geom = OSMTools.Utilities.geometryFromValues(latLonCoordinates) - if(geom==null) { + if (geom == null) { throw new Exception("Invalid BBOX") } return buildOSMQuery(geom.getEnvelopeInternal(), keys, osmElement) @@ -423,7 +423,7 @@ String buildOSMQuery(List latLonCoordinates, def keys=null) throws Exception{ * * @return A string representation of the OSM query. */ - String buildOSMQueryWithAllData(Envelope envelope, def keys, OSMElement... osmElement) throws Exception{ +String buildOSMQueryWithAllData(Envelope envelope, def keys, OSMElement... osmElement) throws Exception { if (!envelope) { throw new Exception("Cannot create the overpass query from the bbox $envelope.".toString()) } @@ -452,7 +452,7 @@ String buildOSMQuery(List latLonCoordinates, def keys=null) throws Exception{ * * @return A string representation of the OSM query. */ -String buildOSMQuery(Polygon polygon, def keys=null) throws Exception{ +String buildOSMQuery(Polygon polygon, def keys = null) throws Exception { return buildOSMQuery(polygon, keys, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) } @@ -468,7 +468,7 @@ String buildOSMQuery(Polygon polygon, def keys=null) throws Exception{ * * @return A string representation of the OSM query. */ -String buildOSMQuery(Polygon polygon, def keys, OSMElement... osmElement) throws Exception{ +String buildOSMQuery(Polygon polygon, def keys, OSMElement... osmElement) throws Exception { if (polygon == null) { throw new Exception("Cannot create the overpass query from a null polygon.") } @@ -509,7 +509,7 @@ String buildOSMQuery(Polygon polygon, def keys, OSMElement... osmElement) throws * * @return A Map of parameters. */ -Map readJSONParameters(def jsonFile) throws Exception{ +Map readJSONParameters(def jsonFile) throws Exception { if (!jsonFile) { throw new Exception("The given file should not be null") } @@ -631,9 +631,8 @@ Geometry geometryFromValues(def bbox) { } if (bbox.size() == 4) { return buildGeometry([bbox[1], bbox[0], bbox[3], bbox[2]]); - } - else if (bbox.size()==3){ - if(bbox[2]<100){ + } else if (bbox.size() == 3) { + if (bbox[2] < 100) { error("The distance to create a bbox from a point must be greater than 100 meters") return null } diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy index 169a8064de..59f71dfe76 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/LoaderTest.groovy @@ -21,7 +21,6 @@ package org.orbisgis.geoclimate.osmtools import org.junit.jupiter.api.* import org.junit.jupiter.api.io.TempDir -import org.locationtech.jts.geom.Coordinate import org.locationtech.jts.geom.GeometryFactory import org.orbisgis.data.H2GIS import org.orbisgis.geoclimate.osmtools.utils.OSMElement @@ -83,8 +82,8 @@ class LoaderTest extends AbstractOSMToolsTest { */ @Test void badFromAreaTest() { - assertThrows(Exception.class, ()-> OSMTools.Loader.fromArea(ds, null)) - assertThrows(Exception.class, ()-> OSMTools.Loader.fromArea(ds, "A string")) + assertThrows(Exception.class, () -> OSMTools.Loader.fromArea(ds, null)) + assertThrows(Exception.class, () -> OSMTools.Loader.fromArea(ds, "A string")) } /** @@ -93,7 +92,7 @@ class LoaderTest extends AbstractOSMToolsTest { @Test void fromAreaNoDistTest() { //With polygon - Map r = OSMTools.Loader.fromArea(ds, [48.733493,-3.076869,48.733995,-3.075829]) + Map r = OSMTools.Loader.fromArea(ds, [48.733493, -3.076869, 48.733995, -3.075829]) assertFalse r.isEmpty() assertTrue r.containsKey("zone") @@ -119,7 +118,7 @@ class LoaderTest extends AbstractOSMToolsTest { zoneEnv.next() assertEquals "POLYGON ((-3.076869 48.733493, -3.076869 48.733995, -3.075829 48.733995, -3.075829 48.733493, -3.076869 48.733493))", zoneEnv.getGeometry(1).toText() - def env = OSMTools.Utilities.geometryFromValues([48.733493,-3.076869,48.733995,-3.075829]).getEnvelopeInternal() + def env = OSMTools.Utilities.geometryFromValues([48.733493, -3.076869, 48.733995, -3.075829]).getEnvelopeInternal() //With Envelope r = OSMTools.Loader.fromArea(ds, env) @@ -157,7 +156,7 @@ class LoaderTest extends AbstractOSMToolsTest { def geomFacto = new GeometryFactory() def dist = 1000 - def polygon = OSMTools.Utilities.geometryFromValues([ 48.790598,-3.084508,48.791800,-3.082228]) + def polygon = OSMTools.Utilities.geometryFromValues([48.790598, -3.084508, 48.791800, -3.082228]) def env = polygon.getEnvelopeInternal() @@ -218,7 +217,7 @@ class LoaderTest extends AbstractOSMToolsTest { */ @Test void fromPlaceNoDistTest() { - if(OSMTools.Utilities.isNominatimReady()) { + if (OSMTools.Utilities.isNominatimReady()) { def placeName = "Lezoen, Plourivo" def formattedPlaceName = "Lezoen_Plourivo_" Map r = OSMTools.Loader.fromPlace(ds, placeName) @@ -257,7 +256,7 @@ class LoaderTest extends AbstractOSMToolsTest { def placeName = " The place Name -toFind " def dist = 5 def formattedPlaceName = "The_place_Name_toFind_" - assertThrows(Exception.class, ()->OSMTools.Loader.fromPlace(ds, placeName, dist)) + assertThrows(Exception.class, () -> OSMTools.Loader.fromPlace(ds, placeName, dist)) def r = OSMTools.Loader.fromPlace(ds, "Lezoen, Plourivo", dist) @@ -287,10 +286,10 @@ class LoaderTest extends AbstractOSMToolsTest { void badFromPlaceTest() { def placeName = " The place Name -toFind " def dist = -5 - assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(ds, placeName, dist)) - assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(ds, placeName, -1)) - assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(ds, null)) - assertThrows(Exception.class, ()-> OSMTools.Loader.fromPlace(null, placeName)) + assertThrows(Exception.class, () -> OSMTools.Loader.fromPlace(ds, placeName, dist)) + assertThrows(Exception.class, () -> OSMTools.Loader.fromPlace(ds, placeName, -1)) + assertThrows(Exception.class, () -> OSMTools.Loader.fromPlace(ds, null)) + assertThrows(Exception.class, () -> OSMTools.Loader.fromPlace(null, placeName)) } /** @@ -298,7 +297,7 @@ class LoaderTest extends AbstractOSMToolsTest { */ @Test void extractTest() { - def env = OSMTools.Utilities.geometryFromValues([48.733493,-3.076869,48.733995,-3.075829]).getEnvelopeInternal() + def env = OSMTools.Utilities.geometryFromValues([48.733493, -3.076869, 48.733995, -3.075829]).getEnvelopeInternal() def query = "[maxsize:1073741824]" + OSMTools.Utilities.buildOSMQuery(env, null, OSMElement.NODE, OSMElement.WAY, OSMElement.RELATION) def extract = OSMTools.Loader.extract(query) assertNotNull extract @@ -311,9 +310,9 @@ class LoaderTest extends AbstractOSMToolsTest { */ @Test void badExtractTest() { - assertThrows(Exception.class, ()-> OSMTools.Loader.extract(null)) + assertThrows(Exception.class, () -> OSMTools.Loader.extract(null)) badOverpassQueryOverride() - assertThrows(Exception.class, ()-> OSMTools.Loader.extract("toto")) + assertThrows(Exception.class, () -> OSMTools.Loader.extract("toto")) } /** @@ -329,17 +328,17 @@ class LoaderTest extends AbstractOSMToolsTest { def prefix = uuid().toUpperCase() //Null dataSource - assertThrows(Exception.class, ()-> OSMTools.Loader.load(null, prefix, osmFile.absolutePath)) + assertThrows(Exception.class, () -> OSMTools.Loader.load(null, prefix, osmFile.absolutePath)) //Null prefix - assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, null, osmFile.absolutePath)) + assertThrows(Exception.class, () -> OSMTools.Loader.load(ds, null, osmFile.absolutePath)) //Bad prefix - assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, "(╯°□°)╯︵ ┻━┻", osmFile.absolutePath)) + assertThrows(Exception.class, () -> OSMTools.Loader.load(ds, "(╯°□°)╯︵ ┻━┻", osmFile.absolutePath)) //Null path - assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, prefix, null)) + assertThrows(Exception.class, () -> OSMTools.Loader.load(ds, prefix, null)) //Unexisting path - assertThrows(Exception.class, ()-> OSMTools.Loader.load(ds, prefix, "ᕕ(ᐛ)ᕗ")) + assertThrows(Exception.class, () -> OSMTools.Loader.load(ds, prefix, "ᕕ(ᐛ)ᕗ")) } /** diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy index ff6a397a8c..9b230b37ea 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/TransformTest.groovy @@ -20,15 +20,14 @@ package org.orbisgis.geoclimate.osmtools import org.h2gis.utilities.GeographyUtilities -import org.junit.jupiter.api.* +import org.junit.jupiter.api.BeforeAll +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test import org.junit.jupiter.api.io.TempDir import org.locationtech.jts.geom.* import org.orbisgis.data.H2GIS import org.orbisgis.geoclimate.osmtools.utils.OSMElement import org.orbisgis.geoclimate.osmtools.utils.Utilities -import org.orbisgis.geoclimate.utils.LoggerUtils -import org.slf4j.Logger -import org.slf4j.LoggerFactory import static org.junit.jupiter.api.Assertions.* diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy index 9d4d81ec9f..b440b5d339 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/TransformUtilsTest.groovy @@ -264,9 +264,9 @@ class TransformUtilsTest extends AbstractOSMToolsTest { @Test void badBuildIndexesTest() { def osmTable = "toto" - assertThrows(Exception.class,()->OSMTools.TransformUtils.buildIndexes(h2gis, null)) - assertThrows(Exception.class, ()->OSMTools.TransformUtils.buildIndexes(null, null)) - assertThrows(Exception.class, ()->OSMTools.TransformUtils.buildIndexes(null, osmTable)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.buildIndexes(h2gis, null)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.buildIndexes(null, null)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.buildIndexes(null, osmTable)) } /** @@ -295,37 +295,37 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ OSMTools.TransformUtils.buildIndexes(h2gis, osmTablesPrefix) assertTrue h2gis.hasTable("${osmTablesPrefix}_node") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_node","id_node") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_node", "id_node") assertTrue h2gis.hasTable("${osmTablesPrefix}_way_node") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_node","id_node") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_node","node_order") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_node", "id_node") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_node", "node_order") assertTrue h2gis.hasTable("${osmTablesPrefix}_way") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way","id_way") - assertFalse h2gis.isIndexed("${osmTablesPrefix}_way","not_taken_into_account") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way", "id_way") + assertFalse h2gis.isIndexed("${osmTablesPrefix}_way", "not_taken_into_account") assertTrue h2gis.hasTable("${osmTablesPrefix}_way_tag") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag","tag_key") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag","id_way") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag","tag_value") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag", "tag_key") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag", "id_way") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_tag", "tag_value") assertTrue h2gis.hasTable("${osmTablesPrefix}_relation_tag") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag","tag_key") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag","id_relation") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag","tag_value") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag", "tag_key") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag", "id_relation") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation_tag", "tag_value") assertTrue h2gis.hasTable("${osmTablesPrefix}_relation") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation","id_relation") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_relation", "id_relation") assertTrue h2gis.hasTable("${osmTablesPrefix}_way_member") - assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_member","id_relation") + assertTrue h2gis.isIndexed("${osmTablesPrefix}_way_member", "id_relation") assertTrue h2gis.hasTable("${osmTablesPrefix}_way_not_taken_into_account") - assertFalse h2gis.isIndexed("${osmTablesPrefix}_way_not_taken_into_account","id_way") + assertFalse h2gis.isIndexed("${osmTablesPrefix}_way_not_taken_into_account", "id_way") assertTrue h2gis.hasTable("${osmTablesPrefix}_relation_not_taken_into_account") - assertFalse h2gis.isIndexed("${osmTablesPrefix}_relation_not_taken_into_account","id_relation") + assertFalse h2gis.isIndexed("${osmTablesPrefix}_relation_not_taken_into_account", "id_relation") } /** @@ -387,10 +387,10 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ loadDataForNodeExtraction(h2gis, prefix) - assertThrows(Exception.class, ()->OSMTools.TransformUtils.extractNodesAsPoints(null, prefix, epsgCode, outTable, tags, columnsToKeep)) - assertThrows(Exception.class, ()-> OSMTools.TransformUtils.extractNodesAsPoints(h2gis, null, epsgCode, outTable, tags, columnsToKeep)) - assertThrows(Exception.class, ()->OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, -1, outTable, tags, columnsToKeep)) - assertThrows(Exception.class, ()->OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, null, tags, columnsToKeep)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.extractNodesAsPoints(null, prefix, epsgCode, outTable, tags, columnsToKeep)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.extractNodesAsPoints(h2gis, null, epsgCode, outTable, tags, columnsToKeep)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, -1, outTable, tags, columnsToKeep)) + assertThrows(Exception.class, () -> OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, null, tags, columnsToKeep)) assertFalse OSMTools.TransformUtils.extractNodesAsPoints(h2gis, prefix, epsgCode, outTable, [house: "false", path: 'false'], null) } @@ -571,7 +571,7 @@ ${osmTablesPrefix}_way_member, ${osmTablesPrefix}_way_not_taken_into_account, ${ def prefix = "OSM_" + uuid() def epsgCode = 2145 def tags = ["building": ["house"]] - def columnsToKeep = ["building","water"] + def columnsToKeep = ["building", "water"] //Load data createData(h2gis, prefix) diff --git a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy index 06a53455cc..1aeeeea62a 100644 --- a/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy +++ b/osmtools/src/test/groovy/org/orbisgis/geoclimate/osmtools/utils/UtilitiesTest.groovy @@ -230,7 +230,7 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badToBBoxTest() { - assertThrows(Exception.class, ()-> OSMTools.Utilities.toBBox(null)) + assertThrows(Exception.class, () -> OSMTools.Utilities.toBBox(null)) } /** @@ -399,8 +399,8 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badBuildOSMQueryFromPolygonTest() { - assertThrows(Exception.class, ()-> OSMTools.Utilities.buildOSMQuery((Polygon) null, ["building"], OSMElement.NODE)) - assertThrows(Exception.class, ()-> OSMTools.Utilities.buildOSMQuery(new GeometryFactory().createPolygon(), ["building"], OSMElement.NODE)) + assertThrows(Exception.class, () -> OSMTools.Utilities.buildOSMQuery((Polygon) null, ["building"], OSMElement.NODE)) + assertThrows(Exception.class, () -> OSMTools.Utilities.buildOSMQuery(new GeometryFactory().createPolygon(), ["building"], OSMElement.NODE)) } /** @@ -427,8 +427,8 @@ class UtilitiesTest extends AbstractOSMToolsTest { */ @Test void badReadJSONParametersTest() { - assertThrows(Exception.class, ()-> OSMTools.Utilities.readJSONParameters(null)) - assertThrows(Exception.class, ()-> OSMTools.Utilities.readJSONParameters("")) + assertThrows(Exception.class, () -> OSMTools.Utilities.readJSONParameters(null)) + assertThrows(Exception.class, () -> OSMTools.Utilities.readJSONParameters("")) assertNull OSMTools.Utilities.readJSONParameters("toto") assertNull OSMTools.Utilities.readJSONParameters("target") assertNull OSMTools.Utilities.readJSONParameters(new File(UtilitiesTest.getResource("bad_json_params.json").toURI()).absolutePath) diff --git a/pom.xml b/pom.xml index 4cd5ba532a..9d2481669b 100644 --- a/pom.xml +++ b/pom.xml @@ -1,4 +1,5 @@ - + 4.0.0 org.orbisgis.geoclimate geoclimate-parent diff --git a/worldpoptools/pom.xml b/worldpoptools/pom.xml index bd759dd494..5de9d02b06 100644 --- a/worldpoptools/pom.xml +++ b/worldpoptools/pom.xml @@ -1,5 +1,6 @@ - + geoclimate-parent org.orbisgis.geoclimate diff --git a/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy b/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy index f53c8ca7dc..8e5d563bd1 100644 --- a/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy +++ b/worldpoptools/src/main/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtract.groovy @@ -85,7 +85,7 @@ String extractWorldPopLayer(String coverageId, List bbox) { * @return the name of the imported table */ String importAscGrid(JdbcDataSource datasource, String worldPopFilePath, int epsg = 4326, String tableName = "world_pop") - throws Exception{ + throws Exception { info "Import the the world pop asc file" // The name of the outputTableName is constructed def outputTableWorldPopName = postfix tableName @@ -118,7 +118,7 @@ String importAscGrid(JdbcDataSource datasource, String worldPopFilePath, int eps } catch (Exception ex) { datasource.execute("""drop table if exists $outputTableWorldPopName; create table $outputTableWorldPopName (the_geom GEOMETRY(POLYGON, $epsg), ID_POP INTEGER, POP FLOAT);""".toString()) - throw new Exception( "Cannot find any worldpop data on the requested area") + throw new Exception("Cannot find any worldpop data on the requested area") } } return outputTableWorldPopName @@ -233,7 +233,7 @@ boolean grid(String wcsRequest, File outputGridFile) { * @param coverageId * @return */ -boolean isCoverageAvailable(String coverageId){ +boolean isCoverageAvailable(String coverageId) { String describeRequest = """https://ogc.worldpop.org/geoserver/ows?service=WCS&version=2.0.1&request=DescribeCoverage&coverageId=$coverageId""".toString() def queryUrl = new URL(describeRequest) final String proxyHost = System.getProperty("http.proxyHost"); @@ -251,11 +251,11 @@ boolean isCoverageAvailable(String coverageId){ info "Executing query... $queryUrl" //Save the result in a file if (connection.responseCode == 200) { - XmlSlurper xmlParser = new XmlSlurper() + XmlSlurper xmlParser = new XmlSlurper() GPathResult nodes = xmlParser.parse(connection.inputStream) - if(nodes.Exception){ + if (nodes.Exception) { return true - }else { + } else { error "The service is not available for the coverageId : $coverageId" return false } diff --git a/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy b/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy index 33e7735013..60baed26ee 100644 --- a/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy +++ b/worldpoptools/src/test/groovy/org/orbisgis/geoclimate/worldpoptools/WorldPopExtractTest.groovy @@ -78,7 +78,7 @@ class WorldPopExtractTest { */ @Test void extractGridProcess() { - if(WorldPopExtract.Extract.isCoverageAvailable("wpGlobal:ppp_2018")) { + if (WorldPopExtract.Extract.isCoverageAvailable("wpGlobal:ppp_2018")) { String outputFilePath = WorldPopTools.Extract.extractWorldPopLayer("wpGlobal:ppp_2018", [47.63324, -2.78087, 47.65749, -2.75979]) assertNotNull(outputFilePath) assertTrue new File(outputFilePath).exists() @@ -90,7 +90,7 @@ class WorldPopExtractTest { */ @Test void extractLoadGridProcess() { - if(WorldPopExtract.Extract.isCoverageAvailable("wpGlobal:ppp_2018")) { + if (WorldPopExtract.Extract.isCoverageAvailable("wpGlobal:ppp_2018")) { String outputFilePath = WorldPopTools.Extract.extractWorldPopLayer("wpGlobal:ppp_2018", [47.63324, -2.78087, 47.65749, -2.75979]) if (outputFilePath) { assertTrue new File(outputFilePath).exists()