Skip to content

Commit

Permalink
Merge branch 'develop' into 252-include-retrofit-information-on-the-m…
Browse files Browse the repository at this point in the history
…apping-class
  • Loading branch information
longshuicy committed Jan 30, 2024
2 parents 1be78bf + 5c87be5 commit 6f4d20a
Show file tree
Hide file tree
Showing 23 changed files with 1,035 additions and 305 deletions.
15 changes: 11 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,17 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

## [Unchanged]

## [Unreleased]

### Added
- Add `hazardDatasets` field to TornadoDataset, TornadoModel and EarthquakeModel class [#213](https://github.com/IN-CORE/incore-services/issues/213)
- Add retrofit information to the mapping set class [#252](https://github.com/IN-CORE/incore-services/issues/252)

### Changed
- Use Java models to represent semantics [#239](https://github.com/IN-CORE/incore-services/issues/239)
- Sort Semantic Definition Alphabetically [#238](https://github.com/IN-CORE/incore-services/issues/238)

## [1.23.0] - 2023-12-13

### Changed
Expand All @@ -23,7 +29,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
## [1.21.0] - 2023-10-11

### Added
- Add template dataset generation (CSV and Shapefile) API to semantics-service [#214](https://github.com/IN-CORE/incore-services/issues/214)
- Geopakage handling in services [#205](https://github.com/IN-CORE/incore-services/issues/205)
- Template dataset generation (CSV and Shapefile) API to semantics-service [#214](https://github.com/IN-CORE/incore-services/issues/214)
- Owner item added to the dataset, hazard, and dfr3 object [#92](https://github.com/IN-CORE/incore-services/issues/92)
- Attenuation model Sadigh et al. 1997 [#208](https://github.com/IN-CORE/incore-services/issues/208)
- Include incore lab quota to the allocation endpoints [#217](https://github.com/IN-CORE/incore-services/issues/217)
Expand All @@ -43,8 +50,8 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- Refactor POST /types API [#159](https://github.com/IN-CORE/incore-services/issues/159)
- Refactor GET /types endpoint [#156](https://github.com/IN-CORE/incore-services/issues/156)
- Refactor DELETE /types/{id} endpoint [#160](https://github.com/IN-CORE/incore-services/issues/160)
- Added filtering by space in GET /types endpoint [191](https://github.com/IN-CORE/incore-services/issues/191)
- Added limiting and offset for GET /types/search endpoint and to GET /types endpoint [#195](https://github.com/IN-CORE/incore-services/issues/195)
- Filtering by space in GET /types endpoint [191](https://github.com/IN-CORE/incore-services/issues/191)
- Limiting and offset for GET /types/search endpoint and to GET /types endpoint [#195](https://github.com/IN-CORE/incore-services/issues/195)

### Changed
- Refactor AllocationsController and UsageController to use updated authorizer [#143](https://github.com/IN-CORE/incore-services/issues/143)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import io.swagger.v3.oas.annotations.tags.Tag;
import org.apache.commons.io.FilenameUtils;
import org.apache.log4j.Logger;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.FormDataParam;

Expand Down Expand Up @@ -525,7 +526,7 @@ public Dataset deleteDataset(@Parameter(name = "Dataset Id from data service", r
public Dataset uploadFiles(@Parameter(name = "Dataset Id from data service", required = true) @PathParam("id") String datasetId,
@Parameter(name = "Form inputs representing the file(s). The id/key of each input file has to be 'file'",
required = true)
FormDataMultiPart inputs) {
FormDataMultiPart inputs) throws IOException {
if (!authorizer.canUserWriteMember(this.username, datasetId, spaceRepository.getAllSpaces(),this.groups)) {
throw new IncoreHTTPException(Response.Status.FORBIDDEN,
this.username + " has no permission to modify the dataset " + datasetId);
Expand Down Expand Up @@ -616,6 +617,7 @@ public Dataset uploadFiles(@Parameter(name = "Dataset Id from data service", req
boolean isZip = false;
boolean isJoin = false;
boolean isPrj = false;
boolean isGpkg = false;

int fileCounter = 0;
int linkCounter = 0;
Expand Down Expand Up @@ -644,6 +646,8 @@ public Dataset uploadFiles(@Parameter(name = "Dataset Id from data service", req
isZip = true;
} else if (fileExt.equalsIgnoreCase("prj")) {
isPrj = true;
} else if (fileExt.equalsIgnoreCase("gpkg")) {
isGpkg = true;
}

// process zip file
Expand All @@ -664,6 +668,24 @@ public Dataset uploadFiles(@Parameter(name = "Dataset Id from data service", req
}
}

// process geopackage file
if (isGpkg) {
// if the file is geopackage but the format is shapefile, it should return error
if (!format.equalsIgnoreCase(FileUtils.FORMAT_GEOPACKAGE)) {
logger.error("The attached file is geopackage while dataset's format is no geopackage.");
throw new IncoreHTTPException(Response.Status.NOT_ACCEPTABLE,
"The attached file is geopackage but dataset's format is not geopackage.");
}

// check how many files are uploaded, if it is more than one file, then raise an error
if (bodyPartSize > 1) {
logger.error("There should be only one file uploaded when it comes with geopackage file ");
throw new IncoreHTTPException(Response.Status.NOT_ACCEPTABLE,
"There are more than one file uploaded with geopackage dataset. " +
"Please upload only single geopackage file.");
}
}

InputStream is = null;
if (paramName.equalsIgnoreCase(POST_PARAMETER_FILE)) {
is = inputs.getFields(paramName).get(fileCounter).getValueAs(InputStream.class);
Expand Down Expand Up @@ -862,7 +884,31 @@ public Dataset uploadFiles(@Parameter(name = "Dataset Id from data service", req
}
}

repository.addDataset(dataset);
SimpleFeatureCollection sfc = null; // this will be used for uploading geopackage to geoserver
if (format.equalsIgnoreCase(FileUtils.FORMAT_GEOPACKAGE)) {
if (!isGpkg) {
FileUtils.removeFilesFromFileDescriptor(dataset.getFileDescriptors());
logger.debug("The given file is not a geopackage file.");
throw new IncoreHTTPException(Response.Status.NOT_ACCEPTABLE, "Give file is not a geopackage file.");
}
File tmpFile = new File(FilenameUtils.concat(DATA_REPO_FOLDER, dataFDs.get(0).getDataURL()));

// check if geopackage only has a single layer and the layer name is the same as file name
if (!GeotoolsUtils.isGpkgSingleLayer(tmpFile)) {
FileUtils.removeFilesFromFileDescriptor(dataset.getFileDescriptors());
logger.debug("The geopackage has to have a single layer, and layer name should be the same as file name.");
throw new IncoreHTTPException(Response.Status.NOT_ACCEPTABLE,
"Geopackage is not a single layer or layer name is not the same as file name.");
}

// check if geopackage has guid
sfc = GeotoolsUtils.getSimpleFeatureCollectionFromGeopackage(tmpFile);
if (!GeotoolsUtils.isGUIDinGeopackage(sfc)) {
FileUtils.removeFilesFromFileDescriptor(dataset.getFileDescriptors());
logger.debug("The geopackage does not have guid field.");
throw new IncoreHTTPException(Response.Status.NOT_ACCEPTABLE, "No GUID field.");
}
}

// TODO: This a patch/hotfix so space is not saved when updating the dataset.
// May be this endpoint should not try to addDataset, rather it should just try to update the files section of the existing dataset
Expand Down Expand Up @@ -903,6 +949,16 @@ public Dataset uploadFiles(@Parameter(name = "Dataset Id from data service", req
} else if (format.equalsIgnoreCase("raster") || format.equalsIgnoreCase("geotiff") ||
format.equalsIgnoreCase("tif") || format.equalsIgnoreCase("tiff")) {
GeoserverUtils.datasetUploadToGeoserver(dataset, repository, isShp, isTif, isAsc);
} else if (format.equalsIgnoreCase(FileUtils.FORMAT_GEOPACKAGE)) {
double[] bbox = GeotoolsUtils.getBboxFromGeopackage(sfc);
dataset.setBoundingBox(bbox);
repository.addDataset(dataset);
// uploading geoserver must involve the process of renaming the database in geopackage
File gpkgFile = new File(FilenameUtils.concat(DATA_REPO_FOLDER, dataFDs.get(0).getDataURL()));
if (!GeoserverUtils.uploadGpkgToGeoserver(dataset.getId(), gpkgFile)) {
logger.error("Fail to upload geopackage file");
throw new IncoreHTTPException(Response.Status.INTERNAL_SERVER_ERROR, "Fail to upload geopakcage file.");
}
} else {
if (isShp && isPrj) {
GeoserverUtils.datasetUploadToGeoserver(dataset, repository, isShp, isTif, isAsc);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@
import edu.illinois.ncsa.incore.service.data.models.MvzLoader;
import org.apache.commons.io.FilenameUtils;
import org.apache.log4j.Logger;
import org.geotools.data.DataStore;
import org.geotools.data.DataStoreFinder;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureStore;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.geopkg.GeoPkgDataStoreFactory;
import org.opengis.feature.simple.SimpleFeatureType;
import org.jsoup.Jsoup;
import org.jsoup.select.Elements;

Expand All @@ -36,6 +43,7 @@
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;


/**
* Created by ywkim on 6/8/2017.
*/
Expand All @@ -52,6 +60,7 @@ public class FileUtils {
public static final String EXTENSION_CSV = "csv";
public static final String EXTENSION_ZIP = "zip";
public static final String EXTENSION_PRJ = "prj";
public static final String EXTENSION_GPKG = "gpkg";
public static final String EXTENSION_GEOPACKAGE = "gpkg"; // file extension of geopackage
public static final int INDENT_SPACE = 4;
public static final int TYPE_NUMBER_SHP = 1;
Expand All @@ -65,6 +74,7 @@ public class FileUtils {
public static final String DATASET_DESCRIPTION = "description";
public static final String DATASET_FILE_NAME = "fileName";
public static final String FORMAT_SHAPEFILE = "shapefile";
public static final String FORMAT_GEOPACKAGE = "geopackage";
public static final String FORMAT_NETWORK = "shp-network";
public static final String NETWORK_COMPONENT = "networkDataset";
public static final String NETWORK_LINK = "link";
Expand Down Expand Up @@ -856,7 +866,8 @@ public static boolean fileUseGeoserver(String examinedFile, boolean geoserverEna
if (geoserverEnabled) {
String fileExt = FilenameUtils.getExtension(examinedFile);
if (fileExt.equalsIgnoreCase("shp") || fileExt.equalsIgnoreCase("asc")
|| fileExt.equalsIgnoreCase("tif") || fileExt.equalsIgnoreCase("zip")) {
|| fileExt.equalsIgnoreCase("tif") || fileExt.equalsIgnoreCase("zip")
|| fileExt.equalsIgnoreCase("gpkg")) {
useGeoserver = true;
}
}
Expand All @@ -876,4 +887,70 @@ public static void removeFilesFromFileDescriptor(List fdList) {
deleteFiles(delParent);
}
}

/**
* create a temporary geopackage file with the layer's name with dataset it
*
* @param inFile
* @param store
* @return
* @throws IOException
*/
public static File generateRenameGpkgDbName(File inFile, String store) throws IOException {
File renamedGpkgFile = null;

try {
HashMap<String, Object> map = new HashMap<>();
map.put(GeoPkgDataStoreFactory.DBTYPE.key, "geopkg");
map.put(GeoPkgDataStoreFactory.DATABASE.key, inFile.getAbsoluteFile());
DataStore dataStore = DataStoreFinder.getDataStore(map);
if (dataStore == null) {
throw new IOException("Unable to open geopackage file");
}

// get all layer names in input geopackage file
String[] layerNames = dataStore.getTypeNames();

// input geopackage should only have a single layer
if (layerNames.length == 1) {
String oldLayerName = layerNames[0];

// create temp directory
String tempDir = Files.createTempDirectory(FileUtils.DATA_TEMP_DIR_PREFIX).toString();

renamedGpkgFile = new File(tempDir + File.separator + store + ".gpkg");

// get input geopackage layer
SimpleFeatureStore oldFeatureStore = (SimpleFeatureStore) dataStore.getFeatureSource(oldLayerName);

// create a new geopackage file in temp directory
map.put(GeoPkgDataStoreFactory.DATABASE.key, renamedGpkgFile);
DataStore newDataStore = DataStoreFinder.getDataStore(map);

// create a schema for new geopackage by copying from input one
SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
builder.init(oldFeatureStore.getSchema());
builder.setName(store);
SimpleFeatureType newSchema = builder.buildFeatureType();
newDataStore.createSchema(newSchema);

SimpleFeatureStore newFeatureStore = (SimpleFeatureStore) newDataStore.getFeatureSource(store);

// copy old layer to new layer
SimpleFeatureCollection oldFeatures = oldFeatureStore.getFeatures();
newFeatureStore.addFeatures(oldFeatures);

dataStore.removeSchema(oldLayerName);

dataStore.dispose();
newDataStore.dispose();
} else {
throw new IOException("There are multiple layers in the GeoPackage");
}
} catch (IOException e) {
throw new IOException("Unable to open geopackage file.");
}

return renamedGpkgFile;
}
}
Loading

0 comments on commit 6f4d20a

Please sign in to comment.