Skip to content

Commit

Permalink
Merge pull request #424 from ibi-group/dev
Browse files Browse the repository at this point in the history
Feature Release: September 23, 2021
  • Loading branch information
miles-grant-ibigroup authored Oct 4, 2021
2 parents 982c64c + adfad10 commit 553859c
Show file tree
Hide file tree
Showing 55 changed files with 640 additions and 4,793 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/maven.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,11 @@ jobs:
uses: actions/setup-java@v1
with:
java-version: 1.8
# Install node 12 for running e2e tests (and for maven-semantic-release).
- name: Use Node.js 12.x
# Install node 14 for running e2e tests (and for maven-semantic-release).
- name: Use Node.js 14.x
uses: actions/setup-node@v1
with:
node-version: 12.x
node-version: 14.x
- name: Start MongoDB
uses: supercharge/[email protected]
with:
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@
<dependency>
<groupId>com.github.conveyal</groupId>
<artifactId>gtfs-lib</artifactId>
<version>6.2.4</version>
<version>7.0.2</version>
<!-- Exclusions added in order to silence SLF4J warnings about multiple bindings:
http://www.slf4j.org/codes.html#multiple_bindings
-->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ public enum JobType {
VALIDATE_ALL_FEEDS,
MONITOR_SERVER_STATUS,
MERGE_FEED_VERSIONS,
RECREATE_BUILD_IMAGE
RECREATE_BUILD_IMAGE,
UPDATE_PELIAS
}

public MonitorableJob(Auth0UserProfile owner, String name, JobType type) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,27 +1,36 @@
package com.conveyal.datatools.common.utils;

import com.amazonaws.AmazonServiceException;
import com.conveyal.datatools.common.utils.aws.CheckedAWSException;
import com.conveyal.datatools.common.utils.aws.S3Utils;
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.conveyal.datatools.manager.utils.ErrorUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.io.ByteStreams;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.eclipse.jetty.http.HttpStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.HaltException;
import spark.Request;
import spark.Response;

import javax.servlet.MultipartConfigElement;
import javax.servlet.ServletException;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.ServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.Part;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;

import static com.conveyal.datatools.manager.DataManager.getConfigPropertyAsText;
Expand Down Expand Up @@ -75,7 +84,10 @@ public static String formatJSON (String key, String value) {
* supplied details about the exception encountered.
*/
public static ObjectNode getObjectNode(String message, int code, Exception e) {
String detail = e != null ? e.getMessage() : null;
String detail = null;
if (e != null) {
detail = e.getMessage() != null ? e.getMessage() : ExceptionUtils.getStackTrace(e);
}
return mapper.createObjectNode()
.put("result", code >= 400 ? "ERR" : "OK")
.put("message", message)
Expand Down Expand Up @@ -265,6 +277,50 @@ public static void copyRequestStreamIntoFile(Request req, File file) {
}
}

/**
* Copies a multi-part file upload to disk, attempts to upload it to S3, then deletes the local file.
* @param req Request object containing file to upload
* @param uploadType A string to include in the uploaded filename. Will also be added to the temporary file
* which makes debugging easier should the upload fail.
* @param key The S3 key to upload the file to
* @return An HTTP S3 url containing the uploaded file
*/
public static String uploadMultipartRequestBodyToS3(Request req, String uploadType, String key) {
// Get file from request
if (req.raw().getAttribute("org.eclipse.jetty.multipartConfig") == null) {
MultipartConfigElement multipartConfigElement = new MultipartConfigElement(System.getProperty("java.io.tmpdir"));
req.raw().setAttribute("org.eclipse.jetty.multipartConfig", multipartConfigElement);
}
String extension = null;
File tempFile = null;
String uploadedFileName = null;
try {
Part part = req.raw().getPart("file");
uploadedFileName = part.getSubmittedFileName();

extension = "." + part.getContentType().split("/", 0)[1];
tempFile = File.createTempFile(part.getName() + "_" + uploadType, extension);
InputStream inputStream;
inputStream = part.getInputStream();
FileOutputStream out = new FileOutputStream(tempFile);
IOUtils.copy(inputStream, out);
} catch (IOException | ServletException e) {
e.printStackTrace();
logMessageAndHalt(req, 400, "Unable to read uploaded file");
}
try {
return S3Utils.uploadObject(uploadType + "/" + key + "_" + uploadedFileName, tempFile);
} catch (AmazonServiceException | CheckedAWSException e) {
logMessageAndHalt(req, 500, "Error uploading file to S3", e);
return null;
} finally {
boolean deleted = tempFile.delete();
if (!deleted) {
LOG.error("Could not delete s3 temporary upload file");
}
}
}

private static String trimLines(String str) {
if (str == null) return "";
String[] lines = str.split("\n");
Expand Down
18 changes: 18 additions & 0 deletions src/main/java/com/conveyal/datatools/common/utils/aws/S3Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.GeneratePresignedUrlRequest;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.conveyal.datatools.common.utils.SparkUtils;
import com.conveyal.datatools.manager.DataManager;
import com.conveyal.datatools.manager.models.OtpServer;
Expand Down Expand Up @@ -192,6 +194,22 @@ public static String downloadObject(
}
}

/**
* Uploads a file to S3 using a given key
* @param keyName The s3 key to uplaod the file to
* @param fileToUpload The file to upload to S3
* @return A URL where the file is publicly accessible
*/
public static String uploadObject(String keyName, File fileToUpload) throws AmazonServiceException, CheckedAWSException {
String url = S3Utils.getDefaultBucketUrlForKey(keyName);
// FIXME: This may need to change during feed store refactor
getDefaultS3Client().putObject(new PutObjectRequest(
S3Utils.DEFAULT_BUCKET, keyName, fileToUpload)
// grant public read
.withCannedAcl(CannedAccessControlList.PublicRead));
return url;
}

public static AmazonS3 getDefaultS3Client() throws CheckedAWSException {
return getS3Client (null, null);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
package com.conveyal.datatools.editor.controllers.api;

import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.conveyal.datatools.common.utils.SparkUtils;
import com.conveyal.datatools.common.utils.aws.CheckedAWSException;
import com.conveyal.datatools.common.utils.aws.S3Utils;
import com.conveyal.datatools.editor.controllers.EditorLockController;
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.conveyal.datatools.manager.models.FeedSource;
Expand All @@ -23,22 +18,15 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.dbutils.DbUtils;
import org.apache.commons.io.IOUtils;
import org.eclipse.jetty.http.HttpStatus;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.HaltException;
import spark.Request;
import spark.Response;

import javax.servlet.MultipartConfigElement;
import javax.servlet.ServletException;
import javax.servlet.http.Part;
import javax.sql.DataSource;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
Expand Down Expand Up @@ -108,49 +96,6 @@ public abstract class EditorController<T extends Entity> {
registerRoutes();
}

public static String uploadBranding(Request req, String key) {
String url;

// Get file from request
if (req.raw().getAttribute("org.eclipse.jetty.multipartConfig") == null) {
MultipartConfigElement multipartConfigElement = new MultipartConfigElement(System.getProperty("java.io.tmpdir"));
req.raw().setAttribute("org.eclipse.jetty.multipartConfig", multipartConfigElement);
}
String extension = null;
File tempFile = null;
try {
Part part = req.raw().getPart("file");
extension = "." + part.getContentType().split("/", 0)[1];
tempFile = File.createTempFile(key + "_branding", extension);
InputStream inputStream;
inputStream = part.getInputStream();
FileOutputStream out = new FileOutputStream(tempFile);
IOUtils.copy(inputStream, out);
} catch (IOException | ServletException e) {
e.printStackTrace();
logMessageAndHalt(req, 400, "Unable to read uploaded file");
}

try {
String keyName = "branding/" + key + extension;
url = S3Utils.getDefaultBucketUrlForKey(keyName);
// FIXME: This may need to change during feed store refactor
S3Utils.getDefaultS3Client().putObject(new PutObjectRequest(
S3Utils.DEFAULT_BUCKET, keyName, tempFile)
// grant public read
.withCannedAcl(CannedAccessControlList.PublicRead));
return url;
} catch (AmazonServiceException | CheckedAWSException e) {
logMessageAndHalt(req, 500, "Error uploading file to S3", e);
return null;
} finally {
boolean deleted = tempFile.delete();
if (!deleted) {
LOG.error("Could not delete s3 upload file.");
}
}
}

/**
* Add static HTTP endpoints to Spark static instance.
*/
Expand Down Expand Up @@ -399,7 +344,7 @@ private String uploadEntityBranding (Request req, Response res) {
int id = getIdFromRequest(req);
String url;
try {
url = uploadBranding(req, String.format("%s_%d", classToLowercase, id));
url = SparkUtils.uploadMultipartRequestBodyToS3(req, "branding", String.format("%s_%d", classToLowercase, id));
} catch (HaltException e) {
// Do not re-catch halts thrown for exceptions that have already been caught.
throw e;
Expand Down
135 changes: 0 additions & 135 deletions src/main/java/com/conveyal/datatools/editor/datastore/DatabaseTx.java

This file was deleted.

Loading

0 comments on commit 553859c

Please sign in to comment.