Skip to content

Commit

Permalink
Merge pull request #452 from ibi-group/dev
Browse files Browse the repository at this point in the history
Feature Release February 8, 2022
  • Loading branch information
binh-dam-ibigroup authored Feb 8, 2022
2 parents 08b728e + c13cec4 commit f1056fe
Show file tree
Hide file tree
Showing 146 changed files with 4,960 additions and 1,523 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Transit Data Manager

The core application for Conveyal's transit data tools suite.
The core application for IBI Group's transit data tools suite.

## Documentation

Expand Down
2 changes: 1 addition & 1 deletion configurations/default/env.yml.tmp
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,5 @@ SPARKPOST_EMAIL: [email protected]
GTFS_DATABASE_URL: jdbc:postgresql://localhost/catalogue
# GTFS_DATABASE_USER:
# GTFS_DATABASE_PASSWORD:
#MONGO_URI: mongodb://mongo-host:27017
#MONGO_HOST: mongo-host:27017
MONGO_DB_NAME: catalogue
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@
<dependency>
<groupId>com.github.conveyal</groupId>
<artifactId>gtfs-lib</artifactId>
<version>7.0.2</version>
<version>7.0.4</version>
<!-- Exclusions added in order to silence SLF4J warnings about multiple bindings:
http://www.slf4j.org/codes.html#multiple_bindings
-->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ public enum JobType {
MONITOR_SERVER_STATUS,
MERGE_FEED_VERSIONS,
RECREATE_BUILD_IMAGE,
UPDATE_PELIAS
UPDATE_PELIAS,
AUTO_PUBLISH_FEED_VERSION
}

public MonitorableJob(Auth0UserProfile owner, String name, JobType type) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,9 +137,8 @@ public static void logMessageAndHalt(

if (statusCode >= 500) {
LOG.error(message);

// create report to notify bugsnag if configured
ErrorUtils.reportToBugsnag(e, request.attribute("user"));
Auth0UserProfile userProfile = request != null ? request.attribute("user") : null;
ErrorUtils.reportToBugsnag(e, userProfile);
}

JsonNode json = getObjectNode(message, statusCode, e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import com.conveyal.datatools.common.utils.aws.S3Utils;
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.conveyal.datatools.manager.jobs.DeployJob;
import com.conveyal.datatools.manager.jobs.PeliasUpdateJob;
import com.conveyal.datatools.manager.models.Deployment;
import com.conveyal.datatools.manager.models.EC2InstanceSummary;
import com.conveyal.datatools.manager.models.FeedSource;
Expand Down Expand Up @@ -485,6 +486,23 @@ private static String deploy (Request req, Response res) {
return SparkUtils.formatJobMessage(job.jobId, "Deployment initiating.");
}

/**
* Create a Pelias update job based on an existing, live deployment
*/
private static String peliasUpdate (Request req, Response res) {
Auth0UserProfile userProfile = req.attribute("user");
Deployment deployment = getDeploymentWithPermissions(req, res);
Project project = Persistence.projects.getById(deployment.projectId);
if (project == null) {
logMessageAndHalt(req, 400, "Internal reference error. Deployment's project ID is invalid");
}

// Execute the pelias update job and keep track of it
PeliasUpdateJob peliasUpdateJob = new PeliasUpdateJob(userProfile, "Updating Local Places Index", deployment);
JobUtils.heavyExecutor.execute(peliasUpdateJob);
return SparkUtils.formatJobMessage(peliasUpdateJob.jobId, "Pelias update initiating.");
}

/**
* Uploads a file from Spark request object to the s3 bucket of the deployment the Pelias Update Job is associated with.
* Follows https://github.com/ibi-group/datatools-server/blob/dev/src/main/java/com/conveyal/datatools/editor/controllers/api/EditorController.java#L111
Expand Down Expand Up @@ -537,6 +555,7 @@ public static void register (String apiPrefix) {
fullJson.addMixin(Deployment.class, Deployment.DeploymentWithEc2InstancesMixin.class);

post(apiPrefix + "secure/deployments/:id/deploy/:target", DeploymentController::deploy, slimJson::write);
post(apiPrefix + "secure/deployments/:id/updatepelias", DeploymentController::peliasUpdate, slimJson::write);
post(apiPrefix + "secure/deployments/:id/deploy/", ((request, response) -> {
logMessageAndHalt(request, 400, "Must provide valid deployment target name");
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,13 +252,14 @@ private static FeedSource updateExternalFeedResource(Request req, Response res)
}
// Hold previous value for use when updating third-party resource
String previousValue = prop.value;
// Update the property in our database.
ExternalFeedSourceProperty updatedProp = Persistence.externalFeedSourceProperties.updateField(
propertyId, "value", entry.getValue().asText());

// Trigger an event on the external resource
// Update the property with the value to be submitted.
prop.value = entry.getValue().asText();

// Trigger an event on the external resource.
// After updating the external resource, we will update Mongo with values sent by the external resource.
try {
externalFeedResource.propertyUpdated(updatedProp, previousValue, req.headers("Authorization"));
externalFeedResource.propertyUpdated(prop, previousValue, req.headers("Authorization"));
} catch (IOException e) {
logMessageAndHalt(req, 500, "Could not update external feed source", e);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
package com.conveyal.datatools.manager.controllers.api;

import com.conveyal.datatools.common.utils.SparkUtils;
import com.conveyal.datatools.common.utils.aws.CheckedAWSException;
import com.conveyal.datatools.common.utils.aws.S3Utils;
import com.conveyal.datatools.manager.DataManager;
import com.conveyal.datatools.manager.auth.Auth0UserProfile;
import com.conveyal.datatools.manager.auth.Actions;
import com.conveyal.datatools.manager.jobs.CreateFeedVersionFromSnapshotJob;
import com.conveyal.datatools.manager.jobs.GisExportJob;
import com.conveyal.datatools.manager.jobs.MergeFeedsJob;
import com.conveyal.datatools.manager.jobs.MergeFeedsType;
import com.conveyal.datatools.manager.jobs.feedmerge.MergeFeedsType;
import com.conveyal.datatools.manager.jobs.ProcessSingleFeedJob;
import com.conveyal.datatools.manager.models.FeedDownloadToken;
import com.conveyal.datatools.manager.models.FeedRetrievalMethod;
Expand Down Expand Up @@ -44,7 +45,7 @@
import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt;
import static com.conveyal.datatools.manager.controllers.api.FeedSourceController.checkFeedSourcePermissions;
import static com.mongodb.client.model.Filters.eq;
import static com.conveyal.datatools.manager.jobs.MergeFeedsType.REGIONAL;
import static com.conveyal.datatools.manager.jobs.feedmerge.MergeFeedsType.REGIONAL;
import static com.mongodb.client.model.Filters.in;
import static spark.Spark.delete;
import static spark.Spark.get;
Expand Down Expand Up @@ -119,8 +120,8 @@ private static String createFeedVersionViaUpload(Request req, Response res) {
LOG.info("Last modified: {}", new Date(newGtfsFile.lastModified()));

// Check that the hashes of the feeds don't match, i.e. that the feed has changed since the last version.
// (as long as there is a latest version, i.e. the feed source is not completely new)
if (latestVersion != null && latestVersion.hash.equals(newFeedVersion.hash)) {
// (as long as there is a latest version, the feed source is not completely new)
if (newFeedVersion.isSameAs(latestVersion)) {
// Uploaded feed matches latest. Delete GTFS file because it is a duplicate.
LOG.error("Upload version {} matches latest version {}.", newFeedVersion.id, latestVersion.id);
newGtfsFile.delete();
Expand Down Expand Up @@ -260,30 +261,33 @@ private static FeedVersion publishToExternalResource (Request req, Response res)

// notify any extensions of the change
try {
for (String resourceType : DataManager.feedResources.keySet()) {
DataManager.feedResources.get(resourceType).feedVersionCreated(version, null);
}
if (!DataManager.isExtensionEnabled("mtc")) {
// update published version ID on feed source
Persistence.feedSources.updateField(version.feedSourceId, "publishedVersionId", version.namespace);
return version;
} else {
// NOTE: If the MTC extension is enabled, the parent feed source's publishedVersionId will not be updated to the
// version's namespace until the FeedUpdater has successfully downloaded the feed from the share S3 bucket.
Date publishedDate = new Date();
// Set "sent" timestamp to now and reset "processed" timestamp (in the case that it had previously been
// published as the active version.
version.sentToExternalPublisher = publishedDate;
version.processedByExternalPublisher = null;
Persistence.feedVersions.replace(version.id, version);
return version;
}
publishToExternalResource(version);
return version;
} catch (Exception e) {
logMessageAndHalt(req, 500, "Could not publish feed.", e);
return null;
}
}

public static void publishToExternalResource(FeedVersion version) throws CheckedAWSException {
for (String resourceType : DataManager.feedResources.keySet()) {
DataManager.feedResources.get(resourceType).feedVersionCreated(version, null);
}
if (!DataManager.isExtensionEnabled("mtc")) {
// update published version ID on feed source
Persistence.feedSources.updateField(version.feedSourceId, "publishedVersionId", version.namespace);
} else {
// NOTE: If the MTC extension is enabled, the parent feed source's publishedVersionId will not be updated to the
// version's namespace until the FeedUpdater has successfully downloaded the feed from the share S3 bucket.
Date publishedDate = new Date();
// Set "sent" timestamp to now and reset "processed" timestamp (in the case that it had previously been
// published as the active version.
version.sentToExternalPublisher = publishedDate;
version.processedByExternalPublisher = null;
Persistence.feedVersions.replace(version.id, version);
}
}

/**
* HTTP endpoint to initiate an export of a shapefile containing the stops or routes of one or
* more feed versions. NOTE: the job ID returned must be used by the requester to download the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import spark.Request;
import spark.Response;

import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
Expand All @@ -35,7 +34,7 @@
import static com.conveyal.datatools.common.utils.SparkUtils.formatJobMessage;
import static com.conveyal.datatools.common.utils.SparkUtils.logMessageAndHalt;
import static com.conveyal.datatools.manager.DataManager.publicPath;
import static com.conveyal.datatools.manager.jobs.MergeFeedsType.REGIONAL;
import static com.conveyal.datatools.manager.jobs.feedmerge.MergeFeedsType.REGIONAL;
import static spark.Spark.delete;
import static spark.Spark.get;
import static spark.Spark.post;
Expand Down
Loading

0 comments on commit f1056fe

Please sign in to comment.