Skip to content

Commit

Permalink
Merge pull request #172 from battlecode/webinfra-submit-better
Browse files Browse the repository at this point in the history
Submission process bugfixes, compile server integration
  • Loading branch information
n8kim1 authored Dec 29, 2020
2 parents d315ae1 + 1b2a08b commit e8d6430
Show file tree
Hide file tree
Showing 7 changed files with 109 additions and 36 deletions.
49 changes: 39 additions & 10 deletions backend/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,11 @@ def signed_upload_url(file_path, bucket):
"""

blob = GCloudUploadDownload.get_blob(file_path, bucket)
return blob.create_resumable_upload_session()
# Origin is necessary to prevent CORS errors later:
# https://stackoverflow.com/questions/25688608/xmlhttprequest-cors-to-google-cloud-storage-only-working-in-preflight-request
# https://stackoverflow.com/questions/46971451/cors-request-made-despite-error-in-console
# https://googleapis.dev/python/storage/latest/blobs.html
return blob.create_resumable_upload_session(origin=settings.THIS_URL)

@staticmethod
def signed_download_url(file_path, bucket):
Expand Down Expand Up @@ -659,6 +663,7 @@ def create(self, request, team, league_id):
if not serializer.is_valid():
return Response(serializer.errors, status.HTTP_400_BAD_REQUEST)

# Note that IDs are needed to generate the link.
serializer.save() #save it once, link will be undefined since we don't have any way to know id
serializer.save() #save again, link automatically set

Expand All @@ -677,15 +682,6 @@ def create(self, request, team, league_id):

upload_url = GCloudUploadDownload.signed_upload_url(SUBMISSION_FILENAME(serializer.data['id']), GCLOUD_SUB_BUCKET)

# The submission process is problematic: if the IDs are recorded, before the code is actually uploaded, then code that fails to upload will have dead IDs associated with it, and the team will be sad
# Also, if user navigates away before the upload_url is returned,
# then no code makes it into the bucket
# This is fixed(?) by uploading in the backend,
# or by uploading the file and then pressing another button to officialy submit
# The best way for now would be to have the upload, when done,
# call a function in the backend that adjusts sub IDs
# TODO somehow fix this problem

return Response({'upload_url': upload_url, 'submission_id': submission.id}, status.HTTP_201_CREATED)


Expand All @@ -708,6 +704,38 @@ def retrieve_file(self, request, team, league_id, pk=None):
return Response({'download_url': download_url}, status.HTTP_200_OK)


@action(methods=['patch', 'post'], detail=True)
def compilation_pubsub_call(self, request, team, league_id, pk=None):
# It is better if compile server gets requests for compiling submissions that are actually in buckets.
# So, only after an upload is done, the frontend calls this endpoint to give the compile server a request.
submission = self.get_queryset().get(pk=pk)
if team != submission.team:
return Response({'message': 'Not authenticated on the right team'}, status.HTTP_401_UNAUTHORIZED)

# If a compilation has already succeeded, keep as so; no need to re-do.
# (Might make sense to re-do for other submissions, however.)
if submission.compilation_status == settings.COMPILE_STATUS.SUCCESS:
return Response({'message': 'Success response already received for this submission'}, status.HTTP_400_BAD_REQUEST)

# indicate submission being in a bucket
submission.compilation_status = settings.COMPILE_STATUS.UPLOADED
submission.save()

id = submission.id
# call to compile server
print('attempting call to compile server')
print('id:', id)
data = str(id)
data_bytestring = data.encode('utf-8')
print(type(data_bytestring))
pub(GCLOUD_PROJECT, GCLOUD_SUB_COMPILE_NAME, data_bytestring)

# indicate submission being queued
submission.compilation_status = settings.COMPILE_STATUS.QUEUED
submission.save()

return Response({'message': 'Status updated'}, status.HTTP_200_OK)

@action(methods=['patch', 'post'], detail=True)
def compilation_update(self, request, team, league_id, pk=None):
is_admin = User.objects.all().get(username=request.user).is_superuser
Expand Down Expand Up @@ -820,6 +848,7 @@ def team_compilation_id(self, request, team, league_id, pk=None):
return Response({'compilation_id': comp_id}, status.HTTP_200_OK)
else:
# this is bad, replace with something thats actually None
# ^ TODO should address this
return Response({'compilation_id': -1}, status.HTTP_200_OK)


Expand Down
1 change: 1 addition & 0 deletions backend/dev_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
THIS_URL = 'http://localhost:3000'

# Application definition
INSTALLED_APPS += ['debug_toolbar']
Expand Down
24 changes: 20 additions & 4 deletions backend/docs/SETUP.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ Once the database is finished being created, connect to it with your Postgres ed
Delete the contents of the following tables. (_Don't delete the tables themselves!_ To easily delete info, you can run a query, such as `DELETE FROM [table_name]`.) The tables are: `api_scrimmage`, `api_scrimmage_hidden`, `api_submission`, `api_team`, `api_team_users`, `api_teamsubmission`, `api_tournament`, `api_tournamentscrimmage`, `api_update`, `api_user`, `django_admin_log`.
(You may have to delete them in a particular order. Particularly, if you get an error pertaining to a "foreign key constraint", you'll have to delete the table which uses it first. Deleting those tables is probably okay.)

Updating `api_league` is slightly different. Don't delete the entry; just edit it instead. Change `name` to something more suitable (eg `bh20`), change the `start_date` and `end_date` (they don't have to be exact, so feel free to use a longer range than the actual tournament. Set `active` to true. **Set `submissions_enabled` to false and `game_released` to false.** Finally `engine_version` needs to be changed as well; ask the infrastructure team what to change it to.
Updating `api_league` is slightly different. Don't delete the entry; just edit it instead. Change `name` to something more suitable (eg `bh20`), change the `start_date` and `end_date` (they don't have to be exact, so feel free to use a longer range than the actual tournament. **Set `active` to true. Set `submissions_enabled` to true. Set `game_released` to false.** Finally `engine_version` needs to be changed as well; ask the infrastructure team what to change it to.

Next, we need to register a superuser account (for use by the infra). Run the battlecode website, and simply follow the normal account registration process. Take note of the password!
Also, have this superuser create and join a team (this is necessary for some permissions).
Expand All @@ -62,7 +62,18 @@ Then stop the old database (on its main page, press "stop").

## Deployment Setup

Deployment is done through the Google Cloud Platform. You'll need access to the Google Cloud project. (If you don't have access already, ask a dev to add you.) With that, you can start here:
Deployment is done through the Google Cloud Platform. You'll need access to the Google Cloud project. (If you don't have access already, ask a dev to add you.) It's also helpful to install gsutil, a command line application for managing GCP. Link here: https://cloud.google.com/storage/docs/gsutil.

With that, you can start here --

### Configuring Settings

After registering a domain name for the competition, set `THIS_URL` (in `settings.py`) to that domain.

### Storage Buckets
Go to "Storage" on GCP console. A bucket for submissions should have been created (if not, instructions are in the infrastructure readme.)
Set up the CORS policy, which allows us to upload to the bucket on external websites. Find `docs/cors,json`; in there, update the domain URLs listed. Then, run `gsutil cors set path/to/cors.json gs://bc21-submissions` (updating the bucket name) to whatever it is this year.
More info is here: https://cloud.google.com/storage/docs/configuring-cors#gsutil

### Cloud Build Triggers
Go to "Cloud Build" triggers on GCP console, here: https://console.cloud.google.com/cloud-build/triggers?project=battlecode18
Expand All @@ -72,11 +83,15 @@ Change Dockerfile directory to `/backend`, and image name to `gcr.io/battlecode1

With this step done: on pushes to master, Google Cloud will create a Docker container with our latest code. Push a commit to master, to test that the trigger works! Under "Cloud Builds" -> "History" you can see the build in progress.

### Google Application Credentials
Infrastructure should have made a service account. Get the service account json file from an infra dev. (If they haven't done so yet, you can come back to this section later. Make sure to!)
Set the contents of this file into dev_settings_sensitive.py, as GOOGLE_APPLICATION_CREDENTIALS. Formatting is a little weird here -- you'll have to wrap the contents of the json file in `r'''` at the beginning, and `'''` at the end. See another version of the file for an example.

### Instance Template
From Google Cloud console, "Compute Engine" -> “Instance Templates”. Click on an old backend template, and then click on “Create similar”. Change the name to something descriptive enough and conventional. ("bc21-backend-template", for example, works well. Also I’ve found that including the current date and time in the name can help keep things straight.) For machine type, we've found the `n1-standard-n1` to be cheap and work well, especially providing enough memory.

Check the checkbox of "Deploy a container image to this VM instance", and change the container image to the image name you've just written in the cloud build trigger.
Then, click "Advanced container options" to see a place to set environment variables. In the repo's `backend/settings.py`, you can look at the `os.getenv` calls to see which environment variables are needed. Set these here, to the same values that have been used in local testing / in `dev_settings_sensitive.py`. (Other than `DB_HOST`, these probably don't need changing.) Note that these are un-editable; if you ever change environment variables, you'll have to make a new instance template.
Check the checkbox of "Deploy a container image to this VM instance", and change the container image to the image name you've just written in the cloud build trigger.
Then, click "Advanced container options" to see a place to set environment variables. Find the variables set in `dev_settings_sensitive.py`, and set all of those keys/values here, too. (Here, these values should not be enclosed in quotes.) Note that these are un-editable; if you ever change environment variables, you'll have to make a new instance template. ("Create Similar" on the instance template's page is helpful here.)

(For now, keep the boot disk the same; it may be good to change it to a later version down the road. Be sure to test that the VMs still work, though.)

Expand Down Expand Up @@ -107,6 +122,7 @@ Finally, click update!
(Note: sometimes, after you try to update changes, they may not go through. This may be due to creating too many backend instances/buckets; we can only have so many up at any given time. You'll see notifications and any errors in the top right corner of the Google Console; you can check if this is the problem. If so, deleting old backend services/buckets is surprisingly hard. You need to first delete any uses of them in the host and path rules, then delete their uses in the "backend services" / "backend buckets" lists on the edit page's backend configuration section; don't forget to save. Then you need to _actually_ delete them, by using the gcloud command line interface. Instructions [here](https://cloud.google.com/sdk/gcloud/reference/compute/backend-services/delete) and [here](https://cloud.google.com/sdk/gcloud/reference/compute/backend-buckets/delete).)

### Some last steps
Make sure the CORS policy and Google Application credentials are all set up, as described earlier. In particular make sure that the Google Application credentials have been set up as an environment variable in the instance template, or create a new instance template with this set.
Delete old instance groups: go to "Compute Engine" -> "Instance groups", check any old instance groups that are no longer in use, and click "delete".
Delete old instance template: go to "Compute Engine" -> "Instance templates", check any old templates that are no longer in use, and click "delete".
Delete old, unused backend services and buckets, if you're up to it, instructions in previous section. But this can be a pain and is certainly not necessary.
17 changes: 17 additions & 0 deletions backend/docs/cors.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[
{
"method": [
"GET",
"PUT"
],
"origin": [
"http://localhost:3000",
"http://2021.battlecode.org",
"https://2021.battlecode.org"
],
"responseHeader": [
"authorization",
"content-type"
]
}
]
4 changes: 3 additions & 1 deletion backend/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,8 @@ class COMPILE_STATUS:
SUCCESS = 1
FAIL = 2
ERROR = 3
UPLOADED = 4
QUEUED = 5


# Application definition
Expand Down Expand Up @@ -272,4 +274,4 @@ class COMPILE_STATUS:


# google cloud
GOOGLE_APPLICATION_CREDENTIALS = os.getenv('GOOGLE_APPLICATION_CREDENTIALS_JSON')
GOOGLE_APPLICATION_CREDENTIALS = os.getenv('GOOGLE_APPLICATION_CREDENTIALS')
41 changes: 20 additions & 21 deletions frontend/src/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,19 @@ class Api {

//----SUBMISSIONS----

// TODO clean up a lot of old comments, print statements
// TODO provide more explanatory comments
// TODO there's a better wayy to work with 'submitting' in cookies
// TODO 'submitting' could probably use a better name
// TODO review code in the submissions js
// TODO errors in these callbacks should also display messages in frontend

//uploads a new submission to the google cloud bucket
static newSubmission(submissionfile, callback){
// submissionfile.append('_method', 'PUT');
// get the url from the real api
$.post(`${URL}/api/${LEAGUE}/submission/`, {
team: Cookies.get('team_id')
}).done((data, status) => {
$.post(`${URL}/api/${LEAGUE}/submission/`)
.done((data, status) => {
console.log("got URL")
Cookies.set('submission_id', data['submission_id']);
$.ajax({
Expand All @@ -36,29 +42,22 @@ class Api {
})
.done((data, status) => {
console.log(data, status)
})
// Even when upload succeeds, an error is thrown...
// We make the dangerous assumption that the upload succeeded,
// ie that the submission exists in a bucket
// TODO this is a dangerous assumption, find a better solution
// (maybe revolving around the upload working error-free,
// and hooking callbacks to done rather than fail)
// TODO it's possible that the fail callback occurs
// before the upload finishes
.fail((xhr, status, error) => {
// console.log(data);
$.post(`${URL}/api/${LEAGUE}/submission/` +Cookies.get('submission_id') + `/compilation_update/`, {
team: Cookies.get('team_id')
}).done((data, status) => {
$.post(`${URL}/api/${LEAGUE}/submission/` +Cookies.get('submission_id') + `/compilation_pubsub_call/`)
.done((data, status) => {
console.log("Definitely done!")
// console.log(data, status)
Cookies.set('submitting', 0)
// TODO make this display done on screen
})
.fail((xhr, status, error) => {
console.log("Error in compilation update callback: ", xhr, status, error)
})
})
}).fail((xhr, status, error) => {
console.log("Error in post:", error)

.fail((xhr, status, error) => {
console.log("Error in put request of file to bucket: ", xhr, status, error)
})
})
.fail((xhr, status, error) => {
console.log("Error in post request for upload: ", xhr, status, error)
});

}
Expand Down
9 changes: 9 additions & 0 deletions frontend/src/views/submissions.js
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,15 @@ class Submissions extends Component {


// makes an api call to upload the selected file
// TODO clean this method up
// TODO add explanation
// TODO submission table should be what exactly?
// Latest submission in progress, and last 3 good submissions? (and then make this clear in frontend) -- think I'm leaning towards this one
// Last 3 submissions, period? (this might need revisions in backend)
// TODO update how we display the most recent submission (including its status.)
// Also now that we have new statuses, we need to figue out what we should display in the frontend for each of them.
// (eg if user navigates away before the upload link is returned / before the upload finishes, or if submission fails to get queued/compiled,
// what should the user do? what should we tell them?)
uploadData = () => {
// let status_str = "Submitting..."
Cookies.set('submitting', 1)
Expand Down

0 comments on commit e8d6430

Please sign in to comment.