diff --git a/.gitignore b/.gitignore
index d500923c..b75783ba 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,5 @@
.project
*.iml
initial-configuration/mysql-docker/.env
+pass.tmp
+initial-configuration/pass.tmp
\ No newline at end of file
diff --git a/README.md b/README.md
index 1c137153..3ef6aad6 100644
--- a/README.md
+++ b/README.md
@@ -131,6 +131,7 @@ Once you have logged into Jenkins and have set up your admin account, you need t
system variables:
- `DOCKER_CONFIG_DIR`: `/path/to/config/dir` This is the path you passed to `install-dependencies-docker`
+- `MYSQL_CONFIG_DIR`: `/path/to/mysql/cnf/dir` This is the path you passed to `install-dependencies-docker`
- `MYSQL_NETWORK`: `picsure` If you plan to switch to a remote database, this needs to be changed back to `host`
6. Run the Initial Configuration Pipeline job.
@@ -149,9 +150,7 @@ system variables:
- `EMAIL`: This is the Google account that will be the initial admin user.
- - `PROJECT_SPECIFIC_OVERRIDE_REPOSITORY`: This is the repo that contains the project specific overrides for your
- project. If you just want the default PIC-SURE behavior use this
- repo : https://github.com/hms-dbmi/baseline-pic-sure
+ - `MIGRATION_NAME`: This is the name of the migration that will be run. If you just want the default PIC-SURE behavior use `Baseline` from the repo: https://github.com/hms-dbmi/pic-sure-migrations or fork it and add your migration. If you are a GIC Institution, use `GIC-Institution`.
- `RELEASE_CONTROL_REPOSITORY`: This is the repo that contains the build-spec.json file for your project. This file
controls what code is built and deployed. If you just want the default PIC-SURE behavior use this
diff --git a/initial-configuration/config/hpds/hpds.env b/initial-configuration/config/hpds/hpds.env
new file mode 100644
index 00000000..6871e644
--- /dev/null
+++ b/initial-configuration/config/hpds/hpds.env
@@ -0,0 +1 @@
+CATALINA_OPTS=-XX:+UseParallelGC -XX:SurvivorRatio=250 -Xms1g -Xmx16g -DCACHE_SIZE=1500 -DSMALL_TASK_THREADS=1 -DLARGE_TASK_THREADS=1 -DSMALL_JOB_LIMIT=100 -DID_BATCH_SIZE=2000 -DALL_IDS_CONCEPT=NONE -DID_CUBE_NAME=NONE -Denable_file_sharing=true
diff --git a/initial-configuration/config/httpd/httpd-vhosts-ssloffload.conf b/initial-configuration/config/httpd/httpd-vhosts-ssloffload.conf
index 5f76eefd..76b8c35d 100644
--- a/initial-configuration/config/httpd/httpd-vhosts-ssloffload.conf
+++ b/initial-configuration/config/httpd/httpd-vhosts-ssloffload.conf
@@ -34,7 +34,7 @@ Listen 0.0.0.0:80
RewriteRule ^/picsure/(.*)$ "http://wildfly:8080/pic-sure-api-2/PICSURE/$1" [P]
- RewriteRule ^/psama/(.*)$ "http://wildfly:8080/pic-sure-auth-services/auth/$1" [P]
+ RewriteRule ^/psama/(.*)$ "http://psama:8090/auth/$1" [P]
RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-f
RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-d
diff --git a/initial-configuration/config/httpd/httpd-vhosts.conf b/initial-configuration/config/httpd/httpd-vhosts.conf
index 9145565d..73b8e47e 100644
--- a/initial-configuration/config/httpd/httpd-vhosts.conf
+++ b/initial-configuration/config/httpd/httpd-vhosts.conf
@@ -17,13 +17,12 @@ AddType application/x-pkcs7-crl .crl
SSLCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:AES128-GCM-SHA256:AES128-SHA256:AES256-GCM-SHA384:AES256-SHA256
SSLProxyCipherSuite ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:AES128-GCM-SHA256:AES128-SHA256:AES256-GCM-SHA384:AES256-SHA256
-
-SSLHonorCipherOrder on
-
-SSLProtocol all -TLSv1.2
-SSLProxyProtocol all -TLSv1.2
+SSLProtocol all -SSLv3 -TLSv1 -TLSv1.1 -TLSv1.2
+SSLHonorCipherOrder off
+SSLSessionTickets off
SSLPassPhraseDialog builtin
+SSLUseStapling On
SSLSessionCache "shmcb:${HTTPD_PREFIX}/logs/ssl_scache(512000)"
SSLSessionCacheTimeout 300
@@ -35,19 +34,30 @@ ServerTokens Prod
ServerName localhost
RewriteEngine On
ProxyPreserveHost On
+ #Dont allow httpd debug methods
+ RewriteCond %%{REQUEST_METHOD} ^TRACK
+ RewriteRule .* - [F]
+ RewriteCond %%{REQUEST_METHOD} ^TRACE
+ RewriteRule .* - [F]
+
RewriteCond %{HTTPS} off [OR]
RewriteCond %{HTTP_HOST} ^(?:)?(.+)$ [NC]
- RewriteRule ^ https://%{SERVER_NAME}/picsureui/ [L,NE,R=301]
+ RewriteRule ^ https://%{SERVER_NAME}/$1 [L,NE,R=301]
+ ServerName %{SERVER_NAME}
ProxyTimeout 300
+ SSLEngine on
SSLProxyEngine on
SSLProxyVerify none
SSLProxyCheckPeerCN off
SSLProxyCheckPeerName off
+ # enable HTTP/2, if available
+ Protocols h2 http/1.1
+
SSLCertificateFile "${HTTPD_PREFIX}/cert/server.crt"
SSLCertificateKeyFile "${HTTPD_PREFIX}/cert/server.key"
SSLCertificateChainFile "${HTTPD_PREFIX}/cert/server.chain"
@@ -77,42 +87,38 @@ ServerTokens Prod
RewriteEngine On
ProxyPreserveHost On
-
- ProxyPass http://jupyterhub:8000/jupyterhub
- ProxyPassReverse http://jupyterhub:8000/jupyterhub
-
+ #Dont allow httpd debug methods
+ RewriteCond %%{REQUEST_METHOD} ^TRACK
+ RewriteRule .* - [F]
+ RewriteCond %%{REQUEST_METHOD} ^TRACE
+ RewriteRule .* - [F]
-
- ProxyPassMatch ws://jupyterhub:8000/jupyterhub/$1/$2$3
- ProxyPassReverse ws://jupyterhub:8000/jupyterhub/$1/$2$3
-
+ # Match the request to /health and return a 200 OK status for AWS ELB health checks
+ RewriteRule ^/picsure/health$ - [R=200,L]
RewriteRule ^/picsure/(.*)$ "http://wildfly:8080/pic-sure-api-2/PICSURE/$1" [P]
- RewriteRule ^/psama/(.*)$ "http://wildfly:8080/pic-sure-auth-services/auth/$1" [P]
-
- RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-f
- RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-d
-
- RewriteRule /psamaui/(.*) /psamaui/index_03272020.html
+ RewriteRule ^/psama/(.*)$ "http://psama:8090/auth/$1" [P]
RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-f
RewriteCond %{DOCUMENT_ROOT}/%{REQUEST_FILENAME} !-d
- RewriteRule /picsureui/(.*) /picsureui/index_03272020.html
RewriteRule ^/static/(.*)$ /static/$1 [L]
- RedirectMatch ^/$ /picsureui/
- ErrorDocument 404 /index.html
-
DocumentRoot "${HTTPD_PREFIX}/htdocs"
- ErrorLog "${HTTPD_PREFIX}/logs/error_log"
- TransferLog "${HTTPD_PREFIX}/logs/access_log"
- CustomLog "${HTTPD_PREFIX}/logs/ssl_request_log" \
- "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b"
+ LogFormat "%%{X-Forwarded-For}i %t %%{SSL_PROTOCOL}x %%{SSL_CIPHER}x \"%r\" %b" proxy-ssl
+ LogFormat "%h %l %u %t \"%r\" %>s %b \"%%{Referer}i\" \"%%{User-Agent}i\"" combined
+ LogFormat "%%{X-Forwarded-For}i %l %u %t \"%r\" %>s %b \"%%{Referer}i\" \"%%{User-Agent}i\"" proxy
+ SetEnvIf X-Forwarded-For "^.*\..*\..*\..*" forwarded
+ CustomLog "$${HTTPD_PREFIX}/logs/access_log" combined env=!forwarded
+ CustomLog "$${HTTPD_PREFIX}/logs/access_log" proxy env=forwarded
+ CustomLog "$${HTTPD_PREFIX}/logs/ssl_request_log" proxy-ssl env=forwarded
+ CustomLog "$${HTTPD_PREFIX}/logs/ssl_request_log" \
+ "%t %h %%{SSL_PROTOCOL}x %%{SSL_CIPHER}x \"%r\" %b" env=!forwarded
+ ErrorLog "$${HTTPD_PREFIX}/logs/error_log"
+ TransferLog "$${HTTPD_PREFIX}/logs/access_log"
BrowserMatch "MSIE [2-5]" \
nokeepalive ssl-unclean-shutdown \
downgrade-1.0 force-response-1.0
-
diff --git a/initial-configuration/config/httpd/httpd.env b/initial-configuration/config/httpd/httpd.env
new file mode 100644
index 00000000..e69de29b
diff --git a/initial-configuration/config/httpd/picsureui_settings.json b/initial-configuration/config/httpd/picsureui_settings.json
index bc6655ef..6506b0a1 100644
--- a/initial-configuration/config/httpd/picsureui_settings.json
+++ b/initial-configuration/config/httpd/picsureui_settings.json
@@ -26,7 +26,7 @@
"customizeAuth0Login": true,
"queryButtonLabel": "Export for analysis",
"maxVariantCount": 10000,
- "auth0domain":"__AUTH0_DOMAIN__",
+ "auth0domain":"__AUTH0_TENANT__",
"client_id":"__PIC_SURE_CLIENT_ID__",
"analyticsId": "__ANALYTICS_ID__",
"tagManagerId": "__TAG_MANAGER_ID__"
diff --git a/initial-configuration/config/httpd/psamaui_settings.json b/initial-configuration/config/httpd/psamaui_settings.json
index 7d1e0a9c..2b96b8ec 100644
--- a/initial-configuration/config/httpd/psamaui_settings.json
+++ b/initial-configuration/config/httpd/psamaui_settings.json
@@ -6,7 +6,7 @@
"basePath" : "/psama",
"uiPath": "",
"customizeAuth0Login": true,
- "auth0domain":"__AUTH0_DOMAIN__",
+ "auth0domain":"__AUTH0_TENANT__",
"client_id":"__PIC_SURE_CLIENT_ID__"
}
diff --git a/initial-configuration/config/psama/.env b/initial-configuration/config/psama/.env
new file mode 100644
index 00000000..d6f5bb4f
--- /dev/null
+++ b/initial-configuration/config/psama/.env
@@ -0,0 +1,30 @@
+# Database Configuration
+DATASOURCE_URL=jdbc:mysql://picsure-db:3306/auth?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&autoReconnectForPools=true&serverTimezone=UTC
+DATASOURCE_USERNAME=auth
+DATASOURCE_PASSWORD=__AUTH_MYSQL_PASSWORD__
+
+# Mail Configuration
+EMAIL_ADDRESS=__EMAIL_FROM_ADDR__
+EMAIL_PASSWORD=__EMAIL_PASSWORD__
+
+# Application Properties
+APPLICATION_CLIENT_SECRET=__PIC_SURE_CLIENT_SECRET__
+APPLICATION_CLIENT_SECRET_IS_BASE_64=false
+TOS_ENABLED=false
+SYSTEM_NAME=PIC-SURE All-in-one
+GRANT_EMAIL_SUBJECT=__ACCESS_GRANTED_EMAIL_SUBJECT__
+USER_ACTIVATION_REPLY_TO=__USER_ACTIVATION_REPLY_TO__
+ADMIN_USERS=__ADMIN_USERS__
+DENIED_EMAIL_ENABLED=false
+STACK_SPECIFIC_APPLICATION_ID=__STACK_SPECIFIC_APPLICATION_ID__
+
+# IDP Provider Configuration
+AUTH0_IDP_PROVIDER_IS_ENABLED=true
+IDP_PROVIDER_URI=https://__AUTH0_TENANT__.auth0.com/
+AUTH0_HOST=https://__AUTH0_TENANT__.auth0.com/
+
+# Token Expiration Times
+TOKEN_EXPIRATION_TIME=3600000
+LONG_TERM_TOKEN_EXPIRATION_TIME=2592000000
+
+JAVA_OPTS="-Xms2g -Xmx4g -XX:MetaspaceSize=96M -XX:MaxMetaspaceSize=256m -Djava.net.preferIPv4Stack=true -Djavax.net.ssl.trustStore=/usr/local/tomcat/conf/application.truststore -Djavax.net.ssl.trustStorePassword=password"
diff --git a/initial-configuration/config/wildfly/standalone.xml b/initial-configuration/config/wildfly/standalone.xml
index 9c32b3d9..fcba7164 100644
--- a/initial-configuration/config/wildfly/standalone.xml
+++ b/initial-configuration/config/wildfly/standalone.xml
@@ -158,26 +158,6 @@
sa
-
- jdbc:mysql://picsure-db:3306/auth?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&autoReconnectForPools=true&serverTimezone=UTC
- mysql
-
- 2
- 10
- true
-
-
- auth
- __AUTH_MYSQL_PASSWORD__
-
-
-
- SELECT 1
- true
- false
-
-
-
jdbc:mysql://picsure-db:3306/picsure?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&autoReconnectForPools=true&serverTimezone=UTC
mysql
@@ -461,7 +441,11 @@
-
+
+
+
+
+
@@ -475,7 +459,7 @@
-
+
diff --git a/initial-configuration/config/wildfly/wildfly.env b/initial-configuration/config/wildfly/wildfly.env
new file mode 100644
index 00000000..94815e41
--- /dev/null
+++ b/initial-configuration/config/wildfly/wildfly.env
@@ -0,0 +1 @@
+JAVA_OPTS=-Xms2g -Xmx4g -XX:MetaspaceSize=96M -XX:MaxMetaspaceSize=256m -Djava.net.preferIPv4Stack=true -Djavax.net.ssl.trustStore=/opt/jboss/wildfly/standalone/configuration/application.truststore -Djavax.net.ssl.trustStorePassword=password
\ No newline at end of file
diff --git a/initial-configuration/install-dependencies-docker.sh b/initial-configuration/install-dependencies-docker.sh
index bf8b20a6..82b0d84e 100755
--- a/initial-configuration/install-dependencies-docker.sh
+++ b/initial-configuration/install-dependencies-docker.sh
@@ -1,4 +1,3 @@
-
#!/usr/bin/env bash
sed_inplace() {
@@ -8,7 +7,6 @@ sed_inplace() {
sed -i "$@"
fi
}
-export -f sed_inplace
CWD=$(pwd)
# this makes tr work on OSX
@@ -18,12 +16,12 @@ export LC_ALL=C
# $1 is the path to the docker-config dir $2 is the path to the rc rc_file
function set_docker_config_dir {
local docker_config_dir=$1
- local rc_file=$2
+ export rc_file=$2
if [ -z "$docker_config_dir" ]; then
docker_config_dir="/var/local/docker-config"
fi
if [ -z "$rc_file" ]; then
- rc_file="$HOME/.bashrc"
+ export rc_file="$HOME/.bashrc"
fi
#Check if docker_config_dir is a dir and exists
if [ ! -d "$docker_config_dir" ]; then
@@ -45,7 +43,28 @@ function set_docker_config_dir {
echo 'alias picsure-db="docker exec -ti picsure-db bash -c '\''mysql -uroot -p\$MYSQL_ROOT_PASSWORD'\''"' >> "$rc_file"
}
+function set_mysql_config_dir() {
+ local mysql_config_dir=$1
+ if [ -z "$mysql_config_dir" ]; then
+ mysql_config_dir="$DOCKER_CONFIG_DIR/picsure-db/"
+ fi
+ #Check if mysql_config_dir is a dir and exists
+ if [ ! -d "$mysql_config_dir" ]; then
+ echo "Creating dir $mysql_config_dir and setting MYSQL_CONFIG_DIR in $rc_file"
+ mkdir -p $mysql_config_dir
+ export MYSQL_CONFIG_DIR=$mysql_config_dir
+ echo "export MYSQL_CONFIG_DIR=$mysql_config_dir" >> "$rc_file"
+ else
+ echo "dir $mysql_config_dir exists, just setting MYSQL_CONFIG_DIR in $rc_file"
+ # If the config dir exists, we still want to clean up old settings for it
+ export MYSQL_CONFIG_DIR=$1
+ grep 'MYSQL_CONFIG_DIR' "$rc_file" && sed_inplace '/MYSQL_CONFIG_DIR/d' "$rc_file"
+ echo "export MYSQL_CONFIG_DIR=$mysql_config_dir" >> "$rc_file"
+ fi
+}
+
set_docker_config_dir "$1"
+set_mysql_config_dir "$2"
#-------------------------------------------------------------------------------------------------#
# Docker Install #
@@ -56,7 +75,7 @@ echo "Starting update"
echo "Installing docker"
if [ -n "$(command -v yum)" ] && [ -z "$(command -v docker)" ]; then
echo "Yum detected. Assuming RHEL. Install commands will use yum"
- set_docker_config_dir $1 "$HOME/.zshrc"
+ set_docker_config_dir $1 "$HOME/.zshrc"
yum -y update
# This repo can be removed after we move away from centos 7 I think
yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
@@ -90,7 +109,7 @@ if [ -n "$(command -v apt-get)" ] && [ -z "$(command -v docker)" ]; then
fi
if [[ "$OSTYPE" =~ ^darwin ]]; then
- echo "Darwin detected. Assuming macOS. Install commands will use brew."
+ echo "Darwin detected. Assuming macOS. Install commands will use brew."
#check for brew
if [ -z "$(command -v brew)" ]; then
echo "Brew not detected. Please install brew and rerun this script."
@@ -112,7 +131,7 @@ fi
if [ -n "$(command -v apk)" ]; then
echo "apk detected. Assuming alpine. Install commands will use apk"
apk update && apk add --no-cache wget
-fi
+fi
if [ -z "$(command -v docker)" ]; then
echo "You dont have docker installed and we cant detect a supported package manager."
@@ -172,11 +191,13 @@ export APP_ID=`uuidgen | tr '[:upper:]' '[:lower:]'`
export APP_ID_HEX=`echo $APP_ID | awk '{ print toupper($0) }'|sed 's/-//g'`
sed_inplace "s/__STACK_SPECIFIC_APPLICATION_ID__/$APP_ID/g" $DOCKER_CONFIG_DIR/httpd/picsureui_settings.json
sed_inplace "s/__STACK_SPECIFIC_APPLICATION_ID__/$APP_ID/g" $DOCKER_CONFIG_DIR/wildfly/standalone.xml
+sed_inplace "s/__STACK_SPECIFIC_APPLICATION_ID__/$APP_ID/g" $DOCKER_CONFIG_DIR/psama/.env
export RESOURCE_ID=`uuidgen | tr '[:upper:]' '[:lower:]'`
export RESOURCE_ID_HEX=`echo $RESOURCE_ID | awk '{ print toupper($0) }'|sed 's/-//g'`
sed_inplace "s/__STACK_SPECIFIC_RESOURCE_UUID__/$RESOURCE_ID/g" $DOCKER_CONFIG_DIR/httpd/picsureui_settings.json
+
echo $APP_ID > $DOCKER_CONFIG_DIR/APP_ID_RAW
echo $APP_ID_HEX > $DOCKER_CONFIG_DIR/APP_ID_HEX
echo $RESOURCE_ID > $DOCKER_CONFIG_DIR/RESOURCE_ID_RAW
@@ -197,6 +218,9 @@ if [ -n "$2" ]; then
./convert-cert.sh $2 $3 $password
fi
+echo Deleting pass.tmp
+rm pass.tmp
+
echo "Installation script complete. Staring Jenkins."
cd ..
./start-jenkins.sh
diff --git a/initial-configuration/install-dependencies.sh b/initial-configuration/install-dependencies.sh
index 7c3f3390..d6743a0f 100755
--- a/initial-configuration/install-dependencies.sh
+++ b/initial-configuration/install-dependencies.sh
@@ -87,7 +87,7 @@ rm -f picsure.tmp
echo "` < /dev/urandom tr -dc @^=+$*%_A-Z-a-z-0-9 | head -c${1:-24}`%4cA" > auth.tmp
mysql -u root -e "grant all privileges on auth.* to 'auth'@'%' identified by '`cat auth.tmp`';flush privileges;";
-sed -i s/__AUTH_MYSQL_PASSWORD__/`cat auth.tmp`/g /usr/local/docker-config/wildfly/standalone.xml
+sed -i s/__AUTH_MYSQL_PASSWORD__/`cat auth.tmp`/g /usr/local/docker-config/psama/.env
rm -f auth.tmp
echo "Building and installing Jenkins"
@@ -109,6 +109,7 @@ export APP_ID=`uuidgen -r`
export APP_ID_HEX=`echo $APP_ID | awk '{ print toupper($0) }'|sed 's/-//g'`
sed -i "s/__STACK_SPECIFIC_APPLICATION_ID__/$APP_ID/g" /usr/local/docker-config/httpd/picsureui_settings.json
sed -i "s/__STACK_SPECIFIC_APPLICATION_ID__/$APP_ID/g" /usr/local/docker-config/wildfly/standalone.xml
+sed -i "s/__STACK_SPECIFIC_APPLICATION_ID__/$APP_ID/g" /usr/local/docker-config/psama/.env
export RESOURCE_ID=`uuidgen -r`
export RESOURCE_ID_HEX=`echo $RESOURCE_ID | awk '{ print toupper($0) }'|sed 's/-//g'`
diff --git a/initial-configuration/jenkins/jenkins-docker/Dockerfile b/initial-configuration/jenkins/jenkins-docker/Dockerfile
index a8ade3e6..12c9d0fd 100644
--- a/initial-configuration/jenkins/jenkins-docker/Dockerfile
+++ b/initial-configuration/jenkins/jenkins-docker/Dockerfile
@@ -1,4 +1,4 @@
-FROM jenkins/jenkins:2.440-jdk11
+FROM jenkins/jenkins:2.442-jdk11
COPY plugins.yml /usr/share/jenkins/ref/plugins.yml
diff --git a/initial-configuration/jenkins/jenkins-docker/config.xml b/initial-configuration/jenkins/jenkins-docker/config.xml
index 5345d196..60bd2377 100644
--- a/initial-configuration/jenkins/jenkins-docker/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/config.xml
@@ -139,6 +139,7 @@
Backup Jenkins Home
Create PIC-SURE PassThrough Resource
Create PIC-SURE Aggregate Resource
+ Create PIC-SURE Visualization Build
Retrieve Build Spec
@@ -153,6 +154,55 @@
false
+
+
+ Dictionary
+ false
+ false
+
+
+
+ Build Dictionary API
+ Deploy Dictionary API
+ Upload Dictionary .env File
+ Edit Dictionary .env
+ Weight Dictionary Search Fields
+
+
+
+
+
+
+
+
+
+
+
+ false
+
+
+
+ Uploader
+ false
+ false
+
+
+
+ Build and Deploy Uploader
+ Upload Uploader .env File
+
+
+
+
+
+
+
+
+
+
+
+ false
+
Deployment
50000
@@ -169,9 +219,7 @@
- 6
- project_specific_override_repo
- __PROJECT_SPECIFIC_OVERRIDE_REPO__
+ 8
release_control_branch
*/master
release_control_repo
@@ -182,6 +230,12 @@
/usr/local/docker-config/
MYSQL_NETWORK
host
+ MYSQL_CONFIG_DIR
+ /usr/local/docker-config/picsure-db/
+ MIGRATION_REPO
+ https://github.com/hms-dbmi/PIC-SURE-Migrations
+ MIGRATION_NAME
+ __PROJECT_SPECIFIC_MIGRATION_NAME__
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Add or Edit a Setting/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Add or Edit a Setting/config.xml
deleted file mode 100644
index 81dbd5a1..00000000
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Add or Edit a Setting/config.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-
-
-
- Edit the values in picsureui_settings.json file. You can find the current file in either your Network tab of your browser's developer tools by searching for "settings.json" or in this location: $DOCKER_CONFIG_DIR/httpd/picsureui_settings.json
- false
-
-
-
-
- SETTING_FIELD_NAME
- The field name of the setting you would like to edit. Example: helpLink
-
- false
-
-
- SETTING_VALUE
- The value of the setting. This can be a string or json object. NOTE: If it is a string is should be in quotes. For more complex values you may need run it through a json linter. Example: "mailto:example@example.com" OR {"hello": "world", "arrayName": ["array", "test"]}
-
- false
-
-
-
-
-
- true
- false
- false
- false
-
- false
-
-
-
- #!/bin/bash
- # Check for the presence
- current_field_name=$(jq -r --arg field_name "$SETTING_FIELD_NAME" 'select(.[$field_name] != null) | .[$field_name]' /usr/local/docker-config/httpd/picsureui_settings.json)
- echo $current_field_name
-
- # Check if the key is missing
- if [[ -z $current_field_name ]]; then
- # Add the key with the value
- jq --arg field_name "$SETTING_FIELD_NAME" --argjson value "$SETTING_VALUE" '. + {($field_name): $value}' /usr/local/docker-config/httpd/picsureui_settings.json > /tmp/temp.json && mv /tmp/temp.json /usr/local/docker-config/httpd/picsureui_settings.json
- else
- # Replace the old value
- jq --arg field_name "$SETTING_FIELD_NAME" --argjson value "$SETTING_VALUE" '.[$field_name] = $value' /usr/local/docker-config/httpd/picsureui_settings.json > /tmp/temp.json && mv /tmp/temp.json /usr/local/docker-config/httpd/picsureui_settings.json
- fi
-
-
-
-
-
-
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Build Dictionary API/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Build Dictionary API/config.xml
new file mode 100644
index 00000000..174a2963
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Build Dictionary API/config.xml
@@ -0,0 +1,56 @@
+
+
+
+ Build the Dictionary API Container
+ false
+
+
+
+
+ pipeline_build_id
+ MANUAL_RUN
+ false
+
+
+ git_hash
+ origin/main
+ true
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/picsure-dictionary.git
+
+
+
+
+ ${git_hash}
+
+
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ current_git_tag=$(git tag --points-at HEAD)
+if [ -z "${current_git_tag}"]; then
+ current_git_tag=$(git log --pretty=format:'%h' -n 1)
+fi
+docker build . -t "avillach/dictionary-api:$current_git_tag"
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Build and Deploy Microservice/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Build and Deploy Microservice/config.xml
deleted file mode 100755
index 5bf5ebc8..00000000
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Build and Deploy Microservice/config.xml
+++ /dev/null
@@ -1,106 +0,0 @@
-
-
-
- Build and deploy a microservice. The microservice must live in a directory in
- https://github.com/hms-dbmi/pic-sure-services/
-
- false
-
-
-
-
- git_hash
- */main
- false
-
-
- service_name
- The name of the directory for your service in the pic-sure-services repo
- false
-
-
- service_description
- Describe it
- false
-
-
- .env
- The .env file needed to run this service. The UUID will be added to the end of this env
-
-
-
-
-
- 2
-
-
- https://github.com/hms-dbmi/pic-sure-services.git
-
-
-
-
- ${git_hash}
-
-
- false
-
-
-
- true
- false
- false
- false
-
- false
-
-
- # Get the resource from the db if it exists
-export SQL="SELECT LOWER(CONCAT(SUBSTR(HEX(uuid), 1, 8), '-', SUBSTR(HEX(uuid), 9, 4), '-', SUBSTR(HEX(uuid), 13, 4), '-', SUBSTR(HEX(uuid), 17, 4), '-', SUBSTR(HEX(uuid), 21))) from picsure.resource where name = '$service_name'";
-export resource_uuid=$(docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -se "$SQL" picsure);
-
-# Add the resource to the database if it doesn't already exist
-if [ -z "$resource_uuid" ]; then
- echo 'This is the first time building this resource. Adding to db'
- echo ''
- export SQL="INSERT IGNORE INTO picsure.resource (uuid, name, resourceRSPath, description) \
- VALUES (UUID(), '$service_name', 'http://$service_name/', '$service_description')";
- docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e "$SQL" picsure
-fi
-
-# Get the resource from the db
-export SQL="SELECT LOWER(CONCAT(SUBSTR(HEX(uuid), 1, 8), '-', SUBSTR(HEX(uuid), 9, 4), '-', SUBSTR(HEX(uuid), 13, 4), '-', SUBSTR(HEX(uuid), 17, 4), '-', SUBSTR(HEX(uuid), 21))) from picsure.resource where name = '$service_name'";
-export resource_uuid=$(docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -se "$SQL" picsure);
-echo ''
-echo "Done adding to db. Using $resource_uuid as uuid";
-
-# Add the resource to our env file
-if grep -q 'RESOURCE_UUID' .env; then
- echo ''
- echo 'RESOURCE_UUID exists in .env. Replacing with our value'
- sed -i~ '/^RESOURCE_UUID=/s/=.*/="$resource_uuid"/' .env
-else
- echo ''
- echo 'RESOURCE_UUID does NOT exist in .env. Appending our to eof'
- echo RESOURCE_UUID=$resource_uuid >> .env
-fi
-
-
-# Copy over all the things
-echo ''
-echo 'Copying source code, partial compose file and .env'
-cp -r $service_name /pic-sure-services/
-mv .env /pic-sure-services/$service_name/
-cp docker-compose.yml /pic-sure-services/
-cd /pic-sure-services/
-
-# This find command just gets all the docker-compose files in this dir and the child dirs
-# All those files get merged together into one compose file
-echo ''
-echo 'Building and starting container. Good luck!'
-docker compose -f docker-compose.yml $(find ./* -maxdepth 2 -name '*docker-compose.yml' | sed -e 's/^/-f /' | xargs) up --build -d
-
-
-
-
-
-
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Build and Deploy Uploader/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Build and Deploy Uploader/config.xml
new file mode 100644
index 00000000..2ea13db4
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Build and Deploy Uploader/config.xml
@@ -0,0 +1,57 @@
+
+
+
+
+ false
+
+
+
+
+ git_hash
+ */main
+ false
+
+
+ pipeline_build_id
+ MANUAL_BUILD
+ false
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/pic-sure-services.git
+
+
+
+
+ ${git_hash}
+
+
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ mv /usr/local/docker-config/uploader/.env .env
+cp -r uploader /usr/local/docker-config/
+mv .env /usr/local/docker-config/uploader/.env
+cd /usr/local/docker-config/uploader/
+
+docker compose up -d --build uploader
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Auth0 Integration/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Auth0 Integration/config.xml
index e74b4840..3b087172 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Auth0 Integration/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Auth0 Integration/config.xml
@@ -46,6 +46,9 @@ sed -i "s/$old_client_id/$AUTH0_CLIENT_ID/g" /usr/local/docker-config/
sed -i "s/$old_tenant/$AUTH0_TENANT/g" /usr/local/docker-config/httpd/picsureui_settings.json
sed -i "s/$old_tenant/$AUTH0_TENANT/g" /usr/local/docker-config/wildfly/standalone.xml
+sed -i "s/$old_client_secret/$AUTH0_CLIENT_SECRET/g" /usr/local/docker-config/psama/.env
+sed -i "s/$old_client_id/$AUTH0_CLIENT_ID/g" /usr/local/docker-config/psama/.env
+sed -i "s/$old_tenant/$AUTH0_TENANT/g" /usr/local/docker-config/psama/.env
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Institution Node/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Institution Node/config.xml
index 3b59dff2..5ea1d25b 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Institution Node/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Institution Node/config.xml
@@ -39,10 +39,10 @@
false
- PROJECT_SPECIFIC_OVERRIDE_REPOSITORY
- This must be the project specific git repository that contains the project specific database migrations and UI overrides in the standard PIC-SURE Override Repository Structure.
-
- false
+ PROJECT_MIGRATION_NAME
+ Name of the migration to run, see https://github.com/hms-dbmi/PIC-SURE-Migrations or fork it and add your migration
+ GIC-Institution
+ true
RELEASE_CONTROL_REPOSITORY
@@ -68,9 +68,114 @@
0
false
-
-
-
+
+ OPEN
+ PIC-SURE can be used without forcing the user log in. Unauthorized users will have a limited feature set. See more information on Gitbook: TODO_GITBOOK
+ true
+
+
+ DISCOVER
+ Enable the Discover page which allows users to search across all data in the project.
+ true
+
+
+ DASHBOARD
+ Enable the Dashboard page which allows users to search across all data in the project.
+ true
+
+
+ DIST_EXPLORER
+ Enable the distribution explorer page which creates visualizations of the data.
+ true
+
+
+ ENABLE_GENE_QUERY
+ Enable the genomic filtering UI which guides users creating genomic queries. Enable if HPDS has been loaded with Genomic data.
+ true
+
+
+ ENABLE_SNP_QUERY
+ Enable the genomic filtering UI which guides users creating genomic SNP queries. Enable if HPDS has been loaded with Genomic data.
+ true
+
+
+ VITE_ALLOW_EXPORT_ENABLED
+ Enables the export button on the search results page.
+ true
+
+
+ API
+ Shows the user information on how to use the API and allows them to access their LONG_TERM_TOKEN.
+ true
+
+
+ ALLOW_EXPORT
+ This enables the prepare for analysis button which guides the user through your export process. There are a few different settings below to control how the user may receive their data in the end.
+ true
+
+
+ DOWNLOAD_AS_CSV
+ Allows the user to download their data at the end of the export process as a CSV file.
+ true
+
+
+ SHOW_VARIABLE_EXPORT
+ Allow adding export variables via the search results UI.
+ true
+
+
+ SHOW_VARIABLE_HIERARCHY
+ Shows the hierarchy button for each search result and enables the hierarchy screen in the export process.
+ true
+
+
+ DOWNLOAD_AS_PFB
+ Allows the user to download their data at the end of the export process as a PFB file.
+ false
+
+
+ DATA_REQUESTS
+ Enables data request functionality.
+ false
+
+
+ VARIANT_EXPLORER
+ Enable the variant explorer. Only enable if HPDS has been loaded with Genomic data.
+ true
+
+
+ VARIANT_EXPLORER_TYPE
+ Aggregate is recommended.
+
+
+ aggregate
+ full
+
+
+
+
+ EXPLORER_MAX_COUNT
+ Max amount of data point for the variant explorer.
+ 10000
+ true
+
+
+ VARIANT_EXPLORER_EXCLUDE_COLUMNS
+ Advanced: An array of columns to exclude in the variant explorer.
+The value must be in an array format, for example: ["AN"]
+ true
+
+
+ TOUR
+ Enable the tour feature which guides the user through the application.
+ true
+
+
+ TOUR_SEARCH_TERM
+ The tour preforms a search. You can specify the search term here.
+ age
+ true
+
@@ -93,7 +198,6 @@ pipeline {
steps {
script {
sh """
- sed -i "s|__PROJECT_SPECIFIC_OVERRIDE_REPO__|$env.PROJECT_SPECIFIC_OVERRIDE_REPOSITORY|g" /var/jenkins_home/config.xml
sed -i "s|__RELEASE_CONTROL_REPO__|$env.RELEASE_CONTROL_REPOSITORY|g" /var/jenkins_home/config.xml
"""
Jenkins.instance.doReload()
@@ -123,6 +227,54 @@ pipeline {
[$class: 'StringParameterValue', name: 'OUTBOUND_EMAIL_USER_PASSWORD', value: env.OUTBOUND_EMAIL_USER_PASSWORD]]
}
+ },
+ createFrontendEnvFile: {
+ script {
+ sh "[ ! -f /usr/local/docker-config/httpd/.env ] || cp -rf /usr/local/docker-config/httpd/.env /usr/local/docker-config/httpd/.env.bak"
+ sh "touch .env"
+ sh "chmod +wr .env"
+ sh "echo 'VITE_PROJECT_HOSTNAME=' >> .env"
+ sh "echo 'VITE_ORIGIN=' >> .env"
+ sh "echo 'VITE_LOGO=' >> .env"
+ sh "echo 'VITE_AUTH0_TENANT=$env.AUTH0_TENANT' >> .env"
+ sh "echo 'VITE_OPEN=$env.OPEN' >> .env"
+ sh "echo 'VITE_DISCOVER=$env.DISCOVER' >> .env"
+ sh "echo 'VITE_DASHBOARD=$env.DASHBOARD' >> .env"
+ sh "echo 'VITE_DIST_EXPLORER=$env.DIST_EXPLORER' >> .env"
+ sh "echo 'VITE_API=$env.API' >> .env"
+ sh "echo 'VITE_ALLOW_EXPORT=$env.ALLOW_EXPORT' >> .env"
+ sh "echo 'VITE_ALLOW_EXPORT_ENABLED=$env.ALLOW_EXPORT_ENABLED' >> .env"
+ sh "echo 'VITE_DOWNLOAD_AS_CSV=$env.DOWNLOAD_AS_CSV' >> .env"
+ sh "echo 'VITE_ENABLE_GENE_QUERY=$env.ENABLE_GENE_QUERY' >> .env"
+ sh "echo 'VITE_ENABLE_SNP_QUERY=$env.ENABLE_SNP_QUERY' >> .env"
+ sh "echo 'VITE_SHOW_VARIABLE_EXPORT=$env.SHOW_VARIABLE_EXPORT' >> .env"
+ sh "echo 'VITE_SHOW_VARIABLE_HIERARCHY=$env.SHOW_VARIABLE_HIERARCHY' >> .env"
+ sh "echo 'VITE_DOWNLOAD_AS_PFB=$env.DOWNLOAD_AS_PFB' >> .env"
+ sh "echo 'VITE_DATA_REQUESTS=$env.DATA_REQUESTS' >> .env"
+ sh "echo 'VITE_VARIANT_EXPLORER=$env.VARIANT_EXPLORER' >> .env"
+ sh "echo 'VITE_VARIANT_EXPLORER_TYPE=$env.VARIANT_EXPLORER_TYPE' >> .env"
+ sh "echo 'VITE_EXPLORER_MAX_COUNT=$env.EXPLORER_MAX_COUNT' >> .env"
+ sh "echo 'VITE_VARIANT_EXPLORER_EXCLUDE_COLUMNS=$env.VARIANT_EXPLORER_EXCLUDE_COLUMNS' >> .env"
+ sh "echo 'VITE_TOUR=$env.TOUR' >> .env"
+ sh "echo 'VITE_TOUR_SEARCH_TERM=$env.TOUR_SEARCH_TERM' >> .env"
+ sh "echo 'VITE_REQUIRE_CONSENTS=false' >> .env"
+ sh "echo 'VITE_USE_QUERY_TEMPLATE=false' >> .env"
+ sh "echo '#Login Provider (You may add as many as you would like):' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE=true' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_TYPE=AUTH0' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_CLIENTID=$env.AUTH0_CLIENT_ID' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_CONNECTION=google-oauth2' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_DESCRIPTION=Login' >> .env"
+ sh "echo '#Resource UUIDs:' >> .env"
+ sh "echo 'VITE_RESOURCE_HPDS=' >> .env"
+ sh "echo 'VITE_RESOURCE_OPEN_HPDS=' >> .env"
+ sh "echo 'VITE_RESOURCE_BASE_QUERY=' >> .env"
+ sh "echo 'VITE_RESOURCE_VIZ=' >> .env"
+ sh "echo '#Google Analytics settings (leave blank to disable):' >> .env"
+ sh "echo 'VITE_GOOGLE_ANALYTICS_ID=' >> .env"
+ sh "echo 'VITE_GOOGLE_TAG_MANAGER_ID=' >> .env"
+ sh "mv .env '/usr/local/docker-config/httpd/.env'"
+ }
}
)
}
@@ -150,19 +302,24 @@ pipeline {
micro_app_ref = build.git_hash
echo 'micro_app_ref ' + micro_app_ref
}
- if(build.project_job_git_key.equalsIgnoreCase("PSU")) {
- psu_ref = build.git_hash
- echo 'psu_ref ' + psu_ref
+ if (build.project_job_git_key.equalsIgnoreCase("PSF")) {
+ psf_ref = build.git_hash
+ echo 'psf_ref ' + psf_ref
}
+ if (build.project_job_git_key.equalsIgnoreCase("PSM")) {
+ psm_ref = build.git_hash
+ echo 'psm_ref ' + psm_ref
+ }
if(build.project_job_git_key.equalsIgnoreCase("PSA")) {
pic_sure_ref = build.git_hash
echo 'pic_sure_ref ' + pic_sure_ref
}
}
- if(micro_app_ref.isEmpty() || pic_sure_ref.isEmpty() || psu_ref.isEmpty()) {
+ if(micro_app_ref.isEmpty() || pic_sure_ref.isEmpty() || psf_ref.isEmpty() || psm_ref.isEmpty()) {
currentBuild.result = 'ABORTED'
echo 'micro_app_ref = ' + micro_app_ref
- echo 'psu_ref = ' + psu_ref
+ echo 'psf_ref = ' + psf_ref
+ echo 'psm_ref = ' + psm_ref
echo 'pic_sure_ref = ' + pic_sure_ref
error('Build Spec Not configured correctly!')
}
@@ -176,6 +333,8 @@ pipeline {
script {
checkout([$class: 'GitSCM', branches: [[name: micro_app_ref ]],
userRemoteConfigs: [[url: 'https://github.com/hms-dbmi/pic-sure-auth-microapp']]])
+ def image = docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0')
+ image.pull()
//git url: 'https://github.com/hms-dbmi/pic-sure-auth-microapp'
docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/auth:/opt/flyway-migrations/auth "--entrypoint=" ') {
sh "rm -rf /opt/flyway-migrations/auth/sql"
@@ -222,25 +381,24 @@ pipeline {
steps {
node("master") {
script {
- checkout([$class: 'GitSCM', branches: [[name: psu_ref ]],
- userRemoteConfigs: [[url: env.project_specific_override_repo]]])
- //git url: env.project_specific_override_repo
- sh """
- sed -i "s/__APPLICATION_UUID__/`cat /usr/local/docker-config/APP_ID_HEX`/g" ./custom-migrations/auth/*.sql
- sed -i "s/__RESOURCE_UUID__/`cat /usr/local/docker-config/RESOURCE_ID_HEX`/g" ./custom-migrations/picsure/*.sql
- """
- docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/picsure:/opt/flyway-migrations/picsure "--entrypoint=" ') {
- sh "rm -rf /opt/flyway-migrations/picsure/sql"
- sh "cp -R ./custom-migrations/picsure /opt/flyway-migrations/picsure/sql"
- sh "/opt/flyway/flyway -X -baselineOnMigrate=true -configFiles=/opt/flyway-migrations/picsure/flyway-picsure.conf -table=flyway_custom_schema_history migrate"
- sleep(time:10,unit:"SECONDS")
- }
- docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/auth:/opt/flyway-migrations/auth "--entrypoint=" ') {
- sh "rm -rf /opt/flyway-migrations/auth/sql"
- sh "cp -R ./custom-migrations/auth /opt/flyway-migrations/auth/sql"
- sh "/opt/flyway/flyway -X -baselineOnMigrate=true -configFiles=/opt/flyway-migrations/auth/flyway-auth.conf -table=flyway_custom_schema_history migrate"
- sleep(time:10,unit:"SECONDS")
- }
+ checkout([$class: 'GitSCM', branches: [[name: psm_ref ]],
+ userRemoteConfigs: [[url: MIGRATION_REPO]]])
+ sh """
+ sed -i "s/__APPLICATION_UUID__/`cat /usr/local/docker-config/APP_ID_HEX`/g" ./${MIGRATION_NAME}/auth/*.sql
+ sed -i "s/__RESOURCE_UUID__/`cat /usr/local/docker-config/RESOURCE_ID_HEX`/g" ./${MIGRATION_NAME}/picsure/*.sql
+ """
+ docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/picsure:/opt/flyway-migrations/picsure "--entrypoint=" ') {
+ sh "rm -rf /opt/flyway-migrations/picsure/sql"
+ sh "cp -R ./${MIGRATION_NAME}/picsure /opt/flyway-migrations/picsure/sql"
+ sh "/opt/flyway/flyway -X -baselineOnMigrate=true -configFiles=/opt/flyway-migrations/picsure/flyway-picsure.conf -table=flyway_custom_schema_history migrate"
+ sleep(time:10,unit:"SECONDS")
+ }
+ docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/auth:/opt/flyway-migrations/auth "--entrypoint=" ') {
+ sh "rm -rf /opt/flyway-migrations/auth/sql"
+ sh "cp -R ./${MIGRATION_NAME}/auth /opt/flyway-migrations/auth/sql"
+ sh "/opt/flyway/flyway -X -baselineOnMigrate=true -configFiles=/opt/flyway-migrations/auth/flyway-auth.conf -table=flyway_custom_schema_history migrate"
+ sleep(time:10,unit:"SECONDS")
+ }
}
}
}
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Configure JupyterHub Token Introspection Token/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Configure JupyterHub Token Introspection Token/config.xml
index 43a64d10..c4f6a21d 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Configure JupyterHub Token Introspection Token/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Configure JupyterHub Token Introspection Token/config.xml
@@ -50,7 +50,7 @@ export old_token_introspection_token=`cat /usr/local/docker-config/jupyterhub_co
sed -i "s/$old_token_introspection_token/$new_token_introspection_token/g" /usr/local/docker-config/jupyterhub_config.py
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"update application set token='$new_token_introspection_token' where uuid=$application_id;" auth
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Outbound Email Settings/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Outbound Email Settings/config.xml
index ef75cad2..dca80b51 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Outbound Email Settings/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Outbound Email Settings/config.xml
@@ -48,6 +48,16 @@ fi
if [ -n "$EMAIL_FROM" ] && [ -n "$oldfrom" ] ; then
sed -i "s/$oldfrom/$EMAIL_FROM/g" /usr/local/docker-config/wildfly/standalone.xml
fi
+
+if [ -n "$OUTBOUND_EMAIL_USER_PASSWORD" ] && [ -n "$oldpassword" ] ; then
+ sed -i "s/$oldpassword/$OUTBOUND_EMAIL_USER_PASSWORD/g" /usr/local/docker-config/psama/.env
+fi
+if [ -n "$OUTBOUND_EMAIL_USER" ] && [ -n "$olduser" ] ; then
+ sed -i "s/$olduser/$OUTBOUND_EMAIL_USER/g" /usr/local/docker-config/psama/.env
+fi
+if [ -n "$EMAIL_FROM" ] && [ -n "$oldfrom" ] ; then
+ sed -i "s/$oldfrom/$EMAIL_FROM/g" /usr/local/docker-config/psama/.env
+fi
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Configure PIC-SURE Token Introspection Token/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Configure PIC-SURE Token Introspection Token/config.xml
index f7f391f7..897e57a0 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Configure PIC-SURE Token Introspection Token/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Configure PIC-SURE Token Introspection Token/config.xml
@@ -46,10 +46,11 @@ export appplication_uuid=`cat /usr/local/docker-config/httpd/picsureui_settings.
export new_token_introspection_token=`java -jar generateJwt.jar secret.txt sub "PSAMA_APPLICATION|${appplication_uuid}" 365 day | grep -v "Generating"`
export old_token_introspection_token=`cat /usr/local/docker-config/wildfly/standalone.xml | grep token_introspection_token | cut -d '=' -f 3 | sed 's/[\"/\>]//'g`
-
sed -i "s/$old_token_introspection_token/$new_token_introspection_token/g" /usr/local/docker-config/wildfly/standalone.xml
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+sed -i "s/$old_token_introspection_token/$new_token_introspection_token/g" /usr/local/docker-config/psama/.env
+
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"update application set token='$new_token_introspection_token';" auth
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Remote MySQL Instance/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Remote MySQL Instance/config.xml
index b78202ac..88a45ed1 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Configure Remote MySQL Instance/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Configure Remote MySQL Instance/config.xml
@@ -65,7 +65,6 @@ echo ""
flyway_auth_url=jdbc:mysql://$MYSQL_HOST_NAME:$MYSQL_PORT/auth?serverTimezone=UTC
flyway_picsure_url=jdbc:mysql://$MYSQL_HOST_NAME:$MYSQL_PORT/picsure?serverTimezone=UTC
-
cd /usr/local/docker-config/flyway/auth
sed -i '/flyway.url/d' ./flyway-auth.conf
sed -i "1iflyway.url=$flyway_auth_url" ./flyway-auth.conf
@@ -82,9 +81,9 @@ sed -i "1ihost=$MYSQL_HOST_NAME" ./sql.properties
sed -i '/port/d' ./sql.properties
sed -i "2iport=$MYSQL_PORT" ./sql.properties
+sed -i 's/jdbc:mysql*.*auth/jdbc:mysql:\/\/'$MYSQL_HOST_NAME':'$MYSQL_PORT'\/auth/g' /usr/local/docker-config/psama/.env
cd /usr/local/docker-config/wildfly
-sed -i 's/jdbc:mysql*.*auth/jdbc:mysql:\/\/'$MYSQL_HOST_NAME':'$MYSQL_PORT'\/auth/g' /usr/local/docker-config/wildfly/standalone.xml
sed -i 's/jdbc:mysql*.*picsure/jdbc:mysql:\/\/'$MYSQL_HOST_NAME':'$MYSQL_PORT'\/picsure/g' /usr/local/docker-config/wildfly/standalone.xml
echo `grep "password" /usr/local/docker-config/flyway/auth/sql.properties | cut -d "=" -f2-` > airflow.tmp
@@ -95,19 +94,18 @@ AIRFLOW_PASSWORD=`grep "password" /usr/local/docker-config/flyway/auth
PICSURE_PASSWORD=`sed -n 's/<password>\(.*\)<\/password>/\1/p' picsure.tmp | xargs`
AUTH_PASSWORD=`sed -n 's/<password>\(.*\)<\/password>/\1/p' auth.tmp | xargs`
-
-docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on auth.* to 'airflow'@'%' identified by '$AIRFLOW_PASSWORD';flush privileges;" mysql
-docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on picsure.* to 'airflow'@'%' identified by '$AIRFLOW_PASSWORD';flush privileges;" mysql
-docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on picsure.* to 'picsure'@'%' identified by '$PICSURE_PASSWORD';flush privileges;" mysql
-docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on auth.* to 'auth'@'%' identified by '$AUTH_PASSWORD';flush privileges;" mysql
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on auth.* to 'airflow'@'%' identified by '$AIRFLOW_PASSWORD';flush privileges;" mysql
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on picsure.* to 'airflow'@'%' identified by '$AIRFLOW_PASSWORD';flush privileges;" mysql
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on picsure.* to 'picsure'@'%' identified by '$PICSURE_PASSWORD';flush privileges;" mysql
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "grant all privileges on auth.* to 'auth'@'%' identified by '$AUTH_PASSWORD';flush privileges;" mysql
if [ "$DROP_EXISTING_TABLES" = "TRUE"]; then
- docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "drop database IF EXISTS auth;" mysql
- docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "drop database IF EXISTS picsure;" mysql
+ docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "drop database IF EXISTS auth;" mysql
+ docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "drop database IF EXISTS picsure;" mysql
fi
-docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "create database auth;" mysql
-docker run -i -v /root/.my.cnf:/root/.my.cnf mysql mysql -e "create database picsure;" mysql
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "create database auth;" mysql
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf mysql mysql -e "create database picsure;" mysql
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create Admin User/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create Admin User/config.xml
old mode 100644
new mode 100755
index c49a9b59..4314b49d
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Create Admin User/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create Admin User/config.xml
@@ -8,8 +8,6 @@
EMAIL
-
-
false
@@ -32,15 +30,16 @@
export USER_ID=`uuidgen -r`
export USER_ID_HEX=`echo $USER_ID | awk '{ print toupper($0) }'|sed 's/-//g'`
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
-"INSERT INTO auth.user VALUES (unhex('$USER_ID_HEX'), null, '{\"email\":\"$EMAIL\"}', null, (select uuid from connection where label='$CONNECTION_LABEL'),'$EMAIL',0,null,1,null);" auth
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
-"INSERT INTO auth.user_role VALUES (unhex('$USER_ID_HEX'), unhex('002DC366B0D8420F998F885D0ED797FD'));" auth
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
-"INSERT INTO auth.user_role VALUES (unhex('$USER_ID_HEX'), unhex('797FD002DC366B0D8420F998F885D0ED'));" auth
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+"INSERT INTO auth.user (uuid, auth0_metadata, general_metadata, acceptedTOS, connectionId, email, matched, subject, is_active, long_term_token) VALUES (unhex('$USER_ID_HEX'), null, '{\"email\":\"$EMAIL\"}', null, (select uuid from connection where label='$CONNECTION_LABEL'),'$EMAIL',0,null,1,null);" auth
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+"INSERT INTO auth.user_role (user_id, role_id) VALUES (unhex('$USER_ID_HEX'), unhex('002DC366B0D8420F998F885D0ED797FD'));" auth
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+"INSERT INTO auth.user_role (user_id, role_id) VALUES (unhex('$USER_ID_HEX'), unhex('797FD002DC366B0D8420F998F885D0ED'));" auth
+
-
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create Custom Login (IDP) Connection/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create Custom Login (IDP) Connection/config.xml
index 819bc4cd..f2194667 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Create Custom Login (IDP) Connection/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create Custom Login (IDP) Connection/config.xml
@@ -41,11 +41,11 @@
CONNECTION_ID=`uuidgen -r`
CONNECTION_ID_HEX=`echo $CONNECTION_ID | awk '{ print toupper($0) }'|sed 's/-//g';`
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"INSERT INTO auth.connection (uuid, label, id, subprefix, requiredFields) VALUES (unhex('$CONNECTION_ID_HEX'), '$CONNECTION_LABEL', '$ID', '$SUBPREFIX', '$requiredFields');" auth
USER_MAPPING_ID=`uuidgen -r`
USER_MAPPING_ID_HEX=`echo $USER_MAPPING_ID | awk '{ print toupper($0) }'|sed 's/-//g';`
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"INSERT INTO auth.userMetadataMapping (uuid, auth0MetadataJsonPath, connectionId, generalMetadataJsonPath) VALUES (unhex('$USER_MAPPING_ID_HEX'), '\$.email', unhex('$CONNECTION_ID_HEX'), '\$.email');" auth
echo "Please now edit the connections.json file in the httpd container and add your IDP button there. For more instructions see: https://pic-sure.gitbook.io/pic-sure/"
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Aggregate Resource/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Aggregate Resource/config.xml
index c830ed19..96a9041f 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Aggregate Resource/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Aggregate Resource/config.xml
@@ -93,7 +93,7 @@ RESOURCE_ID_HEX=`echo $RESOURCE_ID | awk '{ print toupper($0) }'|sed &
export SQL="INSERT INTO resource (uuid, targetURL, resourceRSPath, description, name, token) \
VALUES (unhex('$RESOURCE_ID_HEX'), NULL, 'http://wildfly:8080/$RESOURCE_PATH/pic-sure/aggregate-data-sharing/', '$RESOURCE_DESC', '$RESOURCE_NAME', NULL);"
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e "$SQL" picsure
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e "$SQL" picsure
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE PassThrough Resource/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE PassThrough Resource/config.xml
index 843bb22f..a0f7c80e 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE PassThrough Resource/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE PassThrough Resource/config.xml
@@ -102,7 +102,7 @@ RESOURCE_ID_HEX=`echo $RESOURCE_ID | awk '{ print toupper($0) }'|sed &
export SQL="INSERT INTO resource (uuid, targetURL, resourceRSPath, description, name, token) \
VALUES (unhex('$RESOURCE_ID_HEX'), NULL, 'http://wildfly:8080/$RESOURCE_PATH/pic-sure/passthru/', '$RESOURCE_DESC', '$RESOURCE_NAME', NULL);"
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e "$SQL" picsure
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e "$SQL" picsure
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Visualization Build/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Visualization Build/config.xml
new file mode 100644
index 00000000..a2a4b982
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create PIC-SURE Visualization Build/config.xml
@@ -0,0 +1,84 @@
+
+
+
+
+ false
+
+
+
+
+ RESOURCE_NAME
+ Resource Name
+ PIC-SURE Visualization Resource
+ false
+
+
+ RESOURCE_PATH
+ The path to be used in wildfly deployments
+ pic-sure-visualization-resource
+ false
+
+
+ git_hash
+ */master
+ false
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/pic-sure.git
+
+
+
+
+ ${git_hash}
+
+
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ # Copy WAR file from PIC-SURE-API Build resources
+cd pic-sure-resources/pic-sure-visualization-resource
+mkdir -p /usr/local/docker-config/wildfly/deployments
+cp target/pic-sure-visualization-resource.war /usr/local/docker-config/wildfly/deployments/$RESOURCE_PATH.war
+
+# Make properties config file
+export SQL="SELECT LOWER(CONCAT(SUBSTR(HEX(uuid), 1, 8), '-', SUBSTR(HEX(uuid), 9, 4), '-', SUBSTR(HEX(uuid), 13, 4), '-', SUBSTR(HEX(uuid), 17, 4), '-', SUBSTR(HEX(uuid), 21))) from picsure.resource where name = 'hpds'";
+HPDS_ID=$(docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -se "$SQL" picsure);
+RESOURCE_ID=`uuidgen -r`
+
+mkdir -p /usr/local/docker-config/wildfly/visualization/$RESOURCE_PATH
+PROP_FILE="/usr/local/docker-config/wildfly/visualization/$RESOURCE_PATH/resource.properties"
+cat > $PROP_FILE <<-END
+target.origin.id=http://localhost:8080/pic-sure-api-2/PICSURE/
+visualization.resource.id=$RESOURCE_ID
+auth.hpds.resource.id=$HPDS_ID
+open.hpds.resource.id=$HPDS_ID
+END
+
+# Insert into DB
+RESOURCE_ID_HEX=`echo $RESOURCE_ID | awk '{ print toupper($0) }'|sed 's/-//g';`
+export SQL="INSERT INTO resource (uuid, targetURL, resourceRSPath, description, name, token) \
+ VALUES (unhex('$RESOURCE_ID_HEX'), NULL, 'http://wildfly:8080/$RESOURCE_PATH/pic-sure/visualization/', '$RESOURCE_DESC', '$RESOURCE_NAME', NULL);"
+
+# Run with config
+docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e "$SQL" picsure
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create Root Certs in TrustStore/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create Root Certs in TrustStore/config.xml
index 0b69fc06..9b00ee85 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Create Root Certs in TrustStore/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create Root Certs in TrustStore/config.xml
@@ -13,6 +13,9 @@
+
+ rm -f /usr/local/docker-config/wildfly/application.truststore
+
curl https://letsencrypt.org/certs/isrgrootx1.der -o isrgrootx1.der
curl https://letsencrypt.org/certs/lets-encrypt-r3.der -o lets-encrypt-r3.der
@@ -20,6 +23,15 @@
keytool -import -keystore /usr/local/docker-config/wildfly/application.truststore -storepass password -noprompt -trustcacerts -alias letsencryptauthority1 -file isrgrootx1.der -storetype JKS
keytool -import -keystore /usr/local/docker-config/wildfly/application.truststore -storepass password -noprompt -trustcacerts -alias letsencryptauthority2 -file lets-encrypt-r3.der -storetype JKS
+
+
+ rm -rf /usr/local/docker-config/psama/application.truststore
+
+ curl https://letsencrypt.org/certs/isrgrootx1.der -o isrgrootx1.der
+ curl https://letsencrypt.org/certs/lets-encrypt-r3.der -o lets-encrypt-r3.der
+
+ keytool -import -keystore /usr/local/docker-config/psama/application.truststore -storepass password -noprompt -trustcacerts -alias letsencryptauthority1 -file isrgrootx1.der -storetype JKS
+ keytool -import -keystore /usr/local/docker-config/psama/application.truststore -storepass password -noprompt -trustcacerts -alias letsencryptauthority2 -file lets-encrypt-r3.der -storetype JKS
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Create Test Users/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Create Test Users/config.xml
index cc0e703f..9cf753e5 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Create Test Users/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Create Test Users/config.xml
@@ -61,7 +61,7 @@ function run_sql_procedure() {
local connection_id="$2"
local role_name="$3"
- docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+ docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"CALL CreateUserWithRole('$user_email', '$connection_id', '$role_name', '{\"email\": \"$user_email\"}');" auth
}
@@ -74,14 +74,14 @@ function update_user_token() {
grep client_secret /usr/local/docker-config/wildfly/standalone.xml | cut -d '=' -f 3 | sed 's/[\"/\>]//g' > secret.txt
# Get the user subject by email
- USER_SUBJECT=$(docker run -i -v /root/.my.cnf:/root/.my.cnf --network=host mysql mysql -N -e \
+ USER_SUBJECT=$(docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=host mysql mysql -N -e \
"SELECT subject FROM auth.user where email='$USERNAME';")
# Create a new user token by subject and expiry
user_token=$(java -jar target/generateJwt.jar secret.txt sub "${USER_SUBJECT}" ${DAYSUNTILEXPIRATION} day | grep -v "Generating")
# Update user with the new token
- docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+ docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"update auth.user set long_term_token='$user_token' where email='$USERNAME';"
# Return the newly generated user token
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Deploy Dictionary API/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Deploy Dictionary API/config.xml
new file mode 100644
index 00000000..bbe9d588
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Deploy Dictionary API/config.xml
@@ -0,0 +1,63 @@
+
+
+
+ Deploy Dictionary API Container
+ false
+
+
+
+
+ dictionary.env
+
+
+ pipeline_build_id
+ MANUAL_RUN
+ false
+
+
+ git_hash
+ origin/main
+ true
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/picsure-dictionary.git
+
+
+
+
+ ${git_hash}
+
+
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ mkdir -p $DOCKER_CONFIG_DIR/dictionary
+cp docker-compose.yml $DOCKER_CONFIG_DIR/dictionary/
+
+if [ -f dictionary.env ]; then
+ cp dictionary.env $DOCKER_CONFIG_DIR/dictionary/.env
+fi
+
+cd $DOCKER_CONFIG_DIR/dictionary/
+docker compose up -d
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Download PSAMA Configuration/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Download PSAMA Configuration/config.xml
new file mode 100644
index 00000000..ec1aa894
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Download PSAMA Configuration/config.xml
@@ -0,0 +1,38 @@
+
+
+
+
+ false
+
+
+ false
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ # Just get the file so a user can download it.
+cp /usr/local/docker-config/psama/.env psama.env
+
+
+
+
+
+ psama.env
+ false
+ false
+ false
+ true
+ true
+ false
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Edit Dictionary .env/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Edit Dictionary .env/config.xml
new file mode 100644
index 00000000..e67dde95
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Edit Dictionary .env/config.xml
@@ -0,0 +1,50 @@
+
+
+
+
+ false
+
+
+
+
+ ENV_CHANGES
+ A space delimited list of key value pairs. Ex:
+key1 value1 key2 value2
+ false
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/properties-editor.git
+
+
+
+
+ */master
+
+
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ docker build . -t avillachlab/properties-editor:LATEST
+docker run --rm -v $DOCKER_CONFIG_DIR/dictionary/.env:/.env avillachlab/properties-editor:LATEST .env $ENV_CHANGES
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Export builds/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Export builds/config.xml
new file mode 100644
index 00000000..688ff220
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Export builds/config.xml
@@ -0,0 +1,48 @@
+
+
+
+
+ false
+
+
+ false
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ #!/bin/bash
+
+# Jenkins jobs directory
+jenkinsJobsDir="/var/jenkins_home/jobs"
+
+# Output tar file name
+outputFileName="jenkins_jobs_backup.tar.gz"
+
+# Find all config.xml files and pass them to tar for archiving
+find $jenkinsJobsDir -type f -name "config.xml" -print0 | tar -czvf $outputFileName --null -T -
+
+echo "Backup completed: $outputFileName"
+
+
+
+
+
+ jenkins_jobs_backup.tar.gz
+ false
+ false
+ false
+ true
+ true
+ false
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Initial Configuration Pipeline/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Initial Configuration Pipeline/config.xml
index 0d7dde28..241bd113 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Initial Configuration Pipeline/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Initial Configuration Pipeline/config.xml
@@ -18,49 +18,157 @@
AUTH0_CLIENT_ID
__PIC_SURE_CLIENT_ID__
- false
+ true
AUTH0_CLIENT_SECRET
__PIC_SURE_CLIENT_SECRET__
- false
+ true
AUTH0_TENANT
avillachlab
- false
+ true
ADMIN_USER_EMAIL
The email address for the initial admin user. This should be a Google account.
- false
+ true
- PROJECT_SPECIFIC_OVERRIDE_REPOSITORY
- This must be the project specific git repository that contains the project specific database migrations and UI overrides in the standard PIC-SURE Override Repository Structure.
-
- false
+ PROJECT_MIGRATION_NAME
+ Name of the migration to run, see https://github.com/hms-dbmi/PIC-SURE-Migrations or fork it and add your migration
+ Baseline
+ true
RELEASE_CONTROL_REPOSITORY
This must be the project specific release control git repository that contains the git commit hashes that will be built and deployed.
- false
+ true
OUTBOUND_EMAIL_USER
The email account name that should be used to send outbound email. The default server is Google's SMTP, so this should be a GMail account. This field is optional
__user@email.com__
- false
+ true
OUTBOUND_EMAIL_USER_PASSWORD
The password for the account used to send outbound. This field is optional
__YOUR_EMAIL_PASSWORD__
- false
+ true
+
+
+ OPEN
+ PIC-SURE can be used without forcing the user log in. Unauthorized users will have a limited feature set. See more information on Gitbook: TODO_GITBOOK
+ true
+
+
+ DISCOVER
+ Enable the Discover page which allows users to search across all data in the project.
+ true
+
+
+ DASHBOARD
+ Enable the Dashboard page which allows users to search across all data in the project.
+ true
+
+
+ DIST_EXPLORER
+ Enable the distribution explorer page which creates visualizations of the data.
+ true
+
+
+ ENABLE_GENE_QUERY
+ Enable the genomic filtering UI which guides users creating genomic queries. Enable if HPDS has been loaded with Genomic data.
+ true
+
+
+ ENABLE_SNP_QUERY
+ Enable the genomic filtering UI which guides users creating genomic SNP queries. Enable if HPDS has been loaded with Genomic data.
+ true
+
+
+ VITE_ALLOW_EXPORT_ENABLED
+ Enables the export button on the search results page.
+ true
+
+
+ API
+ Shows the user information on how to use the API and allows them to access their LONG_TERM_TOKEN.
+ true
+
+
+ ALLOW_EXPORT
+ This enables the prepare for analysis button which guides the user through your export process. There are a few different settings below to control how the user may receive their data in the end.
+ true
+
+
+ DOWNLOAD_AS_CSV
+ Allows the user to download their data at the end of the export process as a CSV file.
+ true
+
+
+ SHOW_VARIABLE_EXPORT
+ Allow adding export variables via the search results UI.
+ true
+
+
+ SHOW_VARIABLE_HIERARCHY
+ Shows the hierarchy button for each search result and enables the hierarchy screen in the export process.
+ true
+
+
+ DOWNLOAD_AS_PFB
+ Allows the user to download their data at the end of the export process as a PFB file.
+ false
+
+
+ DATA_REQUESTS
+ Enables data request functionality.
+ false
+
+
+ VARIANT_EXPLORER
+ Enable the variant explorer. Only enable if HPDS has been loaded with Genomic data.
+ true
+
+
+ VARIANT_EXPLORER_TYPE
+ Aggregate is recommended.
+
+
+ aggregate
+ full
+
+
+
+
+ EXPLORER_MAX_COUNT
+ Max amount of data point for the variant explorer.
+ 10000
+ true
+
+
+ VARIANT_EXPLORER_EXCLUDE_COLUMNS
+ Advanced: An array of columns to exclude in the variant explorer.
+The value must be in an array format, for example: ["AN"]
+ true
+
+
+ TOUR
+ Enable the tour feature which guides the user through the application.
+ true
+
+
+ TOUR_SEARCH_TERM
+ The tour preforms a search. You can specify the search term here.
+ age
+ true
@@ -70,7 +178,10 @@
def retrieveBuildSpecId;
def pipelineBuildId;
-def build_hashes = {};
+def build_hashes = [
+ DICTIONARY: false,
+ UPLOADER: false
+];
pipeline {
agent any
stages {
@@ -78,102 +189,152 @@ pipeline {
steps {
script {
sh """
- sed -i "s|__PROJECT_SPECIFIC_OVERRIDE_REPO__|$env.PROJECT_SPECIFIC_OVERRIDE_REPOSITORY|g" /var/jenkins_home/config.xml
sed -i "s|__RELEASE_CONTROL_REPO__|$env.RELEASE_CONTROL_REPOSITORY|g" /var/jenkins_home/config.xml
+ sed -i "s|__PROJECT_SPECIFIC_MIGRATION_NAME__|$env.PROJECT_MIGRATION_NAME|g" /var/jenkins_home/config.xml
"""
Jenkins.instance.doReload()
}
}
}
- stage('Initial Config and Build') {
- steps {
- parallel (
- dbMigrate: {
- script {
- def result = build job: 'PIC-SURE Database Migrations'
- }
- },
- picsureBuild: {
- script {
- def result = build job: 'PIC-SURE Pipeline'
- }
- },
- auth0Config: {
- script {
- def result = build job: 'Configure Auth0 Integration', parameters: [
- [$class: 'StringParameterValue', name: 'AUTH0_CLIENT_ID', value:env.AUTH0_CLIENT_ID],
- [$class: 'StringParameterValue', name: 'AUTH0_CLIENT_SECRET', value: env.AUTH0_CLIENT_SECRET]]
- }
- },
- addRootCerts: {
- script {
- def result = build job: 'Create Root Certs in TrustStore'
- }
- },
- emailConfig: {
- script {
- def result = build job: 'Configure Outbound Email Settings', parameters: [
- [$class: 'StringParameterValue', name: 'OUTBOUND_EMAIL_USER', value:env.OUTBOUND_EMAIL_USER],
- [$class: 'StringParameterValue', name: 'EMAIL_FROM', value: env.OUTBOUND_EMAIL_USER],
- [$class: 'StringParameterValue', name: 'OUTBOUND_EMAIL_USER_PASSWORD', value: env.OUTBOUND_EMAIL_USER_PASSWORD]]
+ stage('Initial Config and Build') {
+ steps {
+ parallel (
+ dbMigrate: {
+ script {
+ def result = build job: 'PIC-SURE Database Migrations'
+ }
+ },
+ picsureBuild: {
+ script {
+ def result = build job: 'PIC-SURE Pipeline'
+ }
+ },
+ auth0Config: {
+ script {
+ def result = build job: 'Configure Auth0 Integration', parameters: [
+ [$class: 'StringParameterValue', name: 'AUTH0_CLIENT_ID', value:env.AUTH0_CLIENT_ID],
+ [$class: 'StringParameterValue', name: 'AUTH0_CLIENT_SECRET', value: env.AUTH0_CLIENT_SECRET]]
+ }
+ },
+ addRootCerts: {
+ script {
+ catchError(buildResult: 'SUCCESS', stageResult: 'UNSTABLE') {
+ def result = build job: 'Create Root Certs in TrustStore'
+ }
+ }
+ },
+ createFrontendEnvFile: {
+ script {
+ sh "[ ! -f /usr/local/docker-config/httpd/.env ] || cp -rf /usr/local/docker-config/httpd/.env /usr/local/docker-config/httpd/.env.bak"
+ sh "touch .env"
+ sh "chmod +wr .env"
+ sh "echo 'VITE_PROJECT_HOSTNAME=' >> .env"
+ sh "echo 'VITE_ORIGIN=' >> .env"
+ sh "echo 'VITE_LOGO=' >> .env"
+ sh "echo 'VITE_AUTH0_TENANT=$env.AUTH0_TENANT' >> .env"
+ sh "echo 'VITE_OPEN=$env.OPEN' >> .env"
+ sh "echo 'VITE_DISCOVER=$env.DISCOVER' >> .env"
+ sh "echo 'VITE_DASHBOARD=$env.DASHBOARD' >> .env"
+ sh "echo 'VITE_DIST_EXPLORER=$env.DIST_EXPLORER' >> .env"
+ sh "echo 'VITE_API=$env.API' >> .env"
+ sh "echo 'VITE_ALLOW_EXPORT=$env.ALLOW_EXPORT' >> .env"
+ sh "echo 'VITE_ALLOW_EXPORT_ENABLED=$env.ALLOW_EXPORT_ENABLED' >> .env"
+ sh "echo 'VITE_DOWNLOAD_AS_CSV=$env.DOWNLOAD_AS_CSV' >> .env"
+ sh "echo 'VITE_ENABLE_GENE_QUERY=$env.ENABLE_GENE_QUERY' >> .env"
+ sh "echo 'VITE_ENABLE_SNP_QUERY=$env.ENABLE_SNP_QUERY' >> .env"
+ sh "echo 'VITE_SHOW_VARIABLE_EXPORT=$env.SHOW_VARIABLE_EXPORT' >> .env"
+ sh "echo 'VITE_SHOW_VARIABLE_HIERARCHY=$env.SHOW_VARIABLE_HIERARCHY' >> .env"
+ sh "echo 'VITE_DOWNLOAD_AS_PFB=$env.DOWNLOAD_AS_PFB' >> .env"
+ sh "echo 'VITE_DATA_REQUESTS=$env.DATA_REQUESTS' >> .env"
+ sh "echo 'VITE_VARIANT_EXPLORER=$env.VARIANT_EXPLORER' >> .env"
+ sh "echo 'VITE_VARIANT_EXPLORER_TYPE=$env.VARIANT_EXPLORER_TYPE' >> .env"
+ sh "echo 'VITE_EXPLORER_MAX_COUNT=$env.EXPLORER_MAX_COUNT' >> .env"
+ sh "echo 'VITE_VARIANT_EXPLORER_EXCLUDE_COLUMNS=$env.VARIANT_EXPLORER_EXCLUDE_COLUMNS' >> .env"
+ sh "echo 'VITE_TOUR=$env.TOUR' >> .env"
+ sh "echo 'VITE_TOUR_SEARCH_TERM=$env.TOUR_SEARCH_TERM' >> .env"
+ sh "echo 'VITE_REQUIRE_CONSENTS=false' >> .env"
+ sh "echo 'VITE_USE_QUERY_TEMPLATE=false' >> .env"
+ sh "echo '#Login Provider (You may add as many as you would like):' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE=true' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_TYPE=AUTH0' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_CLIENTID=$env.AUTH0_CLIENT_ID' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_CONNECTION=google-oauth2' >> .env"
+ sh "echo 'VITE_AUTH_PROVIDER_MODULE_GOOGLE_DESCRIPTION=Login' >> .env"
+ sh "echo '#Resource UUIDs:' >> .env"
+ sh "echo 'VITE_RESOURCE_HPDS=' >> .env"
+ sh "echo 'VITE_RESOURCE_OPEN_HPDS=' >> .env"
+ sh "echo 'VITE_RESOURCE_BASE_QUERY=' >> .env"
+ sh "echo 'VITE_RESOURCE_VIZ=' >> .env"
+ sh "echo '#Google Analytics settings (leave blank to disable):' >> .env"
+ sh "echo 'VITE_GOOGLE_ANALYTICS_ID=' >> .env"
+ sh "echo 'VITE_GOOGLE_TAG_MANAGER_ID=' >> .env"
+ sh "mv .env '/usr/local/docker-config/httpd/.env'"
+ }
+ },
+ emailConfig: {
+ script {
+ def result = build job: 'Configure Outbound Email Settings', parameters: [
+ [$class: 'StringParameterValue', name: 'OUTBOUND_EMAIL_USER', value:env.OUTBOUND_EMAIL_USER],
+ [$class: 'StringParameterValue', name: 'EMAIL_FROM', value: env.OUTBOUND_EMAIL_USER],
+ [$class: 'StringParameterValue', name: 'OUTBOUND_EMAIL_USER_PASSWORD', value: env.OUTBOUND_EMAIL_USER_PASSWORD]]
+ }
}
- }
- )
+ )
+ }
}
- }
- stage('Post Migration Configs') {
- steps {
- parallel (
- adminConfig: {
- script {
- def result = build job: 'Create Admin User', parameters: [
- [$class: 'StringParameterValue', name: 'EMAIL', value:env.ADMIN_USER_EMAIL],
- [$class: 'StringParameterValue', name: 'CONNECTION_LABEL', value: "Google"]]
- }
- },
- tokenIntroToken: {
- script {
- def result = build job: 'Configure PIC-SURE Token Introspection Token'
+ stage('Post Migration Configs') {
+ steps {
+ parallel (
+ adminConfig: {
+ script {
+ def result = build job: 'Create Admin User', parameters: [
+ [$class: 'StringParameterValue', name: 'EMAIL', value:env.ADMIN_USER_EMAIL],
+ [$class: 'StringParameterValue', name: 'CONNECTION_LABEL', value: "Google"]]
+ }
+ },
+ tokenIntroToken: {
+ script {
+ def result = build job: 'Configure PIC-SURE Token Introspection Token'
+ }
+ },
+ encryptionKey: {
+ script {
+ def result = build job: 'Create HPDS Encryption Key'
+ }
}
- },
- encryptionKey: {
- script {
- def result = build job: 'Create HPDS Encryption Key'
+ )
+ }
+ }
+ stage('Load Demo Data and Deploy') {
+ steps {
+ script {
+ def result = build job: 'Retrieve Build Spec'
+ retrieveBuildSpecId = result.number
+ }
+ script {
+ copyArtifacts filter: '*', projectName: 'Retrieve Build Spec', selector: specific(""+retrieveBuildSpecId)
+ sh 'cat build-spec.json'
+ sh 'cat pipeline_git_commit.txt'
+ sh 'pwd'
+ def buildSpec = new JsonSlurper().parse(new File('/var/jenkins_home/workspace/PIC-SURE Pipeline/build-spec.json'))
+ pipelineBuildId = new File('/var/jenkins_home/workspace/PIC-SURE Pipeline/pipeline_git_commit.txt').text.trim()
+ for(def build : buildSpec.application){
+ build_hashes[build.project_job_git_key] = build.git_hash
}
}
- )
- }
- }
- stage('Load Demo Data and Deploy') {
- steps {
- script {
- def result = build job: 'Retrieve Build Spec'
- retrieveBuildSpecId = result.number
- }
- script {
- copyArtifacts filter: '*', projectName: 'Retrieve Build Spec', selector: specific(""+retrieveBuildSpecId)
- sh 'cat build-spec.json'
- sh 'cat pipeline_git_commit.txt'
- sh 'pwd'
- def buildSpec = new JsonSlurper().parse(new File('/var/jenkins_home/workspace/PIC-SURE Pipeline/build-spec.json'))
- pipelineBuildId = new File('/var/jenkins_home/workspace/PIC-SURE Pipeline/pipeline_git_commit.txt').text.trim()
- for(def build : buildSpec.application){
- build_hashes[build.project_job_git_key] = build.git_hash
+ script {
+ def result = build job: 'Load HPDS Data From CSV'
+ }
+ script {
+ def result = build job: 'Start PIC-SURE', parameters: [[$class: 'StringParameterValue', name: 'pipeline_build_id', value: pipelineBuildId]]
}
- }
- script {
- def result = build job: 'Load HPDS Data From CSV'
- }
- script {
- def result = build job: 'Start PIC-SURE', parameters: [[$class: 'StringParameterValue', name: 'pipeline_build_id', value: pipelineBuildId],[$class: 'StringParameterValue', name: 'git_hash', value: build_hashes['PSU']]]
}
}
}
- }
-}
+ }
true
false
-
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build - Jenkinsfile/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build - Jenkinsfile/config.xml
new file mode 100644
index 00000000..a37b2a8e
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build - Jenkinsfile/config.xml
@@ -0,0 +1,71 @@
+
+
+
+
+
+
+
+
+ REPOSITORY_NAME
+ DOCKER_REGISTRY
+
+
+
+
+ This Jenkins job will build and deploy the pic-sure-auth-micro-app.
+ false
+
+
+ false
+ false
+
+
+
+
+ DOCKER_REGISTRY
+ Docker registry URL (e.g., ECR URL)
+ hms-dbmi
+ false
+
+
+ REPOSITORY_NAME
+ Docker repository name
+ psama
+ false
+
+
+ pipeline_build_id
+ MANUAL_RUN
+ false
+
+
+ git_hash
+ */ALS-6103-Architectural-Changes-To-Support-Multiple-Auth-providers
+ false
+
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/pic-sure-auth-microapp.git
+
+
+
+
+ ${git_hash}
+
+
+ false
+
+
+
+ jenkinsfile
+ false
+
+
+ false
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build/config.xml
index ad717a3f..bab68826 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Auth Micro-App Build/config.xml
@@ -44,30 +44,22 @@
false
-
- clean install -DskipTests
- Maven Home
- false
-
-
- false
-
if [ -f "/usr/local/docker-config/setProxy.sh" ]; then
. /usr/local/docker-config/setProxy.sh
fi
-GIT_BRANCH_SHORT=`echo ${GIT_BRANCH} | cut -d "/" -f 2`
+# Copy global maven settings to be used in container build
+mkdir -p .m2 && cp /usr/local/docker-config/.m2/*.xml .m2/ 2</dev/null
+
+GIT_BRANCH_SHORT=`echo ${GIT_BRANCH} | cut -d "/" -f 2`
GIT_COMMIT_SHORT=`echo ${GIT_COMMIT} | cut -c1-7`
-cd pic-sure-auth-services
-docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$http_proxy --build-arg no_proxy="$no_proxy" \
+docker build -f ./pic-sure-auth-services/Dockerfile --build-arg http_proxy=$http_proxy --build-arg https_proxy=$http_proxy --build-arg no_proxy="$no_proxy" \
--build-arg HTTP_PROXY=$http_proxy --build-arg HTTPS_PROXY=$http_proxy --build-arg NO_PROXY="$no_proxy" \
- -t hms-dbmi/pic-sure-auth-microapp:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} .
-docker tag hms-dbmi/pic-sure-auth-microapp:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} hms-dbmi/pic-sure-auth-microapp:LATEST
-mkdir -p /usr/local/docker-config/wildfly/deployments/
-cp target/pic-sure-auth-services.war /usr/local/docker-config/wildfly/deployments/
+ -t hms-dbmi/psama:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} .
+docker tag hms-dbmi/psama:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} hms-dbmi/psama:LATEST
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Migrations/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Migrations/config.xml
index 2d9013e4..bdd535d8 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Migrations/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Migrations/config.xml
@@ -18,6 +18,8 @@
def micro_app_ref = ''
def pic_sure_ref = ''
def psu_ref = ''
+def psm_ref = ''
+def psf_ref = ''
node {
stage('Retrieve Build Spec') {
@@ -41,19 +43,24 @@ node {
micro_app_ref = build.git_hash
echo 'micro_app_ref ' + micro_app_ref
}
- if(build.project_job_git_key.equalsIgnoreCase("PSU")) {
- psu_ref = build.git_hash
- echo 'psu_ref ' + psu_ref
- }
if(build.project_job_git_key.equalsIgnoreCase("PSA")) {
pic_sure_ref = build.git_hash
echo 'pic_sure_ref ' + pic_sure_ref
}
+ if(build.project_job_git_key.equalsIgnoreCase("PSF")) {
+ psf_ref = build.git_hash
+ echo 'psf_ref ' + psf_ref
+ }
+ if(build.project_job_git_key.equalsIgnoreCase("PSM")) {
+ psm_ref = build.git_hash
+ echo 'psm_ref ' + psm_ref
+ }
}
- if(micro_app_ref.isEmpty() || pic_sure_ref.isEmpty() || psu_ref.isEmpty()) {
+ if(micro_app_ref.isEmpty() || pic_sure_ref.isEmpty() || psf_ref.isEmpty() || psm_ref.isEmpty()) {
currentBuild.result = 'ABORTED'
echo 'micro_app_ref = ' + micro_app_ref
- echo 'psu_ref = ' + psu_ref
+ echo 'psf_ref = ' + psf_ref
+ echo 'psm_ref = ' + psm_ref
echo 'pic_sure_ref = ' + pic_sure_ref
error('Build Spec Not configured correctly!')
}
@@ -62,7 +69,8 @@ node {
stage('Auth Schema Migration') {
checkout([$class: 'GitSCM', branches: [[name: micro_app_ref ]],
userRemoteConfigs: [[url: 'https://github.com/hms-dbmi/pic-sure-auth-microapp']]])
- //git url: 'https://github.com/hms-dbmi/pic-sure-auth-microapp'
+ def image = docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0')
+ image.pull()
docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/auth:/opt/flyway-migrations/auth "--entrypoint=" ') {
sh "rm -rf /opt/flyway-migrations/auth/sql"
sh "cp -R ./pic-sure-auth-db/db/sql /opt/flyway-migrations/auth/sql"
@@ -82,22 +90,21 @@ node {
}
}
stage('Project Specific Migrations') {
- checkout([$class: 'GitSCM', branches: [[name: psu_ref ]],
- userRemoteConfigs: [[url: env.project_specific_override_repo]]])
- //git url: env.project_specific_override_repo
+ checkout([$class: 'GitSCM', branches: [[name: psm_ref ]],
+ userRemoteConfigs: [[url: MIGRATION_REPO]]])
sh """
- sed -i "s/__APPLICATION_UUID__/`cat /usr/local/docker-config/APP_ID_HEX`/g" ./custom-migrations/auth/*.sql
- sed -i "s/__RESOURCE_UUID__/`cat /usr/local/docker-config/RESOURCE_ID_HEX`/g" ./custom-migrations/picsure/*.sql
+ sed -i "s/__APPLICATION_UUID__/`cat /usr/local/docker-config/APP_ID_HEX`/g" ./${MIGRATION_NAME}/auth/*.sql
+ sed -i "s/__RESOURCE_UUID__/`cat /usr/local/docker-config/RESOURCE_ID_HEX`/g" ./${MIGRATION_NAME}/picsure/*.sql
"""
docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/picsure:/opt/flyway-migrations/picsure "--entrypoint=" ') {
sh "rm -rf /opt/flyway-migrations/picsure/sql"
- sh "cp -R ./custom-migrations/picsure /opt/flyway-migrations/picsure/sql"
+ sh "cp -R ./${MIGRATION_NAME}/picsure /opt/flyway-migrations/picsure/sql"
sh "/opt/flyway/flyway -X -baselineOnMigrate=true -configFiles=/opt/flyway-migrations/picsure/flyway-picsure.conf -table=flyway_custom_schema_history migrate"
sleep(time:10,unit:"SECONDS")
}
- docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/auth:/opt/flyway-migrations/auth "--entrypoint=" ') {
+ docker.image('dbmi/pic-sure-db-migrations:pic-sure-db-migration_v1.0').inside('--network=picsure -v $DOCKER_CONFIG_DIR/flyway/auth:/opt/flyway-migrations/auth "--entrypoint=" ') {
sh "rm -rf /opt/flyway-migrations/auth/sql"
- sh "cp -R ./custom-migrations/auth /opt/flyway-migrations/auth/sql"
+ sh "cp -R ./${MIGRATION_NAME}/auth /opt/flyway-migrations/auth/sql"
sh "/opt/flyway/flyway -X -baselineOnMigrate=true -configFiles=/opt/flyway-migrations/auth/flyway-auth.conf -table=flyway_custom_schema_history migrate"
sleep(time:10,unit:"SECONDS")
}
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Repair/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Repair/config.xml
index c11b240f..2384a02a 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Repair/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Database Repair/config.xml
@@ -50,10 +50,10 @@ node {
echo 'pic_sure_ref ' + pic_sure_ref
}
}
- if(micro_app_ref.isEmpty() || pic_sure_ref.isEmpty() || psu_ref.isEmpty()) {
+ if(micro_app_ref.isEmpty() || pic_sure_ref.isEmpty() || psm_ref.isEmpty()) {
currentBuild.result = 'ABORTED'
echo 'micro_app_ref = ' + micro_app_ref
- echo 'psu_ref = ' + psu_ref
+ echo 'psm_ref = ' + psm_ref
echo 'pic_sure_ref = ' + pic_sure_ref
error('Build Spec Not configured correctly!')
}
@@ -83,8 +83,7 @@ node {
}
stage('Project Specific Migrations') {
checkout([$class: 'GitSCM', branches: [[name: psu_ref ]],
- userRemoteConfigs: [[url: env.project_specific_override_repo]]])
- //git url: env.project_specific_override_repo
+ userRemoteConfigs: [[url: MIGRATION_REPO]]])
sh """
sed -i "s/__APPLICATION_UUID__/`cat /usr/local/docker-config/APP_ID_HEX`/g" ./custom-migrations/auth/*.sql
sed -i "s/__RESOURCE_UUID__/`cat /usr/local/docker-config/RESOURCE_ID_HEX`/g" ./custom-migrations/picsure/*.sql
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Project Specific PIC-SURE Overrides/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Frontend Build/config.xml
similarity index 68%
rename from initial-configuration/jenkins/jenkins-docker/jobs/Project Specific PIC-SURE Overrides/config.xml
rename to initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Frontend Build/config.xml
index 44aaff5b..6a21c34c 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Project Specific PIC-SURE Overrides/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Frontend Build/config.xml
@@ -8,24 +8,22 @@
pipeline_build_id
-
MANUAL_RUN
- false
+ true
git_hash
-
- */main
+ main
false
-
+
2
- ${project_specific_override_repo}
+ https://github.com/hms-dbmi/pic-sure-frontend.git
@@ -34,7 +32,7 @@
false
-
+
true
@@ -50,28 +48,25 @@ if [ -f "/usr/local/docker-config/setProxy.sh" ]; then
. /usr/local/docker-config/setProxy.sh
fi
-sed -i s/TARGET_BUILD_VERSION/LATEST/g ui/Dockerfile
-GIT_BRANCH_SHORT=`echo ${GIT_BRANCH} | cut -d "/" -f 2`
+GIT_BRANCH_SHORT=`echo ${GIT_BRANCH} | cut -d "/" -f 2`
GIT_COMMIT_SHORT=`echo ${GIT_COMMIT} | cut -c1-7`
-if [ -f "./setEnv.sh" ]; then
- . ./setEnv.sh
-fi
+cp /usr/local/docker-config/httpd/.env .
-if [ -f "./custom_httpd_volumes" ]; then
- cp ./custom_httpd_volumes /usr/local/docker-config/httpd/
+if [ ! -f .env ]; then
+ echo ".env file not found"
+ exit 1
fi
-
-cd ui
docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$http_proxy --build-arg no_proxy="$no_proxy" \
---build-arg HTTP_PROXY=$http_proxy --build-arg HTTPS_PROXY=$http_proxy --build-arg NO_PROXY="$no_proxy" \
--t hms-dbmi/pic-sure-ui-overrides:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} .
-docker tag hms-dbmi/pic-sure-ui-overrides:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} hms-dbmi/pic-sure-ui-overrides:LATEST
+ --build-arg HTTP_PROXY=$http_proxy --build-arg HTTPS_PROXY=$http_proxy --build-arg NO_PROXY="$no_proxy" \
+ -f Dockerfile -t hms-dbmi/pic-sure-frontend:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} .
+docker tag hms-dbmi/pic-sure-frontend:${GIT_BRANCH_SHORT}_${GIT_COMMIT_SHORT} hms-dbmi/pic-sure-frontend:LATEST
+
-
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Pipeline/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Pipeline/config.xml
index 30bbbd26..2ad4e76e 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Pipeline/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Pipeline/config.xml
@@ -1,8 +1,8 @@
-
+
-
-
+
+
@@ -12,16 +12,19 @@
false
-
+
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Wildfly Image Build/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Wildfly Image Build/config.xml
index e5bde8a7..53b1b148 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Wildfly Image Build/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/PIC-SURE Wildfly Image Build/config.xml
@@ -46,7 +46,7 @@ fi
docker build --build-arg http_proxy=$http_proxy --build-arg https_proxy=$http_proxy --build-arg no_proxy="$no_proxy" \
--build-arg HTTP_PROXY=$http_proxy --build-arg HTTPS_PROXY=$http_proxy --build-arg NO_PROXY="$no_proxy" \
---build-arg PIC_SURE_API_VERSION=LATEST --build-arg PIC_SURE_AUTH_VERSION=LATEST --build-arg PIC_SURE_PASSTHRU_RESOURCE_VERSION=LATEST \
+--build-arg PIC_SURE_API_VERSION=LATEST --build-arg PIC_SURE_PASSTHRU_RESOURCE_VERSION=LATEST \
-t hms-dbmi/pic-sure-wildfly:${pipeline_build_id} .
docker tag hms-dbmi/pic-sure-wildfly:${pipeline_build_id} hms-dbmi/pic-sure-wildfly:LATEST
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Passthrough Resource Pipeline/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Passthrough Resource Pipeline/config.xml
index 364bb5b1..91e0842b 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Passthrough Resource Pipeline/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Passthrough Resource Pipeline/config.xml
@@ -58,8 +58,10 @@
def retrieveBuildSpecId;
def pipelineBuildId;
-def build_hashes = {};
-
+def build_hashes = [
+ DICTIONARY: false,
+ UPLOADER: false
+];
def resourceURL = "https://${SERVERNAME}/picsure/"
pipeline {
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Remove Test Users/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Remove Test Users/config.xml
index 3e02aaff..24478710 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Remove Test Users/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Remove Test Users/config.xml
@@ -29,14 +29,14 @@ function delete_user_by_email() {
# SQL command to remove user from assoc
local remove_user_role="DELETE FROM auth.user_role WHERE user_id in (SELECT uuid FROM auth.user where email = '$user_email');"
- docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+ docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"$remove_user_role" auth
# SQL command to delete a user based on their email
local remove_user="DELETE FROM auth.user WHERE email = '$user_email';"
# Run the command using Docker and MySQL client
- docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+ docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"$remove_user" auth
}
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Update User Token/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Update User Token/config.xml
index 4a0a9b74..9539fc0d 100644
--- a/initial-configuration/jenkins/jenkins-docker/jobs/Update User Token/config.xml
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Update User Token/config.xml
@@ -58,12 +58,12 @@
cd target
grep client_secret /usr/local/docker-config/wildfly/standalone.xml | cut -d '=' -f 3 | sed 's/[\"/\>]//g' > secret.txt
-export USER_SUBJECT=`docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -N -e \
+export USER_SUBJECT=`docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -N -e \
"SELECT subject FROM auth.user where email='$USERNAME'; "`
-export user_token=`java -jar generateJwt.jar secret.txt sub "${USER_SUBJECT}" ${DAYSUNTILEXPIRATION} day | grep -v "Generating"`
+export user_token=`java -jar generateJwt.jar secret.txt sub "LONG_TERM_TOKEN|${USER_SUBJECT}" ${DAYSUNTILEXPIRATION} day | grep -v "Generating"`
-docker run -i -v /root/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
+docker run -i -v $MYSQL_CONFIG_DIR/.my.cnf:/root/.my.cnf --network=${MYSQL_NETWORK:-host} mysql mysql -e \
"update auth.user set long_term_token='$user_token' where email='$USERNAME';"
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Upload Dictionary .env File/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Upload Dictionary .env File/config.xml
new file mode 100644
index 00000000..edc1b056
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Upload Dictionary .env File/config.xml
@@ -0,0 +1,30 @@
+
+
+
+ false
+
+
+
+
+ dictionary.env
+
+
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ mkdir -p /usr/local/docker-config/dictionary/
+cp dictionary.env /usr/local/docker-config/dictionary/.env
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Upload PSAMA Configuration/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Upload PSAMA Configuration/config.xml
new file mode 100644
index 00000000..c664e415
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Upload PSAMA Configuration/config.xml
@@ -0,0 +1,46 @@
+
+
+
+ This job is used to upload an updated pic-sure-auth-micro-app (psama) configuration file. If you would like to download the current configuration file you can use the "Download PSAMA Configuration" Jenkins Job.
+ false
+
+
+ false
+ false
+
+
+
+
+ psama.env
+ The updated pic-sure-auth-micro-app configuration file. You can download the current configuration file using the "Download PSAMA Configuration".
+
+
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ # Replace psama's configuration file with the provided one.
+cp psama.env /usr/local/docker-config/psama/.env
+
+
+
+
+
+ psama.env
+ false
+ false
+ false
+ true
+ true
+ false
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Upload Uploader .env File/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Upload Uploader .env File/config.xml
new file mode 100644
index 00000000..0a038351
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Upload Uploader .env File/config.xml
@@ -0,0 +1,31 @@
+
+
+
+
+ false
+
+
+
+
+ uploader.env
+
+
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ mkdir -p /usr/local/docker-config/uploader/
+cp uploader.env /usr/local/docker-config/uploader/.env
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/jenkins/jenkins-docker/jobs/Weigh Dictionary Search Fields/config.xml b/initial-configuration/jenkins/jenkins-docker/jobs/Weigh Dictionary Search Fields/config.xml
new file mode 100644
index 00000000..08a89c04
--- /dev/null
+++ b/initial-configuration/jenkins/jenkins-docker/jobs/Weigh Dictionary Search Fields/config.xml
@@ -0,0 +1,59 @@
+
+
+
+
+ false
+
+
+
+
+ weights.csv
+ EXAMPLE:
+concept_node.DISPLAY,3
+concept_node.CONCEPT_PATH,2
+dataset.FULL_NAME,1
+dataset.DESCRIPTION,1
+concept_node_meta_str,1
+
+
+
+
+
+ 2
+
+
+ https://github.com/hms-dbmi/picsure-dictionary/
+
+
+
+
+ */weights
+
+
+ false
+
+
+
+ true
+ false
+ false
+ false
+
+ false
+
+
+ # These are bash commands, so use the internal location of the config volume
+mkdir -p /usr/local/docker-config/dictionary-weights/
+cp /usr/local/docker-config/dictionary/.env /usr/local/docker-config/dictionary-weights/
+cp weights.csv /usr/local/docker-config/dictionary-weights/weights.csv
+
+cd dictionaryweights
+# These are docker commands, so now we use the external location of the config volume
+docker build . -t dictionary-weights
+docker run --rm --env-file=/usr/local/docker-config/dictionary-weights/.env --network dictionary_dictionary -v $DOCKER_CONFIG_DIR/dictionary-weights/weights.csv:/weights.csv dictionary-weights
+
+
+
+
+
+
\ No newline at end of file
diff --git a/initial-configuration/migrate-env.sh b/initial-configuration/migrate-env.sh
new file mode 100755
index 00000000..9bcfe78e
--- /dev/null
+++ b/initial-configuration/migrate-env.sh
@@ -0,0 +1,28 @@
+# This script is used to migrate variable from the old start-picsure.sh
+# which housed many configurable environment variables
+# Those variables are now stored in their respective .env files in $DOCKER_CONFIG_DIR
+
+# BEFORE running this script, run source start-picsure.sh
+
+
+echo "Making config dirs for hpds, psama, httpd, and wildfly in $DOCKER_CONFIG_DIR"
+
+mkdir -p $DOCKER_CONFIG_DIR/hpds
+mkdir -p $DOCKER_CONFIG_DIR/psama
+mkdir -p $DOCKER_CONFIG_DIR/httpd
+mkdir -p $DOCKER_CONFIG_DIR/wildfly
+
+echo "Populating config files with env vars from old start script"
+
+echo "" >> $DOCKER_CONFIG_DIR/hpds/hpds.env
+echo "CATALINA_OPTS= $HPDS_OPTS" >> $DOCKER_CONFIG_DIR/hpds/hpds.env
+
+echo "" >> $DOCKER_CONFIG_DIR/psama/.env
+echo "JAVA_OPTS=$PSAMA_OPTS" >> $DOCKER_CONFIG_DIR/psama/.env
+
+echo "" >> $DOCKER_CONFIG_DIR/httpd/httpd.env
+
+echo "" >> $DOCKER_CONFIG_DIR/wildfly/wildfly.env
+echo "JAVA_OPTS=$WILDFLY_JAVA_OPTS $TRUSTSTORE_JAVA_OPTS" >> $DOCKER_CONFIG_DIR/wildfly/wildfly.env
+
+echo "Done."
diff --git a/initial-configuration/mysql-docker/setup.sh b/initial-configuration/mysql-docker/setup.sh
index e06f169e..65fd37f5 100755
--- a/initial-configuration/mysql-docker/setup.sh
+++ b/initial-configuration/mysql-docker/setup.sh
@@ -1,24 +1,36 @@
+sed_inplace() {
+ if [ "$(uname)" = "Darwin" ]; then
+ sed -i '' "$@"
+ else
+ sed -i "$@"
+ fi
+}
if [ -z "$(docker ps --format '{{.Names}}' | grep picsure-db)" ]; then
echo "Cleaning up old configs"
rm -r "${DOCKER_CONFIG_DIR:?}"/*
cp -r config/* "$DOCKER_CONFIG_DIR"/
+ rm -f "$MYSQL_CONFIG_DIR"/.my.cnf
echo "Starting mysql server"
echo "$( < /dev/urandom tr -dc @^=+$*%_A-Z-a-z-0-9 | head -c${1:-24})" > pass.tmp
rm -f mysql-docker/.env
+
# shellcheck disable=SC2129
echo "PICSURE_DB_ROOT_PASS=`cat pass.tmp`" >> mysql-docker/.env
echo "PICSURE_DB_PASS=`cat pass.tmp`" >> mysql-docker/.env
echo "PICSURE_DB_DATABASE=ignore" >> mysql-docker/.env
echo "PICSURE_DB_USER=ignore" >> mysql-docker/.env
+ echo "DOCKER_CONFIG_DIR=$DOCKER_CONFIG_DIR" >> mysql-docker/.env
echo "Configuring .my.cnf"
# shellcheck disable=SC2129
- echo "[mysql]" >> "$HOME"/.my.cnf
- echo "user=root" >> "$HOME"/.my.cnf
- echo "password=\"$(cat pass.tmp)\"" >> "$HOME"/.my.cnf
- echo "host=picsure-db" >> "$HOME"/.my.cnf
- echo "port=3306" >> "$HOME"/.my.cnf
+ mkdir -p "$MYSQL_CONFIG_DIR"
+ touch "$MYSQL_CONFIG_DIR"/.my.cnf
+ echo "[mysql]" >> "$MYSQL_CONFIG_DIR"/.my.cnf
+ echo "user=root" >> "$MYSQL_CONFIG_DIR"/.my.cnf
+ echo "password=\"$(cat pass.tmp)\"" >> "$MYSQL_CONFIG_DIR"/.my.cnf
+ echo "host=picsure-db" >> "$MYSQL_CONFIG_DIR"/.my.cnf
+ echo "port=3306" >> "$MYSQL_CONFIG_DIR"/.my.cnf
cd mysql-docker
docker compose up -d
@@ -65,7 +77,7 @@ if [ -z "$(docker ps --format '{{.Names}}' | grep picsure-db)" ]; then
echo "` < /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-24}`" > auth.tmp
docker exec -t picsure-db mysql -u root -p`cat ../pass.tmp` -e "CREATE USER 'auth'@'%' IDENTIFIED BY '`cat auth.tmp`';";
docker exec -t picsure-db mysql -u root -p`cat ../pass.tmp` -e "GRANT ALL PRIVILEGES ON auth.* to 'auth'@'%';FLUSH PRIVILEGES;";
- sed_inplace s/__AUTH_MYSQL_PASSWORD__/`cat auth.tmp`/g $DOCKER_CONFIG_DIR/wildfly/standalone.xml
+ sed_inplace s/__AUTH_MYSQL_PASSWORD__/`cat auth.tmp`/g $DOCKER_CONFIG_DIR/psama/.env
rm -f auth.tmp
cd $CWD
diff --git a/reset_development_environment.sh b/reset_development_environment.sh
new file mode 100755
index 00000000..e0b17cb5
--- /dev/null
+++ b/reset_development_environment.sh
@@ -0,0 +1,73 @@
+#!/bin/bash
+
+# Check if DOCKER_CONFIG_DIR is set, if not, use default
+if [ -z "$DOCKER_CONFIG_DIR" ]; then
+ echo "DOCKER_CONFIG_DIR is not set. Defaulting to /var/local/docker-config."
+ DOCKER_CONFIG_DIR="/var/local/docker-config"
+else
+ echo "DOCKER_CONFIG_DIR is set to $DOCKER_CONFIG_DIR"
+fi
+
+# Ensure DOCKER_CONFIG_DIR is not set to root "/"
+if [ "$DOCKER_CONFIG_DIR" = "/" ]; then
+ echo "Error: DOCKER_CONFIG_DIR is set to root '/'. Aborting to prevent system damage."
+ exit 1
+fi
+
+#$MYSQL_CONFIG_DIR
+if [ -z "$MYSQL_CONFIG_DIR" ]; then
+ echo "MYSQL_CONFIG_DIR is not set. Defaulting to $DOCKER_CONFIG_DIR."
+ MYSQL_CONFIG_DIR="$DOCKER_CONFIG_DIR"
+else
+ echo "MYSQL_CONFIG_DIR is set to $MYSQL_CONFIG_DIR"
+fi
+
+# Ensure DOCKER_CONFIG_DIR is not set to root "/"
+if [ "$DOCKER_CONFIG_DIR" = "/" ]; then
+ echo "Error: DOCKER_CONFIG_DIR is set to root '/'. Aborting to prevent system damage."
+ exit 1
+fi
+
+# Step 1: Run stop-picsure.sh
+echo "Stopping PIC-SURE..."
+./stop-picsure.sh
+
+# Step 2: Run stop-jenkin.sh
+echo "Stopping Jenkins..."
+./stop-jenkin.sh
+
+# Step 3: Stop and remove the picsure-db container
+echo "Stopping and removing PIC-SURE database container..."
+docker stop picsure-db
+docker rm picsure-db
+
+# Step 4: Run docker system prune -a
+echo "Pruning Docker system and removing all images..."
+docker system prune -a -f
+
+# Step 5: Clear the MYSQL_CONFIG_DIR
+echo "Clearing the MySQL configuration directory..."
+rm -rf "$MYSQL_CONFIG_DIR/*"
+
+# Step 6: Clear the DOCKER_CONFIG_DIR
+echo "Clearing the Docker configuration directory..."
+rm -rf "$DOCKER_CONFIG_DIR/*"
+
+# Step 7: Remove the jenkins_home directory and recreate necessary directories
+echo "Removing and recreating Jenkins and log directories..."
+sudo rm -rf /var/jenkins_home
+sudo rm -rf /var/log/jenkins-docker-logs
+sudo rm -rf /var/jenkins_home_bak
+
+sudo mkdir -p /var/log/jenkins-docker-logs
+sudo mkdir -p /var/jenkins_home
+sudo mkdir -p /var/jenkins_home_bak
+sudo mkdir -p /var/log/httpd-docker-logs/ssl_mutex
+
+# Step 8: Set permissions for the directories
+echo "Setting permissions for Jenkins and log directories..."
+sudo chmod -R 777 /var/jenkins_home
+sudo chmod -R 777 /var/jenkins_home_bak
+sudo chmod -R 777 /var/log/httpd-docker-logs
+
+echo "All steps completed successfully."
\ No newline at end of file
diff --git a/start-jenkins.sh b/start-jenkins.sh
index fafd5b4d..b7cb8018 100755
--- a/start-jenkins.sh
+++ b/start-jenkins.sh
@@ -1,14 +1,11 @@
#!/usr/bin/env bash
DOCKER_CONFIG_DIR="${DOCKER_CONFIG_DIR:-/usr/local/docker-config}"
+MY_SQL_DIR="${MY_SQL_DIR:-/root/}"
if [ -f $DOCKER_CONFIG_DIR/setProxy.sh ]; then
. $DOCKER_CONFIG_DIR/setProxy.sh
fi
-if ! docker network inspect selenium > /dev/null 2>&1; then
- docker network create selenium
-fi
-
docker run -d \
-e http_proxy="$http_proxy" \
-e https_proxy="$https_proxy" \
@@ -19,12 +16,10 @@ docker run -d \
-v /var/jenkins_home:/var/jenkins_home \
-v "$DOCKER_CONFIG_DIR":/usr/local/docker-config \
-v /var/run/docker.sock:/var/run/docker.sock \
- -v "$HOME"/.my.cnf:/root/.my.cnf \
+ -v "$MYSQL_CONFIG_DIR"/.my.cnf:/root/.my.cnf \
-v "$HOME"/.m2:/root/.m2 \
-v /etc/hosts:/etc/hosts \
-v /usr/local/pic-sure-services:/pic-sure-services \
- --env-file initial-configuration/mysql-docker/.env \
- --network selenium \
-p 8080:8080 --name jenkins pic-sure-jenkins:LATEST
# These would normally be volume mounts, but mounting volumes in volumes is bad vibes
diff --git a/start-picsure.sh b/start-picsure.sh
index 6d4c4d91..00a7b806 100755
--- a/start-picsure.sh
+++ b/start-picsure.sh
@@ -1,38 +1,36 @@
#!/usr/bin/env bash
+# A note to developers: if you use /usr/local/docker-config to refer to a place on the host file system
+# 99 times out of 100 you are WRONG and you have just made a bug. Please:
+# - Consider using $DOCKER_CONFIG_DIR instead
+# - Challenge your own understanding of where files are located in docker and on the host file system and
+# how that does or doesn't change the commands you run when inside Jenkins
+
+DOCKER_CONFIG_DIR="${DOCKER_CONFIG_DIR:-/usr/local/docker-config}"
+
if [ -f "$DOCKER_CONFIG_DIR/setProxy.sh" ]; then
. $DOCKER_CONFIG_DIR/setProxy.sh
fi
-if ! docker network inspect selenium > /dev/null 2>&1; then
- docker network create selenium
-fi
-
-
-if [ -z "$(grep queryExportType $DOCKER_CONFIG_DIR/httpd/picsureui_settings.json | grep DISABLED)" ]; then
- export EXPORT_SIZE="2000";
-else
- export EXPORT_SIZE="0";
+if [ -z "$(grep "VITE_ALLOW_EXPORT" $DOCKER_CONFIG_DIR/httpd/.env | grep 'false')" ]; then
+ export EXPORT_SIZE="2000";
+ else
+ export EXPORT_SIZE="0";
+ fi
fi
-export WILDFLY_JAVA_OPTS="-Xms2g -Xmx4g -XX:MetaspaceSize=96M -XX:MaxMetaspaceSize=256m -Djava.net.preferIPv4Stack=true $PROXY_OPTS"
-export HPDS_OPTS="-XX:+UseParallelGC -XX:SurvivorRatio=250 -Xms1g -Xmx16g -DCACHE_SIZE=1500 -DSMALL_TASK_THREADS=1 -DLARGE_TASK_THREADS=1 -DSMALL_JOB_LIMIT=100 -DID_BATCH_SIZE=$EXPORT_SIZE -DALL_IDS_CONCEPT=NONE -DID_CUBE_NAME=NONE -Denable_file_sharing=true "
+# Docker Volumes
export PICSURE_SETTINGS_VOLUME="-v $DOCKER_CONFIG_DIR/httpd/picsureui_settings.json:/usr/local/apache2/htdocs/picsureui/settings/settings.json"
-export PICSURE_BANNER_VOLUME="-v /usr/local/docker-config/httpd/banner_config.json:/usr/local/apache2/htdocs/picsureui/settings/banner_config.json"
+export PICSURE_BANNER_VOLUME="-v $DOCKER_CONFIG_DIR/httpd/banner_config.json:/usr/local/apache2/htdocs/picsureui/settings/banner_config.json"
export PSAMA_SETTINGS_VOLUME="-v $DOCKER_CONFIG_DIR/httpd/psamaui_settings.json:/usr/local/apache2/htdocs/picsureui/psamaui/settings/settings.json"
-export EMAIL_TEMPLATE_VOUME="-v $DOCKER_CONFIG_DIR/wildfly/emailTemplates:/opt/jboss/wildfly/standalone/configuration/emailTemplates "
-
-# these debug options can be added to wildfly or hpds container startup to enable remote debugging or profiling.
-# Don't forget to add a port mapping too!
-export DEBUG_OPTS="-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=0.0.0.0:8000"
-export PROFILING_OPTS=" -Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=9000 -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.rmi.port=9000 "
-
-if [ -f $DOCKER_CONFIG_DIR/wildfly/application.truststore ]; then
- export TRUSTSTORE_VOLUME="-v $DOCKER_CONFIG_DIR/wildfly/application.truststore:/opt/jboss/wildfly/standalone/configuration/application.truststore"
- export TRUSTSTORE_JAVA_OPTS="-Djavax.net.ssl.trustStore=/opt/jboss/wildfly/standalone/configuration/application.truststore -Djavax.net.ssl.trustStorePassword=password"
+export EMAIL_TEMPLATE_VOLUME="-v $DOCKER_CONFIG_DIR/wildfly/emailTemplates:/opt/jboss/wildfly/standalone/configuration/emailTemplates "
+export TRUSTSTORE_VOLUME="-v $DOCKER_CONFIG_DIR/wildfly/application.truststore:/opt/jboss/wildfly/standalone/configuration/application.truststore"
+export PSAMA_TRUSTSTORE_VOLUME="-v $DOCKER_CONFIG_DIR/psama/application.truststore:/usr/local/tomcat/conf/application.truststore"
+if [ -f $DOCKER_CONFIG_DIR/httpd/custom_httpd_volumes ]; then
+ export CUSTOM_HTTPD_VOLUMES=`cat $DOCKER_CONFIG_DIR/httpd/custom_httpd_volumes`
fi
-
+# Start Commands
docker stop hpds && docker rm hpds
docker run --name=hpds --restart always --network=picsure \
-v $DOCKER_CONFIG_DIR/hpds:/opt/local/hpds \
@@ -40,29 +38,31 @@ docker run --name=hpds --restart always --network=picsure \
-v /var/log/hpds-logs/:/var/log/ \
-v $DOCKER_CONFIG_DIR/hpds_csv/:/usr/local/docker-config/hpds_csv/ \
-v $DOCKER_CONFIG_DIR/aws_uploads/:/gic_query_results/ \
- -e CATALINA_OPTS=" $HPDS_OPTS " \
- -p 5007:5007 \
+ --env-file $DOCKER_CONFIG_DIR/hpds/hpds.env \
-d hms-dbmi/pic-sure-hpds:LATEST
-if [ -f $DOCKER_CONFIG_DIR/httpd/custom_httpd_volumes ]; then
- export CUSTOM_HTTPD_VOLUMES=`cat $DOCKER_CONFIG_DIR/httpd/custom_httpd_volumes`
-fi
-
docker stop httpd && docker rm httpd
+
docker run --name=httpd --restart always --network=picsure \
- -v /var/log/httpd-docker-logs/:/usr/local/apache2/logs/ \
- $PICSURE_SETTINGS_VOLUME \
- $PICSURE_BANNER_VOLUME \
- $PSAMA_SETTINGS_VOLUME \
- -v $DOCKER_CONFIG_DIR/httpd/cert:/usr/local/apache2/cert/ \
- $CUSTOM_HTTPD_VOLUMES \
- -p 80:80 \
- -p 443:443 \
- -d hms-dbmi/pic-sure-ui-overrides:LATEST
-docker network connect selenium httpd
+ -v /var/log/httpd-docker-logs/:/app/logs/ \
+ -v $DOCKER_CONFIG_DIR/httpd/cert:/usr/local/apache2/cert/ \
+ -v $DOCKER_CONFIG_DIR/httpd/httpd-vhosts.conf:/usr/local/apache2/conf/extra/httpd-vhosts.conf \
+ $CUSTOM_HTTPD_VOLUMES \
+ --env-file $DOCKER_CONFIG_DIR/httpd/httpd.env \
+ -p 80:80 \
+ -p 443:443 \
+ -d hms-dbmi/pic-sure-frontend:LATEST
docker exec httpd sed -i '/^#LoadModule proxy_wstunnel_module/s/^#//' conf/httpd.conf
docker restart httpd
+docker stop psama && docker rm psama
+docker run --name=psama --restart always \
+ --network=picsure \
+ --env-file $DOCKER_CONFIG_DIR/psama/.env \
+ $EMAIL_TEMPLATE_VOLUME \
+ $PSAMA_TRUSTSTORE_VOLUME \
+ -d hms-dbmi/psama:LATEST
+
docker stop wildfly && docker rm wildfly
docker run --name=wildfly --restart always --network=picsure -u root \
-v /var/log/wildfly-docker-logs/:/opt/jboss/wildfly/standalone/log/ \
@@ -70,11 +70,16 @@ docker run --name=wildfly --restart always --network=picsure -u root \
-v /var/log/wildfly-docker-os-logs/:/var/log/ \
-v $DOCKER_CONFIG_DIR/wildfly/passthru/:/opt/jboss/wildfly/standalone/configuration/passthru/ \
-v $DOCKER_CONFIG_DIR/wildfly/aggregate-data-sharing/:/opt/jboss/wildfly/standalone/configuration/aggregate-data-sharing/ \
+ -v $DOCKER_CONFIG_DIR/wildfly/visualization/:/opt/jboss/wildfly/standalone/configuration/visualization/ \
-v $DOCKER_CONFIG_DIR/wildfly/deployments/:/opt/jboss/wildfly/standalone/deployments/ \
-v $DOCKER_CONFIG_DIR/wildfly/standalone.xml:/opt/jboss/wildfly/standalone/configuration/standalone.xml \
$TRUSTSTORE_VOLUME \
- $EMAIL_TEMPLATE_VOUME \
+ $EMAIL_TEMPLATE_VOLUME \
-v $DOCKER_CONFIG_DIR/wildfly/wildfly_mysql_module.xml:/opt/jboss/wildfly/modules/system/layers/base/com/sql/mysql/main/module.xml \
-v $DOCKER_CONFIG_DIR/wildfly/mysql-connector-java-5.1.49.jar:/opt/jboss/wildfly/modules/system/layers/base/com/sql/mysql/main/mysql-connector-java-5.1.49.jar \
- -e JAVA_OPTS="$WILDFLY_JAVA_OPTS $TRUSTSTORE_JAVA_OPTS" \
+ --env-file $DOCKER_CONFIG_DIR/wildfly/wildfly.env \
-d hms-dbmi/pic-sure-wildfly:LATEST
+
+if [ -d $DOCKER_CONFIG_DIR/dictionary ]; then
+ docker compose -f $DOCKER_CONFIG_DIR/dictionary/docker-compose.yml --env-file $DOCKER_CONFIG_DIR/dictionary/.env up -d
+fi
diff --git a/stop-picsure.sh b/stop-picsure.sh
index b6b00cae..f448f321 100755
--- a/stop-picsure.sh
+++ b/stop-picsure.sh
@@ -2,4 +2,8 @@
docker stop hpds && docker rm hpds
docker stop httpd && docker rm httpd
docker stop wildfly && docker rm wildfly
+docker stop psama && docker rm psama
+if [ -d $DOCKER_CONFIG_DIR/dictionary ]; then
+ docker compose -f $DOCKER_CONFIG_DIR/dictionary/docker-compose.yml --env-file $DOCKER_CONFIG_DIR/dictionary/.env down
+fi
\ No newline at end of file
diff --git a/update-jenkins.sh b/update-jenkins.sh
index ed70f575..28d6b32e 100755
--- a/update-jenkins.sh
+++ b/update-jenkins.sh
@@ -5,6 +5,8 @@ git pull
echo "Sometimes we have to update not just the Jenkins jobs, but also the docker image itself."
echo "If you want to update that image. Rerun this command with the --rebuild flag added."
+DOCKER_CONFIG_DIR="${DOCKER_CONFIG_DIR:-/usr/local/docker-config}"
+
if [ "$1" = "--rebuild" ]; then
# Rebuild the docker image. This matches the initial dep script. The proxy args are generally empty, but you might
# run into bugs if you have an http proxy, but don't set it somewhere clever like your bash profile
@@ -33,6 +35,8 @@ fi
# Pull through previous PICSURE configurations
sed -i "s|__PROJECT_SPECIFIC_OVERRIDE_REPO__|`cat /var/jenkins_home_bak/config.xml | grep -A1 project_specific_override_repo | tail -1 | sed 's/<\/*string>//g' | sed 's/ //g' `|g" /var/jenkins_home/config.xml
sed -i "s|__RELEASE_CONTROL_REPO__|`cat /var/jenkins_home_bak/config.xml | grep -A1 release_control_repo | tail -1 | sed 's/<\/*string>//g' | sed 's/ //g' `|g" /var/jenkins_home/config.xml
+sed -i "s|/usr/local/docker-config/|`cat /var/jenkins_home_bak/config.xml | grep -A1 DOCKER_CONFIG_DIR | tail -1 | sed 's/<\/*string>//g' | sed 's/ //g' `|g" /var/jenkins_home/config.xml
+sed -i "s|host|`cat /var/jenkins_home_bak/config.xml | grep -A1 MYSQL_NETWORK | tail -1 | sed 's/<\/*string>//g' | sed 's/ //g' `|g" /var/jenkins_home/config.xml
sed -i "s|*/master|`cat /var/jenkins_home_bak/config.xml | grep -A1 release_control_branch | tail -1 | sed 's/<\/*string>//g' | sed 's/ //g' `|g" /var/jenkins_home/config.xml