diff --git a/.secrets.baseline b/.secrets.baseline index e7164afad..2e574927a 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -7,6 +7,9 @@ { "name": "AWSKeyDetector" }, + { + "name": "AzureStorageKeyDetector" + }, { "name": "Base64HighEntropyString", "limit": 4.5 @@ -17,9 +20,15 @@ { "name": "CloudantDetector" }, + { + "name": "DiscordBotTokenDetector" + }, + { + "name": "GitHubTokenDetector" + }, { "name": "HexHighEntropyString", - "limit": 3 + "limit": 3.0 }, { "name": "IbmCloudIamDetector" @@ -37,15 +46,24 @@ { "name": "MailchimpDetector" }, + { + "name": "NpmDetector" + }, { "name": "PrivateKeyDetector" }, + { + "name": "SendGridDetector" + }, { "name": "SlackDetector" }, { "name": "SoftlayerDetector" }, + { + "name": "SquareOAuthDetector" + }, { "name": "StripeDetector" }, @@ -57,10 +75,6 @@ { "path": "detect_secrets.filters.allowlist.is_line_allowlisted" }, - { - "path": "detect_secrets.filters.common.is_baseline_file", - "filename": ".secrets.baseline" - }, { "path": "detect_secrets.filters.common.is_ignored_due_to_verification_policies", "min_level": 2 @@ -91,12 +105,6 @@ }, { "path": "detect_secrets.filters.heuristic.is_templated_secret" - }, - { - "path": "detect_secrets.filters.regex.should_exclude_file", - "pattern": [ - "^.secrets.baseline$" - ] } ], "results": { @@ -106,8 +114,7 @@ "filename": "Chef/repo/data_bags/README.md", "hashed_secret": "6367c48dd193d56ea7b0baad25b19455e529f5ee", "is_verified": false, - "line_number": 38, - "is_secret": false + "line_number": 38 } ], "Docker/sidecar/service.key": [ @@ -116,8 +123,7 @@ "filename": "Docker/sidecar/service.key", "hashed_secret": "1348b145fa1a555461c1b790a2f66614781091e9", "is_verified": false, - "line_number": 1, - "is_secret": false + "line_number": 1 } ], "Jenkins/Stacks/Jenkins/jenkins.env.sample": [ @@ -151,8 +157,7 @@ "filename": "ansible/roles/slurm/README.md", "hashed_secret": "4acfde1ff9c353ba2ef0dbe0df73bda2743cba42", "is_verified": false, - "line_number": 86, - "is_secret": false + "line_number": 86 } ], "apis_configs/fence_settings.py": [ @@ -161,8 +166,7 @@ "filename": "apis_configs/fence_settings.py", "hashed_secret": "347cd9c53ff77d41a7b22aa56c7b4efaf54658e3", "is_verified": false, - "line_number": 80, - "is_secret": false + "line_number": 80 } ], "apis_configs/peregrine_settings.py": [ @@ -171,8 +175,7 @@ "filename": "apis_configs/peregrine_settings.py", "hashed_secret": "347cd9c53ff77d41a7b22aa56c7b4efaf54658e3", "is_verified": false, - "line_number": 46, - "is_secret": false + "line_number": 46 } ], "apis_configs/sheepdog_settings.py": [ @@ -181,8 +184,7 @@ "filename": "apis_configs/sheepdog_settings.py", "hashed_secret": "347cd9c53ff77d41a7b22aa56c7b4efaf54658e3", "is_verified": false, - "line_number": 46, - "is_secret": false + "line_number": 46 } ], "aws-inspec/kubernetes/chef_inspec-cron.yaml": [ @@ -200,8 +202,7 @@ "filename": "doc/api.md", "hashed_secret": "625de83a7517422051911680cc803921ff99db90", "is_verified": false, - "line_number": 47, - "is_secret": false + "line_number": 47 } ], "doc/gen3OnK8s.md": [ @@ -292,8 +293,7 @@ "filename": "gen3/lib/bootstrap/templates/Gen3Secrets/apis_configs/fence-config.yaml", "hashed_secret": "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3", "is_verified": false, - "line_number": 33, - "is_secret": false + "line_number": 33 } ], "gen3/lib/bootstrap/templates/cdis-manifest/manifests/sower/sower.json": [ @@ -325,16 +325,14 @@ "filename": "gen3/lib/onprem.sh", "hashed_secret": "29e52a9bac8f274fa41c51fce9c98eba0dd99cb3", "is_verified": false, - "line_number": 68, - "is_secret": false + "line_number": 68 }, { "type": "Secret Keyword", "filename": "gen3/lib/onprem.sh", "hashed_secret": "50f013532a9770a2c2cfdc38b7581dd01df69b70", "is_verified": false, - "line_number": 84, - "is_secret": false + "line_number": 84 } ], "gen3/lib/testData/default/expectedFenceResult.yaml": [ @@ -438,56 +436,49 @@ "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "fe54e5e937d642307ec155b47ac8a214cb40d474", "is_verified": false, - "line_number": 10, - "is_secret": false + "line_number": 10 }, { "type": "Base64 High Entropy String", "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "cea0e701e53c42bede2212b22f58f9ff8324da55", "is_verified": false, - "line_number": 13, - "is_secret": false + "line_number": 13 }, { "type": "Base64 High Entropy String", "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "d98d72830f08c9a8b96ed11d3d96ae9e71b72a26", "is_verified": false, - "line_number": 16, - "is_secret": false + "line_number": 16 }, { "type": "Base64 High Entropy String", "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "667fd45d415f73f4132cf0ed11452beb51117b12", "is_verified": false, - "line_number": 18, - "is_secret": false + "line_number": 18 }, { "type": "Base64 High Entropy String", "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "c2599d515ba3be74ed58821485ba769fc565e424", "is_verified": false, - "line_number": 33, - "is_secret": false + "line_number": 33 }, { "type": "Base64 High Entropy String", "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "6ec5eb29e2884f0c9731493b38902e37c2d672ba", "is_verified": false, - "line_number": 35, - "is_secret": false + "line_number": 35 }, { "type": "Base64 High Entropy String", "filename": "gen3/lib/testData/etlconvert/expected2.yaml", "hashed_secret": "99126b74731670a59b663d5320712564ec7b5f22", "is_verified": false, - "line_number": 36, - "is_secret": false + "line_number": 36 } ], "gen3/lib/testData/etlconvert/users2.yaml": [ @@ -642,8 +633,7 @@ "filename": "gen3/test/secretsTest.sh", "hashed_secret": "c2c715092ef59cba22520f109f041efca84b8938", "is_verified": false, - "line_number": 25, - "is_secret": false + "line_number": 25 } ], "gen3/test/terraformTest.sh": [ @@ -666,8 +656,7 @@ "filename": "gen3/test/terraformTest.sh", "hashed_secret": "1cc07dccfdf640eb0e403e490a873a5536759009", "is_verified": false, - "line_number": 172, - "is_secret": false + "line_number": 172 }, { "type": "Secret Keyword", @@ -681,8 +670,7 @@ "filename": "gen3/test/terraformTest.sh", "hashed_secret": "185a71a740ef6b9b21c84e6eaa47b89c7de181ef", "is_verified": false, - "line_number": 175, - "is_secret": false + "line_number": 175 }, { "type": "Secret Keyword", @@ -927,8 +915,7 @@ "filename": "kube/services/datadog/values.yaml", "hashed_secret": "4a8ce7ae6a8a7f2624e232b61b18c2ac9789c44b", "is_verified": false, - "line_number": 23, - "is_secret": false + "line_number": 23 } ], "kube/services/datasim/datasim-deploy.yaml": [ @@ -1506,14 +1493,14 @@ "filename": "kube/services/jenkins/jenkins-deploy.yaml", "hashed_secret": "c937b6fbb346a51ef679dd02ac5c4863e02bfdbf", "is_verified": false, - "line_number": 153 + "line_number": 157 }, { "type": "Secret Keyword", "filename": "kube/services/jenkins/jenkins-deploy.yaml", "hashed_secret": "9ce05cf6168d15dfe02aac9ca9e0712c19c9436d", "is_verified": false, - "line_number": 156 + "line_number": 160 } ], "kube/services/jenkins2-ci-worker/jenkins2-ci-worker-deploy.yaml": [ @@ -2909,8 +2896,7 @@ "filename": "kube/services/monitoring/grafana-values.yaml", "hashed_secret": "2ae868079d293e0a185c671c7bcdac51df36e385", "is_verified": false, - "line_number": 162, - "is_secret": false + "line_number": 162 }, { "type": "Secret Keyword", @@ -3199,8 +3185,7 @@ "filename": "kube/services/revproxy/helpers.js", "hashed_secret": "1d278d3c888d1a2fa7eed622bfc02927ce4049af", "is_verified": false, - "line_number": 10, - "is_secret": false + "line_number": 10 } ], "kube/services/revproxy/helpersTest.js": [ @@ -3216,8 +3201,7 @@ "filename": "kube/services/revproxy/helpersTest.js", "hashed_secret": "e029d4904cc728879d70030572bf37d4510367cb", "is_verified": false, - "line_number": 22, - "is_secret": false + "line_number": 22 } ], "kube/services/revproxy/revproxy-deploy.yaml": [ @@ -3352,8 +3336,7 @@ "filename": "kube/services/superset/superset-redis.yaml", "hashed_secret": "9fe1c31809da38c55b2b64bfab47b92bc5f6b7b9", "is_verified": false, - "line_number": 265, - "is_secret": false + "line_number": 265 } ], "kube/services/superset/values.yaml": [ @@ -3383,8 +3366,7 @@ "filename": "kube/services/superset/values.yaml", "hashed_secret": "98a84a63e5633d17e3b27b69695f87aa7189e9dc", "is_verified": false, - "line_number": 459, - "is_secret": false + "line_number": 459 } ], "kube/services/thor/thor-deploy.yaml": [ @@ -3470,8 +3452,7 @@ "filename": "tf_files/aws/eks/sample.tfvars", "hashed_secret": "83c1003f406f34fba4d6279a948fee3abc802884", "is_verified": false, - "line_number": 107, - "is_secret": false + "line_number": 107 } ], "tf_files/aws/eks/variables.tf": [ @@ -3480,8 +3461,7 @@ "filename": "tf_files/aws/eks/variables.tf", "hashed_secret": "83c1003f406f34fba4d6279a948fee3abc802884", "is_verified": false, - "line_number": 133, - "is_secret": false + "line_number": 133 } ], "tf_files/aws/modules/common-logging/README.md": [ @@ -3490,8 +3470,7 @@ "filename": "tf_files/aws/modules/common-logging/README.md", "hashed_secret": "83442aa5a16cb1992731c32367ef464564388017", "is_verified": false, - "line_number": 57, - "is_secret": false + "line_number": 57 } ], "tf_files/aws/modules/common-logging/lambda_function.py": [ @@ -3500,16 +3479,14 @@ "filename": "tf_files/aws/modules/common-logging/lambda_function.py", "hashed_secret": "061765d6854d72f03a6527610d5b6822c9d516de", "is_verified": false, - "line_number": 18, - "is_secret": false + "line_number": 18 }, { "type": "Base64 High Entropy String", "filename": "tf_files/aws/modules/common-logging/lambda_function.py", "hashed_secret": "61df81a188bb4dba6ae6128ff7e2c9c6a6f736ef", "is_verified": false, - "line_number": 18, - "is_secret": false + "line_number": 18 }, { "type": "Base64 High Entropy String", @@ -3530,8 +3507,7 @@ "filename": "tf_files/aws/modules/common-logging/lambda_function.py", "hashed_secret": "4f9fd96d3926f2c53ab0261d33f1d1a85a6a77ff", "is_verified": false, - "line_number": 30, - "is_secret": false + "line_number": 30 } ], "tf_files/aws/modules/common-logging/testLambda.py": [ @@ -3540,16 +3516,14 @@ "filename": "tf_files/aws/modules/common-logging/testLambda.py", "hashed_secret": "061765d6854d72f03a6527610d5b6822c9d516de", "is_verified": false, - "line_number": 5, - "is_secret": false + "line_number": 5 }, { "type": "Base64 High Entropy String", "filename": "tf_files/aws/modules/common-logging/testLambda.py", "hashed_secret": "61df81a188bb4dba6ae6128ff7e2c9c6a6f736ef", "is_verified": false, - "line_number": 5, - "is_secret": false + "line_number": 5 }, { "type": "Base64 High Entropy String", @@ -3579,8 +3553,7 @@ "filename": "tf_files/aws/modules/eks/variables.tf", "hashed_secret": "83c1003f406f34fba4d6279a948fee3abc802884", "is_verified": false, - "line_number": 113, - "is_secret": false + "line_number": 113 } ], "tf_files/aws/modules/management-logs/README.md": [ @@ -3589,8 +3562,7 @@ "filename": "tf_files/aws/modules/management-logs/README.md", "hashed_secret": "83442aa5a16cb1992731c32367ef464564388017", "is_verified": false, - "line_number": 54, - "is_secret": false + "line_number": 54 } ], "tf_files/aws/modules/management-logs/lambda_function.py": [ @@ -3599,16 +3571,14 @@ "filename": "tf_files/aws/modules/management-logs/lambda_function.py", "hashed_secret": "061765d6854d72f03a6527610d5b6822c9d516de", "is_verified": false, - "line_number": 18, - "is_secret": false + "line_number": 18 }, { "type": "Base64 High Entropy String", "filename": "tf_files/aws/modules/management-logs/lambda_function.py", "hashed_secret": "61df81a188bb4dba6ae6128ff7e2c9c6a6f736ef", "is_verified": false, - "line_number": 18, - "is_secret": false + "line_number": 18 }, { "type": "Base64 High Entropy String", @@ -3629,8 +3599,7 @@ "filename": "tf_files/aws/modules/management-logs/lambda_function.py", "hashed_secret": "4f9fd96d3926f2c53ab0261d33f1d1a85a6a77ff", "is_verified": false, - "line_number": 30, - "is_secret": false + "line_number": 30 } ], "tf_files/aws/modules/management-logs/testLambda.py": [ @@ -3639,16 +3608,14 @@ "filename": "tf_files/aws/modules/management-logs/testLambda.py", "hashed_secret": "061765d6854d72f03a6527610d5b6822c9d516de", "is_verified": false, - "line_number": 5, - "is_secret": false + "line_number": 5 }, { "type": "Base64 High Entropy String", "filename": "tf_files/aws/modules/management-logs/testLambda.py", "hashed_secret": "61df81a188bb4dba6ae6128ff7e2c9c6a6f736ef", "is_verified": false, - "line_number": 5, - "is_secret": false + "line_number": 5 }, { "type": "Base64 High Entropy String", @@ -3669,16 +3636,14 @@ "filename": "tf_files/aws/modules/management-logs/testLambda.py", "hashed_secret": "3cf8eb4e9254e1d6cc523da01f8b798b9a83101a", "is_verified": false, - "line_number": 6, - "is_secret": false + "line_number": 6 }, { "type": "Hex High Entropy String", "filename": "tf_files/aws/modules/management-logs/testLambda.py", "hashed_secret": "60a6dfc8d43cd2f5c6292899fc2f94f2d4fc32c4", "is_verified": false, - "line_number": 6, - "is_secret": false + "line_number": 6 }, { "type": "Base64 High Entropy String", @@ -3712,8 +3677,7 @@ "filename": "tf_files/gcp-bwg/roots/commons_setup/variables/answerfile-commons_setup-001.template.tfvars", "hashed_secret": "f865b53623b121fd34ee5426c792e5c33af8c227", "is_verified": false, - "line_number": 231, - "is_secret": false + "line_number": 231 } ], "tf_files/gcp-bwg/roots/templates/answerfile-commons_setup-001.template.tfvars": [ @@ -3722,8 +3686,7 @@ "filename": "tf_files/gcp-bwg/roots/templates/answerfile-commons_setup-001.template.tfvars", "hashed_secret": "f865b53623b121fd34ee5426c792e5c33af8c227", "is_verified": false, - "line_number": 231, - "is_secret": false + "line_number": 231 } ], "tf_files/gcp-bwg/roots/templates/answerfile-env-tenant.user.tfvars_NO_APP_SETUP": [ @@ -3732,8 +3695,7 @@ "filename": "tf_files/gcp-bwg/roots/templates/answerfile-env-tenant.user.tfvars_NO_APP_SETUP", "hashed_secret": "f865b53623b121fd34ee5426c792e5c33af8c227", "is_verified": false, - "line_number": 262, - "is_secret": false + "line_number": 262 } ], "tf_files/gcp/commons/root.tf": [ @@ -3762,5 +3724,5 @@ } ] }, - "generated_at": "2024-02-27T20:53:30Z" + "generated_at": "2024-03-01T21:46:43Z" } diff --git a/files/scripts/ci-env-pool-reset.sh b/files/scripts/ci-env-pool-reset.sh index c0c1f67c6..362cfbfd5 100644 --- a/files/scripts/ci-env-pool-reset.sh +++ b/files/scripts/ci-env-pool-reset.sh @@ -29,6 +29,7 @@ source "${GEN3_HOME}/gen3/gen3setup.sh" cat - > jenkins-envs-services.txt </", + "NIDA Data Share": "https://datashare.nida.nih.gov/study/", + "NICHD DASH": "https://dash.nichd.nih.gov/study/", + "ICPSR": "https://www.icpsr.umich.edu/web/ICPSR/studies/", + "BioSystics-AP": "https://biosystics-ap.com/assays/assaystudy//", +} + + # Defines field that we don't want to include in the filters OMITTED_VALUES_MAPPING = { "study_metadata.human_subject_applicability.gender_applicability": "Not applicable" @@ -114,6 +124,31 @@ def get_client_token(client_id: str, client_secret: str): return token +def get_related_studies(serial_num, hostname): + related_study_result = [] + + if serial_num: + mds = requests.get(f"http://revproxy-service/mds/metadata?nih_reporter.project_num_split.serial_num={serial_num}&data=true&limit=2000") + if mds.status_code == 200: + related_study_metadata = mds.json() + + for ( + related_study_metadata_key, + related_study_metadata_value, + ) in related_study_metadata.items(): + title = ( + related_study_metadata_value.get( + "gen3_discovery", {} + ) + .get("study_metadata", {}) + .get("minimal_info", {}) + .get("study_name", "") + ) + link = f"https://{hostname}/portal/discovery/{related_study_metadata_key}/" + related_study_result.append({"title": title, "link": link}) + return related_study_result + + parser = argparse.ArgumentParser() parser.add_argument("--directory", help="CEDAR Directory ID for registering ") @@ -214,6 +249,67 @@ def get_client_token(client_id: str, client_secret: str): mds_res["gen3_discovery"]["study_metadata"].update(cedar_record) mds_res["gen3_discovery"]["study_metadata"]["metadata_location"]["other_study_websites"] = cedar_record_other_study_websites + # setup citations + doi_citation = mds_res["gen3_discovery"]["study_metadata"].get("doi_citation", "") + mds_res["gen3_discovery"]["study_metadata"]["citation"]["heal_platform_citation"] = doi_citation + + + # setup repository_study_link + data_repositories = ( + mds_res.get("study_metadata", {}) + .get("metadata_location", {}) + .get("data_repositories", []) + ) + repository_citation = "Users must also include a citation to the data as specified by the local repository." + repository_citation_additional_text = ' The link to the study page at the local repository can be found in the "Data" tab.' + for repository in data_repositories: + if ( + repository["repository_name"] + and repository["repository_name"] + in REPOSITORY_STUDY_ID_LINK_TEMPLATE + and repository["repository_study_ID"] + ): + repository_study_link = REPOSITORY_STUDY_ID_LINK_TEMPLATE[ + repository["repository_name"] + ].replace("", repository["repository_study_ID"]) + repository.update({"repository_study_link": repository_study_link}) + if repository_citation_additional_text not in repository_citation: + repository_citation += repository_citation_additional_text + if len(data_repositories): + data_repositories[0] = { + **data_repositories[0], + "repository_citation": repository_citation, + } + mds_res["gen3_discovery"]["study_metadata"][ + "metadata_location" + ]["data_repositories"] = data_repositories + + + + # set up related studies + serial_num = None + try: + serial_num = ( + mds_res + .get("nih_reporter", {}) + .get("project_num_split", {}) + .get("serial_num", None) + ) + except Exception: + print(f"Unable to get serial number for study") + + if serial_num == None: + print(f"Unable to get serial number for study") + + related_study_result = get_related_studies(serial_num, hostname) + existing_related_study_result = mds_res.get("related_studies", []) + for related_study in related_study_result: + if related_study not in existing_related_study_result: + existing_related_study_result.append(copy.deepcopy(related_study)) + mds_res["gen3_discovery"][ + "related_studies" + ] = copy.deepcopy(existing_related_study_result) + # merge data from cedar that is not study level metadata into a level higher deleted_keys = [] for key, value in mds_res["gen3_discovery"]["study_metadata"].items(): diff --git a/gen3/bin/kube-setup-argo.sh b/gen3/bin/kube-setup-argo.sh index ff2438833..20676145b 100644 --- a/gen3/bin/kube-setup-argo.sh +++ b/gen3/bin/kube-setup-argo.sh @@ -5,10 +5,25 @@ source "${GEN3_HOME}/gen3/lib/utils.sh" gen3_load "gen3/gen3setup" gen3_load "gen3/lib/kube-setup-init" +override_namespace=false +force=false + +for arg in "${@}"; do + if [ "$arg" == "--override-namespace" ]; then + override_namespace=true + elif [ "$arg" == "--force" ]; then + force=true + else + #Print usage info and exit + gen3_log_info "Usage: gen3 kube-setup-argo [--override-namespace] [--force]" + exit 1 + fi +done ctx="$(g3kubectl config current-context)" ctxNamespace="$(g3kubectl config view -ojson | jq -r ".contexts | map(select(.name==\"$ctx\")) | .[0] | .context.namespace")" +argo_namespace=$(g3k_config_lookup '.argo_namespace' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) function setup_argo_buckets { local accountNumber @@ -32,13 +47,13 @@ function setup_argo_buckets { roleName="gen3-argo-${environment//_/-}-role" bucketPolicy="argo-bucket-policy-${nameSpace}" internalBucketPolicy="argo-internal-bucket-policy-${nameSpace}" - if [[ ! -z $(g3k_config_lookup '."s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) || ! -z $(g3k_config_lookup '.argo."s3-bucket"') ]]; then - if [[ ! -z $(g3k_config_lookup '."s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) ]]; then + if [[ ! -z $(g3k_config_lookup '."downloadable-s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) || ! -z $(g3k_config_lookup '.argo."downloadable-s3-bucket"') ]]; then + if [[ ! -z $(g3k_config_lookup '."downloadable-s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) ]]; then gen3_log_info "Using S3 bucket found in manifest: ${bucketName}" - bucketName=$(g3k_config_lookup '."s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) + bucketName=$(g3k_config_lookup '."downloadable-s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) else gen3_log_info "Using S3 bucket found in manifest: ${bucketName}" - bucketName=$(g3k_config_lookup '.argo."s3-bucket"') + bucketName=$(g3k_config_lookup '.argo."downloadable-s3-bucket"') fi fi if [[ ! -z $(g3k_config_lookup '."internal-s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) || ! -z $(g3k_config_lookup '.argo."internal-s3-bucket"') ]]; then @@ -131,19 +146,19 @@ EOF g3kubectl create namespace argo || true g3kubectl label namespace argo app=argo || true # Grant admin access within the argo namespace to the default SA in the argo namespace - g3kubectl create rolebinding argo-admin --clusterrole=admin --serviceaccount=argo:default -n argo || true + g3kubectl create rolebinding argo-admin --clusterrole=admin --serviceaccount=argo:default -n $argo_namespace || true fi gen3_log_info "Creating IAM role ${roleName}" if aws iam get-role --role-name "${roleName}" > /dev/null 2>&1; then gen3_log_info "IAM role ${roleName} already exists.." roleArn=$(aws iam get-role --role-name "${roleName}" --query 'Role.Arn' --output text) gen3_log_info "Role annotate" - g3kubectl annotate serviceaccount default eks.amazonaws.com/role-arn=${roleArn} -n argo - g3kubectl annotate serviceaccount argo eks.amazonaws.com/role-arn=${roleArn} -n $nameSpace + g3kubectl annotate serviceaccount default eks.amazonaws.com/role-arn=${roleArn} --overwrite -n $argo_namespace + g3kubectl annotate serviceaccount argo eks.amazonaws.com/role-arn=${roleArn} --overwrite -n $nameSpace else gen3 awsrole create $roleName argo $nameSpace -f all_namespaces roleArn=$(aws iam get-role --role-name "${roleName}" --query 'Role.Arn' --output text) - g3kubectl annotate serviceaccount default eks.amazonaws.com/role-arn=${roleArn} -n argo + g3kubectl annotate serviceaccount default eks.amazonaws.com/role-arn=${roleArn} -n $argo_namespace fi # Grant admin access within the current namespace to the argo SA in the current namespace @@ -177,34 +192,47 @@ EOF for serviceName in indexd; do secretName="${serviceName}-creds" # Only delete if secret is found to prevent early exits - if [[ ! -z $(g3kubectl get secrets -n argo | grep $secretName) ]]; then - g3kubectl delete secret "$secretName" -n argo > /dev/null 2>&1 + if [[ ! -z $(g3kubectl get secrets -n $argo_namespace | grep $secretName) ]]; then + g3kubectl delete secret "$secretName" -n $argo_namespace > /dev/null 2>&1 fi done sleep 1 # I think delete is async - give backend a second to finish indexdFencePassword=$(cat $(gen3_secrets_folder)/creds.json | jq -r .indexd.user_db.$indexd_admin_user) - g3kubectl create secret generic "indexd-creds" --from-literal=user=$indexd_admin_user --from-literal=password=$indexdFencePassword -n argo + g3kubectl create secret generic "indexd-creds" --from-literal=user=$indexd_admin_user --from-literal=password=$indexdFencePassword -n $argo_namespace fi } function setup_argo_db() { - if ! secret="$(g3kubectl get secret argo-db-creds -n argo 2> /dev/null)"; then + if ! secret="$(g3kubectl get secret argo-db-creds -n $argo_namespace 2> /dev/null)"; then gen3_log_info "Setting up argo db persistence" gen3 db setup argo || true dbCreds=$(gen3 secrets decode argo-g3auto dbcreds.json) - g3kubectl create secret -n argo generic argo-db-creds --from-literal=db_host=$(echo $dbCreds | jq -r .db_host) --from-literal=db_username=$(echo $dbCreds | jq -r .db_username) --from-literal=db_password=$(echo $dbCreds | jq -r .db_password) --from-literal=db_database=$(echo $dbCreds | jq -r .db_database) + g3kubectl create secret -n $argo_namespace generic argo-db-creds --from-literal=db_host=$(echo $dbCreds | jq -r .db_host) --from-literal=db_username=$(echo $dbCreds | jq -r .db_username) --from-literal=db_password=$(echo $dbCreds | jq -r .db_password) --from-literal=db_database=$(echo $dbCreds | jq -r .db_database) else gen3_log_info "Argo DB setup already completed" fi } - setup_argo_buckets +function setup_argo_template_secret() { + gen3_log_info "Started the template secret process" + downloadable_bucket_name=$(g3k_config_lookup '."downloadable-s3-bucket"' $(g3k_manifest_init)/$(g3k_hostname)/manifests/argo/argo.json) + # Check if the secret already exists + if [[ ! -z $(g3kubectl get secret argo-template-values-secret -n $argo_namespace) ]]; then + gen3_log_info "Argo template values secret already exists, assuming it's stale and deleting" + g3kubectl delete secret argo-template-values-secret -n $argo_namespace + fi + gen3_log_info "Creating argo template values secret" + g3kubectl create secret generic argo-template-values-secret --from-literal=DOWNLOADABLE_BUCKET=$downloadable_bucket_name -n $argo_namespace +} + +setup_argo_buckets # only do this if we are running in the default namespace -if [[ "$ctxNamespace" == "default" || "$ctxNamespace" == "null" ]]; then +if [[ "$ctxNamespace" == "default" || "$ctxNamespace" == "null" || "$override_namespace" == true ]]; then setup_argo_db - if (! helm status argo -n argo > /dev/null 2>&1 ) || [[ "$1" == "--force" ]]; then - DBHOST=$(kubectl get secrets -n argo argo-db-creds -o json | jq -r .data.db_host | base64 -d) - DBNAME=$(kubectl get secrets -n argo argo-db-creds -o json | jq -r .data.db_database | base64 -d) + setup_argo_template_secret + if (! helm status argo -n $argo_namespace > /dev/null 2>&1 ) || [[ "$force" == true ]]; then + DBHOST=$(kubectl get secrets -n $argo_namespace argo-db-creds -o json | jq -r .data.db_host | base64 -d) + DBNAME=$(kubectl get secrets -n $argo_namespace argo-db-creds -o json | jq -r .data.db_database | base64 -d) if [[ -z $internalBucketName ]]; then BUCKET=$bucketName else @@ -218,7 +246,7 @@ if [[ "$ctxNamespace" == "default" || "$ctxNamespace" == "null" ]]; then helm repo add argo https://argoproj.github.io/argo-helm --force-update 2> >(grep -v 'This is insecure' >&2) helm repo update 2> >(grep -v 'This is insecure' >&2) - helm upgrade --install argo argo/argo-workflows -n argo -f ${valuesFile} --version 0.29.1 + helm upgrade --install argo argo/argo-workflows -n $argo_namespace -f ${valuesFile} --version 0.29.1 else gen3_log_info "kube-setup-argo exiting - argo already deployed, use --force to redeploy" fi diff --git a/gen3/bin/kube-setup-hatchery.sh b/gen3/bin/kube-setup-hatchery.sh index 691fb354a..5454d1e24 100644 --- a/gen3/bin/kube-setup-hatchery.sh +++ b/gen3/bin/kube-setup-hatchery.sh @@ -20,11 +20,60 @@ gen3 jupyter j-namespace setup # (g3k_kv_filter ${GEN3_HOME}/kube/services/hatchery/serviceaccount.yaml BINDING_ONE "name: hatchery-binding1-$namespace" BINDING_TWO "name: hatchery-binding2-$namespace" CURRENT_NAMESPACE "namespace: $namespace" | g3kubectl apply -f -) || true +function exists_or_create_gen3_license_table() { + # Create dynamodb table for gen3-license if it does not exist. + TARGET_TABLE="$1" + echo "Checking for dynamoDB table: ${TARGET_TABLE}" -# cron job to distribute licenses if using Stata workspaces -if [ "$(g3kubectl get configmaps/manifest-hatchery -o yaml | grep "\"image\": .*stata.*")" ]; -then - gen3 job cron distribute-licenses '* * * * *' + FOUND_TABLE=`aws dynamodb list-tables | jq -r .TableNames | jq -c -r '.[]' | grep $TARGET_TABLE` + if [ -n "$FOUND_TABLE" ]; then + echo "Target table already exists in dynamoDB: $FOUND_TABLE" + else + echo "Creating table ${TARGET_TABLE}" + GSI=`g3kubectl get configmaps/manifest-hatchery -o json | jq -r '.data."license-user-maps-global-secondary-index"'` + if [[ -z "$GSI" || "$GSI" == "null" ]]; then + echo "Error: No global-secondary-index in configuration" + return 0 + fi + aws dynamodb create-table \ + --no-cli-pager \ + --table-name "$TARGET_TABLE" \ + --attribute-definitions AttributeName=itemId,AttributeType=S \ + AttributeName=environment,AttributeType=S \ + AttributeName=isActive,AttributeType=S \ + --key-schema AttributeName=itemId,KeyType=HASH \ + AttributeName=environment,KeyType=RANGE \ + --provisioned-throughput ReadCapacityUnits=5,WriteCapacityUnits=5 \ + --global-secondary-indexes \ + "[ + { + \"IndexName\": \"$GSI\", + \"KeySchema\": [{\"AttributeName\":\"environment\",\"KeyType\":\"HASH\"}, + {\"AttributeName\":\"isActive\",\"KeyType\":\"RANGE\"}], + \"Projection\":{ + \"ProjectionType\":\"INCLUDE\", + \"NonKeyAttributes\":[\"itemId\",\"userId\",\"licenseId\",\"licenseType\"] + }, + \"ProvisionedThroughput\": { + \"ReadCapacityUnits\": 5, + \"WriteCapacityUnits\": 3 + } + } + ]" + fi +} + +TARGET_TABLE=`g3kubectl get configmaps/manifest-hatchery -o json | jq -r '.data."license-user-maps-dynamodb-table"'` +if [[ -z "$TARGET_TABLE" || "$TARGET_TABLE" == "null" ]]; then + echo "No gen3-license table in configuration" + # cron job to distribute licenses if using Stata workspaces but not using dynamoDB + if [ "$(g3kubectl get configmaps/manifest-hatchery -o yaml | grep "\"image\": .*stata.*")" ]; + then + gen3 job cron distribute-licenses '* * * * *' + fi +else + echo "Found gen3-license table in configuration: $TARGET_TABLE" + exists_or_create_gen3_license_table "$TARGET_TABLE" fi policy=$( cat <