Skip to content

Commit

Permalink
feat(RHIDP-4812): Instead of uploading the entities via RHDH API add …
Browse files Browse the repository at this point in the history
…it directly via app-config.yaml (#118)

Signed-off-by: Pavel Macík <[email protected]>
  • Loading branch information
pmacik authored Nov 7, 2024
1 parent 03e47d7 commit 5fa1a44
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 83 deletions.
11 changes: 0 additions & 11 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -97,17 +97,6 @@ deploy-rhdh-helm: $(TMP_DIR)
cd ./ci-scripts/rhdh-setup/; ./deploy.sh -i "$(AUTH_PROVIDER)"
date --utc -Ins>$(TMP_DIR)/deploy-after

## Create users, groups and objects such as components and APIs in RHDH
.PHONY: populate-rhdh
populate-rhdh: $(TMP_DIR)
date --utc -Ins>$(TMP_DIR)/populate-before
ifeq ($(RHDH_INSTALL_METHOD),helm)
cd ./ci-scripts/rhdh-setup/; ./deploy.sh -c "$(AUTH_PROVIDER)"
else ifeq ($(RHDH_INSTALL_METHOD),olm)
cd ./ci-scripts/rhdh-setup/; ./deploy.sh -o -c "$(AUTH_PROVIDER)"
endif
date --utc -Ins>$(TMP_DIR)/populate-after

## Undeploy RHDH with Helm
.PHONY: undeploy-rhdh-helm
undeploy-rhdh-helm:
Expand Down
17 changes: 13 additions & 4 deletions ci-scripts/collect-results.sh
Original file line number Diff line number Diff line change
Expand Up @@ -112,29 +112,38 @@ if [ "$PRE_LOAD_DB" == "true" ]; then
mstart=$(date --utc --date "$(cat "${ARTIFACT_DIR}/populate-before")" --iso-8601=seconds)
mend=$(date --utc --date "$(cat "${ARTIFACT_DIR}/populate-after")" --iso-8601=seconds)
mhost=$(kubectl -n openshift-monitoring get route -l app.kubernetes.io/name=thanos-query -o json | jq --raw-output '.items[0].spec.host')

deploy_started=$(cat "${ARTIFACT_DIR}/deploy-before")
deploy_ended=$(cat "${ARTIFACT_DIR}/deploy-after")
deploy_duration="$(timestamp_diff "$deploy_started" "$deploy_ended")"

populate_started=$(cat "${ARTIFACT_DIR}/populate-before")
populate_ended=$(cat "${ARTIFACT_DIR}/populate-after")
populate_duration="$(timestamp_diff "$populate_started" "$populate_ended")"

populate_users_groups_started=$(cat "${ARTIFACT_DIR}/populate-users-groups-before")
populate_users_groups_ended=$(cat "${ARTIFACT_DIR}/populate-users-groups-after")
populate_users_groups_duration="$(timestamp_diff "$populate_users_groups_started" "$populate_users_groups_ended")"

populate_catalog_started=$(cat "${ARTIFACT_DIR}/populate-catalog-before")
populate_catalog_ended=$(cat "${ARTIFACT_DIR}/populate-catalog-after")
populate_catalog_duration="$(timestamp_diff "$populate_catalog_started" "$populate_catalog_ended")"

status_data.py \
--status-data-file "$monitoring_collection_data" \
--set \
measurements.timings.deploy.started="$deploy_started" \
measurements.timings.deploy.ended="$deploy_ended" \
measurements.timings.deploy.duration="$(timestamp_diff "$deploy_started" "$deploy_ended")" \
measurements.timings.deploy.duration="$deploy_duration" \
measurements.timings.populate.started="$populate_started" \
measurements.timings.populate.ended="$populate_ended" \
measurements.timings.populate.duration="$(timestamp_diff "$populate_started" "$populate_ended")" \
measurements.timings.populate.duration="$populate_duration" \
measurements.timings.populate_users_groups.started="$populate_users_groups_started" \
measurements.timings.populate_users_groups.ended="$populate_users_groups_ended" \
measurements.timings.populate_users_groups.duration="$(timestamp_diff "$populate_users_groups_started" "$populate_users_groups_ended")" \
measurements.timings.populate_users_groups.duration="$populate_users_groups_duration" \
measurements.timings.populate_catalog.started="$populate_catalog_started" \
measurements.timings.populate_catalog.ended="$populate_catalog_ended" \
measurements.timings.populate_catalog.duration="$(timestamp_diff "$populate_catalog_started" "$populate_catalog_ended")" \
measurements.timings.populate_catalog.duration="$populate_catalog_duration" \
-d &>"$monitoring_collection_log"
status_data.py \
--status-data-file "$monitoring_collection_data" \
Expand Down
59 changes: 8 additions & 51 deletions ci-scripts/rhdh-setup/create_resource.sh
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ clone_and_upload() {
git_repo=${GITHUB_REPO//github.com/${git_str}}
[[ -d "${git_dir}" ]] && rm -rf "${git_dir}"
git clone "$git_repo" "$git_dir"
cd "$git_dir" || return
pushd "$git_dir" || return
git config user.name "rhdh-performance-bot"
git config user.email [email protected]
tmp_branch=$(mktemp -u XXXXXXXXXX)
Expand All @@ -110,61 +110,18 @@ clone_and_upload() {
git push -f --set-upstream origin "$tmp_branch"
cd ..
sleep 5
output="${TMP_DIR}/locations.yaml"
if [ ! -f "$output" ]; then
echo "locations: []" > "$output"
fi
for filename in "${files[@]}"; do
e_count=$(yq eval '.metadata.name | capture(".*-(?P<value>[0-9]+)").value' "$filename" | tail -n 1)
upload_url="${GITHUB_REPO%.*}/blob/${tmp_branch}/$(basename "$filename")"
max_attempts=5
attempt=1
while ((attempt <= max_attempts)); do
log_info "Uploading entities from $upload_url"
ACCESS_TOKEN=$(get_token "rhdh")
response="$(curl -k "$(backstage_url)/api/catalog/locations" --cookie "$COOKIE" --cookie-jar "$COOKIE" \
-X POST \
-H 'Accept-Encoding: gzip, deflate, br' \
-H 'Authorization: Bearer '"$ACCESS_TOKEN" \
-H 'Content-Type: application/json' --data-raw '{"type":"url","target":"'"${upload_url}"'"}')"
if [ "${PIPESTATUS[0]}" -eq 0 ]; then
log_info "Entities from $upload_url uploaded"
break
else
log_warn "Unable to upload entities from $upload_url: [$response]. Trying again up to $max_attempts times."
((attempt++))
fi
done
if [[ $attempt -gt $max_attempts ]]; then
log_error "Unable to upload entities from $upload_url $max_attempts attempts, giving up!"
return 1
fi

timeout=300
timeout_timestamp=$(date -d "$timeout seconds" "+%s")
last_count=-1
while true; do
if [ "$(date "+%s")" -gt "$timeout_timestamp" ]; then
log_error "Timeout waiting on entity count"
exit 1
else
ACCESS_TOKEN=$(get_token "rhdh")
if [[ 'component-*.yaml' == "${1}" ]]; then b_count=$(curl -s -k "$(backstage_url)/api/catalog/entity-facets?facet=kind" --cookie "$COOKIE" --cookie-jar "$COOKIE" -H 'Content-Type: application/json' -H 'Authorization: Bearer '"$ACCESS_TOKEN" | tee -a "$TMP_DIR/get_component_count.log" | jq -r '.facets.kind[] | select(.value == "Component")| .count'); fi
if [[ 'api-*.yaml' == "${1}" ]]; then b_count=$(curl -s -k "$(backstage_url)/api/catalog/entity-facets?facet=kind" --cookie "$COOKIE" --cookie-jar "$COOKIE" -H 'Content-Type: application/json' -H 'Authorization: Bearer '"$ACCESS_TOKEN" | tee -a "$TMP_DIR/get_api_count.log" | jq -r '.facets.kind[] | select(.value == "API")| .count'); fi
if [[ -z "$b_count" ]]; then log_warn "Failed to get current count, maybe RHDH is down?"; b_count=0; fi
if [[ "$last_count" != "$b_count" ]]; then # reset the timeout if current count changes
log_info "The current count changed, resetting entity waiting timeout to $timeout seconds"
timeout_timestamp=$(date -d "$timeout seconds" "+%s")
last_count=$b_count
fi
if [[ $b_count -ge $e_count ]]; then
log_info "The entity count reached expected value ($b_count)"
break
fi
fi
log_info "Waiting for the entity count to be ${e_count} (current: ${b_count})"
sleep 10s
done
yq -i '.locations |= . + {"target": "'"$upload_url"'", "type": "url"}' "$output"
done
for filename in "${files[@]}"; do
rm -vf "$filename"
done
popd || return
}

# shellcheck disable=SC2016
Expand Down Expand Up @@ -228,7 +185,6 @@ create_rbac_policy() {
exit 1
;;
esac

}

create_groups() {
Expand Down Expand Up @@ -354,6 +310,7 @@ rhdh_token() {
echo "$ACCESS_TOKEN"
}

# shellcheck disable=SC2120
get_token() {
service=$1
if [[ ${service} == 'rhdh' ]]; then
Expand Down
70 changes: 58 additions & 12 deletions ci-scripts/rhdh-setup/deploy.sh
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ install() {
appurl=$(oc whoami --show-console)
export OPENSHIFT_APP_DOMAIN=${appurl#*.}
$cli create namespace "${RHDH_NAMESPACE}" --dry-run=client -o yaml | $cli apply -f -
keycloak_install
keycloak_install |& tee "${TMP_DIR}/keycloak_install.log"

if $PRE_LOAD_DB; then
log_info "Creating users and groups in Keycloak in background"
Expand Down Expand Up @@ -199,10 +199,6 @@ create_users_groups() {
}

create_objs() {
if ! $PRE_LOAD_DB; then
create_users_groups
fi

if [[ ${GITHUB_USER} ]] && [[ ${GITHUB_REPO} ]]; then
date --utc -Ins >"${TMP_DIR}/populate-catalog-before"
create_per_grp create_cmp COMPONENT_COUNT
Expand Down Expand Up @@ -233,9 +229,14 @@ backstage_install() {
yq -i '.backend.cors.origin="'"$base_url"'"' "$TMP_DIR/app-config.yaml"
until envsubst <template/backstage/secret-rhdh-pull-secret.yaml | $clin apply -f -; do $clin delete secret rhdh-pull-secret --ignore-not-found=true; done
if ${ENABLE_RBAC}; then yq -i '. |= . + load("template/backstage/'$INSTALL_METHOD'/app-rbac-patch.yaml")' "$TMP_DIR/app-config.yaml"; fi
if ${PRE_LOAD_DB}; then
echo "locations: []" >"$TMP_DIR/locations.yaml"
create_objs
yq -i '.catalog.locations |= . + load("'"$TMP_DIR/locations.yaml"'").locations' "$TMP_DIR/app-config.yaml"
fi
until $clin create configmap app-config-rhdh --from-file "app-config.rhdh.yaml=$TMP_DIR/app-config.yaml"; do $clin delete configmap app-config-rhdh --ignore-not-found=true; done
if ${ENABLE_RBAC}; then
cp template/backstage/rbac-config.yaml "${TMP_DIR}"
cp template/backstage/rbac-config.yaml "${TMP_DIR}/rbac-config.yaml"
create_rbac_policy "$RBAC_POLICY"
cat "$TMP_DIR/group-rbac.yaml" >>"$TMP_DIR/rbac-config.yaml"
until $clin create -f "$TMP_DIR/rbac-config.yaml"; do $clin delete configmap rbac-policy --ignore-not-found=true; done
Expand All @@ -249,8 +250,52 @@ backstage_install() {
log_error "Invalid install method: $INSTALL_METHOD, currently allowed methods are helm or olm"
return 1
fi
if [ "${AUTH_PROVIDER}" == "keycloak" ] && ${RHDH_METRIC}; then $clin create -f template/backstage/rhdh-metrics-service.yaml; fi
if ${RHDH_METRIC}; then envsubst <template/backstage/rhdh-servicemonitor.yaml | $clin create -f -; fi
date --utc -Ins >"${TMP_DIR}/populate-before"
if ${RHDH_METRIC}; then
log_info "Setting up RHDH metrics"
if [ "${AUTH_PROVIDER}" == "keycloak" ]; then
$clin create -f template/backstage/rhdh-metrics-service.yaml
fi
envsubst <template/backstage/rhdh-servicemonitor.yaml | $clin create -f -
fi
log_info "RHDH Installed, waiting for the catalog to be populated"
timeout=300
timeout_timestamp=$(date -d "$timeout seconds" "+%s")
last_count=-1
for entity_type in Component Api; do
while true; do
if [ "$(date "+%s")" -gt "$timeout_timestamp" ]; then
log_error "Timeout waiting on '$entity_type' count"
exit 1
else
ACCESS_TOKEN=$(get_token "rhdh")
if [[ 'Component' == "$entity_type" ]]; then
e_count=$COMPONENT_COUNT
b_count=$(curl -s -k "$(backstage_url)/api/catalog/entity-facets?facet=kind" --cookie "$COOKIE" --cookie-jar "$COOKIE" -H 'Content-Type: application/json' -H 'Authorization: Bearer '"$ACCESS_TOKEN" | tee -a "$TMP_DIR/get_component_count.log" | jq -r '.facets.kind[] | select(.value == "Component")| .count')
fi
if [[ 'Api' == "$entity_type" ]]; then
e_count=$API_COUNT
b_count=$(curl -s -k "$(backstage_url)/api/catalog/entity-facets?facet=kind" --cookie "$COOKIE" --cookie-jar "$COOKIE" -H 'Content-Type: application/json' -H 'Authorization: Bearer '"$ACCESS_TOKEN" | tee -a "$TMP_DIR/get_api_count.log" | jq -r '.facets.kind[] | select(.value == "API")| .count')
fi
if [[ -z "$b_count" ]]; then
log_warn "Failed to get current '$entity_type' count, maybe RHDH is down?"
b_count=0
fi
if [[ "$last_count" != "$b_count" ]]; then # reset the timeout if current count changes
log_info "The current '$entity_type' count changed, resetting waiting timeout to $timeout seconds"
timeout_timestamp=$(date -d "$timeout seconds" "+%s")
last_count=$b_count
fi
if [[ $b_count -ge $e_count ]]; then
log_info "The '$entity_type' count reached expected value ($b_count)"
break
fi
fi
log_info "Waiting for the '$entity_type' count to be ${e_count} (current: ${b_count})"
sleep 10s
done
done
date --utc -Ins >"${TMP_DIR}/populate-after"
}

# shellcheck disable=SC2016,SC1004
Expand Down Expand Up @@ -364,11 +409,13 @@ psql_debug() {
rhdh_deployment=backstage-developer-hub
fi
if ${PSQL_LOG}; then
log_info "Setting ups PostgreSQL logging"
$clin exec "${psql_db}" -- sh -c "sed -i "s/^\s*#log_min_duration_statement.*/log_min_duration_statement=${LOG_MIN_DURATION_STATEMENT}/" /var/lib/pgsql/data/userdata/postgresql.conf "
$clin exec "${psql_db}" -- sh -c "sed -i "s/^\s*#log_min_duration_sample.*/log_min_duration_sample=${LOG_MIN_DURATION_SAMPLE}/" /var/lib/pgsql/data/userdata/postgresql.conf "
$clin exec "${psql_db}" -- sh -c "sed -i "s/^\s*#log_statement_sample_rate.*/log_statement_sample_rate=${LOG_STATEMENT_SAMPLE_RATE}/" /var/lib/pgsql/data/userdata/postgresql.conf "
fi
if ${PSQL_EXPORT}; then
log_info "Setting up PostgreSQL tracking"
$clin exec "${psql_db}" -- sh -c 'sed -i "s/^\s*#track_io_timing.*/track_io_timing = on/" /var/lib/pgsql/data/userdata/postgresql.conf'
$clin exec "${psql_db}" -- sh -c 'sed -i "s/^\s*#track_wal_io_timing.*/track_wal_io_timing = on/" /var/lib/pgsql/data/userdata/postgresql.conf'
$clin exec "${psql_db}" -- sh -c 'sed -i "s/^\s*#track_functions.*/track_functions = all/" /var/lib/pgsql/data/userdata/postgresql.conf'
Expand All @@ -383,6 +430,7 @@ psql_debug() {
fi

if ${PSQL_EXPORT}; then
log_info "Setting up PostgreSQL metrics exporter"
$clin exec "${psql_db}" -- sh -c 'psql -c "CREATE EXTENSION pg_stat_statements;"'
uid=$(oc get namespace "${RHDH_NAMESPACE}" -o go-template='{{ index .metadata.annotations "openshift.io/sa.scc.supplemental-groups" }}' | cut -d '/' -f 1)
pg_pass=$(${clin} get secret rhdh-postgresql -o jsonpath='{.data.postgres-password}' | base64 -d)
Expand All @@ -409,6 +457,7 @@ psql_debug() {
fi

if ${PSQL_EXPORT}; then
log_info "Setting up PostgreSQL monitoring"
plugins=("pg-exporter" "backstage-plugin-permission" "backstage-plugin-auth" "backstage-plugin-catalog" "backstage-plugin-scaffolder" "backstage-plugin-search" "backstage-plugin-app")
for plugin in "${plugins[@]}"; do
cp template/postgres-exporter/service-monitor-template.yaml "${TMP_DIR}/${plugin}-monitor.yaml"
Expand Down Expand Up @@ -513,14 +562,11 @@ delete_rhdh_with_olm() {
$cli delete namespace "$RHDH_OPERATOR_NAMESPACE" --ignore-not-found=true --wait
}

while getopts "oi:crd" flag; do
while getopts "oi:rd" flag; do
case "${flag}" in
o)
export INSTALL_METHOD=olm
;;
c)
create_objs
;;
r)
delete
install
Expand Down
2 changes: 1 addition & 1 deletion ci-scripts/runs-to-csv.sh
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ find "${1:-.}" -name benchmark.json -print0 | while IFS= read -r -d '' filename;
.measurements.timings.deploy.started,
.measurements.timings.deploy.ended,
.measurements.timings.deploy.duration,
.measurements.timings.populate_users_groups.ended,
.measurements.timings.populate_users_groups.started,
.measurements.timings.populate_users_groups.ended,
.measurements.timings.populate_users_groups.duration,
.measurements.timings.populate.started,
.measurements.timings.populate.ended,
Expand Down
4 changes: 0 additions & 4 deletions ci-scripts/setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,3 @@ trap kill_rate_limits EXIT

echo "$(date --utc -Ins) Running deployment script"
make ci-deploy

if [ "$PRE_LOAD_DB" == "true" ]; then
make populate-rhdh
fi

0 comments on commit 5fa1a44

Please sign in to comment.