diff --git a/ci-scripts/rhdh-setup/create_resource.sh b/ci-scripts/rhdh-setup/create_resource.sh index e4b94bc..b540cac 100755 --- a/ci-scripts/rhdh-setup/create_resource.sh +++ b/ci-scripts/rhdh-setup/create_resource.sh @@ -186,11 +186,33 @@ create_group() { fi } +export RBAC_POLICY_ALL_GROUPS_ADMIN="all_groups_admin" #default +export RBAC_POLICY_STATIC="static" + +create_rbac_policy() { + policy="${1:-$RBAC_POLICY_ALL_GROUPS_ADMIN}" + log_info "Generating RBAC policy: $policy" + case $policy in + "$RBAC_POLICY_ALL_GROUPS_ADMIN") + for i in $(seq 1 "$GROUP_COUNT"); do + echo " g, group:default/g${i}, role:default/a" >>"$TMP_DIR/group-rbac.yaml" + done + ;; + "$RBAC_POLICY_STATIC") + for i in $(seq 1 "${RBAC_POLICY_SIZE:-$GROUP_COUNT}"); do + echo " g, group:default/g${i}, role:default/a" >>"$TMP_DIR/group-rbac.yaml" + done + ;; + \?) + log_error "Invalid RBAC policy: ${policy}" + exit 1 + ;; + esac + +} + create_groups() { log_info "Creating Groups in Keycloak" - for i in $(seq 1 "$GROUP_COUNT"); do - echo " g, group:default/g${i}, role:default/a" >>"$TMP_DIR/group-rbac.yaml" - done sleep 5 seq 1 "${GROUP_COUNT}" | xargs -n1 -P"${POPULATION_CONCURRENCY}" bash -c 'create_group' } @@ -356,5 +378,5 @@ get_token() { rm -rf "$token_lockfile" } -export -f keycloak_url backstage_url get_token keycloak_token rhdh_token create_group create_user log log_info log_warn log_error log_token log_token_info log_token_err +export -f keycloak_url backstage_url get_token keycloak_token rhdh_token create_rbac_policy create_group create_user log log_info log_warn log_error log_token log_token_info log_token_err export kc_lockfile bs_lockfile token_lockfile diff --git a/ci-scripts/rhdh-setup/deploy.sh b/ci-scripts/rhdh-setup/deploy.sh index 03c403a..892c458 100755 --- a/ci-scripts/rhdh-setup/deploy.sh +++ b/ci-scripts/rhdh-setup/deploy.sh @@ -55,6 +55,7 @@ export KEYCLOAK_USER_PASS=${KEYCLOAK_USER_PASS:-$(mktemp -u XXXXXXXXXX)} export AUTH_PROVIDER="${AUTH_PROVIDER:-''}" export ENABLE_RBAC="${ENABLE_RBAC:-false}" export ENABLE_PROFILING="${ENABLE_PROFILING:-false}" +export RBAC_POLICY="${RBAC_POLICY:-all_groups_admin}" export PSQL_LOG="${PSQL_LOG:-true}" export RHDH_METRIC="${RHDH_METRIC:-true}" @@ -236,6 +237,7 @@ backstage_install() { until $clin create configmap app-config-rhdh --from-file "app-config.rhdh.yaml=$TMP_DIR/app-config.yaml"; do $clin delete configmap app-config-rhdh --ignore-not-found=true; done if ${ENABLE_RBAC}; then cp template/backstage/rbac-config.yaml "${TMP_DIR}" + create_rbac_policy "$RBAC_POLICY" cat "$TMP_DIR/group-rbac.yaml" >>"$TMP_DIR/rbac-config.yaml" until $clin create -f "$TMP_DIR/rbac-config.yaml"; do $clin delete configmap rbac-policy --ignore-not-found=true; done fi diff --git a/ci-scripts/runs-to-csv.sh b/ci-scripts/runs-to-csv.sh index a4d64b5..1a1c284 100755 --- a/ci-scripts/runs-to-csv.sh +++ b/ci-scripts/runs-to-csv.sh @@ -29,6 +29,8 @@ API_COUNT,\ COMPONENT_COUNT,\ BACKSTAGE_USER_COUNT,\ GROUP_COUNT,\ +RBAC_POLICY,\ +RBAC_POLICY_SIZE,\ RHDH_DEPLOYMENT_REPLICAS,\ RHDH_RESOURCES_CPU_LIMITS,\ RHDH_RESOURCES_MEMORY_LIMITS,\ @@ -39,6 +41,8 @@ RHDH_CPU_Avg,\ RHDH_CPU_Max,\ RHDH_Memory_Avg,\ RHDH_Memory_Max,\ +RHDH_Heap_Avg,\ +RHDH_Heap_Max,\ RHDH_DB_Pods,\ RHDH_DB_CPU_Avg,\ RHDH_DB_CPU_Max,\ @@ -94,6 +98,8 @@ find "${1:-.}" -name benchmark.json -print0 | while IFS= read -r -d '' filename; .metadata.env.COMPONENT_COUNT, .metadata.env.BACKSTAGE_USER_COUNT, .metadata.env.GROUP_COUNT, + .metadata.env.RBAC_POLICY, + .metadata.env.RBAC_POLICY_SIZE, .metadata.env.RHDH_DEPLOYMENT_REPLICAS, .metadata.env.RHDH_RESOURCES_CPU_LIMITS, .metadata.env.RHDH_RESOURCES_MEMORY_LIMITS, @@ -104,6 +110,8 @@ find "${1:-.}" -name benchmark.json -print0 | while IFS= read -r -d '' filename; .measurements."rhdh-developer-hub".cpu.max, .measurements."rhdh-developer-hub".memory.mean, .measurements."rhdh-developer-hub".memory.max, + .measurements.nodejs.test.nodejs_heap_size_used_bytes.mean, + .measurements.nodejs.test.nodejs_heap_size_used_bytes.max, .measurements."rhdh-postgresql".count_ready.mean, .measurements."rhdh-postgresql".cpu.mean, .measurements."rhdh-postgresql".cpu.max, diff --git a/ci-scripts/scalability/collect-results.sh b/ci-scripts/scalability/collect-results.sh index 820bac9..a14b008 100755 --- a/ci-scripts/scalability/collect-results.sh +++ b/ci-scripts/scalability/collect-results.sh @@ -22,6 +22,8 @@ read -ra active_users_spawn_rate <<<"${SCALE_ACTIVE_USERS_SPAWN_RATES:-1:1 200:4 read -ra bs_users_groups <<<"${SCALE_BS_USERS_GROUPS:-1:1 10000:2500}" +read -ra rbac_policy_size <<<"${SCALE_RBAC_POLICY_SIZE:-10000}" + read -ra catalog_apis_components <<<"${SCALE_CATALOG_SIZES:-1:1 10000:10000}" read -ra replicas <<<"${SCALE_REPLICAS:-5}" @@ -42,33 +44,34 @@ for w in "${workers[@]}"; do IFS=":" read -ra tokens <<<"${bu_bg}" bu="${tokens[0]}" # backstage users [[ "${#tokens[@]}" == 1 ]] && bg="" || bg="${tokens[1]}" # backstage groups - for s in "${db_storages[@]}"; do - for au_sr in "${active_users_spawn_rate[@]}"; do - IFS=":" read -ra tokens <<<"${au_sr}" - active_users=${tokens[0]} - output="$ARTIFACT_DIR/scalability_c-${r}r-db_${s}-${bu}bu-${bg}bg-${w}w-${active_users}u.csv" - header="CatalogSize${csv_delim}Apis${csv_delim}Components${csv_delim}MaxActiveUsers${csv_delim}AverageRPS${csv_delim}MaxRPS${csv_delim}AverageRT${csv_delim}MaxRT${csv_delim}Failures${csv_delim}FailRate${csv_delim}DBStorageUsed${csv_delim}DBStorageAvailable${csv_delim}DBStorageCapacity" - for cr_cl in "${cpu_requests_limits[@]}"; do - IFS=":" read -ra tokens <<<"${cr_cl}" - cr="${tokens[0]}" # cpu requests - [[ "${#tokens[@]}" == 1 ]] && cl="" || cl="${tokens[1]}" # cpu limits - for mr_ml in "${memory_requests_limits[@]}"; do - IFS=":" read -ra tokens <<<"${mr_ml}" - mr="${tokens[0]}" # memory requests - [[ "${#tokens[@]}" == 1 ]] && ml="" || ml="${tokens[1]}" # memory limits - echo "$header" >"$output" - for a_c in "${catalog_apis_components[@]}"; do - IFS=":" read -ra tokens <<<"${a_c}" - a="${tokens[0]}" # apis - [[ "${#tokens[@]}" == 1 ]] && c="" || c="${tokens[1]}" # components - index="${r}r-db_${s}-${bu}bu-${bg}bg-${w}w-${cr}cr-${cl}cl-${mr}mr-${ml}ml-${a}a-${c}c" - iteration="${index}/test/${active_users}u" - echo "[$iteration] Looking for benchmark.json..." - benchmark_json="$(find "${ARTIFACT_DIR}" -name benchmark.json | grep "$iteration" || true)" - if [ -n "$benchmark_json" ]; then - benchmark_json="$(readlink -m "$benchmark_json")" - echo "[$iteration] Gathering data from $benchmark_json" - jq_cmd="\"$((a + c))\" \ + for rbs in "${rbac_policy_size[@]}"; do + for s in "${db_storages[@]}"; do + for au_sr in "${active_users_spawn_rate[@]}"; do + IFS=":" read -ra tokens <<<"${au_sr}" + active_users=${tokens[0]} + output="$ARTIFACT_DIR/scalability_c-${r}r-db_${s}-${bu}bu-${bg}bg-${rbs}rbs-${w}w-${active_users}u.csv" + header="CatalogSize${csv_delim}Apis${csv_delim}Components${csv_delim}MaxActiveUsers${csv_delim}AverageRPS${csv_delim}MaxRPS${csv_delim}AverageRT${csv_delim}MaxRT${csv_delim}Failures${csv_delim}FailRate${csv_delim}DBStorageUsed${csv_delim}DBStorageAvailable${csv_delim}DBStorageCapacity" + for cr_cl in "${cpu_requests_limits[@]}"; do + IFS=":" read -ra tokens <<<"${cr_cl}" + cr="${tokens[0]}" # cpu requests + [[ "${#tokens[@]}" == 1 ]] && cl="" || cl="${tokens[1]}" # cpu limits + for mr_ml in "${memory_requests_limits[@]}"; do + IFS=":" read -ra tokens <<<"${mr_ml}" + mr="${tokens[0]}" # memory requests + [[ "${#tokens[@]}" == 1 ]] && ml="" || ml="${tokens[1]}" # memory limits + echo "$header" >"$output" + for a_c in "${catalog_apis_components[@]}"; do + IFS=":" read -ra tokens <<<"${a_c}" + a="${tokens[0]}" # apis + [[ "${#tokens[@]}" == 1 ]] && c="" || c="${tokens[1]}" # components + index="${r}r-db_${s}-${bu}bu-${bg}bg-${rbs}rbs-${w}w-${cr}cr-${cl}cl-${mr}mr-${ml}ml-${a}a-${c}c" + iteration="${index}/test/${active_users}u" + echo "[$iteration] Looking for benchmark.json..." + benchmark_json="$(find "${ARTIFACT_DIR}" -name benchmark.json | grep "$iteration" || true)" + if [ -n "$benchmark_json" ]; then + benchmark_json="$(readlink -m "$benchmark_json")" + echo "[$iteration] Gathering data from $benchmark_json" + jq_cmd="\"$((a + c))\" \ + $csv_delim_quoted + \"${a}\" \ + $csv_delim_quoted + \"${c}\" \ + $csv_delim_quoted + (.results.locust_users.max | tostring) \ @@ -81,14 +84,15 @@ for w in "${workers[@]}"; do + $csv_delim_quoted + (.measurements.cluster.pv_stats.test.\"rhdh-postgresql\".used_bytes.max | tostring) \ + $csv_delim_quoted + (.measurements.cluster.pv_stats.test.\"rhdh-postgresql\".available_bytes.min | tostring) \ + $csv_delim_quoted + (.measurements.cluster.pv_stats.test.\"rhdh-postgresql\".capacity_bytes.max | tostring)" - sed -Ee 's/: ([0-9]+\.[0-9]*[X]+[0-9e\+-]*|[0-9]*X+[0-9]*\.[0-9e\+-]*|[0-9]*X+[0-9]*\.[0-9]*X+[0-9e\+-]+)/: "\1"/g' "$benchmark_json" | jq -rc "$jq_cmd" >>"$output" - else - echo "[$iteration] Unable to find benchmark.json" - for _ in $(seq 1 "$(echo "$header" | tr -cd "$csv_delim" | wc -c)"); do - echo -n ";" >>"$output" - done - echo >>"$output" - fi + sed -Ee 's/: ([0-9]+\.[0-9]*[X]+[0-9e\+-]*|[0-9]*X+[0-9]*\.[0-9e\+-]*|[0-9]*X+[0-9]*\.[0-9]*X+[0-9e\+-]+)/: "\1"/g' "$benchmark_json" | jq -rc "$jq_cmd" >>"$output" + else + echo "[$iteration] Unable to find benchmark.json" + for _ in $(seq 1 "$(echo "$header" | tr -cd "$csv_delim" | wc -c)"); do + echo -n ";" >>"$output" + done + echo >>"$output" + fi + done done done done diff --git a/ci-scripts/scalability/test-scalability.sh b/ci-scripts/scalability/test-scalability.sh index 6318eeb..1a534a9 100755 --- a/ci-scripts/scalability/test-scalability.sh +++ b/ci-scripts/scalability/test-scalability.sh @@ -27,6 +27,8 @@ read -ra active_users_spawn_rate <<<"${SCALE_ACTIVE_USERS_SPAWN_RATES:-1:1 200:4 read -ra bs_users_groups <<<"${SCALE_BS_USERS_GROUPS:-1:1 10000:2500}" +read -ra rbac_policy_size <<<"${SCALE_RBAC_POLICY_SIZE:-10000}" + read -ra catalog_apis_components <<<"${SCALE_CATALOG_SIZES:-1:1 10000:10000}" read -ra replicas <<<"${SCALE_REPLICAS:-5}" @@ -91,64 +93,67 @@ for w in "${workers[@]}"; do IFS=":" read -ra tokens <<<"${bu_bg}" bu="${tokens[0]}" # backstage users [[ "${#tokens[@]}" == 1 ]] && bg="" || bg="${tokens[1]}" # backstage components - for cr_cl in "${cpu_requests_limits[@]}"; do - IFS=":" read -ra tokens <<<"${cr_cl}" - cr="${tokens[0]}" # cpu requests - [[ "${#tokens[@]}" == 1 ]] && cl="" || cl="${tokens[1]}" # cpu limits - for mr_ml in "${memory_requests_limits[@]}"; do - IFS=":" read -ra tokens <<<"${mr_ml}" - mr="${tokens[0]}" # memory requests - [[ "${#tokens[@]}" == 1 ]] && ml="" || ml="${tokens[1]}" # memory limits - for a_c in "${catalog_apis_components[@]}"; do - IFS=":" read -ra tokens <<<"${a_c}" - a="${tokens[0]}" # apis - [[ "${#tokens[@]}" == 1 ]] && c="" || c="${tokens[1]}" # components - for r in "${replicas[@]}"; do - for s in "${db_storages[@]}"; do - echo - echo "/// Setting up RHDH for scalability test ///" - echo - set -x - export RHDH_DEPLOYMENT_REPLICAS="$r" - export RHDH_DB_REPLICAS="$r" - export RHDH_DB_STORAGE="$s" - export RHDH_RESOURCES_CPU_REQUESTS="$cr" - export RHDH_RESOURCES_CPU_LIMITS="$cl" - export RHDH_RESOURCES_MEMORY_REQUESTS="$mr" - export RHDH_RESOURCES_MEMORY_LIMITS="$ml" - export RHDH_KEYCLOAK_REPLICAS=$r - export BACKSTAGE_USER_COUNT=$bu - export GROUP_COUNT=$bg - export WORKERS=$w - export API_COUNT=$a - export COMPONENT_COUNT=$c - index="${r}r-db_${s}-${bu}bu-${bg}bg-${w}w-${cr}cr-${cl}cl-${mr}mr-${ml}ml-${a}a-${c}c" - set +x - oc login "$OPENSHIFT_API" -u "$OPENSHIFT_USERNAME" -p "$OPENSHIFT_PASSWORD" --insecure-skip-tls-verify=true - make clean-local undeploy-rhdh - setup_artifacts="$SCALABILITY_ARTIFACTS/$index/setup" - mkdir -p "$setup_artifacts" - ARTIFACT_DIR=$setup_artifacts ./ci-scripts/setup.sh |& tee "$setup_artifacts/setup.log" - wait_for_indexing |& tee "$setup_artifacts/after-setup-search.log" - for au_sr in "${active_users_spawn_rate[@]}"; do - IFS=":" read -ra tokens <<<"${au_sr}" - au=${tokens[0]} # active users - [[ "${#tokens[@]}" == 1 ]] && sr="" || sr="${tokens[1]}" # spawn rate + for rbs in "${rbac_policy_size[@]}"; do + for cr_cl in "${cpu_requests_limits[@]}"; do + IFS=":" read -ra tokens <<<"${cr_cl}" + cr="${tokens[0]}" # cpu requests + [[ "${#tokens[@]}" == 1 ]] && cl="" || cl="${tokens[1]}" # cpu limits + for mr_ml in "${memory_requests_limits[@]}"; do + IFS=":" read -ra tokens <<<"${mr_ml}" + mr="${tokens[0]}" # memory requests + [[ "${#tokens[@]}" == 1 ]] && ml="" || ml="${tokens[1]}" # memory limits + for a_c in "${catalog_apis_components[@]}"; do + IFS=":" read -ra tokens <<<"${a_c}" + a="${tokens[0]}" # apis + [[ "${#tokens[@]}" == 1 ]] && c="" || c="${tokens[1]}" # components + for r in "${replicas[@]}"; do + for s in "${db_storages[@]}"; do echo - echo "/// Running the scalability test ///" + echo "/// Setting up RHDH for scalability test ///" echo set -x - export SCENARIO=${SCENARIO:-search-catalog} - export USERS="${au}" - export DURATION=${DURATION:-5m} - export SPAWN_RATE="${sr}" + export RHDH_DEPLOYMENT_REPLICAS="$r" + export RHDH_DB_REPLICAS="$r" + export RHDH_DB_STORAGE="$s" + export RHDH_RESOURCES_CPU_REQUESTS="$cr" + export RHDH_RESOURCES_CPU_LIMITS="$cl" + export RHDH_RESOURCES_MEMORY_REQUESTS="$mr" + export RHDH_RESOURCES_MEMORY_LIMITS="$ml" + export RHDH_KEYCLOAK_REPLICAS="${RHDH_KEYCLOAK_REPLICAS:-$r}" + export BACKSTAGE_USER_COUNT=$bu + export GROUP_COUNT=$bg + export RBAC_POLICY_SIZE="$rbs" + export WORKERS=$w + export API_COUNT=$a + export COMPONENT_COUNT=$c + index="${r}r-db_${s}-${bu}bu-${bg}bg-${rbs}rbs-${w}w-${cr}cr-${cl}cl-${mr}mr-${ml}ml-${a}a-${c}c" set +x - make clean - test_artifacts="$SCALABILITY_ARTIFACTS/$index/test/${au}u" - mkdir -p "$test_artifacts" - wait_for_indexing |& tee "$test_artifacts/before-test-search.log" - ARTIFACT_DIR=$test_artifacts ./ci-scripts/test.sh |& tee "$test_artifacts/test.log" - ARTIFACT_DIR=$test_artifacts ./ci-scripts/collect-results.sh |& tee "$test_artifacts/collect-results.log" + oc login "$OPENSHIFT_API" -u "$OPENSHIFT_USERNAME" -p "$OPENSHIFT_PASSWORD" --insecure-skip-tls-verify=true + make clean-local undeploy-rhdh + setup_artifacts="$SCALABILITY_ARTIFACTS/$index/setup" + mkdir -p "$setup_artifacts" + ARTIFACT_DIR=$setup_artifacts ./ci-scripts/setup.sh |& tee "$setup_artifacts/setup.log" + wait_for_indexing |& tee "$setup_artifacts/after-setup-search.log" + for au_sr in "${active_users_spawn_rate[@]}"; do + IFS=":" read -ra tokens <<<"${au_sr}" + au=${tokens[0]} # active users + [[ "${#tokens[@]}" == 1 ]] && sr="" || sr="${tokens[1]}" # spawn rate + echo + echo "/// Running the scalability test ///" + echo + set -x + export SCENARIO=${SCENARIO:-search-catalog} + export USERS="${au}" + export DURATION=${DURATION:-5m} + export SPAWN_RATE="${sr}" + set +x + make clean + test_artifacts="$SCALABILITY_ARTIFACTS/$index/test/${au}u" + mkdir -p "$test_artifacts" + wait_for_indexing |& tee "$test_artifacts/before-test-search.log" + ARTIFACT_DIR=$test_artifacts ./ci-scripts/test.sh |& tee "$test_artifacts/test.log" + ARTIFACT_DIR=$test_artifacts ./ci-scripts/collect-results.sh |& tee "$test_artifacts/collect-results.log" + done done done done diff --git a/ci-scripts/scalability/test.sh b/ci-scripts/scalability/test.sh index 8afdeef..1487e36 100755 --- a/ci-scripts/scalability/test.sh +++ b/ci-scripts/scalability/test.sh @@ -8,7 +8,7 @@ SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) # shellcheck disable=SC1090,SC1091 source "$(readlink -m "$SCRIPT_DIR"/../../test.env)" -export SCENARIO DURATION WAIT_FOR_SEARCH_INDEX PRE_LOAD_DB SCALE_WORKERS SCALE_ACTIVE_USERS_SPAWN_RATES SCALE_BS_USERS_GROUPS SCALE_CATALOG_SIZES SCALE_REPLICAS SCALE_DB_STORAGES +export SCENARIO DURATION WAIT_FOR_SEARCH_INDEX PRE_LOAD_DB SCALE_WORKERS SCALE_ACTIVE_USERS_SPAWN_RATES SCALE_BS_USERS_GROUPS SCALE_RBAC_POLICY_SIZE SCALE_CATALOG_SIZES SCALE_REPLICAS SCALE_DB_STORAGES echo -e "\n === Running RHDH scalability test ===\n" make test-scalability diff --git a/config/cluster_read_config.test.yaml b/config/cluster_read_config.test.yaml index 2499d1d..b93dbd3 100644 --- a/config/cluster_read_config.test.yaml +++ b/config/cluster_read_config.test.yaml @@ -84,6 +84,8 @@ 'COMPONENT_COUNT', 'BACKSTAGE_USER_COUNT', 'GROUP_COUNT', + 'RBAC_POLICY', + 'RBAC_POLICY_SIZE', 'WAIT_FOR_SEARCH_INDEX', 'SCALE_WORKERS', 'SCALE_ACTIVE_USERS_SPAWN_RATES', diff --git a/test.env b/test.env index b93513b..e290e65 100644 --- a/test.env +++ b/test.env @@ -55,12 +55,15 @@ # export LOCUST_EXTRA_CMD=--debug=true # export ARTIFACT_DIR=.artifacts # export ENABLE_RBAC=false +# export RBAC_POLICY=all_groups_admin +# export RBAC_POLICY_SIZE=10000 # export ENABLE_PROFILING=false ## Scalability testing # export SCALE_WORKERS="1 2" # export SCALE_ACTIVE_USERS_SPAWN_RATES="1:1 200:40" # export SCALE_BS_USERS_GROUPS="1:1 1000:250" +# export SCALE_RBAC_POLICY_SIZE="1 10000" # export SCALE_CATALOG_SIZES="1:1 2500:2500" # export SCALE_REPLICAS="1 2" # export SCALE_DB_STORAGES="1Gi 2Gi"