diff --git a/Makefile b/Makefile index 25cbfd1..97cb497 100644 --- a/Makefile +++ b/Makefile @@ -175,7 +175,7 @@ test: $(TMP_DIR) $(ARTIFACT_DIR) ifneq ($(shell test '$(AUTH_PROVIDER)' == 'keycloak' && echo 1 || echo 0),0) $(eval key_pass := $(shell oc -n rhdh-performance get secret perf-test-secrets -o template --template='{{.data.keycloak_user_pass}}' | base64 -d)) $(eval key_host := $(shell oc -n rhdh-performance get routes/keycloak -o template --template='{{.spec.host}}' )) - $(eval LOCUST_EXTRA_CMD := --keycloak-host $(key_host) --keycloak-password $(key_pass) ) + $(eval LOCUST_EXTRA_CMD := $(LOCUST_EXTRA_CMD) --keycloak-host $(key_host) --keycloak-password $(key_pass) ) ifneq ($(shell test $(USERS) -gt $(WORKERS) && echo 1 || echo 0),0) @echo "users greater than workers " else diff --git a/ci-scripts/rhdh-setup/create_resource.sh b/ci-scripts/rhdh-setup/create_resource.sh index 5e7675d..07c7887 100755 --- a/ci-scripts/rhdh-setup/create_resource.sh +++ b/ci-scripts/rhdh-setup/create_resource.sh @@ -279,7 +279,7 @@ get_token() { log_token_err "Unable to get token, re-attempting" fi else - keycloak_pass=$(oc -n "${RHDH_NAMESPACE}" get secret credential-example-sso -o template --template='{{.data.ADMIN_PASSWORD}}' | base64 -d) + keycloak_pass=$(oc -n "${RHDH_NAMESPACE}" get secret credential-rhdh-sso -o template --template='{{.data.ADMIN_PASSWORD}}' | base64 -d) if ! keycloak_token >"$token_file"; then log_token_err "Unable to get token, re-attempting" fi diff --git a/ci-scripts/rhdh-setup/template/keycloak/keycloak.yaml b/ci-scripts/rhdh-setup/template/keycloak/keycloak.yaml index 8b5a43e..b1ff5c7 100644 --- a/ci-scripts/rhdh-setup/template/keycloak/keycloak.yaml +++ b/ci-scripts/rhdh-setup/template/keycloak/keycloak.yaml @@ -1,7 +1,7 @@ apiVersion: keycloak.org/v1alpha1 kind: Keycloak metadata: - name: example-sso + name: rhdh-sso labels: app: sso spec: diff --git a/locust-test-template.yaml b/locust-test-template.yaml index e54b0ee..895f019 100644 --- a/locust-test-template.yaml +++ b/locust-test-template.yaml @@ -12,5 +12,6 @@ spec: --run-time ${DURATION} ${LOCUST_EXTRA_CMD} workerCommandSeed: --locustfile /lotest/src/${SCENARIO}.py + ${LOCUST_EXTRA_CMD} workerReplicas: ${WORKERS} configMap: locust.${SCENARIO} diff --git a/scenarios/mvp-1dot1.metrics.yaml b/scenarios/mvp-1dot1.metrics.yaml new file mode 100644 index 0000000..794fefa --- /dev/null +++ b/scenarios/mvp-1dot1.metrics.yaml @@ -0,0 +1,40 @@ +# Results +{%macro results_scenario(name) -%} +- name: results.{{name}}.locust_requests_avg_response_time + monitoring_query: locust_requests_avg_response_time{name="{{name}}"} + monitoring_step: 15 +- name: results.{{name}}.locust_requests_avg_content_length + monitoring_query: locust_requests_avg_content_length{name="{{name}}"} + monitoring_step: 15 +- name: results.{{name}}.locust_requests_current_rps + monitoring_query: locust_requests_current_rps{name="{{name}}"} + monitoring_step: 15 +- name: results.{{name}}.locust_requests_current_fail_per_sec + monitoring_query: locust_requests_current_fail_per_sec{name="{{name}}"} + monitoring_step: 15 +- name: results.{{name}}.locust_requests_num_failures + monitoring_query: locust_requests_num_failures{name="{{name}}"} + monitoring_step: 15 +- name: results.{{name}}.locust_errors + monitoring_query: locust_errors{name="{{name}}"} + monitoring_step: 15 +{%- endmacro %} + +{{ results_scenario('/api/catalog/entities') }} +{{ results_scenario('/api/catalog/entities?filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entities?filter=kind%3Dcomponent') }} +{{ results_scenario('/api/catalog/entities?filter=kind%3Dcomponent%2Cspec.type%3Dlibrary') }} +{{ results_scenario('/api/catalog/entity-facets') }} +{{ results_scenario('/api/catalog/entity-facets?facet=kind') }} +{{ results_scenario('/api/catalog/entity-facets?facet=metadata.namespace') }} +{{ results_scenario('/api/catalog/entity-facets?facet=metadata.namespace&filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entity-facets?facet=metadata.namespace&filter=kind%3Dcomponent') }} +{{ results_scenario('/api/catalog/entity-facets?facet=metadata.tags') }} +{{ results_scenario('/api/catalog/entity-facets?facet=metadata.tags&filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entity-facets?facet=metadata.tags&filter=kind%3Dcomponent') }} +{{ results_scenario('/api/catalog/entity-facets?facet=relations.ownedBy') }} +{{ results_scenario('/api/catalog/entity-facets?facet=spec.lifecycle') }} +{{ results_scenario('/api/catalog/entity-facets?facet=spec.lifecycle&filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entity-facets?facet=spec.lifecycle&filter=kind%3Dcomponent') }} +{{ results_scenario('/api/catalog/entity-facets?facet=spec.type&filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entity-facets?facet=spec.type&filter=kind%3Dcomponent') }} diff --git a/scenarios/mvp-1dot1.py b/scenarios/mvp-1dot1.py new file mode 100644 index 0000000..b7444c2 --- /dev/null +++ b/scenarios/mvp-1dot1.py @@ -0,0 +1,216 @@ +from locust import HttpUser, events, task +from locust.runners import MasterRunner, WorkerRunner +from urllib3.exceptions import InsecureRequestWarning +import urllib.parse +import json +import re +import urllib3 + +urllib3.disable_warnings(InsecureRequestWarning) + +__version__ = "1" + +usernames = [] + +entity_facets_params = {} + +entity_facets_params["kind"] = { + "facet": "kind", +} + +entity_facets_params["relations.ownedBy"] = { + "facet": "relations.ownedBy", +} + +entity_facets_params["metadata.namespace"] = { + "facet": "metadata.namespace", +} + +entity_facets_params["spec.lifecycle"] = { + "facet": "spec.lifecycle", +} + +entity_facets_params["metadata.tags"] = { + "facet": "metadata.tags", +} + +entity_facets_params["component/spec.lifecycle"] = { + "facet": "spec.lifecycle", + "filter": "kind=component" +} + +entity_facets_params["component/spec.type"] = { + "facet": "spec.type", + "filter": "kind=component" +} + +entity_facets_params["component/metadata.namespace"] = { + "facet": "metadata.namespace", + "filter": "kind=component" +} + +entity_facets_params["component/metadata.tags"] = { + "facet": "metadata.tags", + "filter": "kind=component" +} + +entity_facets_params["api/spec.lifecycle"] = { + "facet": "spec.lifecycle", + "filter": "kind=api" +} + +entity_facets_params["api/spec.type"] = { + "facet": "spec.type", + "filter": "kind=api" +} + +entity_facets_params["api/metadata.namespace"] = { + "facet": "metadata.namespace", + "filter": "kind=api" +} + +entity_facets_params["api/metadata.tags"] = { + "facet": "metadata.tags", + "filter": "kind=api" +} + +entities_params = {} + +entities_params["component"] = { + "filter": "kind=component", +} + +entities_params["component/library"] = { + "filter": "kind=component,spec.type=library", +} + +entities_params["api"] = { + "filter": "kind=api", +} + + +base_path_facets = "/api/catalog/entity-facets" +base_path_entities = "/api/catalog/entities" + + + +def setup_test_users(environment, msg, **kwargs): + # Fired when the worker receives a message of type 'test_users' + usernames.extend(map(lambda u: u, msg.data)) + +@events.init.add_listener +def on_locust_init(environment, **_kwargs): + if not isinstance(environment.runner, MasterRunner): + environment.runner.register_message("test_users", setup_test_users) + +@events.test_start.add_listener +def on_test_start(environment, **_kwargs): + # When the test is started, evenly divides list between + # worker nodes to ensure unique data across threads + if not isinstance(environment.runner, WorkerRunner): + users = [] + for i in range(1,int(environment.runner.target_user_count)+1): + users.append(f"test{i}") + + worker_count = environment.runner.worker_count + chunk_size = int(len(users) / worker_count) + + for i, worker in enumerate(environment.runner.clients): + start_index = i * chunk_size + + if i + 1 < worker_count: + end_index = start_index + chunk_size + else: + end_index = len(users) + + data = users[start_index:end_index] + environment.runner.send_message("test_users", data, worker) + +@events.init_command_line_parser.add_listener +def _(parser): + parser.add_argument("--keycloak-host", type=str, default="") + parser.add_argument("--keycloak-password", is_secret=True, default="") + + + +class MVPTest(HttpUser): + + def on_start(self): + self.client.verify = False + if self.environment.parsed_options.keycloak_host: + r = self.client.get('/api/auth/oauth2Proxy/refresh', verify=False) + qs_str=urllib.parse.parse_qs(r.url) + STATE=qs_str['state'] + login_cookies=r.cookies + pattern = r'action="([^"]*)"' + LOGIN_URL_tmp=re.findall(pattern, str(r.content))[0] + LOGIN_URL=LOGIN_URL_tmp.replace("&", "&") + qs_str=urllib.parse.parse_qs(LOGIN_URL) + TAB_ID=qs_str['tab_id'] + EXECUTION=qs_str['execution'] + + param = {'client_id': self.CLIENTID, 'tab_id': TAB_ID, 'execution': EXECUTION} + form = {'username': self.USERNAME, 'password': self.PASSWORD, 'credentialId': ''} + r = self.client.post(LOGIN_URL, verify=False, data=form, params=param) + + r = self.client.get(self.REFRESH_URL, verify=False) + json_dict = json.loads(r.content) + TOKEN=json_dict['backstageIdentity']['token'] + + self.HEADER = {'Authorization': 'Bearer ' + TOKEN} + else : + r = self.client.get('/api/auth/guest/refresh', verify=False) + json_dict = json.loads(r.content) + TOKEN=json_dict['backstageIdentity']['token'] + + self.HEADER = {'Authorization': 'Bearer ' + TOKEN} + + def __init__(self, parent): + super().__init__(parent) + self.HEADER='' + if self.environment.parsed_options.keycloak_host: + self.USERNAME = usernames.pop() + self.KEYCLOAK_URL=f'https://{self.environment.parsed_options.keycloak_host}/auth' + self.REDIRECT_URL=f'{self.environment.host}/oauth2/callback' + self.REFRESH_URL=f'{self.environment.host}/api/auth/oauth2Proxy/refresh' + + self.PASSWORD=self.environment.parsed_options.keycloak_password + self.REALM="backstage" + self.CLIENTID="backstage" + + def entitiy_facets(self, query) -> None: + self.client.get(base_path_facets, + verify=False, + headers=self.HEADER, + params=entity_facets_params[query]) + + def entities(self, query) -> None: + self.client.get(base_path_entities, + verify=False, + headers=self.HEADER, + params=entities_params[query]) + + @task + def get_kind(self) -> None: + self.entitiy_facets("kind") + self.entitiy_facets("relations.ownedBy") + self.entitiy_facets("metadata.namespace") + self.entitiy_facets("spec.lifecycle") + self.entitiy_facets("metadata.tags") + self.entities("component") + self.entitiy_facets("component/spec.lifecycle") + self.entitiy_facets("component/spec.type") + self.entitiy_facets("component/metadata.namespace") + self.entitiy_facets("component/metadata.tags") + self.entities("api") + self.entitiy_facets("api/spec.lifecycle") + self.entitiy_facets("api/spec.type") + self.entitiy_facets("api/metadata.namespace") + self.entitiy_facets("api/metadata.tags") + self.entities("component") + self.entitiy_facets("component/spec.lifecycle") + self.entitiy_facets("component/spec.type") + self.entitiy_facets("component/metadata.namespace") + self.entitiy_facets("component/metadata.tags") + self.entities("component/library") + self.entities("component") diff --git a/scenarios/mvp.metrics.yaml b/scenarios/mvp.metrics.yaml index 794fefa..05e2939 100644 --- a/scenarios/mvp.metrics.yaml +++ b/scenarios/mvp.metrics.yaml @@ -20,10 +20,11 @@ monitoring_step: 15 {%- endmacro %} -{{ results_scenario('/api/catalog/entities') }} -{{ results_scenario('/api/catalog/entities?filter=kind%3Dapi') }} -{{ results_scenario('/api/catalog/entities?filter=kind%3Dcomponent') }} -{{ results_scenario('/api/catalog/entities?filter=kind%3Dcomponent%2Cspec.type%3Dlibrary') }} +{{ results_scenario('/api/catalog/entities/by-query') }} +{{ results_scenario('/api/catalog/entities/by-query?limit=0&filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entities/by-query?limit=20&orderField=metadata.name%2Casc&filter=kind%3Dapi') }} +{{ results_scenario('/api/catalog/entities/by-query?limit=0&filter=kind%3Dcomponent') }} +{{ results_scenario('/api/catalog/entities/by-query?limit=20&orderField=metadata.name%2Casc&filter=kind%3Dcomponent') }} {{ results_scenario('/api/catalog/entity-facets') }} {{ results_scenario('/api/catalog/entity-facets?facet=kind') }} {{ results_scenario('/api/catalog/entity-facets?facet=metadata.namespace') }} @@ -38,3 +39,5 @@ {{ results_scenario('/api/catalog/entity-facets?facet=spec.lifecycle&filter=kind%3Dcomponent') }} {{ results_scenario('/api/catalog/entity-facets?facet=spec.type&filter=kind%3Dapi') }} {{ results_scenario('/api/catalog/entity-facets?facet=spec.type&filter=kind%3Dcomponent') }} +{{ results_scenario('/api/catalog/entities/by-refs') }} +#... to be continued \ No newline at end of file diff --git a/scenarios/mvp.py b/scenarios/mvp.py index b7444c2..2464d16 100644 --- a/scenarios/mvp.py +++ b/scenarios/mvp.py @@ -74,42 +74,48 @@ "filter": "kind=api" } -entities_params = {} -entities_params["component"] = { - "filter": "kind=component", -} +def get_entities_by_query_params(kind=None, limit=0, user_ref=None, group_ref=None, additional_params={}): + params = {} + params["limit"] = limit + if limit > 0: + params["orderField"] = "metadata.name,asc" -entities_params["component/library"] = { - "filter": "kind=component,spec.type=library", -} + if kind is not None: + filter = f"kind={kind}" + if user_ref is not None: + filter += f",relations.ownedBy={user_ref}" + if group_ref is not None: + filter += f",relations.ownedBy={group_ref}" + if (not all(v is None for v in [kind, user_ref, group_ref])): + params["filter"] = filter -entities_params["api"] = { - "filter": "kind=api", -} + params.update(additional_params) + return params base_path_facets = "/api/catalog/entity-facets" base_path_entities = "/api/catalog/entities" - def setup_test_users(environment, msg, **kwargs): # Fired when the worker receives a message of type 'test_users' usernames.extend(map(lambda u: u, msg.data)) + @events.init.add_listener def on_locust_init(environment, **_kwargs): if not isinstance(environment.runner, MasterRunner): environment.runner.register_message("test_users", setup_test_users) + @events.test_start.add_listener def on_test_start(environment, **_kwargs): # When the test is started, evenly divides list between # worker nodes to ensure unique data across threads if not isinstance(environment.runner, WorkerRunner): users = [] - for i in range(1,int(environment.runner.target_user_count)+1): + for i in range(1, int(environment.runner.target_user_count)+1): users.append(f"test{i}") worker_count = environment.runner.worker_count @@ -126,57 +132,79 @@ def on_test_start(environment, **_kwargs): data = users[start_index:end_index] environment.runner.send_message("test_users", data, worker) + @events.init_command_line_parser.add_listener def _(parser): parser.add_argument("--keycloak-host", type=str, default="") parser.add_argument("--keycloak-password", is_secret=True, default="") + parser.add_argument("--debug", type=bool, default=False) - -class MVPTest(HttpUser): +class MVP_1dot2_Test(HttpUser): def on_start(self): self.client.verify = False if self.environment.parsed_options.keycloak_host: r = self.client.get('/api/auth/oauth2Proxy/refresh', verify=False) - qs_str=urllib.parse.parse_qs(r.url) - STATE=qs_str['state'] - login_cookies=r.cookies + qs_str = urllib.parse.parse_qs(r.url) + STATE = qs_str['state'] + login_cookies = r.cookies pattern = r'action="([^"]*)"' - LOGIN_URL_tmp=re.findall(pattern, str(r.content))[0] - LOGIN_URL=LOGIN_URL_tmp.replace("&", "&") - qs_str=urllib.parse.parse_qs(LOGIN_URL) - TAB_ID=qs_str['tab_id'] - EXECUTION=qs_str['execution'] - - param = {'client_id': self.CLIENTID, 'tab_id': TAB_ID, 'execution': EXECUTION} - form = {'username': self.USERNAME, 'password': self.PASSWORD, 'credentialId': ''} - r = self.client.post(LOGIN_URL, verify=False, data=form, params=param) + LOGIN_URL_tmp = re.findall(pattern, str(r.content))[0] + LOGIN_URL = LOGIN_URL_tmp.replace("&", "&") + qs_str = urllib.parse.parse_qs(LOGIN_URL) + TAB_ID = qs_str['tab_id'] + EXECUTION = qs_str['execution'] + + param = {'client_id': self.CLIENTID, + 'tab_id': TAB_ID, 'execution': EXECUTION} + form = {'username': self.USERNAME, + 'password': self.PASSWORD, 'credentialId': ''} + r = self.client.post(LOGIN_URL, verify=False, + data=form, params=param) r = self.client.get(self.REFRESH_URL, verify=False) json_dict = json.loads(r.content) - TOKEN=json_dict['backstageIdentity']['token'] + TOKEN = json_dict['backstageIdentity']['token'] + idetity_refs = json_dict['backstageIdentity']['identity']['ownershipEntityRefs'] + for id_ref in idetity_refs: + if str(id_ref).startswith("group"): + self.GROUP_REF = id_ref + continue + if str(id_ref).startswith("user"): + self.USER_REF = id_ref self.HEADER = {'Authorization': 'Bearer ' + TOKEN} - else : + else: r = self.client.get('/api/auth/guest/refresh', verify=False) json_dict = json.loads(r.content) - TOKEN=json_dict['backstageIdentity']['token'] + TOKEN = json_dict['backstageIdentity']['token'] + + idetity_refs = json_dict['backstageIdentity']['identity']['ownershipEntityRefs'] + for id_ref in idetity_refs: + if str(id_ref).startswith("group"): + self.GROUP_REF = id_ref + continue + if str(id_ref).startswith("user"): + self.USER_REF = id_ref self.HEADER = {'Authorization': 'Bearer ' + TOKEN} def __init__(self, parent): super().__init__(parent) - self.HEADER='' + self.HEADER = '' if self.environment.parsed_options.keycloak_host: self.USERNAME = usernames.pop() - self.KEYCLOAK_URL=f'https://{self.environment.parsed_options.keycloak_host}/auth' - self.REDIRECT_URL=f'{self.environment.host}/oauth2/callback' - self.REFRESH_URL=f'{self.environment.host}/api/auth/oauth2Proxy/refresh' + host = self.environment.parsed_options.keycloak_host + self.KEYCLOAK_URL = f'https://{host}/auth' + host = self.environment.host + self.REDIRECT_URL = f'{host}/oauth2/callback' + host = self.environment.host + self.REFRESH_URL = f'{host}/api/auth/oauth2Proxy/refresh' - self.PASSWORD=self.environment.parsed_options.keycloak_password - self.REALM="backstage" - self.CLIENTID="backstage" + self.PASSWORD = self.environment.parsed_options.keycloak_password + self.REALM = "backstage" + self.CLIENTID = "backstage" def entitiy_facets(self, query) -> None: self.client.get(base_path_facets, @@ -184,33 +212,118 @@ def entitiy_facets(self, query) -> None: headers=self.HEADER, params=entity_facets_params[query]) - def entities(self, query) -> None: - self.client.get(base_path_entities, - verify=False, - headers=self.HEADER, - params=entities_params[query]) + def entities_by_query(self, kind=None, limit=0, user_ref=None, group_ref=None, additional_params={}): + r = self.client.get(f"{base_path_entities}/by-query", + verify=False, + headers=self.HEADER, + params=get_entities_by_query_params(kind, limit, user_ref, group_ref, additional_params)) + if self.environment.parsed_options.debug: + size = sum(len(chunk) for chunk in r.iter_content(8196)) + debug_output = f"\n[DEBUG][entities_by_query]" + debug_output += f" kind={kind}" + debug_output += f", limit={limit}" + debug_output += f", user_ref={user_ref}" + debug_output += f", group_ref={group_ref}" + debug_output += f", additional_params={additional_params}" + debug_output += f", response_size={size}" + debug_output += f", response={r.content}\n" + print(debug_output) + return r + + def entities_by_refs(self, refs=[]): + entity_refs = {"entityRefs": refs} + r = self.client.post(f"{base_path_entities}/by-refs", + verify=False, + headers=self.HEADER, + json=entity_refs) + if self.environment.parsed_options.debug: + size = sum(len(chunk) for chunk in r.iter_content(8196)) + debug_output = f"\n[DEBUG][entities_by_refs]" + debug_output += f", refs={refs}" + debug_output += f", response_size={size}" + debug_output += f", response={r.content}\n" + print(debug_output) @task - def get_kind(self) -> None: - self.entitiy_facets("kind") + def execute(self) -> None: + # Load Catalog self.entitiy_facets("relations.ownedBy") - self.entitiy_facets("metadata.namespace") + self.entitiy_facets("kind") self.entitiy_facets("spec.lifecycle") self.entitiy_facets("metadata.tags") - self.entities("component") - self.entitiy_facets("component/spec.lifecycle") + self.entitiy_facets("metadata.namespace") + self.entities_by_query(kind="component", limit=20) + self.entities_by_query(kind="component", limit=20) self.entitiy_facets("component/spec.type") - self.entitiy_facets("component/metadata.namespace") + self.entities_by_query( + kind="component", limit=0, + user_ref=self.USER_REF, group_ref=self.GROUP_REF) + self.entities_by_query(kind="component", limit=0) + self.entitiy_facets("component/spec.lifecycle") self.entitiy_facets("component/metadata.tags") - self.entities("api") - self.entitiy_facets("api/spec.lifecycle") + self.entitiy_facets("component/metadata.namespace") + self.entities_by_refs([self.GROUP_REF]) + self.entities_by_query( + kind="component", limit=20, + user_ref=self.USER_REF, group_ref=self.GROUP_REF) + + # Switch to API + self.entities_by_query( + kind="api", limit=20, + user_ref=self.USER_REF, group_ref=self.GROUP_REF) self.entitiy_facets("api/spec.type") - self.entitiy_facets("api/metadata.namespace") + self.entities_by_query( + kind="api", limit=0, + user_ref=self.USER_REF, group_ref=self.GROUP_REF) + self.entities_by_query(kind="api", limit=0) + self.entitiy_facets("api/spec.lifecycle") self.entitiy_facets("api/metadata.tags") - self.entities("component") + self.entitiy_facets("api/metadata.namespace") + + # Switch to Component + self.entities_by_query( + kind="component", limit=20, + user_ref=self.USER_REF, group_ref=self.GROUP_REF) + self.entitiy_facets("component/spec.lifecycle") + self.entities_by_query( + kind="component", limit=0, + user_ref=self.USER_REF, group_ref=self.GROUP_REF) + self.entities_by_query(kind="component", limit=0) self.entitiy_facets("component/spec.lifecycle") - self.entitiy_facets("component/spec.type") - self.entitiy_facets("component/metadata.namespace") self.entitiy_facets("component/metadata.tags") - self.entities("component/library") - self.entities("component") + self.entitiy_facets("component/metadata.namespace") + self.entities_by_refs([self.GROUP_REF]) + + # Select "library" + self.entities_by_query( + kind="component", limit=20, + user_ref=self.USER_REF, group_ref=self.GROUP_REF, + additional_params={"spec_type": "library"}) + self.entities_by_query( + kind="component", limit=0, + user_ref=self.USER_REF, group_ref=self.GROUP_REF, + additional_params={"spec_type": "library"}) + self.entities_by_query( + kind="component", limit=0, + additional_params={"spec_type": "library"}) + self.entities_by_query(kind="component", limit=20) + self.entities_by_refs([self.GROUP_REF]) + + # Select "all" + self.entities_by_query( + kind="component", limit=20, + group_ref=self.GROUP_REF) + self.entities_by_query( + kind="component", limit=0, + group_ref=self.GROUP_REF) + self.entities_by_refs([self.GROUP_REF]) + + # Select/Load next page + r = self.entities_by_query( + kind="component", limit=20, + group_ref=self.GROUP_REF) + json_dict = json.loads(r.content) + next_cursor = json_dict["pageInfo"]["nextCursor"] + self.entities_by_query( + limit=20, + additional_params={"cursor": next_cursor}) diff --git a/test.env b/test.env index f65730e..f969748 100644 --- a/test.env +++ b/test.env @@ -3,29 +3,29 @@ # To override system environment variables, uncomment the variables down bellow. It will be sourced by the ci-scripts. ## Scenario -# export SCENARIO=baseline-test +# export SCENARIO=mvp # export HOST=http://localhost -# export USERS=100 -# export WORKERS=5 +export USERS=1 +export WORKERS=1 # export DURATION=1m -# export SPAWN_RATE=20 -# export WAIT_FOR_SEARCH_INDEX=false +export SPAWN_RATE=1 +export WAIT_FOR_SEARCH_INDEX=false ## RHDH database population -# export PRE_LOAD_DB=true -# export BACKSTAGE_USER_COUNT=1 -# export GROUP_COUNT=1 -# export API_COUNT=1 -# export COMPONENT_COUNT=1 -# export KEYCLOAK_USER_PASS=changeme -# export AUTH_PROVIDER= +export PRE_LOAD_DB=true +export BACKSTAGE_USER_COUNT=1000 +export GROUP_COUNT=10 +export API_COUNT=10000 +export COMPONENT_COUNT=10000 +export KEYCLOAK_USER_PASS=Y2hhbmdlbWUK +export AUTH_PROVIDER=keycloak # export POPULATION_CONCURRENCY=10 # export COMPONENT_SHARD_SIZE=500 ## RHDH installed via Helm -# export RHDH_INSTALL_METHOD=helm +export RHDH_INSTALL_METHOD=helm # export RHDH_NAMESPACE=rhdh-performance-helm -# export RHDH_HELM_REPO=https://raw.githubusercontent.com/rhdh-bot/openshift-helm-charts/rhdh-1.2-rhel-9/installation +export RHDH_HELM_REPO=https://raw.githubusercontent.com/rhdh-bot/openshift-helm-charts/rhdh-1.2-rhel-9/installation # export RHDH_HELM_CHART=redhat-developer-hub # export RHDH_HELM_CHART_VERSION= # export RHDH_HELM_RELEASE_NAME=rhdh @@ -42,17 +42,17 @@ # export RHDH_OLM_CHANNEL=fast ## RHDH horizontal scaling -# export RHDH_DEPLOYMENT_REPLICAS=1 -# export RHDH_DB_REPLICAS=1 -# export RHDH_DB_STORAGE=1Gi +export RHDH_DEPLOYMENT_REPLICAS=1 +export RHDH_DB_REPLICAS=1 +export RHDH_DB_STORAGE=5Gi # export RHDH_RESOURCES_CPU_REQUESTS= # export RHDH_RESOURCES_CPU_LIMITS= # export RHDH_RESOURCES_MEMORY_REQUESTS= # export RHDH_RESOURCES_MEMORY_LIMITS= -# export RHDH_KEYCLOAK_REPLICAS=1 +export RHDH_KEYCLOAK_REPLICAS=1 ## Extras -# export LOCUST_EXTRA_CMD= +# export LOCUST_EXTRA_CMD=--debug=true # export ARTIFACT_DIR=.artifacts -# export ENABLE_RBAC=false +export ENABLE_RBAC=true # export ENABLE_PROFILING=false